aboutsummaryrefslogtreecommitdiff
path: root/thirdparty/ryml/test
diff options
context:
space:
mode:
authorStefan Boberg <[email protected]>2025-11-07 14:49:13 +0100
committerGitHub Enterprise <[email protected]>2025-11-07 14:49:13 +0100
commit24e43a913f29ac3b314354e8ce5175f135bcc64f (patch)
treeca442937ceeb63461012b33a4576e9835099f106 /thirdparty/ryml/test
parentget oplog attachments (#622) (diff)
downloadzen-24e43a913f29ac3b314354e8ce5175f135bcc64f.tar.xz
zen-24e43a913f29ac3b314354e8ce5175f135bcc64f.zip
switch to xmake for package management (#611)
This change removes our dependency on vcpkg for package management, in favour of bringing some code in-tree in the `thirdparty` folder as well as using the xmake build-in package management feature. For the latter, all the package definitions are maintained in the zen repo itself, in the `repo` folder. It should now also be easier to build the project as it will no longer depend on having the right version of vcpkg installed, which has been a common problem for new people coming in to the codebase. Now you should only need xmake to build. * Bumps xmake requirement on github runners to 2.9.9 to resolve an issue where xmake on Windows invokes cmake with `v144` toolchain which does not exist * BLAKE3 is now in-tree at `thirdparty/blake3` * cpr is now in-tree at `thirdparty/cpr` * cxxopts is now in-tree at `thirdparty/cxxopts` * fmt is now in-tree at `thirdparty/fmt` * robin-map is now in-tree at `thirdparty/robin-map` * ryml is now in-tree at `thirdparty/ryml` * sol2 is now in-tree at `thirdparty/sol2` * spdlog is now in-tree at `thirdparty/spdlog` * utfcpp is now in-tree at `thirdparty/utfcpp` * xmake package repo definitions is in `repo` * implemented support for sanitizers. ASAN is supported on windows, TSAN, UBSAN, MSAN etc are supported on Linux/MacOS though I have not yet tested it extensively on MacOS * the zencore encryption implementation also now supports using mbedTLS which is used on MacOS, though for now we still use openssl on Linux * crashpad * bumps libcurl to 8.11.0 (from 8.8.0) which should address a rare build upload bug
Diffstat (limited to 'thirdparty/ryml/test')
-rw-r--r--thirdparty/ryml/test/callbacks_tester.hpp77
-rw-r--r--thirdparty/ryml/test/test_basic.cpp304
-rw-r--r--thirdparty/ryml/test/test_block_folded.cpp1574
-rw-r--r--thirdparty/ryml/test/test_block_literal.cpp1261
-rw-r--r--thirdparty/ryml/test/test_callbacks.cpp356
-rw-r--r--thirdparty/ryml/test/test_case.cpp898
-rw-r--r--thirdparty/ryml/test/test_case.hpp533
-rw-r--r--thirdparty/ryml/test/test_double_quoted.cpp610
-rw-r--r--thirdparty/ryml/test/test_emit.cpp491
-rw-r--r--thirdparty/ryml/test/test_empty_file.cpp79
-rw-r--r--thirdparty/ryml/test/test_empty_map.cpp43
-rw-r--r--thirdparty/ryml/test/test_empty_scalar.cpp353
-rw-r--r--thirdparty/ryml/test/test_empty_seq.cpp40
-rw-r--r--thirdparty/ryml/test/test_explicit_key.cpp419
-rw-r--r--thirdparty/ryml/test/test_generic_map.cpp89
-rw-r--r--thirdparty/ryml/test/test_generic_seq.cpp47
-rw-r--r--thirdparty/ryml/test/test_github_issues.cpp590
-rw-r--r--thirdparty/ryml/test/test_group.cpp732
-rw-r--r--thirdparty/ryml/test/test_group.hpp210
-rw-r--r--thirdparty/ryml/test/test_indentation.cpp340
-rw-r--r--thirdparty/ryml/test/test_json.cpp516
-rw-r--r--thirdparty/ryml/test/test_location.cpp720
-rw-r--r--thirdparty/ryml/test/test_map_of_seq.cpp201
-rw-r--r--thirdparty/ryml/test/test_merge.cpp225
-rw-r--r--thirdparty/ryml/test/test_nested_mapx2.cpp73
-rw-r--r--thirdparty/ryml/test/test_nested_mapx3.cpp103
-rw-r--r--thirdparty/ryml/test/test_nested_mapx4.cpp190
-rw-r--r--thirdparty/ryml/test/test_nested_seqx2.cpp133
-rw-r--r--thirdparty/ryml/test/test_nested_seqx3.cpp187
-rw-r--r--thirdparty/ryml/test/test_nested_seqx4.cpp124
-rw-r--r--thirdparty/ryml/test/test_noderef.cpp813
-rw-r--r--thirdparty/ryml/test/test_null_val.cpp519
-rw-r--r--thirdparty/ryml/test/test_number.cpp217
-rw-r--r--thirdparty/ryml/test/test_parser.cpp566
-rw-r--r--thirdparty/ryml/test/test_plain_scalar.cpp800
-rw-r--r--thirdparty/ryml/test/test_preprocess.cpp53
-rw-r--r--thirdparty/ryml/test/test_scalar_names.cpp94
-rw-r--r--thirdparty/ryml/test/test_seq_of_map.cpp348
-rw-r--r--thirdparty/ryml/test/test_serialize.cpp499
-rw-r--r--thirdparty/ryml/test/test_simple_anchor.cpp1405
-rw-r--r--thirdparty/ryml/test/test_simple_doc.cpp526
-rw-r--r--thirdparty/ryml/test/test_simple_map.cpp1050
-rw-r--r--thirdparty/ryml/test/test_simple_seq.cpp695
-rw-r--r--thirdparty/ryml/test/test_simple_set.cpp144
-rw-r--r--thirdparty/ryml/test/test_single_quoted.cpp356
-rw-r--r--thirdparty/ryml/test/test_singleheader/libryml_singleheader.cpp3
-rw-r--r--thirdparty/ryml/test/test_stack.cpp857
-rw-r--r--thirdparty/ryml/test/test_style.cpp616
-rw-r--r--thirdparty/ryml/test/test_suite.cpp612
-rw-r--r--thirdparty/ryml/test/test_suite/test_suite_common.hpp44
-rw-r--r--thirdparty/ryml/test/test_suite/test_suite_events.cpp607
-rw-r--r--thirdparty/ryml/test/test_suite/test_suite_events.hpp45
-rw-r--r--thirdparty/ryml/test/test_suite/test_suite_events_emitter.cpp289
-rw-r--r--thirdparty/ryml/test/test_suite/test_suite_parts.cpp220
-rw-r--r--thirdparty/ryml/test/test_suite/test_suite_parts.hpp28
-rw-r--r--thirdparty/ryml/test/test_tag_property.cpp1149
-rw-r--r--thirdparty/ryml/test/test_tree.cpp3924
-rw-r--r--thirdparty/ryml/test/test_yaml_events.cpp467
58 files changed, 28464 insertions, 0 deletions
diff --git a/thirdparty/ryml/test/callbacks_tester.hpp b/thirdparty/ryml/test/callbacks_tester.hpp
new file mode 100644
index 000000000..5286e0c34
--- /dev/null
+++ b/thirdparty/ryml/test/callbacks_tester.hpp
@@ -0,0 +1,77 @@
+#ifndef C4_TEST_CALLBACKS_TESTER_HPP_
+#define C4_TEST_CALLBACKS_TESTER_HPP_
+
+#ifndef RYML_SINGLE_HEADER
+#include "c4/yml/common.hpp"
+#endif
+#include <vector>
+#include <iostream>
+
+namespace c4 {
+namespace yml {
+
+struct CallbacksTester
+{
+ std::vector<char> memory_pool;
+ const char *id;
+ size_t num_allocs, alloc_size;
+ size_t num_deallocs, dealloc_size;
+
+ CallbacksTester(const char *id_="notset", size_t sz=10u * 1024u) // 10KB
+ : memory_pool(sz)
+ , id(id_)
+ , num_allocs()
+ , alloc_size()
+ , num_deallocs()
+ , dealloc_size()
+ {
+ }
+
+ // checking
+ ~CallbacksTester()
+ {
+ check();
+ }
+
+ void check()
+ {
+ std::cout << "size: alloc=" << alloc_size << " dealloc=" << dealloc_size << std::endl;
+ std::cout << "count: #allocs=" << num_allocs << " #deallocs=" << num_deallocs << std::endl;
+ RYML_CHECK(num_allocs == num_deallocs);
+ RYML_CHECK(alloc_size == dealloc_size);
+ }
+
+ Callbacks callbacks() const
+ {
+ Callbacks cb = get_callbacks();
+ cb.m_user_data = (void*) this;
+ cb.m_allocate = [](size_t len, void *, void *data){ return ((CallbacksTester*) data)->allocate(len); };
+ cb.m_free = [](void *mem, size_t len, void *data){ return ((CallbacksTester*) data)->free(mem, len); };
+ return cb;
+ }
+
+ void *allocate(size_t len)
+ {
+ std::cout << "alloc[" << num_allocs << "]=" << len << "B\n";
+ void *ptr = &memory_pool[alloc_size];
+ alloc_size += len;
+ ++num_allocs;
+ RYML_CHECK(alloc_size < memory_pool.size());
+ return ptr;
+ }
+
+ void free(void *mem, size_t len)
+ {
+ RYML_CHECK((char*)mem >= &memory_pool.front() && (char*)mem < &memory_pool.back());
+ RYML_CHECK((char*)mem+len >= &memory_pool.front() && (char*)mem+len <= &memory_pool.back());
+ std::cout << "free[" << num_deallocs << "]=" << len << "B\n";
+ dealloc_size += len;
+ ++num_deallocs;
+ // no need to free here
+ }
+};
+
+} // namespace yml
+} // namespace c4
+
+#endif /* C4_TEST_CALLBACKS_TESTER_HPP_ */
diff --git a/thirdparty/ryml/test/test_basic.cpp b/thirdparty/ryml/test/test_basic.cpp
new file mode 100644
index 000000000..7a10c073b
--- /dev/null
+++ b/thirdparty/ryml/test/test_basic.cpp
@@ -0,0 +1,304 @@
+#ifndef RYML_SINGLE_HEADER
+#include "c4/yml/std/std.hpp"
+#include "c4/yml/parse.hpp"
+#include "c4/yml/emit.hpp"
+#include <c4/format.hpp>
+#include <c4/yml/detail/checks.hpp>
+#include <c4/yml/detail/print.hpp>
+#endif
+
+#include "./test_case.hpp"
+
+#include <gtest/gtest.h>
+
+#if defined(_MSC_VER)
+# pragma warning(push)
+# pragma warning(disable: 4389) // signed/unsigned mismatch
+#elif defined(__clang__)
+# pragma clang diagnostic push
+# pragma clang diagnostic ignored "-Wdollar-in-identifier-extension"
+#elif defined(__GNUC__)
+# pragma GCC diagnostic push
+#endif
+
+namespace c4 {
+namespace yml {
+
+TEST(general, parsing)
+{
+ auto tree = parse_in_arena("{foo: 1}");
+
+ char cmpbuf[128] = {0};
+ substr cmp(cmpbuf);
+ size_t ret;
+
+ ret = cat(cmp, tree["foo"].val());
+ EXPECT_EQ(cmp.first(ret), "1");
+
+ ret = cat(cmp, tree["foo"].key());
+ EXPECT_EQ(cmp.first(ret), "foo");
+}
+
+TEST(general, emitting)
+{
+ std::string cmpbuf;
+
+ Tree tree;
+ auto r = tree.rootref();
+
+ r |= MAP; // this is needed to make the root a map
+
+ r["foo"] = "1"; // ryml works only with strings.
+ // Note that the tree will be __pointing__ at the
+ // strings "foo" and "1" used here. You need
+ // to make sure they have at least the same
+ // lifetime as the tree.
+
+ auto s = r["seq"]; // does not change the tree until s is written to.
+ s |= SEQ;
+ r["seq"].append_child() = "bar0"; // value of this child is now __pointing__ at "bar0"
+ r["seq"].append_child() = "bar1";
+ r["seq"].append_child() = "bar2";
+
+ //print_tree(tree);
+
+ // emit to stdout (can also emit to FILE* or ryml::span)
+ emitrs_yaml(tree, &cmpbuf);
+ const char* exp = R"(foo: 1
+seq:
+ - bar0
+ - bar1
+ - bar2
+)";
+ EXPECT_EQ(cmpbuf, exp);
+
+ // serializing: using operator<< instead of operator=
+ // will make the tree serialize the value into a char
+ // arena inside the tree. This arena can be reserved at will.
+ int ch3 = 33, ch4 = 44;
+ s.append_child() << ch3;
+ s.append_child() << ch4;
+
+ {
+ std::string tmp = "child5";
+ s.append_child() << tmp; // requires #include <c4/yml/std/string.hpp>
+ // now tmp can go safely out of scope, as it was
+ // serialized to the tree's internal string arena
+ // Note the include highlighted above is required so that ryml
+ // knows how to turn an std::string into a c4::csubstr/c4::substr.
+ }
+
+ emitrs_yaml(tree, &cmpbuf);
+ exp = R"(foo: 1
+seq:
+ - bar0
+ - bar1
+ - bar2
+ - 33
+ - 44
+ - child5
+)";
+ EXPECT_EQ(cmpbuf, exp);
+
+ // to serialize keys:
+ int k=66;
+ r.append_child() << key(k) << 7;
+
+ emitrs_yaml(tree, &cmpbuf);
+ exp = R"(foo: 1
+seq:
+ - bar0
+ - bar1
+ - bar2
+ - 33
+ - 44
+ - child5
+66: 7
+)";
+ EXPECT_EQ(cmpbuf, exp);
+}
+
+TEST(general, map_to_root)
+{
+ std::string cmpbuf; const char *exp;
+ std::map<std::string, int> m({{"bar", 2}, {"foo", 1}});
+ Tree t;
+ t.rootref() << m;
+
+ emitrs_yaml(t, &cmpbuf);
+ exp = R"(bar: 2
+foo: 1
+)";
+ EXPECT_EQ(cmpbuf, exp);
+
+ t["foo"] << 10;
+ t["bar"] << 20;
+
+ m.clear();
+ t.rootref() >> m;
+
+ EXPECT_EQ(m["foo"], 10);
+ EXPECT_EQ(m["bar"], 20);
+}
+
+TEST(general, print_tree)
+{
+ const char yaml[] = R"(
+a:
+ b: bval
+ c:
+ d:
+ - e
+ - d
+ - f: fval
+ g: gval
+ h:
+ -
+ x: a
+ y: b
+ -
+ z: c
+ u:
+)";
+ Tree t = parse_in_arena(yaml);
+ print_tree(t); // to make sure this is covered too
+}
+
+TEST(general, numbers)
+{
+ const char yaml[] = R"(- -1
+- -1.0
+- +1.0
+- 1e-2
+- 1e+2
+)";
+ Tree t = parse_in_arena(yaml);
+ auto s = emitrs_yaml<std::string>(t);
+ EXPECT_EQ(s, std::string(yaml));
+}
+
+// github issue 29: https://github.com/biojppm/rapidyaml/issues/29
+TEST(general, newlines_on_maps_nested_in_seqs)
+{
+ const char yaml[] = R"(enemy:
+- actors:
+ - {name: Enemy_Bokoblin_Junior, value: 4.0}
+ - {name: Enemy_Bokoblin_Middle, value: 16.0}
+ - {name: Enemy_Bokoblin_Senior, value: 32.0}
+ - {name: Enemy_Bokoblin_Dark, value: 48.0}
+ species: BokoblinSeries
+)";
+ std::string expected = R"(enemy:
+ - actors:
+ - name: Enemy_Bokoblin_Junior
+ value: 4.0
+ - name: Enemy_Bokoblin_Middle
+ value: 16.0
+ - name: Enemy_Bokoblin_Senior
+ value: 32.0
+ - name: Enemy_Bokoblin_Dark
+ value: 48.0
+ species: BokoblinSeries
+)";
+ Tree t = parse_in_arena(yaml);
+ auto s = emitrs_yaml<std::string>(t);
+ EXPECT_EQ(expected, s);
+}
+
+
+TEST(general, test_suite_RZT7)
+{
+ csubstr yaml = R"(
+---
+Time: 2001-11-23 15:01:42 -5
+User: ed
+Warning:
+ This is an error message
+ for the log file
+---
+Time: 2001-11-23 15:02:31 -5
+User: ed
+Warning:
+ A slightly different error
+ message.
+---
+Date: 2001-11-23 15:03:17 -5
+User: ed
+Fatal:
+ Unknown variable "bar"
+Stack:
+ - file: TopClass.py
+ line: 23
+ code: |
+ x = MoreObject("345\n")
+ - file: MoreClass.py
+ line: 58
+ code: |-
+ foo = bar
+)";
+ test_check_emit_check(yaml, [](Tree const &t){
+ ASSERT_TRUE(t.rootref().is_stream());
+ ConstNodeRef doc0 = t.rootref()[0];
+ EXPECT_EQ(doc0["Time"].val(), csubstr("2001-11-23 15:01:42 -5"));
+ EXPECT_EQ(doc0["User"].val(), csubstr("ed"));
+ EXPECT_EQ(doc0["Warning"].val(), csubstr("This is an error message for the log file"));
+ ConstNodeRef doc1 = t.rootref()[1];
+ EXPECT_EQ(doc1["Time"].val(), csubstr("2001-11-23 15:02:31 -5"));
+ EXPECT_EQ(doc1["User"].val(), csubstr("ed"));
+ EXPECT_EQ(doc1["Warning"].val(), csubstr("A slightly different error message."));
+ ConstNodeRef doc2 = t.rootref()[2];
+ EXPECT_EQ(doc2["Date"].val(), csubstr("2001-11-23 15:03:17 -5"));
+ EXPECT_EQ(doc2["User"].val(), csubstr("ed"));
+ EXPECT_EQ(doc2["Fatal"].val(), csubstr("Unknown variable \"bar\""));
+ EXPECT_EQ(doc2["Stack"][0]["file"].val(), csubstr("TopClass.py"));
+ EXPECT_EQ(doc2["Stack"][0]["line"].val(), csubstr("23"));
+ EXPECT_EQ(doc2["Stack"][0]["code"].val(), csubstr("x = MoreObject(\"345\\n\")\n"));
+ EXPECT_EQ(doc2["Stack"][1]["file"].val(), csubstr("MoreClass.py"));
+ EXPECT_EQ(doc2["Stack"][1]["line"].val(), csubstr("58"));
+ EXPECT_EQ(doc2["Stack"][1]["code"].val(), csubstr("foo = bar"));
+ });
+}
+
+
+TEST(general, github_issue_124)
+{
+ // All these inputs are basically the same.
+ // However, the comment was found to confuse the parser in #124.
+ csubstr yaml[] = {
+ "a:\n - b\nc: d",
+ "a:\n - b\n\n# ignore me:\nc: d",
+ "a:\n - b\n\n # ignore me:\nc: d",
+ "a:\n - b\n\n # ignore me:\nc: d",
+ "a:\n - b\n\n#:\nc: d", // also try with just a ':' in the comment
+ "a:\n - b\n\n# :\nc: d",
+ "a:\n - b\n\n#\nc: d", // also try with empty comment
+ };
+ for(csubstr inp : yaml)
+ {
+ SCOPED_TRACE(inp);
+ Tree t = parse_in_arena(inp);
+ std::string s = emitrs_yaml<std::string>(t);
+ // The re-emitted output should not contain the comment.
+ EXPECT_EQ(c4::to_csubstr(s), "a:\n - b\nc: d\n");
+ }
+}
+
+
+
+//-------------------------------------------
+// this is needed to use the test case library
+Case const* get_case(csubstr /*name*/)
+{
+ return nullptr;
+}
+
+} // namespace yml
+} // namespace c4
+
+#if defined(_MSC_VER)
+# pragma warning(pop)
+#elif defined(__clang__)
+# pragma clang diagnostic pop
+#elif defined(__GNUC__)
+# pragma GCC diagnostic pop
+#endif
diff --git a/thirdparty/ryml/test/test_block_folded.cpp b/thirdparty/ryml/test/test_block_folded.cpp
new file mode 100644
index 000000000..9d579c5a1
--- /dev/null
+++ b/thirdparty/ryml/test/test_block_folded.cpp
@@ -0,0 +1,1574 @@
+#include "./test_group.hpp"
+
+namespace c4 {
+namespace yml {
+
+TEST(block_folded, basic)
+{
+ {
+ Tree t = parse_in_arena(R"(>
+hello
+there
+
+got it
+
+
+really
+)");
+ EXPECT_EQ(t.rootref().val(), csubstr("hello there\ngot it\n\nreally\n"));
+ }
+}
+
+TEST(block_folded, empty_block)
+{
+ {
+ Tree t = parse_in_arena(R"(- >
+)");
+ EXPECT_EQ(t[0].val(), csubstr(""));
+ }
+ {
+ Tree t = parse_in_arena(R"(- >-
+)");
+ EXPECT_EQ(t[0].val(), csubstr(""));
+ }
+ {
+ Tree t = parse_in_arena(R"(- >+
+)");
+ EXPECT_EQ(t[0].val(), csubstr(""));
+ }
+ {
+ Tree t = parse_in_arena(R"(
+- >
+
+- >-
+
+- >+
+
+)");
+ EXPECT_FALSE(t.empty());
+ EXPECT_EQ(t[0].val(), csubstr(""));
+ EXPECT_EQ(t[1].val(), csubstr(""));
+ EXPECT_EQ(t[2].val(), csubstr("\n"));
+ }
+ {
+ Tree t = parse_in_arena(R"(
+- >
+
+- >-
+
+- >+
+
+)");
+ EXPECT_FALSE(t.empty());
+ EXPECT_EQ(t[0].val(), csubstr(""));
+ EXPECT_EQ(t[1].val(), csubstr(""));
+ EXPECT_EQ(t[2].val(), csubstr("\n"));
+ }
+ {
+ Tree t = parse_in_arena(R"(
+- >
+- >-
+- >+
+)");
+ EXPECT_FALSE(t.empty());
+ EXPECT_EQ(t[0].val(), csubstr(""));
+ EXPECT_EQ(t[1].val(), csubstr(""));
+ EXPECT_EQ(t[2].val(), csubstr(""));
+ }
+}
+
+TEST(block_folded, empty_block0)
+{
+ Tree t = parse_in_arena(R"(- >
+)");
+ EXPECT_EQ(t[0].val(), csubstr(""));
+ t = parse_in_arena(R"(- >-
+)");
+ EXPECT_EQ(t[0].val(), csubstr(""));
+ t = parse_in_arena(R"(- >+
+)");
+ EXPECT_EQ(t[0].val(), csubstr(""));
+}
+
+TEST(block_folded, empty_block1)
+{
+ const Tree t = parse_in_arena(R"(
+- >-
+ a
+- >-
+
+- >-
+
+- >-
+
+
+
+- >-
+
+
+
+)");
+ EXPECT_EQ(t[0].val(), csubstr("a"));
+ EXPECT_EQ(t[1].val(), csubstr(""));
+ EXPECT_EQ(t[2].val(), csubstr(""));
+ EXPECT_EQ(t[3].val(), csubstr(""));
+ EXPECT_EQ(t[4].val(), csubstr(""));
+}
+
+TEST(block_folded, empty_block_as_container_member)
+{
+ // this was ok
+ test_check_emit_check(R"(
+map:
+ a: ""
+ b: ''
+ d: |
+ c: >
+ e:
+)", [](Tree const &t){
+ EXPECT_TRUE(t["map"].has_key());
+ EXPECT_TRUE(t["map"].is_map());
+ EXPECT_EQ(t["map"].num_children(), 5u);
+ for(const auto &child : t["map"].children())
+ {
+ EXPECT_EQ(child.val(), "");
+ if(child.key() != "e")
+ {
+ EXPECT_TRUE(child.type().is_val_quoted());
+ EXPECT_FALSE(child.val_is_null());
+ }
+ }
+ });
+ // this was ok
+ test_check_emit_check(R"(
+map:
+ a: ""
+ b: ''
+ d: |
+ c: >
+)", [](Tree const &t){
+ EXPECT_TRUE(t["map"].has_key());
+ EXPECT_TRUE(t["map"].is_map());
+ EXPECT_TRUE(t["map"].is_map());
+ EXPECT_EQ(t["map"].num_children(), 4u);
+ for(const auto &child : t["map"].children())
+ {
+ EXPECT_EQ(child.val(), "");
+ EXPECT_TRUE(child.type().is_val_quoted());
+ EXPECT_FALSE(child.val_is_null());
+ }
+ });
+ // this was not ok! the block literal before next is extended: to
+ // include the YAML for next!
+ test_check_emit_check(R"(
+map:
+ a: ""
+ b: ''
+ d: |
+ c: >
+next:
+ a: ""
+ b: ''
+ d: |
+ c: >
+)", [](Tree const &t){
+ for(const char *name : {"map", "next"})
+ {
+ ASSERT_TRUE(t.rootref().has_child(to_csubstr(name))) << "name=" << name;
+ ConstNodeRef node = t[to_csubstr(name)];
+ EXPECT_TRUE(node.has_key());
+ EXPECT_TRUE(node.is_map());
+ EXPECT_TRUE(node.is_map());
+ ASSERT_EQ(node.num_children(), 4u);
+ for(const auto &child : node.children())
+ {
+ EXPECT_EQ(child.val(), "");
+ EXPECT_TRUE(child.type().is_val_quoted());
+ EXPECT_FALSE(child.val_is_null());
+ }
+ }
+ });
+ test_check_emit_check(R"(
+seq:
+ - ""
+ - ''
+ - |
+ - >
+next:
+ - ""
+ - ''
+ - |
+ - >
+)", [](Tree const &t){
+ for(const char *name : {"seq", "next"})
+ {
+ ASSERT_TRUE(t.rootref().has_child(to_csubstr(name))) << "name=" << name;
+ ConstNodeRef node = t[to_csubstr(name)];
+ EXPECT_TRUE(node.has_key());
+ EXPECT_TRUE(node.is_seq());
+ ASSERT_EQ(node.num_children(), 4u);
+ for(const auto &child : node.children())
+ {
+ EXPECT_EQ(child.val(), "");
+ EXPECT_TRUE(child.type().is_val_quoted());
+ EXPECT_FALSE(child.val_is_null());
+ }
+ }
+ });
+}
+
+TEST(block_folded, issue152_not_indented)
+{
+ const Tree t = parse_in_arena(R"(
+ok:
+ - |
+ exec pg_isready -U "dog" -d "dbname=dog" -h 127.0.0.1 -p 5432
+ - parses - yes
+ok_parses: yes
+err:
+ - |
+ exec pg_isready -U "dog" -d "dbname=dog" -h 127.0.0.1 -p 5432
+err_parses: no
+err2:
+ - >
+ exec pg_isready -U "dog" -d "dbname=dog" -h 127.0.0.1 -p 5432
+err2_parses: no
+err3:
+ - >-
+ exec pg_isready -U "dog" -d "dbname=dog" -h 127.0.0.1 -p 5432
+err3_parses: no
+)");
+ EXPECT_EQ(t["ok" ][0].val(), csubstr("exec pg_isready -U \"dog\" -d \"dbname=dog\" -h 127.0.0.1 -p 5432\n"));
+ EXPECT_EQ(t["err" ][0].val(), csubstr("exec pg_isready -U \"dog\" -d \"dbname=dog\" -h 127.0.0.1 -p 5432\n"));
+ EXPECT_EQ(t["err2"][0].val(), csubstr("exec pg_isready -U \"dog\" -d \"dbname=dog\" -h 127.0.0.1 -p 5432\n"));
+ EXPECT_EQ(t["err3"][0].val(), csubstr("exec pg_isready -U \"dog\" -d \"dbname=dog\" -h 127.0.0.1 -p 5432"));
+}
+
+TEST(block_folded, issue152_indented_once)
+{
+ const Tree t = parse_in_arena(R"(
+indented_once:
+ ok:
+ - |
+ exec pg_isready -U "dog" -d "dbname=dog" -h 127.0.0.1 -p 5432
+ - parses - yes
+ ok_parses: yes
+ err:
+ - |
+ exec pg_isready -U "dog" -d "dbname=dog" -h 127.0.0.1 -p 5432
+ err_parses: no
+ err2:
+ - >
+ exec pg_isready -U "dog" -d "dbname=dog" -h 127.0.0.1 -p 5432
+ err2_parses: no
+ err3:
+ - >-
+ exec pg_isready -U "dog" -d "dbname=dog" -h 127.0.0.1 -p 5432
+ err3_parses: no
+)");
+ ConstNodeRef n = t["indented_once"];
+ EXPECT_EQ(n["ok" ][0].val(), csubstr("exec pg_isready -U \"dog\" -d \"dbname=dog\" -h 127.0.0.1 -p 5432\n"));
+ EXPECT_EQ(n["err" ][0].val(), csubstr("exec pg_isready -U \"dog\" -d \"dbname=dog\" -h 127.0.0.1 -p 5432\n"));
+ EXPECT_EQ(n["err2"][0].val(), csubstr("exec pg_isready -U \"dog\" -d \"dbname=dog\" -h 127.0.0.1 -p 5432\n"));
+ EXPECT_EQ(n["err3"][0].val(), csubstr("exec pg_isready -U \"dog\" -d \"dbname=dog\" -h 127.0.0.1 -p 5432"));
+}
+
+TEST(block_folded, issue152_indented_twice)
+{
+ const Tree t = parse_in_arena(R"(
+indented_once:
+ indented_twice:
+ ok:
+ - |
+ exec pg_isready -U "dog" -d "dbname=dog" -h 127.0.0.1 -p 5432
+ - parses - yes
+ ok_parses: yes
+ err:
+ - |
+ exec pg_isready -U "dog" -d "dbname=dog" -h 127.0.0.1 -p 5432
+ err_parses: no
+ err2:
+ - >
+ exec pg_isready -U "dog" -d "dbname=dog" -h 127.0.0.1 -p 5432
+ err2_parses: no
+ err3:
+ - >-
+ exec pg_isready -U "dog" -d "dbname=dog" -h 127.0.0.1 -p 5432
+ err3_parses: no
+)");
+ ConstNodeRef n = t["indented_once"]["indented_twice"];
+ EXPECT_EQ(n["ok" ][0].val(), csubstr("exec pg_isready -U \"dog\" -d \"dbname=dog\" -h 127.0.0.1 -p 5432\n"));
+ EXPECT_EQ(n["err" ][0].val(), csubstr("exec pg_isready -U \"dog\" -d \"dbname=dog\" -h 127.0.0.1 -p 5432\n"));
+ EXPECT_EQ(n["err2"][0].val(), csubstr("exec pg_isready -U \"dog\" -d \"dbname=dog\" -h 127.0.0.1 -p 5432\n"));
+ EXPECT_EQ(n["err3"][0].val(), csubstr("exec pg_isready -U \"dog\" -d \"dbname=dog\" -h 127.0.0.1 -p 5432"));
+}
+
+TEST(block_folded, issue152_indented_thrice)
+{
+ const Tree t = parse_in_arena(R"(
+indented_once:
+ indented_twice:
+ indented_thrice:
+ ok:
+ - |
+ exec pg_isready -U "dog" -d "dbname=dog" -h 127.0.0.1 -p 5432
+ - parses - yes
+ ok_parses: yes
+ err:
+ - |
+ exec pg_isready -U "dog" -d "dbname=dog" -h 127.0.0.1 -p 5432
+ err_parses: no
+ err2:
+ - >
+ exec pg_isready -U "dog" -d "dbname=dog" -h 127.0.0.1 -p 5432
+ err2_parses: no
+ err3:
+ - >-
+ exec pg_isready -U "dog" -d "dbname=dog" -h 127.0.0.1 -p 5432
+ err3_parses: no
+)");
+ ConstNodeRef n = t["indented_once"]["indented_twice"]["indented_thrice"];
+ EXPECT_EQ(n["ok" ][0].val(), csubstr("exec pg_isready -U \"dog\" -d \"dbname=dog\" -h 127.0.0.1 -p 5432\n"));
+ EXPECT_EQ(n["err" ][0].val(), csubstr("exec pg_isready -U \"dog\" -d \"dbname=dog\" -h 127.0.0.1 -p 5432\n"));
+ EXPECT_EQ(n["err2"][0].val(), csubstr("exec pg_isready -U \"dog\" -d \"dbname=dog\" -h 127.0.0.1 -p 5432\n"));
+ EXPECT_EQ(n["err3"][0].val(), csubstr("exec pg_isready -U \"dog\" -d \"dbname=dog\" -h 127.0.0.1 -p 5432"));
+}
+
+
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+
+TEST(block_folded, test_suite_4QFQ)
+{
+ csubstr yaml = R"(
+- |1
+ child2
+- |3
+ child2
+- ' child2
+
+'
+)";
+ test_check_emit_check(yaml, [](Tree const &t){
+ EXPECT_EQ(t[0].val(), csubstr(" child2\n"));
+ EXPECT_EQ(t[1].val(), csubstr(" child2\n"));
+ EXPECT_EQ(t[2].val(), csubstr(" child2\n"));
+ });
+}
+
+TEST(block_folded, test_suite_4QFQ_pt2)
+{
+ csubstr yaml = R"(---
+- |
+ child0
+- >
+
+
+ # child1
+- |1
+ child2
+- >
+ child3
+---
+foo:
+ - |
+ child0
+ - >
+
+
+ # child1
+ - |2
+ child2
+ - >
+ child3
+)";
+ test_check_emit_check(yaml, [](Tree const &t){
+ ASSERT_TRUE(t.rootref().is_stream());
+ ConstNodeRef doc = t.rootref().child(0);
+ ASSERT_TRUE(doc.is_seq());
+ ASSERT_EQ(doc.num_children(), 4u);
+ EXPECT_EQ(doc[0].val(), csubstr("child0\n"));
+ EXPECT_EQ(doc[1].val(), csubstr("\n\n# child1\n"));
+ EXPECT_EQ(doc[2].val(), csubstr(" child2\n"));
+ EXPECT_EQ(doc[3].val(), csubstr("child3\n"));
+ doc = t.rootref().child(1);
+ ASSERT_TRUE(doc.is_map());
+ ASSERT_EQ(doc["foo"].num_children(), 4u);
+ EXPECT_EQ(doc["foo"][0].val(), csubstr("child0\n"));
+ EXPECT_EQ(doc["foo"][1].val(), csubstr("\n\n# child1\n"));
+ EXPECT_EQ(doc["foo"][2].val(), csubstr(" child2\n"));
+ EXPECT_EQ(doc["foo"][3].val(), csubstr("child3\n"));
+ });
+}
+
+TEST(block_folded, test_suite_6VJK)
+{
+ csubstr yaml = R"(- >
+ Sammy Sosa completed another
+ fine season with great stats.
+
+ 63 Home Runs
+ 0.288 Batting Average
+
+ What a year!
+- >
+ Sammy Sosa completed another
+ fine season with great stats.
+ 63 Home Runs
+ 0.288 Batting Average
+ What a year!
+- >
+ Sammy Sosa completed another
+ fine season with great stats.
+
+ 63 Home Runs
+ 0.288 Batting Average
+
+ What a year!
+- >
+ Sammy Sosa completed another
+ fine season with great stats.
+
+
+ 63 Home Runs
+ 0.288 Batting Average
+
+
+ What a year!
+- >
+ Sammy Sosa completed another
+ fine season with great stats.
+
+
+
+ 63 Home Runs
+ 0.288 Batting Average
+
+
+
+ What a year!
+- >-
+ No folding needed
+- >
+ No folding needed)";
+ test_check_emit_check(yaml, [](Tree const &t){
+ EXPECT_EQ(t[0].val(), csubstr("Sammy Sosa completed another fine season with great stats.\n63 Home Runs 0.288 Batting Average\nWhat a year!\n"));
+ EXPECT_EQ(t[1].val(), csubstr("Sammy Sosa completed another fine season with great stats.\n 63 Home Runs\n 0.288 Batting Average\nWhat a year!\n"));
+ EXPECT_EQ(t[2].val(), csubstr("Sammy Sosa completed another fine season with great stats.\n\n 63 Home Runs\n 0.288 Batting Average\n\nWhat a year!\n"));
+ EXPECT_EQ(t[3].val(), csubstr("Sammy Sosa completed another fine season with great stats.\n\n\n 63 Home Runs\n 0.288 Batting Average\n\n\nWhat a year!\n"));
+ EXPECT_EQ(t[4].val(), csubstr("Sammy Sosa completed another fine season with great stats.\n\n\n\n 63 Home Runs\n 0.288 Batting Average\n\n\n\nWhat a year!\n"));
+ EXPECT_EQ(t[5].val(), csubstr("No folding needed"));
+ EXPECT_EQ(t[6].val(), csubstr("No folding needed\n"));
+ });
+}
+
+TEST(block_folded, test_suite_7T8X)
+{
+ csubstr yaml = R"(>
+
+ folded
+ line
+
+ next
+ line
+ * bullet
+
+ * list
+ * lines
+
+ last
+ line
+
+# Comment
+)";
+ Tree t = parse_in_arena(yaml);
+ EXPECT_EQ(t.rootref().val(), "\nfolded line\nnext line\n * bullet\n\n * list\n * lines\n\nlast line\n");
+}
+
+TEST(block_folded, test_suite_A6F9)
+{
+ csubstr yaml = R"(
+strip: |-
+ text
+clip: |
+ text
+keep: |+
+ text
+)";
+ test_check_emit_check(yaml, [](Tree const &t){
+ EXPECT_EQ(t["strip"].val(), "text");
+ EXPECT_EQ(t["clip"].val(), "text\n");
+ EXPECT_EQ(t["keep"].val(), "text\n");
+ });
+}
+
+TEST(block_folded, test_suite_B3HG)
+{
+ csubstr yaml = R"(
+--- >
+ folded
+ text
+
+
+--- >
+ folded
+ text
+--- >
+ folded text
+)";
+ test_check_emit_check(yaml, [](Tree const &t){
+ EXPECT_EQ(t.docref(0).val(), csubstr("folded text\n"));
+ EXPECT_EQ(t.docref(1).val(), csubstr("folded text\n"));
+ EXPECT_EQ(t.docref(2).val(), csubstr("folded text\n"));
+ });
+}
+
+TEST(block_folded, test_suite_D83L)
+{
+ csubstr yaml = R"(
+- |2-
+ explicit indent and chomp
+- |-2
+ chomp and explicit indent
+)";
+ test_check_emit_check(yaml, [](Tree const &t){
+ EXPECT_TRUE(t.rootref().is_seq());
+ EXPECT_EQ(t[0].val(), csubstr("explicit indent and chomp"));
+ EXPECT_EQ(t[1].val(), csubstr("chomp and explicit indent"));
+ });
+}
+
+TEST(block_folded, test_suite_DWX9)
+{
+ csubstr yaml = R"(
+|
+
+
+ literal
+
+
+ text
+
+ # Comment
+)";
+ test_check_emit_check(yaml, [](Tree const &t){
+ EXPECT_EQ(t.rootref().val(), csubstr("\n\nliteral\n \n\ntext\n"));
+ });
+}
+
+TEST(block_folded, test_suite_F6MC)
+{
+ csubstr yaml = R"(
+a: >2
+ more indented
+ regular
+b: >2
+
+
+ more indented
+ regular
+)";
+ test_check_emit_check(yaml, [](Tree const &t){
+ EXPECT_EQ(t["a"].val(), csubstr(" more indented\nregular\n"));
+ EXPECT_EQ(t["b"].val(), csubstr("\n\n more indented\nregular\n"));
+ });
+}
+
+TEST(block_folded, test_suite_K858)
+{
+ csubstr yaml = R"(---
+# strip
+- >-
+
+# clip
+- >
+
+# keep
+- |+
+
+---
+strip: >-
+
+clip: >
+
+keep: |+
+
+)";
+ test_check_emit_check(yaml, [](Tree const &t){
+ ASSERT_EQ(t.docref(0).num_children(), 3u);
+ EXPECT_EQ(t.docref(0)[0].val(), csubstr{});
+ EXPECT_EQ(t.docref(0)[1].val(), csubstr{});
+ EXPECT_EQ(t.docref(0)[2].val(), csubstr("\n"));
+ ASSERT_TRUE(t.docref(1).has_child("strip"));
+ ASSERT_TRUE(t.docref(1).has_child("keep"));
+ ASSERT_TRUE(t.docref(1).has_child("clip"));
+ EXPECT_EQ(t.docref(1)["strip"].val(), csubstr{});
+ EXPECT_EQ(t.docref(1)["clip"].val(), csubstr{});
+ EXPECT_EQ(t.docref(1)["keep"].val(), csubstr("\n"));
+ });
+}
+
+
+TEST(block_folded, test_suite_MJS9)
+{
+ csubstr yaml = R"(
+- >
+ foo
+
+ bar
+
+ baz
+)";
+ test_check_emit_check(yaml, [](Tree const &t){
+ EXPECT_EQ(t[0].val(), csubstr("foo \n\n\t bar\n\nbaz\n")); // "foo \n\n \t bar\n\nbaz\n"
+ });
+}
+
+TEST(block_folded, test_suite_P2AD)
+{
+ csubstr yaml = R"(
+- | # Empty header↓
+ literal
+- >1 # Indentation indicator↓
+ folded
+- |+ # Chomping indicator↓
+ keep
+
+- >1- # Both indicators↓
+ strip
+- >-1 # Both indicators↓
+ strip
+)";
+ test_check_emit_check(yaml, [](Tree const &t){
+ ASSERT_TRUE(t.rootref().is_seq());
+ ASSERT_EQ(t.rootref().num_children(), 5u);
+ EXPECT_EQ(t[0].val(), csubstr("literal\n"));
+ EXPECT_EQ(t[1].val(), csubstr(" folded\n"));
+ EXPECT_EQ(t[2].val(), csubstr("keep\n\n"));
+ EXPECT_EQ(t[3].val(), csubstr(" strip"));
+ EXPECT_EQ(t[4].val(), csubstr(" strip"));
+ });
+}
+
+
+TEST(block_folded, test_suite_R4YG)
+{
+ csubstr yaml = R"(
+- |
+ detected0
+- >
+
+
+ # detected1
+- |1
+ explicit2
+- >
+
+ detected3
+)";
+ test_check_emit_check(yaml, [](Tree const &t){
+ ASSERT_TRUE(t.rootref().is_seq());
+ ASSERT_EQ(t.rootref().num_children(), 4u);
+ EXPECT_EQ(t[0].val(), csubstr("detected0\n"));
+ EXPECT_EQ(t[1].val(), csubstr("\n\n# detected1\n"));
+ EXPECT_EQ(t[2].val(), csubstr(" explicit2\n"));
+ EXPECT_EQ(t[3].val(), csubstr("\t\ndetected3\n"));
+ });
+}
+
+
+TEST(block_folded, test_suite_T26H)
+{
+ csubstr yaml = R"(
+--- |
+
+
+ literal
+
+
+ text
+
+ # Comment
+)";
+ test_check_emit_check(yaml, [](Tree const &t){
+ ASSERT_TRUE(t.rootref().is_stream());
+ ASSERT_TRUE(t.rootref().first_child().is_doc());
+ EXPECT_EQ(t.rootref().first_child().val(), csubstr("\n\nliteral\n \n\ntext\n"));
+ });
+}
+
+
+TEST(block_folded, test_suite_T5N4)
+{
+ csubstr yaml = R"(
+--- |
+ literal
+ text
+
+
+)";
+ test_check_emit_check(yaml, [](Tree const &t){
+ ASSERT_TRUE(t.rootref().is_stream());
+ ASSERT_TRUE(t.rootref().first_child().is_doc());
+ EXPECT_EQ(t.rootref().first_child().val(), csubstr("literal\n\ttext\n"));
+ });
+}
+
+
+TEST(block_folded, test_suite_W4TN)
+{
+ csubstr yaml = R"(
+--- |
+ %!PS-Adobe-2.0
+...
+--- >
+ %!PS-Adobe-2.0
+...
+--- |
+%!PS-Adobe-2.0
+...
+--- >
+%!PS-Adobe-2.0
+...
+---
+# Empty
+...
+--- |
+ %!PS-Adobe-2.0
+--- >
+ %!PS-Adobe-2.0
+--- |
+%!PS-Adobe-2.0
+--- >
+%!PS-Adobe-2.0
+---
+# empty
+)";
+ test_check_emit_check(yaml, [](Tree const &t){
+ ConstNodeRef r = t.rootref();
+ ASSERT_TRUE(r.is_stream());
+ ASSERT_EQ(r.num_children(), 10u);
+ ASSERT_TRUE(r.doc(0).is_doc());
+ ASSERT_TRUE(r.doc(0).is_val());
+ EXPECT_EQ(r.doc(0).val(), csubstr("%!PS-Adobe-2.0\n"));
+ ASSERT_TRUE(r.doc(1).is_doc());
+ ASSERT_TRUE(r.doc(1).is_val());
+ EXPECT_EQ(r.doc(1).val(), csubstr("%!PS-Adobe-2.0\n"));
+ ASSERT_TRUE(r.doc(2).is_doc());
+ ASSERT_TRUE(r.doc(2).is_val());
+ EXPECT_EQ(r.doc(2).val(), csubstr("%!PS-Adobe-2.0\n"));
+ ASSERT_TRUE(r.doc(3).is_doc());
+ ASSERT_TRUE(r.doc(3).is_val());
+ EXPECT_EQ(r.doc(3).val(), csubstr("%!PS-Adobe-2.0\n"));
+ ASSERT_TRUE(r.doc(4).is_doc());
+ ASSERT_TRUE(r.doc(4).is_val());
+ EXPECT_EQ(r.doc(4).val(), csubstr{});
+ ASSERT_TRUE(r.doc(5).is_doc());
+ ASSERT_TRUE(r.doc(5).is_val());
+ EXPECT_EQ(r.doc(5).val(), csubstr("%!PS-Adobe-2.0\n"));
+ ASSERT_TRUE(r.doc(6).is_doc());
+ ASSERT_TRUE(r.doc(6).is_val());
+ EXPECT_EQ(r.doc(6).val(), csubstr("%!PS-Adobe-2.0\n"));
+ ASSERT_TRUE(r.doc(7).is_doc());
+ ASSERT_TRUE(r.doc(7).is_val());
+ EXPECT_EQ(r.doc(7).val(), csubstr("%!PS-Adobe-2.0\n"));
+ ASSERT_TRUE(r.doc(8).is_doc());
+ ASSERT_TRUE(r.doc(8).is_val());
+ EXPECT_EQ(r.doc(8).val(), csubstr("%!PS-Adobe-2.0\n"));
+ ASSERT_TRUE(r.doc(4).is_doc());
+ ASSERT_TRUE(r.doc(4).is_val());
+ EXPECT_EQ(r.doc(4).val(), csubstr{});
+ });
+}
+
+
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+
+
+CASE_GROUP(BLOCK_FOLDED)
+{
+//
+ADD_CASE_TO_GROUP("indentation requirements",
+R"(---
+>
+hello
+there
+---
+>
+ hello
+ there
+---
+>
+ hello
+ there
+---
+>
+ciao
+qua
+---
+>
+ ciao
+ qua
+---
+>
+ ciao
+ qua
+---
+- >
+ hello
+ there
+- >
+ ciao
+ qua
+---
+foo: >
+ hello
+ there
+bar: >
+ ciao
+ qua
+)",
+N(STREAM, L{
+ N(DOCVAL|QV, "hello there\n"),
+ N(DOCVAL|QV, "hello there\n"),
+ N(DOCVAL|QV, "hello there\n"),
+ N(DOCVAL|QV, "ciao qua\n"),
+ N(DOCVAL|QV, "ciao qua\n"),
+ N(DOCVAL|QV, "ciao qua\n"),
+ N(SEQ|DOC, L{N(QV, "hello there\n"), N(QV, "ciao qua\n")}),
+ N(MAP|DOC, L{N(QV, "foo", "hello there\n"), N(QV, "bar", "ciao qua\n")}),
+ }));
+
+ADD_CASE_TO_GROUP("indentation requirements err seq", EXPECT_PARSE_ERROR,
+R"(- >
+hello
+there
+- >
+ciao
+qua
+)",
+N(L{N(QV, "hello there"), N(QV, "ciao qua\n")}));
+
+ADD_CASE_TO_GROUP("indentation requirements err map", EXPECT_PARSE_ERROR,
+R"(foo: >
+hello
+there
+bar: >
+ciao
+qua
+)",
+N(L{N(QV, "foo", "hello there\n"), N(QV, "bar" "ciao qua\n")}));
+
+ADD_CASE_TO_GROUP("indentation requirements err level", EXPECT_PARSE_ERROR,
+R"(--- >2
+ hello
+ there
+)",
+N(NOTYPE));
+
+ADD_CASE_TO_GROUP("foo without space after",
+R"(>
+ foo
+)",
+N(DOCVAL|QV, "foo\n"));
+
+ADD_CASE_TO_GROUP("foo with space after",
+R"(>
+ foo
+
+)",
+N(DOCVAL|QV, "foo\n"));
+
+ADD_CASE_TO_GROUP("simple with indents",
+R"(>
+ foo
+
+ bar
+)",
+N(DOCVAL|QV, "foo\n \n bar\n"));
+
+
+ADD_CASE_TO_GROUP("7T8X",
+R"(- >
+
+ folded
+ line
+
+ next
+ line
+ * bullet
+
+ * list
+ * lines
+
+ last
+ line
+
+# Comment
+
+##### this is the original scalar:
+- >
+
+ folded
+ line
+
+ next
+ line
+ * bullet
+
+ * list
+ * lines
+
+ last
+ line
+
+# Comment
+
+##### without any indentation
+- >
+
+ folded
+ line
+
+ next
+ line
+ * bullet
+
+ * list
+ * lines
+
+ last
+ line
+
+# Comment
+)",
+ L{
+ N(QV, "\nfolded line\nnext line\n * bullet\n\n * list\n * lines\n\nlast line\n"),
+ N(QV, "\nfolded line\nnext line\n * bullet\n\n * list\n * lines\n\nlast line\n"),
+ N(QV, "\nfolded line\nnext line\n * bullet\n\n * list\n * lines\n\nlast line\n"),
+ }
+);
+
+
+ADD_CASE_TO_GROUP("block folded as seq val, implicit indentation 2",
+R"(
+- >
+ Several lines of text,
+ with some "quotes" of various 'types',
+ and also a blank line:
+
+ plus another line at the end.
+
+
+- another val
+)",
+ L{
+ N(QV, "Several lines of text, with some \"quotes\" of various 'types', and also a blank line:\nplus another line at the end.\n"),
+ N("another val")
+ }
+);
+
+ADD_CASE_TO_GROUP("block folded as map val, implicit indentation 2",
+R"(
+example: >
+ Several lines of text,
+ with some "quotes" of various 'types',
+ and also a blank line:
+
+ plus another line at the end.
+
+
+another: val
+)",
+ L{
+ N(QV, "example", "Several lines of text, with some \"quotes\" of various 'types', and also a blank line:\nplus another line at the end.\n"),
+ N("another", "val")
+ }
+);
+
+ADD_CASE_TO_GROUP("block folded as map val, implicit indentation 2, chomp=keep",
+R"(
+example: >+
+ Several lines of text,
+ with some "quotes" of various 'types',
+ and also a blank line:
+
+ plus another line at the end.
+
+
+another: val
+)",
+ L{
+ N(QV, "example", "Several lines of text, with some \"quotes\" of various 'types', and also a blank line:\nplus another line at the end.\n\n\n"),
+ N("another", "val")
+ }
+);
+
+ADD_CASE_TO_GROUP("block folded as map val, implicit indentation 2, chomp=strip",
+R"(
+example: >-
+ Several lines of text,
+ with some "quotes" of various 'types',
+ and also a blank line:
+
+ plus another line at the end.
+
+
+another: val
+)",
+ L{
+ N(QV, "example", "Several lines of text, with some \"quotes\" of various 'types', and also a blank line:\nplus another line at the end."),
+ N("another", "val")
+ }
+);
+
+ADD_CASE_TO_GROUP("block folded as map val, explicit indentation 2",
+R"(
+example: >2
+ Several lines of text,
+ with some "quotes" of various 'types',
+ and also a blank line:
+
+ plus another line at the end.
+
+
+another: val
+)",
+ L{
+ N(QV, "example", "Several lines of text, with some \"quotes\" of various 'types', and also a blank line:\nplus another line at the end.\n"),
+ N("another", "val")
+ }
+);
+
+ADD_CASE_TO_GROUP("block folded as map val, explicit indentation 2, chomp=keep",
+R"(
+example: >+2
+ Several lines of text,
+ with some "quotes" of various 'types',
+ and also a blank line:
+
+ plus another line at the end.
+
+
+example2: >2+
+ Several lines of text,
+ with some "quotes" of various 'types',
+ and also a blank line:
+
+ plus another line at the end.
+
+
+)",
+ L{
+ N(QV, "example", "Several lines of text, with some \"quotes\" of various 'types', and also a blank line:\nplus another line at the end.\n\n\n"),
+ N(QV, "example2", "Several lines of text, with some \"quotes\" of various 'types', and also a blank line:\nplus another line at the end.\n\n\n"),
+ }
+);
+
+ADD_CASE_TO_GROUP("block folded as map val, explicit indentation 2, chomp=strip",
+R"(
+example: >-2
+ Several lines of text,
+ with some "quotes" of various 'types',
+ and also a blank line:
+
+ plus another line at the end.
+
+
+example2: >2-
+ Several lines of text,
+ with some "quotes" of various 'types',
+ and also a blank line:
+
+ plus another line at the end.
+
+
+)",
+ L{
+ N(QV, "example", "Several lines of text, with some \"quotes\" of various 'types', and also a blank line:\nplus another line at the end."),
+ N(QV, "example2", "Several lines of text, with some \"quotes\" of various 'types', and also a blank line:\nplus another line at the end."),
+ }
+);
+
+ADD_CASE_TO_GROUP("block folded as map val, implicit indentation 3",
+R"(
+example: >
+ Several lines of text,
+ with some "quotes" of various 'types',
+ and also a blank line:
+
+ plus another line at the end.
+
+
+another: val
+)",
+ L{
+ N(QV, "example", "Several lines of text, with some \"quotes\" of various 'types', and also a blank line:\nplus another line at the end.\n"),
+ N("another", "val")
+ }
+);
+
+ADD_CASE_TO_GROUP("block folded as map val, explicit indentation 3",
+R"(
+example: >3
+ Several lines of text,
+ with some "quotes" of various 'types',
+ and also a blank line:
+
+ plus another line at the end.
+
+
+another: val
+)",
+ L{
+ N(QV, "example", "Several lines of text, with some \"quotes\" of various 'types', and also a blank line:\nplus another line at the end.\n"),
+ N("another", "val")
+ }
+);
+
+ADD_CASE_TO_GROUP("block folded as map val, implicit indentation 4",
+R"(
+example: >
+ Several lines of text,
+ with some "quotes" of various 'types',
+ and also a blank line:
+
+ plus another line at the end.
+
+
+another: val
+)",
+ L{
+ N(QV, "example", "Several lines of text, with some \"quotes\" of various 'types', and also a blank line:\nplus another line at the end.\n"),
+ N("another", "val")
+ }
+);
+
+ADD_CASE_TO_GROUP("block folded as map val, explicit indentation 4",
+R"(
+example: >4
+ Several lines of text,
+ with some "quotes" of various 'types',
+ and also a blank line:
+
+ plus another line at the end.
+
+
+another: val
+)",
+ L{
+ N(QV, "example", "Several lines of text, with some \"quotes\" of various 'types', and also a blank line:\nplus another line at the end.\n"),
+ N("another", "val")
+ }
+);
+
+ADD_CASE_TO_GROUP("block folded as map val, implicit indentation 9",
+R"(
+example: >
+ Several lines of text,
+ with some "quotes" of various 'types',
+ and also a blank line:
+
+ plus another line at the end.
+
+
+another: val
+)",
+ L{
+ N(QV, "example", "Several lines of text, with some \"quotes\" of various 'types', and also a blank line:\nplus another line at the end.\n"),
+ N("another", "val")
+ }
+);
+
+ADD_CASE_TO_GROUP("block folded as map val, explicit indentation 9",
+R"(
+example: >9
+ Several lines of text,
+ with some "quotes" of various 'types',
+ and also a blank line:
+
+ plus another line at the end.
+
+
+another: val
+)",
+ L{
+ N(QV, "example", "Several lines of text, with some \"quotes\" of various 'types', and also a blank line:\nplus another line at the end.\n"),
+ N("another", "val")
+ }
+);
+
+
+ADD_CASE_TO_GROUP("block folded as map entry",
+R"(
+data: >
+ Wrapped text
+ will be folded
+ into a single
+ paragraph
+
+ Blank lines denote
+ paragraph breaks
+)",
+ N(L{N(KEYVAL|VALQUO, "data", "Wrapped text will be folded into a single paragraph\nBlank lines denote paragraph breaks\n")})
+);
+
+ADD_CASE_TO_GROUP("block folded, no chomp, no indentation",
+R"(example: >
+ Several lines of text,
+ with some "quotes" of various 'types',
+ and also a blank line:
+
+ plus another line at the end.
+
+another: text
+)",
+ N(L{
+ N(KEYVAL|VALQUO, "example", "Several lines of text, with some \"quotes\" of various 'types', and also a blank line:\nplus another line at the end.\n"),
+ N("another", "text"),
+ })
+);
+
+ADD_CASE_TO_GROUP("block folded with tab and spaces",
+R"(>
+ )",
+ N(DOCVAL|VALQUO, "\t \n")
+ );
+
+
+ADD_CASE_TO_GROUP("block folded with empty docval 1",
+R"(>)",
+ N(DOCVAL|VALQUO, "")
+ );
+
+ADD_CASE_TO_GROUP("block folded with empty docval 2",
+R"(>
+)",
+ N(DOCVAL|VALQUO, "")
+ );
+
+ADD_CASE_TO_GROUP("block folded with empty docval 3",
+R"(>
+ )",
+ N(DOCVAL|VALQUO, "")
+ );
+
+ADD_CASE_TO_GROUP("block folded with empty docval 4",
+R"(>
+
+)",
+ N(DOCVAL|VALQUO, "")
+ );
+
+ADD_CASE_TO_GROUP("block folded with empty docval 5",
+R"(>
+
+)",
+ N(DOCVAL|VALQUO, "")
+ );
+
+ADD_CASE_TO_GROUP("block folded with empty docval 8",
+R"(>
+
+)",
+ N(DOCVAL|VALQUO, "")
+ );
+
+ADD_CASE_TO_GROUP("block folded with empty docval 9",
+R"(>
+
+
+
+)",
+ N(DOCVAL|VALQUO, "")
+ );
+
+ADD_CASE_TO_GROUP("block folded with empty docval 10",
+R"(>
+
+
+
+
+)",
+ N(DOCVAL|VALQUO, "")
+ );
+
+ADD_CASE_TO_GROUP("block folded with empty docval 11",
+R"(>
+
+
+
+ )",
+ N(DOCVAL|VALQUO, "")
+ );
+
+ADD_CASE_TO_GROUP("block folded with empty docval 12",
+R"(>
+
+
+
+
+
+
+
+
+
+)",
+ N(DOCVAL|VALQUO, "")
+ );
+
+ADD_CASE_TO_GROUP("block folded with empty docval 13",
+R"(>
+
+
+
+
+
+
+
+)",
+ N(DOCVAL|VALQUO, "")
+ );
+
+ADD_CASE_TO_GROUP("block folded with docval no newlines at end 0",
+R"(>
+ asd)",
+ N(DOCVAL|VALQUO, "asd\n")
+ );
+
+ADD_CASE_TO_GROUP("block folded with docval no newlines at end 1",
+R"(>
+ asd
+)",
+ N(DOCVAL|VALQUO, "asd\n")
+ );
+
+ADD_CASE_TO_GROUP("block folded with docval no newlines at end 2",
+R"(>
+ asd
+
+)",
+ N(DOCVAL|VALQUO, "asd\n")
+ );
+
+ADD_CASE_TO_GROUP("block folded with docval no newlines at end 3",
+R"(>
+ asd
+ )",
+ N(DOCVAL|VALQUO, "asd\n")
+ );
+
+ADD_CASE_TO_GROUP("block folded with docval no newlines at end 4",
+R"(>
+ asd
+
+ )",
+ N(DOCVAL|VALQUO, "asd\n")
+ );
+
+ADD_CASE_TO_GROUP("block folded with docval no newlines at end 5",
+R"(>
+ asd
+
+ )",
+ N(DOCVAL|VALQUO, "asd\n")
+ );
+
+ADD_CASE_TO_GROUP("block folded with docval no newlines at end 5.1",
+R"(>
+ asd
+
+
+
+
+
+ )",
+ N(DOCVAL|VALQUO, "asd\n")
+ );
+
+ADD_CASE_TO_GROUP("block folded with docval no newlines at end 5.2",
+R"(>
+ asd
+
+
+
+
+
+ )",
+ N(DOCVAL|VALQUO, "asd\n")
+ );
+
+ADD_CASE_TO_GROUP("block folded with docval no newlines at end 5.3",
+R"(>
+ asd
+
+
+
+
+
+ )",
+ N(DOCVAL|VALQUO, "asd\n\n\n \n")
+ );
+
+ADD_CASE_TO_GROUP("block folded with docval no newlines at end 6",
+R"(>
+ asd
+ )",
+ N(DOCVAL|VALQUO, "asd\n \n")
+ );
+
+ADD_CASE_TO_GROUP("block folded with docval no newlines at end 7",
+R"(>
+ asd
+
+)",
+ N(DOCVAL|VALQUO, "asd\n \n")
+ );
+
+ADD_CASE_TO_GROUP("block folded with docval no newlines at end 8",
+R"(>
+ asd
+ )",
+ N(DOCVAL|VALQUO, "asd\n \n")
+ );
+
+ADD_CASE_TO_GROUP("block folded with docval no newlines at end 9",
+R"(>
+ asd
+
+)",
+ N(DOCVAL|VALQUO, "asd\n \n")
+ );
+
+ADD_CASE_TO_GROUP("block folded with docval no newlines at end 10",
+R"(>
+ asd
+ )",
+ N(DOCVAL|VALQUO, "asd\n\t \n")
+ );
+
+ADD_CASE_TO_GROUP("block folded with docval no newlines at end 11",
+R"(>
+ asd
+ )",
+ N(DOCVAL|VALQUO, "asd\n \t \n")
+ );
+
+ADD_CASE_TO_GROUP("block folded with docval no newlines at end 12",
+R"(>
+ asd
+
+)",
+ N(DOCVAL|VALQUO, "asd\n\t \n")
+ );
+
+ADD_CASE_TO_GROUP("block folded with docval no newlines at end 13",
+R"(>
+ asd
+
+)",
+ N(DOCVAL|VALQUO, "asd\n \t \n")
+ );
+
+
+ADD_CASE_TO_GROUP("block folded, keep, empty docval trailing 0",
+R"(>+)",
+ N(DOCVAL|VALQUO, "")
+ );
+
+ADD_CASE_TO_GROUP("block folded, keep, empty docval trailing 1",
+R"(>+
+)",
+ N(DOCVAL|VALQUO, "")
+ );
+
+ADD_CASE_TO_GROUP("block folded, keep, empty docval trailing 1.1",
+R"(>+
+ )",
+ N(DOCVAL|VALQUO, "")
+ );
+
+ADD_CASE_TO_GROUP("block folded, keep, empty docval trailing 1.2",
+R"(>+
+ asd)",
+ N(DOCVAL|VALQUO, "asd")
+ );
+
+ADD_CASE_TO_GROUP("block folded, keep, empty docval trailing 1.3",
+R"(>+
+ asd
+)",
+ N(DOCVAL|VALQUO, "asd\n")
+ );
+
+ADD_CASE_TO_GROUP("block folded, keep, empty docval trailing 1.4",
+R"(>+
+ asd
+
+)",
+ N(DOCVAL|VALQUO, "asd\n\n")
+ );
+
+ADD_CASE_TO_GROUP("block folded, keep, empty docval trailing 2",
+R"(>+
+
+)",
+ N(DOCVAL|VALQUO, "\n")
+ );
+
+ADD_CASE_TO_GROUP("block folded, keep, empty docval trailing 2.1",
+R"(>+
+
+ )",
+ N(DOCVAL|VALQUO, "\n")
+ );
+
+ADD_CASE_TO_GROUP("block folded, keep, empty docval trailing 3",
+R"(>+
+
+
+)",
+ N(DOCVAL|VALQUO, "\n\n")
+ );
+
+ADD_CASE_TO_GROUP("block folded, keep, empty docval trailing 4",
+R"(>+
+
+
+
+)",
+ N(DOCVAL|VALQUO, "\n\n\n")
+ );
+
+ADD_CASE_TO_GROUP("block folded, keep, empty docval trailing 5",
+R"(>+
+
+
+
+
+)",
+ N(DOCVAL|VALQUO, "\n\n\n\n")
+ );
+
+ADD_CASE_TO_GROUP("block folded, empty block vals in seq 0",
+R"(- >+
+
+- >+
+ )",
+N(L{N(QV, "\n"), N(QV, ""),}));
+
+ADD_CASE_TO_GROUP("block folded, empty block vals in seq 1",
+R"(- >+
+
+- >+
+
+)",
+N(L{N(QV, "\n"), N(QV, "\n"),}));
+
+}
+
+} // namespace yml
+} // namespace c4
diff --git a/thirdparty/ryml/test/test_block_literal.cpp b/thirdparty/ryml/test/test_block_literal.cpp
new file mode 100644
index 000000000..fe8e352be
--- /dev/null
+++ b/thirdparty/ryml/test/test_block_literal.cpp
@@ -0,0 +1,1261 @@
+#include "./test_group.hpp"
+
+namespace c4 {
+namespace yml {
+
+TEST(block_literal, empty_block)
+{
+ {
+ Tree t = parse_in_arena(R"(- |
+)");
+ EXPECT_EQ(t[0].val(), csubstr(""));
+ }
+ {
+ Tree t = parse_in_arena(R"(- |-
+)");
+ EXPECT_EQ(t[0].val(), csubstr(""));
+ }
+ {
+ Tree t = parse_in_arena(R"(- |+
+)");
+ EXPECT_EQ(t[0].val(), csubstr(""));
+ }
+ {
+ Tree t = parse_in_arena(R"(# no indentation: fails!
+- |
+
+- |-
+
+- |+
+
+)");
+ EXPECT_FALSE(t.empty());
+ EXPECT_EQ(t[0].val(), csubstr(""));
+ EXPECT_EQ(t[1].val(), csubstr(""));
+ EXPECT_EQ(t[2].val(), csubstr("\n"));
+ }
+ {
+ Tree t = parse_in_arena(R"(
+- |
+
+- |-
+
+- |+
+
+)");
+ EXPECT_FALSE(t.empty());
+ EXPECT_EQ(t[0].val(), csubstr(""));
+ EXPECT_EQ(t[1].val(), csubstr(""));
+ EXPECT_EQ(t[2].val(), csubstr("\n"));
+ }
+ {
+ Tree t = parse_in_arena(R"(
+- |
+- |-
+- |+
+)");
+ EXPECT_FALSE(t.empty());
+ EXPECT_EQ(t[0].val(), csubstr(""));
+ EXPECT_EQ(t[1].val(), csubstr(""));
+ EXPECT_EQ(t[2].val(), csubstr(""));
+ }
+}
+
+
+TEST(block_literal, empty_block_as_container_member)
+{
+ // this was ok
+ test_check_emit_check(R"(
+map:
+ a: ""
+ b: ''
+ c: >
+ d: |
+ e:
+)", [](Tree const &t){
+ EXPECT_TRUE(t["map"].has_key());
+ EXPECT_TRUE(t["map"].is_map());
+ EXPECT_EQ(t["map"].num_children(), 5u);
+ for(const auto &child : t["map"].children())
+ {
+ EXPECT_EQ(child.val(), "");
+ if(child.key() != "e")
+ {
+ EXPECT_TRUE(child.type().is_val_quoted());
+ EXPECT_FALSE(child.val_is_null());
+ }
+ }
+ });
+ // this was ok
+ test_check_emit_check(R"(
+map:
+ a: ""
+ b: ''
+ c: >
+ d: |
+)", [](Tree const &t){
+ EXPECT_TRUE(t["map"].has_key());
+ EXPECT_TRUE(t["map"].is_map());
+ EXPECT_TRUE(t["map"].is_map());
+ EXPECT_EQ(t["map"].num_children(), 4u);
+ for(const auto &child : t["map"].children())
+ {
+ EXPECT_EQ(child.val(), "");
+ EXPECT_TRUE(child.type().is_val_quoted());
+ EXPECT_FALSE(child.val_is_null());
+ }
+ });
+ // this was not ok! the block literal before next is extended: to
+ // include the YAML for next!
+ test_check_emit_check(R"(
+map:
+ a: ""
+ b: ''
+ c: >
+ d: |
+next:
+ a: ""
+ b: ''
+ c: >
+ d: |
+)", [](Tree const &t){
+ for(const char *name : {"map", "next"})
+ {
+ ASSERT_TRUE(t.rootref().has_child(to_csubstr(name))) << "name=" << name;
+ ConstNodeRef node = t[to_csubstr(name)];
+ EXPECT_TRUE(node.has_key());
+ EXPECT_TRUE(node.is_map());
+ ASSERT_EQ(node.num_children(), 4u);
+ for(const auto &child : node.children())
+ {
+ EXPECT_EQ(child.val(), "");
+ EXPECT_TRUE(child.type().is_val_quoted());
+ EXPECT_FALSE(child.val_is_null());
+ }
+ }
+ });
+ test_check_emit_check(R"(
+seq:
+ - ""
+ - ''
+ - >
+ - |
+next:
+ - ""
+ - ''
+ - >
+ - |
+)", [](Tree const &t){
+ for(const char *name : {"seq", "next"})
+ {
+ ASSERT_TRUE(t.rootref().has_child(to_csubstr(name))) << "name=" << name;
+ ConstNodeRef node = t[to_csubstr(name)];
+ EXPECT_TRUE(node.has_key());
+ EXPECT_TRUE(node.is_seq());
+ ASSERT_EQ(node.num_children(), 4u);
+ for(const auto &child : node.children())
+ {
+ EXPECT_EQ(child.val(), "");
+ EXPECT_TRUE(child.type().is_val_quoted());
+ EXPECT_FALSE(child.val_is_null());
+ }
+ }
+ });
+}
+
+
+TEST(block_literal, emit_does_not_add_lines_to_multi_at_end_1)
+{
+ Tree t = parse_in_arena("[]");
+ NodeRef r = t.rootref();
+ r.append_child() = "\n\n";
+ r.append_child() = "\n\n";
+ r.append_child() = "last";
+ std::string out = emitrs_yaml<std::string>(t);
+ t.clear();
+ t = parse_in_arena(to_csubstr(out));
+ EXPECT_EQ(t[0].val(), csubstr("\n\n"));
+ EXPECT_EQ(t[1].val(), csubstr("\n\n"));
+ EXPECT_EQ(t[2].val(), csubstr("last"));
+ out = emitrs_yaml<std::string>(t);
+ t.clear();
+ t = parse_in_arena(to_csubstr(out));
+ EXPECT_EQ(t[0].val(), csubstr("\n\n"));
+ EXPECT_EQ(t[1].val(), csubstr("\n\n"));
+ EXPECT_EQ(t[2].val(), csubstr("last"));
+ out = emitrs_yaml<std::string>(t);
+ t.clear();
+ t = parse_in_arena(to_csubstr(out));
+ EXPECT_EQ(t[0].val(), csubstr("\n\n"));
+ EXPECT_EQ(t[1].val(), csubstr("\n\n"));
+ EXPECT_EQ(t[2].val(), csubstr("last"));
+ EXPECT_EQ(csubstr("ab\n\n \n").trimr(" \t\n"), csubstr("ab"));
+}
+
+TEST(block_literal, emit_does_not_add_lines_to_multi_at_end_2)
+{
+ Tree t = parse_in_arena(R"(--- |+
+ ab
+
+
+)");
+ EXPECT_EQ(t.docref(0).val(), csubstr("ab\n\n \n"));
+ std::string expected = R"(--- |
+ ab
+
+
+
+)";
+ std::string out = emitrs_yaml<std::string>(t);
+ EXPECT_EQ(out, expected);
+ t = parse_in_arena(to_csubstr(out));
+ EXPECT_EQ(t.docref(0).val(), csubstr("ab\n\n \n"));
+ out = emitrs_yaml<std::string>(t);
+ EXPECT_EQ(out, expected);
+ t = parse_in_arena(to_csubstr(out));
+ EXPECT_EQ(t.docref(0).val(), csubstr("ab\n\n \n"));
+ out = emitrs_yaml<std::string>(t);
+ EXPECT_EQ(out, expected);
+ t = parse_in_arena(to_csubstr(out));
+ EXPECT_EQ(t.docref(0).val(), csubstr("ab\n\n \n"));
+}
+
+TEST(block_literal, emit_does_not_add_lines_to_multi_at_end_3)
+{
+ std::string yaml = R"(
+- |
+ Several lines of text,
+ with some "quotes" of various 'types',
+ and also a blank line:
+
+ plus another line at the end.
+
+
+
+- |+
+ Several lines of text,
+ with some "quotes" of various 'types',
+ and also a blank line:
+
+ plus another line at the end.
+
+- last
+)";
+ std::string expected = R"(- |
+ Several lines of text,
+ with some "quotes" of various 'types',
+ and also a blank line:
+
+ plus another line at the end.
+
+- |+
+ Several lines of text,
+ with some "quotes" of various 'types',
+ and also a blank line:
+
+ plus another line at the end.
+
+- last
+)";
+ Tree t = parse_in_arena(to_csubstr(yaml));
+ EXPECT_EQ(t[0].val(), "Several lines of text,\nwith some \"quotes\" of various 'types',\nand also a blank line:\n\nplus another line at the end.\n");
+ EXPECT_EQ(t[1].val(), "Several lines of text,\nwith some \"quotes\" of various 'types',\nand also a blank line:\n\nplus another line at the end.\n\n");
+ std::string out = emitrs_yaml<std::string>(t);
+ EXPECT_EQ(out, expected);
+ t = parse_in_arena(to_csubstr(out));
+ EXPECT_EQ(t[0].val(), "Several lines of text,\nwith some \"quotes\" of various 'types',\nand also a blank line:\n\nplus another line at the end.\n");
+ EXPECT_EQ(t[1].val(), "Several lines of text,\nwith some \"quotes\" of various 'types',\nand also a blank line:\n\nplus another line at the end.\n\n");
+ out = emitrs_yaml<std::string>(t);
+ EXPECT_EQ(out, expected);
+}
+
+TEST(block_literal, carriage_return)
+{
+ std::string yaml = "with: |\r\n"
+" text\r\n"
+" lines\r\n"
+"without: |\n"
+" text\n"
+" lines\n";
+ Tree t = parse_in_arena(to_csubstr(yaml));
+ EXPECT_EQ(t["with"].val(), "text\n \tlines\n");
+ EXPECT_EQ(t["without"].val(), "text\n \tlines\n");
+ auto emitted = emitrs_yaml<std::string>(t);
+ #ifdef RYML_DBG
+ __c4presc(emitted.data(), emitted.size());
+ #endif
+ Tree r = parse_in_arena(to_csubstr(emitted));
+ EXPECT_EQ(t["with"].val(), "text\n \tlines\n");
+ EXPECT_EQ(t["without"].val(), "text\n \tlines\n");
+}
+
+#ifdef JAVAI
+TEST(block_literal, errors_on_tab_indents)
+{
+ Tree tree;
+ ExpectError::do_check(&tree, [&]{
+ parse_in_arena("foo: |4\n this is foo\n now with tab-\n \t \tmust not work\n", &tree);
+ });
+}
+#endif
+
+TEST(block_literal, test_suite_L24T_00)
+{
+ // this is double quoted, but will be emitted as a block literal
+ csubstr yaml = R"(foo: "x\n \n"
+)";
+ test_check_emit_check(yaml, [](Tree const &t){
+ EXPECT_EQ(t["foo"].val(), csubstr("x\n \n"));
+ });
+}
+
+TEST(block_literal, error_on_bad_spec)
+{
+ Tree t;
+ ExpectError::do_check(&t, [&t]{
+ t = parse_in_arena("- |012abc\n must have errors above\n");
+ });
+}
+
+
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+
+
+CASE_GROUP(BLOCK_LITERAL)
+{
+//
+ADD_CASE_TO_GROUP("indentation requirements",
+R"(---
+|
+hello
+there
+---
+|
+ hello
+ there
+---
+|
+ hello
+ there
+---
+|
+ciao
+qua
+---
+|
+ ciao
+ qua
+---
+|
+ ciao
+ qua
+---
+- |
+ hello
+ there
+- |
+ ciao
+ qua
+---
+foo: |
+ hello
+ there
+bar: |
+ ciao
+ qua
+)",
+N(STREAM, L{
+ N(DOCVAL|QV, "hello\nthere\n"),
+ N(DOCVAL|QV, "hello\nthere\n"),
+ N(DOCVAL|QV, "hello\nthere\n"),
+ N(DOCVAL|QV, "ciao\nqua\n"),
+ N(DOCVAL|QV, "ciao\nqua\n"),
+ N(DOCVAL|QV, "ciao\nqua\n"),
+ N(SEQ|DOC, L{N(QV, "hello\nthere\n"), N(QV, "ciao\nqua\n")}),
+ N(MAP|DOC, L{N(QV, "foo", "hello\nthere\n"), N(QV, "bar", "ciao\nqua\n")}),
+ }));
+
+ADD_CASE_TO_GROUP("indentation requirements err seq", EXPECT_PARSE_ERROR,
+R"(- |
+hello
+there
+- |
+ciao
+qua
+)",
+N(L{N(QV, "hello\nthere\n"), N(QV, "ciao\nqua\n")}));
+
+ADD_CASE_TO_GROUP("indentation requirements err map", EXPECT_PARSE_ERROR,
+R"(foo: |
+hello
+there
+bar: |
+ciao
+qua
+)",
+N(L{N(QV, "foo", "hello\nthere\n"), N(QV, "bar" "ciao\nqua\n")}));
+
+ADD_CASE_TO_GROUP("indentation requirements err level", EXPECT_PARSE_ERROR,
+R"(--- |2
+ hello
+ there
+)",
+N(NOTYPE));
+
+ADD_CASE_TO_GROUP("empty, specs only 2G84_02",
+"--- |1-",
+N(STREAM, L{N(DOCVAL|VALQUO, {})}));
+
+ADD_CASE_TO_GROUP("empty, specs only 2G84_03",
+"--- |1+",
+N(STREAM, L{N(DOCVAL|VALQUO, {})}));
+
+ADD_CASE_TO_GROUP("empty, specs only 2G84_xx",
+"--- |+",
+N(STREAM, L{N(DOCVAL|VALQUO, {})}));
+
+ADD_CASE_TO_GROUP("empty, specs only 2G84_02_1",
+"|1-",
+N(DOCVAL|VALQUO, {}));
+
+ADD_CASE_TO_GROUP("empty, specs only 2G84_03_1",
+"|1+",
+N(DOCVAL|VALQUO, {}));
+
+ADD_CASE_TO_GROUP("empty, specs only 2G84_xx_1",
+"|+",
+N(DOCVAL|VALQUO, {}));
+
+ADD_CASE_TO_GROUP("block literal as map entry",
+R"(
+data: |
+ There once was a short man from Ealing
+ Who got on a bus to Darjeeling
+ It said on the door
+ "Please don't spit on the floor"
+ So he carefully spat on the ceiling
+)",
+ N(MAP, {
+ N(KEYVAL|VALQUO, "data", "There once was a short man from Ealing\nWho got on a bus to Darjeeling\n It said on the door\n \"Please don't spit on the floor\"\nSo he carefully spat on the ceiling\n")
+ })
+);
+
+ADD_CASE_TO_GROUP("block literal and two scalars",
+R"(
+example: >
+ HTML goes into YAML without modification
+message: |
+ <blockquote style="font: italic 12pt Times">
+ <p>"Three is always greater than two,
+ even for large values of two"</p>
+ <p>--Author Unknown</p>
+ </blockquote>
+date: 2007-06-01
+)",
+ N(MAP, L{
+ N(KEYVAL|VALQUO, "example", "HTML goes into YAML without modification\n"),
+ N(KEYVAL|VALQUO, "message", R"(<blockquote style="font: italic 12pt Times">
+<p>"Three is always greater than two,
+ even for large values of two"</p>
+<p>--Author Unknown</p>
+</blockquote>
+)"),
+ N("date", "2007-06-01"),
+ })
+);
+
+ADD_CASE_TO_GROUP("block literal no chomp, no indentation",
+R"(example: |
+ Several lines of text,
+ with some "quotes" of various 'types',
+ and also a blank line:
+
+ plus another line at the end.
+
+another: text
+)",
+ N(MAP, L{
+ N(KEYVAL|VALQUO, "example", "Several lines of text,\nwith some \"quotes\" of various 'types',\nand also a blank line:\n\nplus another line at the end.\n"),
+ N("another", "text"),
+ })
+);
+
+ADD_CASE_TO_GROUP("block literal as seq val, implicit indentation 2",
+R"(
+- |
+ Several lines of text,
+ with some "quotes" of various 'types',
+ and also a blank line:
+
+ plus another line at the end.
+
+
+- another val
+)",
+ L{
+ N(QV, "Several lines of text,\nwith some \"quotes\" of various 'types',\nand also a blank line:\n\nplus another line at the end.\n"),
+ N("another val")
+ }
+);
+
+ADD_CASE_TO_GROUP("block literal as seq val, implicit indentation 2, chomp=keep",
+R"(
+- |+
+ Several lines of text,
+ with some "quotes" of various 'types',
+ and also a blank line:
+
+ plus another line at the end.
+
+
+- another val
+)",
+ L{
+ N(QV, "Several lines of text,\nwith some \"quotes\" of various 'types',\nand also a blank line:\n\nplus another line at the end.\n\n\n"),
+ N("another val")
+ }
+);
+
+ADD_CASE_TO_GROUP("block literal as seq val, implicit indentation 2, chomp=strip",
+R"(
+- |-
+ Several lines of text,
+ with some "quotes" of various 'types',
+ and also a blank line:
+
+ plus another line at the end.
+
+
+- another val
+)",
+ L{
+ N(QV, "Several lines of text,\nwith some \"quotes\" of various 'types',\nand also a blank line:\n\nplus another line at the end."),
+ N("another val")
+ }
+);
+
+ADD_CASE_TO_GROUP("block literal as seq val at eof, implicit indentation 2",
+R"(
+- |
+ Several lines of text,
+ with some "quotes" of various 'types',
+ and also a blank line:
+
+ plus another line at the end.
+
+
+)",
+ L{
+ N(QV, "Several lines of text,\nwith some \"quotes\" of various 'types',\nand also a blank line:\n\nplus another line at the end.\n"),
+ }
+);
+
+ADD_CASE_TO_GROUP("block literal as seq val at eof, implicit indentation 4",
+R"(
+- |
+ Several lines of text,
+ with some "quotes" of various 'types',
+ and also a blank line:
+
+ plus another line at the end.
+
+
+)",
+ L{
+ N(QV, "Several lines of text,\nwith some \"quotes\" of various 'types',\nand also a blank line:\n\nplus another line at the end.\n"),
+ }
+);
+
+ADD_CASE_TO_GROUP("block literal as map val, implicit indentation 2",
+R"(
+example: |
+ Several lines of text,
+ with some "quotes" of various 'types',
+ and also a blank line:
+
+ plus another line at the end.
+
+
+another: val
+)",
+ L{
+ N(QV, "example", "Several lines of text,\nwith some \"quotes\" of various 'types',\nand also a blank line:\n\nplus another line at the end.\n"),
+ N("another", "val")
+ }
+);
+
+ADD_CASE_TO_GROUP("block literal as map val, explicit indentation 2",
+R"(
+example: |2
+ Several lines of text,
+ with some "quotes" of various 'types',
+ and also a blank line:
+
+ plus another line at the end.
+
+
+another: val
+)",
+ L{
+ N(QV, "example", "Several lines of text,\nwith some \"quotes\" of various 'types',\nand also a blank line:\n\nplus another line at the end.\n"),
+ N("another", "val")
+ }
+);
+
+ADD_CASE_TO_GROUP("block literal as map val, explicit indentation 2, chomp=keep",
+R"(
+example: |+2
+ Several lines of text,
+ with some "quotes" of various 'types',
+ and also a blank line:
+
+ plus another line at the end.
+
+
+another: val
+)",
+ L{
+ N(QV, "example", "Several lines of text,\nwith some \"quotes\" of various 'types',\nand also a blank line:\n\nplus another line at the end.\n\n\n"),
+ N("another", "val")
+ }
+);
+
+ADD_CASE_TO_GROUP("block literal as map val, explicit indentation 2, chomp=strip",
+R"(
+example: |-2
+ Several lines of text,
+ with some "quotes" of various 'types',
+ and also a blank line:
+
+ plus another line at the end.
+
+
+another: val
+)",
+ L{
+ N(QV, "example", "Several lines of text,\nwith some \"quotes\" of various 'types',\nand also a blank line:\n\nplus another line at the end."),
+ N("another", "val")
+ }
+);
+
+ADD_CASE_TO_GROUP("block literal as map val, implicit indentation 3",
+R"(
+example: |
+ Several lines of text,
+ with some "quotes" of various 'types',
+ and also a blank line:
+
+ plus another line at the end.
+
+
+another: val
+)",
+ L{
+ N(QV, "example", "Several lines of text,\nwith some \"quotes\" of various 'types',\nand also a blank line:\n\nplus another line at the end.\n"),
+ N("another", "val")
+ }
+);
+
+ADD_CASE_TO_GROUP("block literal as map val, explicit indentation 3",
+R"(
+example: |3
+ Several lines of text,
+ with some "quotes" of various 'types',
+ and also a blank line:
+
+ plus another line at the end.
+
+
+another: val
+)",
+ L{
+ N(QV, "example", "Several lines of text,\nwith some \"quotes\" of various 'types',\nand also a blank line:\n\nplus another line at the end.\n"),
+ N("another", "val")
+ }
+);
+
+ADD_CASE_TO_GROUP("block literal as map val, implicit indentation 4",
+R"(
+example: |
+ Several lines of text,
+ with some "quotes" of various 'types',
+ and also a blank line:
+
+ plus another line at the end.
+
+
+another: val
+)",
+ L{
+ N(QV, "example", "Several lines of text,\nwith some \"quotes\" of various 'types',\nand also a blank line:\n\nplus another line at the end.\n"),
+ N("another", "val")
+ }
+);
+
+ADD_CASE_TO_GROUP("block literal as map val, explicit indentation 4",
+R"(
+example: |4
+ Several lines of text,
+ with some "quotes" of various 'types',
+ and also a blank line:
+
+ plus another line at the end.
+
+
+another: val
+)",
+ L{
+ N(QV, "example", "Several lines of text,\nwith some \"quotes\" of various 'types',\nand also a blank line:\n\nplus another line at the end.\n"),
+ N("another", "val")
+ }
+);
+
+ADD_CASE_TO_GROUP("block literal as map val at eof, implicit indentation 2",
+R"(
+example: |
+ Several lines of text,
+ with some "quotes" of various 'types',
+ and also a blank line:
+
+ plus another line at the end.
+
+
+)",
+ L{
+ N(QV, "example", "Several lines of text,\nwith some \"quotes\" of various 'types',\nand also a blank line:\n\nplus another line at the end.\n"),
+ }
+);
+
+ADD_CASE_TO_GROUP("block literal as map val at eof, implicit indentation 4",
+R"(
+example: |
+ Several lines of text,
+ with some "quotes" of various 'types',
+ and also a blank line:
+
+ plus another line at the end.
+
+
+)",
+ L{
+ N(QV, "example", "Several lines of text,\nwith some \"quotes\" of various 'types',\nand also a blank line:\n\nplus another line at the end.\n"),
+ }
+);
+
+ADD_CASE_TO_GROUP("block literal as map val, implicit indentation 9",
+R"(
+example: |
+ Several lines of text,
+ with some "quotes" of various 'types',
+ and also a blank line:
+
+ plus another line at the end.
+
+
+another: val
+)",
+ L{
+ N(QV, "example", "Several lines of text,\nwith some \"quotes\" of various 'types',\nand also a blank line:\n\nplus another line at the end.\n"),
+ N("another", "val")
+ }
+);
+
+ADD_CASE_TO_GROUP("block literal as map val, explicit indentation 9",
+R"(
+example: |9
+ Several lines of text,
+ with some "quotes" of various 'types',
+ and also a blank line:
+
+ plus another line at the end.
+
+
+another: val
+)",
+ L{
+ N(QV, "example", "Several lines of text,\nwith some \"quotes\" of various 'types',\nand also a blank line:\n\nplus another line at the end.\n"),
+ N("another", "val")
+ }
+);
+
+ADD_CASE_TO_GROUP("block literal with empty unindented lines, without quotes",
+ R"(tpl:
+ src: |
+ #include <{{hdr.filename}}>
+
+ {{src.gencode}}
+)",
+ L{
+ N("tpl", L{N(QV, "src", "#include <{{hdr.filename}}>\n\n{{src.gencode}}\n")})
+ }
+);
+
+ADD_CASE_TO_GROUP("block literal with empty unindented lines, with double quotes",
+ R"(tpl:
+ src: |
+ #include "{{hdr.filename}}"
+
+ {{src.gencode}}
+)",
+ L{
+ N("tpl", L{N(QV, "src", "#include \"{{hdr.filename}}\"\n\n{{src.gencode}}\n")})
+ }
+);
+
+ADD_CASE_TO_GROUP("block literal with empty unindented lines, with single quotes",
+ R"(tpl:
+ src: |
+ #include '{{hdr.filename}}'
+
+ {{src.gencode}}
+)",
+ L{
+ N("tpl", L{N(QV, "src", "#include '{{hdr.filename}}'\n\n{{src.gencode}}\n")})
+ }
+);
+
+ADD_CASE_TO_GROUP("block literal with same indentation level 0",
+R"(
+aaa: |2
+ xxx
+bbb: |
+ yyy
+)",
+ L{N(QV, "aaa", "xxx\n"), N(QV, "bbb", "yyy\n")}
+ );
+
+ADD_CASE_TO_GROUP("block literal with same indentation level 1",
+R"(
+- aaa: |2
+ xxx
+ bbb: |
+ yyy
+)",
+ L{N(L{N(QV, "aaa", "xxx\n"), N(QV, "bbb", "yyy\n")})}
+ );
+
+ADD_CASE_TO_GROUP("block literal with tab and spaces",
+R"(|
+ )",
+ N(DOCVAL|VALQUO, "\t \n")
+ );
+
+
+ADD_CASE_TO_GROUP("block literal with empty docval 1",
+R"(|)",
+ N(DOCVAL|VALQUO, "")
+ );
+
+ADD_CASE_TO_GROUP("block literal with empty docval 2",
+R"(|
+)",
+ N(DOCVAL|VALQUO, "")
+ );
+
+ADD_CASE_TO_GROUP("block literal with empty docval 3",
+R"(|
+ )",
+ N(DOCVAL|VALQUO, "")
+ );
+
+ADD_CASE_TO_GROUP("block literal with empty docval 4",
+R"(|
+
+)",
+ N(DOCVAL|VALQUO, "")
+ );
+
+ADD_CASE_TO_GROUP("block literal with empty docval 5",
+R"(|
+
+)",
+ N(DOCVAL|VALQUO, "")
+ );
+
+ADD_CASE_TO_GROUP("block literal with empty docval 8",
+R"(|
+
+
+)",
+ N(DOCVAL|VALQUO, "")
+ );
+
+ADD_CASE_TO_GROUP("block literal with empty docval 9",
+R"(|
+
+
+
+)",
+ N(DOCVAL|VALQUO, "")
+ );
+
+ADD_CASE_TO_GROUP("block literal with empty docval 10",
+R"(|
+
+
+
+
+)",
+ N(DOCVAL|VALQUO, "")
+ );
+
+ADD_CASE_TO_GROUP("block literal with empty docval 11",
+R"(|
+
+
+
+ )",
+ N(DOCVAL|VALQUO, "")
+ );
+
+ADD_CASE_TO_GROUP("block literal with empty docval 12",
+R"(|
+
+
+
+
+
+
+
+
+
+)",
+ N(DOCVAL|VALQUO, "")
+ );
+
+ADD_CASE_TO_GROUP("block literal with empty docval 13",
+R"(|
+
+
+
+
+
+
+
+)",
+ N(DOCVAL|VALQUO, "")
+ );
+
+ADD_CASE_TO_GROUP("block literal with empty docval 14.0",
+R"(- |+
+)",
+ N(SEQ, L{N(VALQUO, "")})
+ );
+
+ADD_CASE_TO_GROUP("block literal with empty docval 14.0.1",
+R"(- |+
+ )",
+ N(SEQ, L{N(VALQUO, "\n")})
+ );
+
+ADD_CASE_TO_GROUP("block literal with empty docval 14.0.2",
+R"(- |+
+ )",
+ N(SEQ, L{N(VALQUO, "\n")})
+ );
+
+ADD_CASE_TO_GROUP("block literal with empty docval 14.1",
+R"(foo: |+
+)",
+ N(MAP, L{N(VALQUO, "foo", "")})
+ );
+
+ADD_CASE_TO_GROUP("block literal with empty docval 14.1.1",
+R"(foo: |+
+ )",
+ N(MAP, L{N(VALQUO, "foo", "\n")})
+ );
+
+ADD_CASE_TO_GROUP("block literal with empty docval 14.1.2",
+R"(foo: |+
+ )",
+ N(MAP, L{N(VALQUO, "foo", "\n")})
+ );
+
+ADD_CASE_TO_GROUP("block literal with empty docval 14.2",
+R"(|+
+)",
+ N(DOCVAL|VALQUO, "")
+ );
+
+ADD_CASE_TO_GROUP("block literal with empty docval 14.2.1",
+R"(|+
+ )",
+ N(DOCVAL|VALQUO, "\n")
+ );
+
+ADD_CASE_TO_GROUP("block literal with empty docval 14.2.2",
+R"(|+
+ )",
+ N(DOCVAL|VALQUO, "\n")
+ );
+
+ADD_CASE_TO_GROUP("block literal with empty docval 15.0",
+R"(- |+
+
+)",
+ N(SEQ, L{N(VALQUO, "\n")})
+ );
+
+ADD_CASE_TO_GROUP("block literal with empty docval 15.0.1",
+R"(- |+
+
+ )",
+ N(SEQ, L{N(VALQUO, "\n")})
+ );
+
+ADD_CASE_TO_GROUP("block literal with empty docval 15.1",
+R"(foo: |+
+
+)",
+ N(MAP, L{N(VALQUO, "foo", "\n")})
+ );
+
+ADD_CASE_TO_GROUP("block literal with empty docval 15.1.1",
+R"(foo: |+
+
+ )",
+ N(MAP, L{N(VALQUO, "foo", "\n")})
+ );
+
+ADD_CASE_TO_GROUP("block literal with empty docval 15.2",
+R"(|+
+
+)",
+ N(DOCVAL|VALQUO, "\n")
+ );
+
+ADD_CASE_TO_GROUP("block literal with empty docval 15.2.1",
+R"(|+
+
+ )",
+ N(DOCVAL|VALQUO, "\n")
+ );
+
+ADD_CASE_TO_GROUP("block literal with empty docval 16",
+R"(|+
+
+
+)",
+ N(DOCVAL|VALQUO, "\n\n")
+ );
+
+ADD_CASE_TO_GROUP("block literal with empty docval 16.1",
+R"(foo: |+
+
+
+)",
+ N(MAP, L{N(VALQUO, "foo", "\n\n")})
+ );
+
+ADD_CASE_TO_GROUP("block literal with empty docval 16.2",
+R"(- |+
+
+
+)",
+ N(SEQ, L{N(VALQUO, "\n\n")})
+ );
+
+ADD_CASE_TO_GROUP("block literal with empty docval 17",
+R"(|+
+
+
+
+)",
+ N(DOCVAL|VALQUO, "\n\n\n")
+ );
+
+ADD_CASE_TO_GROUP("block literal with empty docval 17.1",
+R"(foo: |+
+
+
+
+)",
+ N(MAP, L{N(VALQUO, "foo", "\n\n\n")})
+ );
+
+ADD_CASE_TO_GROUP("block literal with empty docval 17.2",
+R"(- |+
+
+
+
+)",
+ N(SEQ, L{N(VALQUO, "\n\n\n")})
+ );
+
+ADD_CASE_TO_GROUP("block literal with docval no newlines at end 0",
+R"(|
+ asd)",
+ N(DOCVAL|VALQUO, "asd\n")
+ );
+
+ADD_CASE_TO_GROUP("block literal with docval no newlines at end 1",
+R"(|
+ asd
+)",
+ N(DOCVAL|VALQUO, "asd\n")
+ );
+
+ADD_CASE_TO_GROUP("block literal with docval no newlines at end 1.1",
+R"(|
+ asd
+ )",
+ N(DOCVAL|VALQUO, "asd\n")
+ );
+
+ADD_CASE_TO_GROUP("block literal with docval no newlines at end 1.2",
+R"(|+
+ asd
+ )",
+ N(DOCVAL|VALQUO, "asd\n")
+ );
+
+ADD_CASE_TO_GROUP("block literal with docval no newlines at end 2",
+R"(|
+ asd
+
+)",
+ N(DOCVAL|VALQUO, "asd\n")
+ );
+
+ADD_CASE_TO_GROUP("block literal with docval no newlines at end 3",
+R"(|
+ asd
+ )",
+ N(DOCVAL|VALQUO, "asd\n")
+ );
+
+ADD_CASE_TO_GROUP("block literal with docval no newlines at end 4",
+R"(|
+ asd
+
+ )",
+ N(DOCVAL|VALQUO, "asd\n")
+ );
+
+ADD_CASE_TO_GROUP("block literal with docval no newlines at end 5",
+R"(|
+ asd
+
+ )",
+ N(DOCVAL|VALQUO, "asd\n")
+ );
+
+ADD_CASE_TO_GROUP("block literal with docval no newlines at end 5.1",
+R"(|
+ asd
+
+
+
+
+
+ )",
+ N(DOCVAL|VALQUO, "asd\n")
+ );
+
+ADD_CASE_TO_GROUP("block literal with docval no newlines at end 5.2",
+R"(|
+ asd
+
+
+
+
+
+ )",
+ N(DOCVAL|VALQUO, "asd\n")
+ );
+
+ADD_CASE_TO_GROUP("block literal with docval no newlines at end 5.3",
+R"(|
+ asd
+
+
+
+
+
+ )",
+ N(DOCVAL|VALQUO, "asd\n\n\n \n")
+ );
+
+ADD_CASE_TO_GROUP("block literal with docval no newlines at end 6",
+R"(|
+ asd
+ )",
+ N(DOCVAL|VALQUO, "asd\n \n")
+ );
+
+ADD_CASE_TO_GROUP("block literal with docval no newlines at end 7",
+R"(|
+ asd
+
+)",
+ N(DOCVAL|VALQUO, "asd\n \n")
+ );
+
+ADD_CASE_TO_GROUP("block literal with docval no newlines at end 8",
+R"(|
+ asd
+ )",
+ N(DOCVAL|VALQUO, "asd\n \n")
+ );
+
+ADD_CASE_TO_GROUP("block literal with docval no newlines at end 9",
+R"(|
+ asd
+
+)",
+ N(DOCVAL|VALQUO, "asd\n \n")
+ );
+
+ADD_CASE_TO_GROUP("block literal with docval no newlines at end 10",
+R"(|
+ asd
+ )",
+ N(DOCVAL|VALQUO, "asd\n\t \n")
+ );
+
+ADD_CASE_TO_GROUP("block literal with docval no newlines at end 11",
+R"(|
+ asd
+ )",
+ N(DOCVAL|VALQUO, "asd\n \t \n")
+ );
+
+ADD_CASE_TO_GROUP("block literal with docval no newlines at end 12",
+R"(|
+ asd
+
+)",
+ N(DOCVAL|VALQUO, "asd\n\t \n")
+ );
+
+ADD_CASE_TO_GROUP("block literal with docval no newlines at end 13",
+R"(|
+ asd
+
+)",
+ N(DOCVAL|VALQUO, "asd\n \t \n")
+ );
+
+ADD_CASE_TO_GROUP("block literal, empty block vals in seq 0",
+R"(- |+
+
+- |+
+ )",
+N(L{N(QV, "\n"), N(QV, "\n"),}));
+
+ADD_CASE_TO_GROUP("block literal, empty block vals in seq 1",
+R"(- |+
+
+- |+
+
+)",
+N(L{N(QV, "\n"), N(QV, "\n"),}));
+
+}
+
+
+} // namespace yml
+} // namespace c4
diff --git a/thirdparty/ryml/test/test_callbacks.cpp b/thirdparty/ryml/test/test_callbacks.cpp
new file mode 100644
index 000000000..6f4bbf0ed
--- /dev/null
+++ b/thirdparty/ryml/test/test_callbacks.cpp
@@ -0,0 +1,356 @@
+#include "./test_case.hpp"
+#ifndef RYML_SINGLE_HEADER
+#include "c4/yml/common.hpp"
+#endif
+#include <stdexcept>
+
+
+namespace c4 {
+namespace yml {
+
+static_assert(std::is_same<std::underlying_type<decltype(c4::yml::npos)>::type, size_t>::value, "invalid type");
+static_assert(std::is_same<std::underlying_type<decltype(c4::yml::NONE)>::type, size_t>::value, "invalid type");
+static_assert(size_t(c4::yml::npos) == ((size_t)-1), "invalid value"); // some debuggers show the wrong value...
+static_assert(size_t(c4::yml::NONE) == ((size_t)-1), "invalid value"); // some debuggers show the wrong value...
+
+std::string stored_msg;
+Location stored_location;
+void * stored_mem;
+size_t stored_length;
+
+void test_error_impl(const char* msg, size_t length, Location loc, void * /*user_data*/)
+{
+ stored_msg = std::string(msg, length);
+ stored_location = loc;
+}
+
+void* test_allocate_impl(size_t length, void * /*hint*/, void * /*user_data*/)
+{
+ void *mem = ::malloc(length);
+ stored_length = length;
+ stored_mem = mem;
+ if(mem == nullptr)
+ {
+ const char msg[] = "could not allocate memory";
+ test_error_impl(msg, sizeof(msg)-1, {}, nullptr);
+ }
+ return mem;
+}
+
+void test_free_impl(void *mem, size_t length, void * /*user_data*/)
+{
+ stored_mem = mem;
+ stored_length = length;
+ ::free(mem);
+}
+
+
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+
+
+TEST(Callbacks, ctor)
+{
+ {
+ Callbacks cb;
+ EXPECT_NE(cb.m_allocate, &test_allocate_impl);
+ EXPECT_NE(cb.m_free, &test_free_impl);
+ EXPECT_NE(cb.m_error, &test_error_impl);
+ #ifndef RYML_NO_DEFAULT_CALLBACKS
+ EXPECT_EQ(cb.m_user_data, nullptr);
+ EXPECT_NE(cb.m_allocate, nullptr);
+ EXPECT_NE(cb.m_free, nullptr);
+ EXPECT_NE(cb.m_error, nullptr);
+ #else
+ EXPECT_EQ(cb.m_user_data, nullptr);
+ EXPECT_EQ(cb.m_allocate, nullptr);
+ EXPECT_EQ(cb.m_free, nullptr);
+ EXPECT_EQ(cb.m_error, nullptr);
+ #endif
+ }
+ {
+ Callbacks cb((void*)0xff, nullptr, nullptr, nullptr);
+ EXPECT_NE(cb.m_allocate, &test_allocate_impl);
+ EXPECT_NE(cb.m_free, &test_free_impl);
+ EXPECT_NE(cb.m_error, &test_error_impl);
+ #ifndef RYML_NO_DEFAULT_CALLBACKS
+ EXPECT_EQ(cb.m_user_data, (void*)0xff);
+ EXPECT_NE(cb.m_allocate, nullptr);
+ EXPECT_NE(cb.m_free, nullptr);
+ EXPECT_NE(cb.m_error, nullptr);
+ #else
+ EXPECT_EQ(cb.m_user_data, (void*)0xff);
+ EXPECT_EQ(cb.m_allocate, nullptr);
+ EXPECT_EQ(cb.m_free, nullptr);
+ EXPECT_EQ(cb.m_error, nullptr);
+ #endif
+ }
+ {
+ Callbacks cb((void*)0xff, &test_allocate_impl, nullptr, nullptr);
+ #ifndef RYML_NO_DEFAULT_CALLBACKS
+ EXPECT_EQ(cb.m_user_data, (void*)0xff);
+ EXPECT_EQ(cb.m_allocate, &test_allocate_impl);
+ EXPECT_NE(cb.m_free, nullptr);
+ EXPECT_NE(cb.m_error, nullptr);
+ #else
+ EXPECT_EQ(cb.m_user_data, (void*)0xff);
+ EXPECT_EQ(cb.m_allocate, nullptr);
+ EXPECT_EQ(cb.m_free, nullptr);
+ EXPECT_EQ(cb.m_error, nullptr);
+ #endif
+ }
+ {
+ Callbacks cb((void*)0xff, nullptr, &test_free_impl, nullptr);
+ #ifndef RYML_NO_DEFAULT_CALLBACKS
+ EXPECT_EQ(cb.m_user_data, (void*)0xff);
+ EXPECT_NE(cb.m_allocate, nullptr);
+ EXPECT_EQ(cb.m_free, &test_free_impl);
+ EXPECT_NE(cb.m_error, nullptr);
+ #else
+ EXPECT_EQ(cb.m_user_data, (void*)0xff);
+ EXPECT_EQ(cb.m_allocate, nullptr);
+ EXPECT_EQ(cb.m_free, &test_free_impl);
+ EXPECT_EQ(cb.m_error, nullptr);
+ #endif
+ }
+ {
+ Callbacks cb((void*)0xff, nullptr, nullptr, &test_error_impl);
+ #ifndef RYML_NO_DEFAULT_CALLBACKS
+ EXPECT_EQ(cb.m_user_data, (void*)0xff);
+ EXPECT_NE(cb.m_allocate, nullptr);
+ EXPECT_NE(cb.m_free, nullptr);
+ EXPECT_EQ(cb.m_error, &test_error_impl);
+ #else
+ EXPECT_EQ(cb.m_user_data, (void*)0xff);
+ EXPECT_EQ(cb.m_allocate, nullptr);
+ EXPECT_EQ(cb.m_free, nullptr);
+ EXPECT_EQ(cb.m_error, test_error_impl);
+ #endif
+ }
+}
+
+TEST(Callbacks, get)
+{
+ Callbacks cb = get_callbacks();
+ EXPECT_NE(cb.m_allocate, &test_allocate_impl);
+ EXPECT_NE(cb.m_free, &test_free_impl);
+ EXPECT_NE(cb.m_error, &test_error_impl);
+ #ifndef RYML_NO_DEFAULT_CALLBACKS
+ EXPECT_EQ(cb.m_user_data, nullptr);
+ EXPECT_NE(cb.m_allocate, nullptr);
+ EXPECT_NE(cb.m_free, nullptr);
+ EXPECT_NE(cb.m_error, nullptr);
+ #else
+ EXPECT_EQ(cb.m_user_data, nullptr);
+ EXPECT_EQ(cb.m_allocate, nullptr);
+ EXPECT_EQ(cb.m_free, nullptr);
+ EXPECT_EQ(cb.m_error, nullptr);
+ #endif
+}
+
+TEST(Callbacks, set)
+{
+ Callbacks before = get_callbacks();
+ Callbacks cb((void*)0xff, &test_allocate_impl, &test_free_impl, &test_error_impl);
+
+ set_callbacks(cb);
+ Callbacks after = get_callbacks();
+ EXPECT_EQ(cb.m_user_data, after.m_user_data);
+ EXPECT_EQ(cb.m_allocate, after.m_allocate);
+ EXPECT_EQ(cb.m_free, after.m_free);
+ EXPECT_EQ(cb.m_error, after.m_error);
+
+ set_callbacks(before);
+ after = get_callbacks();
+ EXPECT_EQ(before.m_user_data, after.m_user_data);
+ EXPECT_EQ(before.m_allocate, after.m_allocate);
+ EXPECT_EQ(before.m_free, after.m_free);
+ EXPECT_EQ(before.m_error, after.m_error);
+
+ set_callbacks(cb);
+ after = get_callbacks();
+ EXPECT_EQ(cb.m_user_data, after.m_user_data);
+ EXPECT_EQ(cb.m_allocate, after.m_allocate);
+ EXPECT_EQ(cb.m_free, after.m_free);
+ EXPECT_EQ(cb.m_error, after.m_error);
+}
+
+TEST(Callbacks, reset)
+{
+ Callbacks before = get_callbacks();
+ Callbacks cb((void*)0xff, &test_allocate_impl, &test_free_impl, &test_error_impl);
+
+ set_callbacks(cb);
+ Callbacks after = get_callbacks();
+ EXPECT_EQ(cb.m_user_data, after.m_user_data);
+ EXPECT_EQ(cb.m_allocate, after.m_allocate);
+ EXPECT_EQ(cb.m_free, after.m_free);
+ EXPECT_EQ(cb.m_error, after.m_error);
+
+ reset_callbacks();
+ EXPECT_EQ(before.m_user_data, after.m_user_data);
+ EXPECT_EQ(before.m_allocate, after.m_allocate);
+ EXPECT_EQ(before.m_free, after.m_free);
+ EXPECT_EQ(before.m_error, after.m_error);
+}
+
+TEST(Callbacks, eq)
+{
+ Callbacks before = get_callbacks();
+ Callbacks bf2 = get_callbacks();
+ Callbacks cb((void*)0xff, &test_allocate_impl, &test_free_impl, &test_error_impl);
+
+ EXPECT_EQ(bf2, before);
+ EXPECT_TRUE(bf2 == before);
+ EXPECT_FALSE(!(bf2 == before));
+ EXPECT_TRUE(!(cb == before));
+}
+
+TEST(Callbacks, ne)
+{
+ Callbacks before = get_callbacks();
+ Callbacks bf2 = get_callbacks();
+ Callbacks cb((void*)0xff, &test_allocate_impl, &test_free_impl, &test_error_impl);
+
+ EXPECT_NE(cb, before);
+ EXPECT_TRUE(cb != before);
+ EXPECT_TRUE(!(bf2 != before));
+ EXPECT_FALSE(!(cb != before));
+}
+
+TEST(Callbacks, cmp_user_data)
+{
+ Callbacks before = get_callbacks();
+ Callbacks cp = before;
+ EXPECT_EQ(cp, before);
+ cp.m_user_data = (void*)(((char*)before.m_user_data) + 100u);
+ EXPECT_NE(cp, before);
+}
+
+TEST(Callbacks, cmp_allocate)
+{
+ Callbacks before = get_callbacks();
+ Callbacks cp = before;
+ EXPECT_NE(cp.m_allocate, nullptr);
+ EXPECT_EQ(cp, before);
+ cp.m_allocate = nullptr;
+ EXPECT_NE(cp, before);
+}
+
+TEST(Callbacks, cmp_free)
+{
+ Callbacks before = get_callbacks();
+ Callbacks cp = before;
+ EXPECT_NE(cp.m_free, nullptr);
+ EXPECT_EQ(cp, before);
+ cp.m_free = nullptr;
+ EXPECT_NE(cp, before);
+}
+
+TEST(Callbacks, cmp_error)
+{
+ Callbacks before = get_callbacks();
+ Callbacks cp = before;
+ EXPECT_NE(cp.m_error, nullptr);
+ EXPECT_EQ(cp, before);
+ cp.m_error = nullptr;
+ EXPECT_NE(cp, before);
+}
+
+
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+
+TEST(allocate_and_free, basic)
+{
+ EXPECT_NE(get_callbacks().m_allocate, &test_allocate_impl);
+ EXPECT_NE(get_callbacks().m_error, &test_error_impl);
+ Callbacks cb(nullptr, &test_allocate_impl, &test_free_impl, nullptr);
+ set_callbacks(cb);
+ void *mem = get_callbacks().m_allocate(32, /*hint*/0, get_callbacks().m_user_data);
+ EXPECT_EQ(stored_mem, mem);
+ EXPECT_EQ(stored_length, 32u);
+ stored_mem = nullptr;
+ stored_length = 0;
+ get_callbacks().m_free(mem, 32u, get_callbacks().m_user_data);
+ EXPECT_EQ(stored_mem, mem);
+ EXPECT_EQ(stored_length, 32u);
+}
+
+TEST(error, basic)
+{
+ EXPECT_NE(get_callbacks().m_error, &test_error_impl);
+ Callbacks cb(nullptr, nullptr, nullptr, &test_error_impl);
+ set_callbacks(cb);
+ // message
+ EXPECT_EQ(get_callbacks().m_error, &test_error_impl);
+ c4::yml::error("some message 123"); // calls test_error_impl, which sets stored_msg and stored_location
+ EXPECT_EQ(stored_msg, "some message 123");
+ EXPECT_EQ(stored_location.name, "");
+ EXPECT_EQ(stored_location.offset, 0u);
+ EXPECT_EQ(stored_location.line, 0u);
+ EXPECT_EQ(stored_location.col, 0u);
+ // location
+ c4::yml::error("some message 456", Location("file.yml", 433u, 123u, 4u));
+ EXPECT_EQ(stored_msg, "some message 456");
+ EXPECT_EQ(stored_location.name, "file.yml");
+ EXPECT_EQ(stored_location.offset, 433u);
+ EXPECT_EQ(stored_location.line, 123u);
+ EXPECT_EQ(stored_location.col, 4u);
+ reset_callbacks();
+ EXPECT_NE(get_callbacks().m_error, &test_error_impl);
+}
+
+TEST(RYML_CHECK, basic)
+{
+ EXPECT_NE(get_callbacks().m_error, &test_error_impl);
+ Callbacks cb(nullptr, nullptr, nullptr, &test_error_impl);
+ set_callbacks(cb);
+ size_t the_line = __LINE__; RYML_CHECK(false); // keep both statements in the same line
+ EXPECT_EQ(stored_msg, "check failed: false");
+ EXPECT_EQ(stored_location.name, __FILE__);
+ EXPECT_EQ(stored_location.offset, 0u);
+ EXPECT_EQ(stored_location.line, the_line);
+ EXPECT_EQ(stored_location.col, 0u);
+ reset_callbacks();
+ EXPECT_NE(get_callbacks().m_error, &test_error_impl);
+}
+
+
+TEST(RYML_ASSERT, basic)
+{
+ EXPECT_NE(get_callbacks().m_error, &test_error_impl);
+ Callbacks cb(nullptr, nullptr, nullptr, &test_error_impl);
+ set_callbacks(cb);
+ stored_msg = "";
+ stored_location = {};
+ size_t the_line = __LINE__; RYML_ASSERT(false); // keep both statements in the same line
+ #if RYML_USE_ASSERT
+ EXPECT_EQ(stored_msg, "check failed: false");
+ EXPECT_EQ(stored_location.name, __FILE__);
+ EXPECT_EQ(stored_location.offset, 0u);
+ EXPECT_EQ(stored_location.line, the_line);
+ EXPECT_EQ(stored_location.col, 0u);
+ #else
+ C4_UNUSED(the_line);
+ EXPECT_EQ(stored_msg, "");
+ EXPECT_EQ(stored_location.name, nullptr);
+ EXPECT_EQ(stored_location.offset, 0u);
+ EXPECT_EQ(stored_location.line, 0u);
+ EXPECT_EQ(stored_location.col, 0u);
+ #endif
+ reset_callbacks();
+ EXPECT_NE(get_callbacks().m_error, &test_error_impl);
+}
+
+
+// FIXME this is here merely to avoid a linker error
+Case const* get_case(csubstr)
+{
+ return nullptr;
+}
+
+} // namespace yml
+} // namespace c4
diff --git a/thirdparty/ryml/test/test_case.cpp b/thirdparty/ryml/test/test_case.cpp
new file mode 100644
index 000000000..a850d242b
--- /dev/null
+++ b/thirdparty/ryml/test/test_case.cpp
@@ -0,0 +1,898 @@
+#include "./test_case.hpp"
+#ifndef RYML_SINGLE_HEADER
+#include "c4/yml/common.hpp"
+#include "c4/format.hpp"
+#include "c4/span.hpp"
+#include "c4/yml/std/std.hpp"
+#include "c4/yml/detail/print.hpp"
+#include "c4/yml/detail/checks.hpp"
+#endif
+
+#include <gtest/gtest.h>
+
+#if defined(_MSC_VER)
+# pragma warning(push)
+#elif defined(__clang__)
+# pragma clang diagnostic push
+#elif defined(__GNUC__)
+# pragma GCC diagnostic push
+# pragma GCC diagnostic ignored "-Wuseless-cast"
+# if __GNUC__ >= 6
+# pragma GCC diagnostic ignored "-Wnull-dereference"
+# endif
+#endif
+
+namespace c4 {
+namespace yml {
+
+
+size_t _num_leaves(Tree const& t, size_t node)
+{
+ size_t count = 0;
+ for(size_t ch = t.first_child(node); ch != NONE; ch = t.next_sibling(ch))
+ count += _num_leaves(t, ch);
+ return count;
+}
+
+
+void test_compare(Tree const& actual, Tree const& expected)
+{
+ ASSERT_EQ(actual.empty(), expected.empty());
+ if(actual.empty() || expected.empty())
+ return;
+ EXPECT_EQ(actual.size(), expected.size());
+ EXPECT_EQ(_num_leaves(actual, actual.root_id()), _num_leaves(expected, expected.root_id()));
+ test_compare(actual, actual.root_id(), expected, expected.root_id(), 0);
+}
+
+
+void test_compare(Tree const& actual, size_t node_actual,
+ Tree const& expected, size_t node_expected,
+ size_t level)
+{
+ #define _MORE_INFO "actual=" << node_actual << " vs expected=" << node_expected
+
+ ASSERT_NE(node_actual, (size_t)NONE);
+ ASSERT_NE(node_expected, (size_t)NONE);
+ ASSERT_LT(node_actual, actual.capacity());
+ ASSERT_LT(node_expected, expected.capacity());
+
+ EXPECT_EQ((type_bits)(actual.type(node_actual)&_TYMASK), (type_bits)(expected.type(node_expected)&_TYMASK)) << _MORE_INFO;
+
+ EXPECT_EQ(actual.has_key(node_actual), expected.has_key(node_expected)) << _MORE_INFO;
+ if(actual.has_key(node_actual) && expected.has_key(node_expected))
+ {
+ EXPECT_EQ(actual.key(node_actual), expected.key(node_expected)) << _MORE_INFO;
+ }
+
+ EXPECT_EQ(actual.has_val(node_actual), expected.has_val(node_expected)) << _MORE_INFO;
+ if(actual.has_val(node_actual) && expected.has_val(node_expected))
+ {
+ EXPECT_EQ(actual.val(node_actual), expected.val(node_expected)) << _MORE_INFO;
+ }
+
+ EXPECT_EQ(actual.has_key_tag(node_actual), expected.has_key_tag(node_expected)) << _MORE_INFO;
+ if(actual.has_key_tag(node_actual) && expected.has_key_tag(node_expected))
+ {
+ EXPECT_EQ(actual.key_tag(node_actual), expected.key_tag(node_expected)) << _MORE_INFO;
+ }
+
+ EXPECT_EQ(actual.has_val_tag(node_actual), expected.has_val_tag(node_expected)) << _MORE_INFO;
+ if(actual.has_val_tag(node_actual) && expected.has_val_tag(node_expected))
+ {
+ auto filtered = [](csubstr tag) {
+ if(tag.begins_with("!<!") && tag.ends_with('>'))
+ return tag.offs(3, 1);
+ return tag;
+ };
+ csubstr actual_tag = filtered(actual.val_tag(node_actual));
+ csubstr expected_tag = filtered(actual.val_tag(node_expected));
+ EXPECT_EQ(actual_tag, expected_tag) << _MORE_INFO;
+ }
+
+ EXPECT_EQ(actual.has_key_anchor(node_actual), expected.has_key_anchor(node_expected)) << _MORE_INFO;
+ if(actual.has_key_anchor(node_actual) && expected.has_key_anchor(node_expected))
+ {
+ EXPECT_EQ(actual.key_anchor(node_actual), expected.key_anchor(node_expected)) << _MORE_INFO;
+ }
+
+ EXPECT_EQ(actual.has_val_anchor(node_actual), expected.has_val_anchor(node_expected)) << _MORE_INFO;
+ if(actual.has_val_anchor(node_actual) && expected.has_val_anchor(node_expected))
+ {
+ EXPECT_EQ(actual.val_anchor(node_actual), expected.val_anchor(node_expected)) << _MORE_INFO;
+ }
+
+ EXPECT_EQ(actual.num_children(node_actual), expected.num_children(node_expected)) << _MORE_INFO;
+ for(size_t ia = actual.first_child(node_actual), ib = expected.first_child(node_expected);
+ ia != NONE && ib != NONE;
+ ia = actual.next_sibling(ia), ib = expected.next_sibling(ib))
+ {
+ test_compare(actual, ia, expected, ib, level+1);
+ }
+
+ #undef _MORE_INFO
+}
+
+void test_arena_not_shared(Tree const& a, Tree const& b)
+{
+ for(NodeData const* n = a.m_buf, *e = a.m_buf + a.m_cap; n != e; ++n)
+ {
+ EXPECT_FALSE(b.in_arena(n->m_key.scalar)) << n - a.m_buf;
+ EXPECT_FALSE(b.in_arena(n->m_key.tag )) << n - a.m_buf;
+ EXPECT_FALSE(b.in_arena(n->m_key.anchor)) << n - a.m_buf;
+ EXPECT_FALSE(b.in_arena(n->m_val.scalar)) << n - a.m_buf;
+ EXPECT_FALSE(b.in_arena(n->m_val.tag )) << n - a.m_buf;
+ EXPECT_FALSE(b.in_arena(n->m_val.anchor)) << n - a.m_buf;
+ }
+ for(NodeData const* n = b.m_buf, *e = b.m_buf + b.m_cap; n != e; ++n)
+ {
+ EXPECT_FALSE(a.in_arena(n->m_key.scalar)) << n - b.m_buf;
+ EXPECT_FALSE(a.in_arena(n->m_key.tag )) << n - b.m_buf;
+ EXPECT_FALSE(a.in_arena(n->m_key.anchor)) << n - b.m_buf;
+ EXPECT_FALSE(a.in_arena(n->m_val.scalar)) << n - b.m_buf;
+ EXPECT_FALSE(a.in_arena(n->m_val.tag )) << n - b.m_buf;
+ EXPECT_FALSE(a.in_arena(n->m_val.anchor)) << n - b.m_buf;
+ }
+ for(TagDirective const& td : a.m_tag_directives)
+ {
+ EXPECT_FALSE(b.in_arena(td.handle));
+ EXPECT_FALSE(b.in_arena(td.prefix));
+ }
+ for(TagDirective const& td : b.m_tag_directives)
+ {
+ EXPECT_FALSE(a.in_arena(td.handle));
+ EXPECT_FALSE(a.in_arena(td.prefix));
+ }
+}
+
+
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+
+// ensure coverage of the default callback report
+#ifndef RYML_NO_DEFAULT_CALLBACKS
+extern void report_error_impl(const char* msg, size_t len, Location loc, FILE *file);
+#endif
+
+std::string format_error(const char* msg, size_t len, Location loc)
+{
+ // ensure coverage of the default callback report
+ #ifndef RYML_NO_DEFAULT_CALLBACKS
+ report_error_impl(msg, len, loc, nullptr);
+ #endif
+ if(!loc) return msg;
+ std::string out;
+ if(!loc.name.empty()) c4::formatrs(append, &out, "{}:", loc.name);
+ c4::formatrs(append, &out, "{}:{}:", loc.line, loc.col);
+ if(loc.offset) c4::formatrs(append, &out, " (@{}B):", loc.offset);
+ c4::formatrs(append, &out, "{}:", csubstr(msg, len));
+ return out;
+}
+
+struct ExpectedError : public std::runtime_error
+{
+ Location error_location;
+ ExpectedError(const char* msg, size_t len, Location loc)
+ : std::runtime_error(format_error(msg, len, loc))
+ , error_location(loc)
+ {
+ }
+};
+
+
+//-----------------------------------------------------------------------------
+
+ExpectError::ExpectError(Tree *tree, Location loc)
+ : m_got_an_error(false)
+ , m_tree(tree)
+ , m_glob_prev(get_callbacks())
+ , m_tree_prev(tree ? tree->callbacks() : Callbacks{})
+ , expected_location(loc)
+{
+ auto err = [](const char* msg, size_t len, Location errloc, void *this_) {
+ ((ExpectError*)this_)->m_got_an_error = true;
+ throw ExpectedError(msg, len, errloc);
+ };
+ #ifdef RYML_NO_DEFAULT_CALLBACKS
+ c4::yml::Callbacks tcb((void*)this, nullptr, nullptr, err);
+ c4::yml::Callbacks gcb((void*)this, nullptr, nullptr, err);
+ #else
+ c4::yml::Callbacks tcb((void*)this, tree ? m_tree_prev.m_allocate : nullptr, tree ? m_tree_prev.m_free : nullptr, err);
+ c4::yml::Callbacks gcb((void*)this, m_glob_prev.m_allocate, m_glob_prev.m_free, err);
+ #endif
+ if(tree)
+ tree->callbacks(tcb);
+ set_callbacks(gcb);
+}
+
+ExpectError::~ExpectError()
+{
+ if(m_tree)
+ m_tree->callbacks(m_tree_prev);
+ set_callbacks(m_tree_prev);
+}
+
+void ExpectError::do_check(Tree *tree, std::function<void()> fn, Location expected_location)
+{
+ auto context = ExpectError(tree, expected_location);
+ try
+ {
+ fn();
+ }
+ catch(ExpectedError const& e)
+ {
+ #if defined(RYML_DBG)
+ std::cout << "---------------\n";
+ std::cout << "got an expected error:\n" << e.what() << "\n";
+ std::cout << "---------------\n";
+ #endif
+ if(context.expected_location)
+ {
+ EXPECT_EQ(static_cast<bool>(context.expected_location),
+ static_cast<bool>(e.error_location));
+ EXPECT_EQ(e.error_location.line, context.expected_location.line);
+ EXPECT_EQ(e.error_location.col, context.expected_location.col);
+ if(context.expected_location.offset)
+ {
+ EXPECT_EQ(e.error_location.offset, context.expected_location.offset);
+ }
+ }
+ };
+ EXPECT_TRUE(context.m_got_an_error);
+}
+
+void ExpectError::check_assertion(Tree *tree, std::function<void()> fn, Location expected_location)
+{
+ #if RYML_USE_ASSERT
+ ExpectError::do_check(tree, fn, expected_location);
+ #else
+ C4_UNUSED(tree);
+ C4_UNUSED(fn);
+ C4_UNUSED(expected_location);
+ #endif
+}
+
+
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+
+using N = CaseNode;
+using L = CaseNode::iseqmap;
+
+TEST(CaseNode, setting_up)
+{
+ L tl1 = {DOC, DOC};
+ L tl2 = {(DOC), (DOC)};
+
+ ASSERT_EQ(tl1.size(), tl2.size());
+ N const& d1 = *tl1.begin();
+ N const& d2 = *(tl1.begin() + 1);
+ ASSERT_EQ(d1.reccount(), d2.reccount());
+ ASSERT_EQ((type_bits)d1.type, (type_bits)DOC);
+ ASSERT_EQ((type_bits)d2.type, (type_bits)DOC);
+
+ N n1(tl1);
+ N n2(tl2);
+ ASSERT_EQ(n1.reccount(), n2.reccount());
+}
+
+
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+NodeType_e CaseNode::_guess() const
+{
+ NodeType t;
+ C4_ASSERT(!val.empty() != !children.empty() || (val.empty() && children.empty()));
+ if(children.empty())
+ {
+ C4_ASSERT(parent);
+ if(key.empty())
+ {
+ t = VAL;
+ }
+ else
+ {
+ t = KEYVAL;
+ }
+ }
+ else
+ {
+ NodeType_e has_key = key.empty() ? NOTYPE : KEY;
+ auto const& ch = children.front();
+ if(ch.key.empty())
+ {
+ t = (has_key|SEQ);
+ }
+ else
+ {
+ t = (has_key|MAP);
+ }
+ }
+ if( ! key_tag.empty())
+ {
+ C4_ASSERT( ! key.empty());
+ t.add(KEYTAG);
+ }
+ if( ! val_tag.empty())
+ {
+ C4_ASSERT( ! val.empty() || ! children.empty());
+ t.add(VALTAG);
+ }
+ if( ! key_anchor.str.empty())
+ {
+ t.add(key_anchor.type);
+ }
+ if( ! val_anchor.str.empty())
+ {
+ t.add(val_anchor.type);
+ }
+ return t;
+}
+
+
+//-----------------------------------------------------------------------------
+void CaseNode::compare_child(yml::ConstNodeRef const& n, size_t pos) const
+{
+ EXPECT_TRUE(pos < n.num_children());
+ EXPECT_TRUE(pos < children.size());
+
+ if(pos >= n.num_children() || pos >= children.size()) return;
+
+ ASSERT_GT(n.num_children(), pos);
+ auto const& expectedch = children[pos];
+
+ if(type & MAP)
+ {
+ auto actualch = n.find_child(expectedch.key);
+ if(actualch != nullptr)
+ {
+ // there may be duplicate keys.
+ if(actualch.id() != n[pos].id())
+ actualch = n[pos];
+ //EXPECT_EQ(fch, n[ch.key]);
+ EXPECT_EQ(actualch.get(), n[pos].get());
+ //EXPECT_EQ(n[pos], n[ch.key]);
+ EXPECT_EQ(n[expectedch.key].key(), expectedch.key);
+ }
+ else
+ {
+ printf("error: node should have child %.*s: ", (int)expectedch.key.len, expectedch.key.str);
+ print_path(n);
+ printf("\n");
+ print_node(n);
+ GTEST_FAIL();
+ }
+ }
+
+ if(type & SEQ)
+ {
+ EXPECT_FALSE(n[pos].has_key());
+ EXPECT_EQ(n[pos].get()->m_key.scalar, children[pos].key);
+ auto actualch = n.child(pos);
+ EXPECT_EQ(actualch.get(), n[pos].get());
+ }
+
+ if(expectedch.type & KEY)
+ {
+ auto actualfch = n[pos];
+ EXPECT_TRUE(actualfch.has_key()) << "id=" << actualfch.id();
+ if(actualfch.has_key())
+ {
+ EXPECT_EQ(actualfch.key(), expectedch.key) << "id=" << actualfch.id();
+ }
+
+ if( ! expectedch.key_tag.empty())
+ {
+ EXPECT_TRUE(actualfch.has_key_tag()) << "id=" << actualfch.id();
+ if(actualfch.has_key_tag())
+ {
+ EXPECT_EQ(actualfch.key_tag(), expectedch.key_tag) << "id=" << actualfch.id();
+ }
+ }
+ }
+
+ if(expectedch.type & VAL)
+ {
+ auto actualch = n[pos];
+ EXPECT_TRUE(actualch.has_val()) << "id=" << actualch.id();
+ if(actualch.has_val())
+ {
+ EXPECT_EQ(actualch.val(), expectedch.val) << "id=" << actualch.id();
+ }
+
+ if( ! expectedch.val_tag.empty())
+ {
+ EXPECT_TRUE(actualch.has_val_tag()) << "id=" << actualch.id();
+ if(actualch.has_val_tag())
+ {
+ EXPECT_EQ(actualch.val_tag(), expectedch.val_tag) << "id=" << actualch.id();
+ }
+ }
+ }
+}
+
+void CaseNode::compare(yml::ConstNodeRef const& actual, bool ignore_quote) const
+{
+ if(ignore_quote)
+ {
+ const auto actual_type = actual.get()->m_type & ~(VALQUO | KEYQUO);
+ const auto expected_type = type & ~(VALQUO | KEYQUO);
+ EXPECT_EQ(expected_type, actual_type) << "id=" << actual.id();
+ }
+ else
+ {
+ EXPECT_EQ((int)actual.get()->m_type, (int)type) << "id=" << actual.id(); // the type() method masks the type, and thus tag flags are omitted on its return value
+ }
+
+ EXPECT_EQ(actual.num_children(), children.size()) << "id=" << actual.id();
+
+ if(actual.has_key())
+ {
+ EXPECT_EQ(actual.key(), key) << "id=" << actual.id();
+ }
+
+ if(actual.has_val())
+ {
+ EXPECT_EQ(actual.val(), val) << "id=" << actual.id();
+ }
+
+ // check that the children are in the same order
+ {
+ EXPECT_EQ(children.size(), actual.num_children()) << "id=" << actual.id();
+
+ size_t ic = 0;
+ for(auto const &expectedch : children)
+ {
+ SCOPED_TRACE("comparing: iteration based on the ref children");
+ (void)expectedch; // unused
+ compare_child(actual, ic++);
+ }
+
+ ic = 0;
+ for(auto const actualch : actual.children())
+ {
+ SCOPED_TRACE("comparing: iteration based on the yml::Node children");
+ (void)actualch; // unused
+ compare_child(actual, ic++);
+ }
+
+ if(actual.first_child() != nullptr)
+ {
+ ic = 0;
+ for(auto const ch : actual.first_child().siblings())
+ {
+ SCOPED_TRACE("comparing: iteration based on the yml::Node siblings");
+ (void)ch; // unused
+ compare_child(actual, ic++);
+ }
+ }
+ }
+
+ for(size_t i = 0, ei = actual.num_children(), j = 0, ej = children.size(); i < ei && j < ej; ++i, ++j)
+ {
+ children[j].compare(actual[i], ignore_quote);
+ }
+}
+
+void CaseNode::recreate(yml::NodeRef *n) const
+{
+ C4_ASSERT( ! n->has_children());
+ auto *nd = n->get();
+ nd->m_type = type|key_anchor.type|val_anchor.type;
+ nd->m_key.scalar = key;
+ nd->m_key.tag = (key_tag);
+ nd->m_key.anchor = key_anchor.str;
+ nd->m_val.scalar = val;
+ nd->m_val.tag = (val_tag);
+ nd->m_val.anchor = val_anchor.str;
+ auto &tree = *n->tree();
+ size_t nid = n->id(); // don't use node from now on
+ for(auto const& ch : children)
+ {
+ size_t id = tree.append_child(nid);
+ NodeRef chn(n->tree(), id);
+ ch.recreate(&chn);
+ }
+}
+
+
+//-----------------------------------------------------------------------------
+
+void print_path(ConstNodeRef const& n)
+{
+ size_t len = 0;
+ char buf[1024];
+ ConstNodeRef p = n;
+ while(p != nullptr)
+ {
+ if(p.has_key())
+ {
+ len += 1 + p.key().len;
+ }
+ else
+ {
+ int ret = snprintf(buf, sizeof(buf), "/%zd", p.has_parent() ? p.parent().child_pos(p) : 0);
+ RYML_ASSERT(ret >= 0);
+ len += static_cast<size_t>(ret);
+ }
+ p = p.parent();
+ };
+ C4_ASSERT(len < sizeof(buf));
+ size_t pos = len;
+ p = n;
+ while(p.valid() && p != nullptr)
+ {
+ if(p.has_key())
+ {
+ size_t tl = p.key().len;
+ int ret = snprintf(buf + pos - tl, tl, "%.*s", (int)tl, p.key().str);
+ RYML_ASSERT(ret >= 0);
+ pos -= static_cast<size_t>(ret);
+ }
+ else if(p.has_parent())
+ {
+ pos = p.parent().child_pos(p);
+ int ret = snprintf(buf, 0, "/%zd", pos);
+ RYML_ASSERT(ret >= 0);
+ size_t tl = static_cast<size_t>(ret);
+ RYML_ASSERT(pos >= tl);
+ ret = snprintf(buf + static_cast<size_t>(pos - tl), tl, "/%zd", pos);
+ RYML_ASSERT(ret >= 0);
+ pos -= static_cast<size_t>(ret);
+ }
+ p = p.parent();
+ };
+ printf("%.*s", (int)len, buf);
+}
+
+
+
+void print_node(CaseNode const& p, int level)
+{
+ printf("%*s%p", (2*level), "", (void*)&p);
+ if( ! p.parent)
+ {
+ printf(" [ROOT]");
+ }
+ printf(" %s:", NodeType::type_str(p.type));
+ if(p.has_key())
+ {
+ if(p.has_key_anchor())
+ {
+ csubstr ka = p.key_anchor.str;
+ printf(" &%.*s", (int)ka.len, ka.str);
+ }
+ if(p.key_tag.empty())
+ {
+ csubstr v = p.key;
+ printf(" '%.*s'", (int)v.len, v.str);
+ }
+ else
+ {
+ csubstr vt = p.key_tag;
+ csubstr v = p.key;
+ printf(" '%.*s %.*s'", (int)vt.len, vt.str, (int)v.len, v.str);
+ }
+ }
+ if(p.has_val())
+ {
+ if(p.val_tag.empty())
+ {
+ csubstr v = p.val;
+ printf(" '%.*s'", (int)v.len, v.str);
+ }
+ else
+ {
+ csubstr vt = p.val_tag;
+ csubstr v = p.val;
+ printf(" '%.*s %.*s'", (int)vt.len, vt.str, (int)v.len, v.str);
+ }
+ }
+ else
+ {
+ if( ! p.val_tag.empty())
+ {
+ csubstr vt = p.val_tag;
+ printf(" %.*s", (int)vt.len, vt.str);
+ }
+ }
+ if(p.has_val_anchor())
+ {
+ auto &a = p.val_anchor.str;
+ printf(" valanchor='&%.*s'", (int)a.len, a.str);
+ }
+ printf(" (%zd sibs)", p.parent ? p.parent->children.size() : 0);
+ if(p.is_container())
+ {
+ printf(" %zd children:", p.children.size());
+ }
+ printf("\n");
+}
+
+
+void print_tree(ConstNodeRef const& p, int level)
+{
+ print_node(p, level);
+ for(ConstNodeRef ch : p.children())
+ {
+ print_tree(ch, level+1);
+ }
+}
+
+void print_tree(CaseNode const& p, int level)
+{
+ print_node(p, level);
+ for(auto const& ch : p.children)
+ print_tree(ch, level+1);
+}
+
+void print_tree(CaseNode const& t)
+{
+ printf("--------------------------------------\n");
+ print_tree(t, 0);
+ printf("#nodes: %zd\n", t.reccount());
+ printf("--------------------------------------\n");
+}
+
+void test_invariants(ConstNodeRef const& n)
+{
+ #define _MORE_INFO << "id=" << n.id()
+
+ if(n.is_root())
+ {
+ EXPECT_FALSE(n.has_other_siblings()) _MORE_INFO;
+ }
+ // keys or vals cannot be root
+ if(n.has_key() || n.is_val() || n.is_keyval())
+ {
+ EXPECT_TRUE(!n.is_root() || (n.is_doc() && !n.has_key())) _MORE_INFO;
+ }
+ // vals cannot be containers
+ if( ! n.empty() && ! n.is_doc())
+ {
+ EXPECT_NE(n.has_val(), n.is_container()) _MORE_INFO;
+ }
+ if(n.has_children())
+ {
+ EXPECT_TRUE(n.is_container()) _MORE_INFO;
+ EXPECT_FALSE(n.is_val()) _MORE_INFO;
+ }
+ // check parent & sibling reciprocity
+ for(ConstNodeRef s : n.siblings())
+ {
+ EXPECT_TRUE(n.has_sibling(s)) _MORE_INFO;
+ EXPECT_TRUE(s.has_sibling(n)) _MORE_INFO;
+ EXPECT_EQ(s.parent().get(), n.parent().get()) _MORE_INFO;
+ }
+ if(n.parent() != nullptr)
+ {
+ EXPECT_EQ(n.parent().num_children() > 1, n.has_other_siblings()) _MORE_INFO;
+ EXPECT_TRUE(n.parent().has_child(n)) _MORE_INFO;
+ EXPECT_EQ(n.parent().num_children(), n.num_siblings()) _MORE_INFO;
+ // doc parent must be a seq and a stream
+ if(n.is_doc())
+ {
+ EXPECT_TRUE(n.parent().is_seq()) _MORE_INFO;
+ EXPECT_TRUE(n.parent().is_stream()) _MORE_INFO;
+ }
+ }
+ else
+ {
+ EXPECT_TRUE(n.is_root()) _MORE_INFO;
+ }
+ if(n.is_seq())
+ {
+ EXPECT_TRUE(n.is_container()) _MORE_INFO;
+ EXPECT_FALSE(n.is_map()) _MORE_INFO;
+ for(ConstNodeRef ch : n.children())
+ {
+ EXPECT_FALSE(ch.is_keyval()) _MORE_INFO;
+ EXPECT_FALSE(ch.has_key()) _MORE_INFO;
+ }
+ }
+ if(n.is_map())
+ {
+ EXPECT_TRUE(n.is_container()) _MORE_INFO;
+ EXPECT_FALSE(n.is_seq()) _MORE_INFO;
+ for(ConstNodeRef ch : n.children())
+ {
+ EXPECT_TRUE(ch.has_key()) _MORE_INFO;
+ }
+ }
+ if(n.has_key_anchor())
+ {
+ EXPECT_FALSE(n.key_anchor().empty()) _MORE_INFO;
+ EXPECT_FALSE(n.is_key_ref()) _MORE_INFO;
+ }
+ if(n.has_val_anchor())
+ {
+ EXPECT_FALSE(n.val_anchor().empty()) _MORE_INFO;
+ EXPECT_FALSE(n.is_val_ref()) _MORE_INFO;
+ }
+ if(n.is_key_ref())
+ {
+ EXPECT_FALSE(n.key_ref().empty()) _MORE_INFO;
+ EXPECT_FALSE(n.has_key_anchor()) _MORE_INFO;
+ }
+ if(n.is_val_ref())
+ {
+ EXPECT_FALSE(n.val_ref().empty()) _MORE_INFO;
+ EXPECT_FALSE(n.has_val_anchor()) _MORE_INFO;
+ }
+ // ... add more tests here
+
+ // now recurse into the children
+ for(ConstNodeRef ch : n.children())
+ {
+ test_invariants(ch);
+ }
+
+ #undef _MORE_INFO
+}
+
+size_t test_tree_invariants(ConstNodeRef const& n)
+{
+ auto parent = n.parent();
+
+ if(n.get()->m_prev_sibling == NONE)
+ {
+ if(parent != nullptr)
+ {
+ EXPECT_EQ(parent.first_child().get(), n.get());
+ EXPECT_EQ(parent.first_child().id(), n.id());
+ }
+ }
+
+ if(n.get()->m_next_sibling == NONE)
+ {
+ if(parent != nullptr)
+ {
+ EXPECT_EQ(parent.last_child().get(), n.get());
+ EXPECT_EQ(parent.last_child().id(), n.id());
+ }
+ }
+
+ if(parent == nullptr)
+ {
+ EXPECT_TRUE(n.is_root());
+ EXPECT_EQ(n.prev_sibling().get(), nullptr);
+ EXPECT_EQ(n.next_sibling().get(), nullptr);
+ }
+
+ size_t count = 1, num = 0;
+ for(ConstNodeRef ch : n.children())
+ {
+ EXPECT_NE(ch.id(), n.id());
+ count += test_tree_invariants(ch);
+ ++num;
+ }
+
+ EXPECT_EQ(num, n.num_children());
+
+ return count;
+}
+
+void test_invariants(Tree const& t)
+{
+
+ ASSERT_LE(t.size(), t.capacity());
+ EXPECT_EQ(t.size() + t.slack(), t.capacity());
+
+ ASSERT_LE(t.arena_size(), t.arena_capacity());
+ ASSERT_LE(t.arena_slack(), t.arena_capacity());
+ EXPECT_EQ(t.arena_size() + t.arena_slack(), t.arena_capacity());
+
+ if(t.empty())
+ return;
+
+ size_t count = test_tree_invariants(t.rootref());
+ EXPECT_EQ(count, t.size());
+
+ check_invariants(t);
+ test_invariants(t.rootref());
+
+ if(!testing::UnitTest::GetInstance()->current_test_info()->result()->Passed())
+ {
+ print_tree(t);
+ }
+
+ return;
+#if 0 == 1
+ for(size_t i = 0; i < t.m_size; ++i)
+ {
+ auto n = t.get(i);
+ if(n->m_prev_sibling == NONE)
+ {
+ EXPECT_TRUE(i == t.m_head || i == t.m_free_head);
+ }
+ if(n->m_next_sibling == NONE)
+ {
+ EXPECT_TRUE(i == t.m_tail || i == t.m_free_tail);
+ }
+ }
+
+ std::vector<bool> touched(t.capacity());
+
+ for(size_t i = t.m_head; i != NONE; i = t.get(i)->m_next_sibling)
+ touched[i] = true;
+
+ size_t size = 0;
+ for(bool v : touched)
+ size += v;
+
+ EXPECT_EQ(size, t.size());
+
+ touched.clear();
+ touched.resize(t.capacity());
+
+ for(size_t i = t.m_free_head; i != NONE; i = t.get(i)->m_next_sibling)
+ {
+ touched[i] = true;
+ }
+
+ size_t slack = 0;
+ for(auto v : touched)
+ {
+ slack += v;
+ }
+
+ EXPECT_EQ(slack, t.slack());
+ EXPECT_EQ(size+slack, t.capacity());
+
+ // there are more checks to be done
+#endif
+}
+
+
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+
+CaseData* get_data(csubstr name)
+{
+ static std::map<csubstr, CaseData> m;
+
+ auto it = m.find(name);
+ CaseData *cd;
+ if(it == m.end())
+ {
+ cd = &m[name];
+ Case const* c = get_case(name);
+ RYML_CHECK(c->src.find("\n\r") == csubstr::npos);
+ {
+ std::string tmp;
+ replace_all("\r", "", c->src, &tmp);
+ cd->unix_style.src_buf.assign(tmp.begin(), tmp.end());
+ cd->unix_style.src = to_substr(cd->unix_style.src_buf);
+ cd->unix_style_json.src_buf.assign(tmp.begin(), tmp.end());
+ cd->unix_style_json.src = to_substr(cd->unix_style.src_buf);
+ }
+ {
+ std::string tmp;
+ replace_all("\n", "\r\n", cd->unix_style.src, &tmp);
+ cd->windows_style.src_buf.assign(tmp.begin(), tmp.end());
+ cd->windows_style.src = to_substr(cd->windows_style.src_buf);
+ cd->windows_style_json.src_buf.assign(tmp.begin(), tmp.end());
+ cd->windows_style_json.src = to_substr(cd->windows_style.src_buf);
+ }
+ }
+ else
+ {
+ cd = &it->second;
+ }
+ return cd;
+}
+
+} // namespace yml
+} // namespace c4
+
+#if defined(_MSC_VER)
+# pragma warning(pop)
+#elif defined(__clang__)
+# pragma clang diagnostic pop
+#elif defined(__GNUC__)
+# pragma GCC diagnostic pop
+#endif
diff --git a/thirdparty/ryml/test/test_case.hpp b/thirdparty/ryml/test/test_case.hpp
new file mode 100644
index 000000000..7ddc0a96d
--- /dev/null
+++ b/thirdparty/ryml/test/test_case.hpp
@@ -0,0 +1,533 @@
+#ifndef _TEST_CASE_HPP_
+#define _TEST_CASE_HPP_
+
+#ifdef RYML_SINGLE_HEADER
+#include <ryml_all.hpp>
+#else
+#include "c4/std/vector.hpp"
+#include "c4/std/string.hpp"
+#include "c4/format.hpp"
+#include <c4/yml/yml.hpp>
+#include <c4/yml/detail/parser_dbg.hpp>
+#endif
+
+#include <gtest/gtest.h>
+#include <functional>
+
+#ifdef __GNUC__
+# pragma GCC diagnostic push
+# pragma GCC diagnostic ignored "-Wtype-limits"
+#endif
+
+#if defined(_MSC_VER)
+# pragma warning(push)
+# pragma warning(disable: 4296/*expression is always 'boolean_value'*/)
+# pragma warning(disable: 4389/*'==': signed/unsigned mismatch*/)
+# if C4_MSVC_VERSION != C4_MSVC_VERSION_2017
+# pragma warning(disable: 4800/*'int': forcing value to bool 'true' or 'false' (performance warning)*/)
+# endif
+#endif
+
+#ifdef RYML_DBG
+# include <c4/yml/detail/print.hpp>
+#endif
+
+namespace c4 {
+
+inline void PrintTo(substr s, ::std::ostream* os) { os->write(s.str, (std::streamsize)s.len); }
+inline void PrintTo(csubstr s, ::std::ostream* os) { os->write(s.str, (std::streamsize)s.len); }
+
+namespace yml {
+
+inline void PrintTo(NodeType ty, ::std::ostream* os)
+{
+ *os << ty.type_str();
+}
+inline void PrintTo(NodeType_e ty, ::std::ostream* os)
+{
+ *os << NodeType::type_str(ty);
+}
+
+inline void PrintTo(Callbacks const& cb, ::std::ostream* os)
+{
+#ifdef __GNUC__
+#define RYML_GNUC_EXTENSION __extension__
+#else
+#define RYML_GNUC_EXTENSION
+#endif
+ *os << '{'
+ << "userdata." << (void*)cb.m_user_data << ','
+ << "allocate." << RYML_GNUC_EXTENSION (void*)cb.m_allocate << ','
+ << "free." << RYML_GNUC_EXTENSION (void*)cb.m_free << ','
+ << "error." << RYML_GNUC_EXTENSION (void*)cb.m_error << '}';
+#undef RYML_GNUC_EXTENSION
+}
+
+struct Case;
+struct CaseNode;
+struct CaseData;
+
+Case const* get_case(csubstr name);
+CaseData* get_data(csubstr name);
+
+void test_compare(Tree const& actual, Tree const& expected);
+void test_compare(Tree const& actual, size_t node_actual,
+ Tree const& expected, size_t node_expected,
+ size_t level=0);
+
+void test_arena_not_shared(Tree const& a, Tree const& b);
+
+void test_invariants(Tree const& t);
+void test_invariants(ConstNodeRef const& n);
+
+void print_node(CaseNode const& t, int level=0);
+void print_tree(CaseNode const& p, int level=0);
+void print_path(ConstNodeRef const& p);
+
+
+
+template<class CheckFn>
+void test_check_emit_check(Tree const& t, CheckFn check_fn)
+{
+ #ifdef RYML_DBG
+ print_tree(t);
+ #endif
+ {
+ SCOPED_TRACE("original yaml");
+ test_invariants(t);
+ check_fn(t);
+ }
+ auto emit_and_parse = [&check_fn](Tree const& tp, const char* identifier){
+ SCOPED_TRACE(identifier);
+ std::string emitted = emitrs_yaml<std::string>(tp);
+ #ifdef RYML_DBG
+ printf("~~~%s~~~\n%.*s", identifier, (int)emitted.size(), emitted.data());
+ #endif
+ Tree cp = parse_in_arena(to_csubstr(emitted));
+ #ifdef RYML_DBG
+ print_tree(cp);
+ #endif
+ test_invariants(cp);
+ check_fn(cp);
+ return cp;
+ };
+ Tree cp = emit_and_parse(t, "emitted 1");
+ cp = emit_and_parse(cp, "emitted 2");
+ cp = emit_and_parse(cp, "emitted 3");
+}
+
+template<class CheckFn>
+void test_check_emit_check(csubstr yaml, CheckFn check_fn)
+{
+ Tree t = parse_in_arena(yaml);
+ test_check_emit_check(t, check_fn);
+}
+
+
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+
+
+inline c4::substr replace_all(c4::csubstr pattern, c4::csubstr repl, c4::csubstr subject, std::string *dst)
+{
+ RYML_CHECK(!subject.overlaps(to_csubstr(*dst)));
+ size_t ret = subject.replace_all(to_substr(*dst), pattern, repl);
+ if(ret != dst->size())
+ {
+ dst->resize(ret);
+ ret = subject.replace_all(to_substr(*dst), pattern, repl);
+ }
+ RYML_CHECK(ret == dst->size());
+ return c4::to_substr(*dst);
+}
+
+
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+
+struct ExpectError
+{
+ bool m_got_an_error;
+ Tree *m_tree;
+ c4::yml::Callbacks m_glob_prev;
+ c4::yml::Callbacks m_tree_prev;
+ Location expected_location;
+
+ ExpectError(Location loc={}) : ExpectError(nullptr, loc) {}
+ ExpectError(Tree *tree, Location loc={});
+ ~ExpectError();
+
+ static void do_check( std::function<void()> fn, Location expected={}) { do_check(nullptr, fn, expected); }
+ static void do_check(Tree *tree, std::function<void()> fn, Location expected={});
+ static void check_assertion( std::function<void()> fn, Location expected={}) { check_assertion(nullptr, fn, expected); }
+ static void check_assertion(Tree *tree, std::function<void()> fn, Location expected={});
+};
+
+
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+
+struct TaggedScalar
+{
+ csubstr tag;
+ csubstr scalar;
+ template<size_t N, size_t M>
+ TaggedScalar(const char (&t)[N], const char (&s)[M]) : tag(t), scalar(s) {}
+ template<size_t N>
+ TaggedScalar(const char (&t)[N], std::nullptr_t) : tag(t), scalar() {}
+};
+
+struct AnchorRef
+{
+ NodeType_e type;
+ csubstr str;
+ AnchorRef() : type(NOTYPE), str() {}
+ AnchorRef(NodeType_e t) : type(t), str() {}
+ AnchorRef(NodeType_e t, csubstr v) : type(t), str(v) {}
+};
+
+
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+
+/** a node class against which ryml structures are tested. Uses initializer
+ * lists to facilitate minimal specification. */
+struct CaseNode
+{
+public:
+
+ using seqmap = std::vector<CaseNode>;
+ using iseqmap = std::initializer_list<CaseNode>;
+
+ struct TaggedList
+ {
+ csubstr tag;
+ iseqmap ilist;
+ template<size_t N> TaggedList(const char (&t)[N], iseqmap l) : tag(t), ilist(l) {}
+ };
+
+public:
+
+ NodeType type;
+ csubstr key, key_tag; AnchorRef key_anchor;
+ csubstr val, val_tag; AnchorRef val_anchor;
+ seqmap children;
+ CaseNode * parent;
+
+public:
+
+ CaseNode(CaseNode && that) noexcept { _move(std::move(that)); }
+ CaseNode(CaseNode const& that) noexcept { _copy(that); }
+
+ CaseNode& operator= (CaseNode && that) noexcept { _move(std::move(that)); return *this; }
+ CaseNode& operator= (CaseNode const& that) noexcept { _copy(that); return *this; }
+
+ ~CaseNode() = default;
+
+public:
+
+ // brace yourself: what you are about to see is ... crazy.
+
+ CaseNode() : CaseNode(NOTYPE) {}
+ CaseNode(NodeType_e t) : type(t), key(), key_tag(), key_anchor(), val(), val_tag(), val_anchor(), children(), parent(nullptr) { _set_parent(); }
+
+ // val
+ template<size_t N> explicit CaseNode(const char (&v)[N] ) : type((VAL )), key(), key_tag(), key_anchor(), val(v ), val_tag( ), val_anchor(), children(), parent(nullptr) { _set_parent(); }
+ explicit CaseNode(TaggedScalar const& v) : type((VAL|VALTAG)), key(), key_tag(), key_anchor(), val(v.scalar), val_tag(v.tag), val_anchor(), children(), parent(nullptr) { _set_parent(); }
+ explicit CaseNode(std::nullptr_t ) : type((VAL )), key(), key_tag(), key_anchor(), val( ), val_tag( ), val_anchor(), children(), parent(nullptr) { _set_parent(); }
+ // val, with anchor/ref
+ template<size_t N> explicit CaseNode(const char (&v)[N] , AnchorRef const& arv) : type((arv.type|VAL )), key(), key_tag(), key_anchor(), val(v ), val_tag( ), val_anchor(arv), children(), parent(nullptr) { _set_parent(); }
+ explicit CaseNode(TaggedScalar const& v, AnchorRef const& arv) : type((arv.type|VAL|VALTAG)), key(), key_tag(), key_anchor(), val(v.scalar), val_tag(v.tag), val_anchor(arv), children(), parent(nullptr) { _set_parent(); }
+ explicit CaseNode(std::nullptr_t , AnchorRef const& arv) : type((arv.type|VAL )), key(), key_tag(), key_anchor(), val( ), val_tag( ), val_anchor(arv), children(), parent(nullptr) { _set_parent(); }
+ explicit CaseNode( AnchorRef const& arv) : type((arv.type|VAL )), key(), key_tag(), key_anchor(), val(arv.str ), val_tag( ), val_anchor(arv), children(), parent(nullptr) { _set_parent(); RYML_ASSERT(arv.type == VALREF); }
+
+
+ // val, explicit type
+ template<size_t N> explicit CaseNode(NodeType t, const char (&v)[N] ) : type((VAL|t )), key(), key_tag(), key_anchor(), val(v ), val_tag( ), val_anchor(), children(), parent(nullptr) { _set_parent(); }
+ explicit CaseNode(NodeType t, TaggedScalar const& v) : type((VAL|VALTAG|t)), key(), key_tag(), key_anchor(), val(v.scalar), val_tag(v.tag), val_anchor(), children(), parent(nullptr) { _set_parent(); }
+ explicit CaseNode(NodeType t, std::nullptr_t ) : type((VAL |t)), key(), key_tag(), key_anchor(), val( ), val_tag( ), val_anchor(), children(), parent(nullptr) { _set_parent(); }
+ // val, explicit type, with val anchor/ref
+ template<size_t N> explicit CaseNode(NodeType t, const char (&v)[N] , AnchorRef const& arv) : type((arv.type|VAL|t )), key(), key_tag(), key_anchor(), val(v ), val_tag( ), val_anchor(arv), children(), parent(nullptr) { _set_parent(); }
+ explicit CaseNode(NodeType t, TaggedScalar const& v, AnchorRef const& arv) : type((arv.type|VAL|VALTAG|t)), key(), key_tag(), key_anchor(), val(v.scalar), val_tag(v.tag), val_anchor(arv), children(), parent(nullptr) { _set_parent(); }
+ explicit CaseNode(NodeType t, std::nullptr_t , AnchorRef const& arv) : type((arv.type|VAL |t)), key(), key_tag(), key_anchor(), val( ), val_tag( ), val_anchor(arv), children(), parent(nullptr) { _set_parent(); }
+
+
+ // keyval
+ template<size_t N, size_t M> explicit CaseNode(const char (&k)[N] , const char (&v)[M] ) : type((KEYVAL )), key(k ), key_tag( ), key_anchor( ), val(v ), val_tag( ), val_anchor( ), children(), parent(nullptr) { _set_parent(); }
+ template<size_t M> explicit CaseNode(std::nullptr_t , const char (&v)[M] ) : type((KEYVAL )), key( ), key_tag( ), key_anchor( ), val(v ), val_tag( ), val_anchor( ), children(), parent(nullptr) { _set_parent(); }
+ template<size_t N> explicit CaseNode(const char (&k)[N] , std::nullptr_t ) : type((KEYVAL )), key(k ), key_tag( ), key_anchor( ), val( ), val_tag( ), val_anchor( ), children(), parent(nullptr) { _set_parent(); }
+ template<size_t N> explicit CaseNode(const char (&k)[N] , TaggedScalar const& v) : type((KEYVAL|VALTAG )), key(k ), key_tag( ), key_anchor( ), val(v.scalar), val_tag(v.tag), val_anchor( ), children(), parent(nullptr) { _set_parent(); }
+ template<size_t M> explicit CaseNode(TaggedScalar const& k, const char (&v)[M] ) : type((KEYVAL|KEYTAG )), key(k.scalar), key_tag(k.tag), key_anchor( ), val(v ), val_tag( ), val_anchor( ), children(), parent(nullptr) { _set_parent(); }
+ explicit CaseNode(TaggedScalar const& k, TaggedScalar const& v) : type((KEYVAL|KEYTAG|VALTAG )), key(k.scalar), key_tag(k.tag), key_anchor( ), val(v.scalar), val_tag(v.tag), val_anchor( ), children(), parent(nullptr) { _set_parent(); }
+ explicit CaseNode(std::nullptr_t , TaggedScalar const& v) : type((KEYVAL |VALTAG )), key( ), key_tag( ), key_anchor( ), val(v.scalar), val_tag(v.tag), val_anchor( ), children(), parent(nullptr) { _set_parent(); }
+ explicit CaseNode(TaggedScalar const& k, std::nullptr_t ) : type((KEYVAL|KEYTAG )), key(k.scalar), key_tag(k.tag), key_anchor( ), val( ), val_tag( ), val_anchor( ), children(), parent(nullptr) { _set_parent(); }
+ explicit CaseNode(std::nullptr_t , std::nullptr_t ) : type((KEYVAL )), key( ), key_tag( ), key_anchor( ), val( ), val_tag( ), val_anchor( ), children(), parent(nullptr) { _set_parent(); }
+ explicit CaseNode(AnchorRef const& ark, AnchorRef const& arv) : type((KEYVAL|ark.type|arv.type)), key(ark.str ), key_tag( ), key_anchor(ark), val(arv.str ), val_tag( ), val_anchor(arv), children(), parent(nullptr) { _set_parent(); RYML_ASSERT(ark.type == KEYREF); RYML_ASSERT(arv.type == VALREF); }
+ // keyval, with val anchor/ref
+ template<size_t N, size_t M> explicit CaseNode(const char (&k)[N] , const char (&v)[M] , AnchorRef const& arv) : type((arv.type|KEYVAL )), key(k ), key_tag( ), key_anchor(), val(v ), val_tag( ), val_anchor(arv), children(), parent(nullptr) { _set_parent(); }
+ template<size_t N> explicit CaseNode(const char (&k)[N] , TaggedScalar const& v, AnchorRef const& arv) : type((arv.type|KEYVAL|VALTAG )), key(k ), key_tag( ), key_anchor(), val(v.scalar), val_tag(v.tag), val_anchor(arv), children(), parent(nullptr) { _set_parent(); }
+ template<size_t M> explicit CaseNode(TaggedScalar const& k, const char (&v)[M] , AnchorRef const& arv) : type((arv.type|KEYVAL|KEYTAG )), key(k.scalar), key_tag(k.tag), key_anchor(), val(v ), val_tag( ), val_anchor(arv), children(), parent(nullptr) { _set_parent(); }
+ explicit CaseNode(TaggedScalar const& k, TaggedScalar const& v, AnchorRef const& arv) : type((arv.type|KEYVAL|KEYTAG|VALTAG)), key(k.scalar), key_tag(k.tag), key_anchor(), val(v.scalar), val_tag(v.tag), val_anchor(arv), children(), parent(nullptr) { _set_parent(); }
+ // keyval, with key anchor/ref
+ template<size_t N, size_t M> explicit CaseNode(const char (&k)[N] , AnchorRef const& ark, const char (&v)[M] ) : type((ark.type|KEYVAL )), key(k ), key_tag( ), key_anchor(ark), val(v ), val_tag( ), val_anchor(), children(), parent(nullptr) { _set_parent(); }
+ template<size_t N> explicit CaseNode(const char (&k)[N] , AnchorRef const& ark, TaggedScalar const& v) : type((ark.type|KEYVAL|VALTAG )), key(k ), key_tag( ), key_anchor(ark), val(v.scalar), val_tag(v.tag), val_anchor(), children(), parent(nullptr) { _set_parent(); }
+ template<size_t M> explicit CaseNode(TaggedScalar const& k, AnchorRef const& ark, const char (&v)[M] ) : type((ark.type|KEYVAL|KEYTAG )), key(k.scalar), key_tag(k.tag), key_anchor(ark), val(v ), val_tag( ), val_anchor(), children(), parent(nullptr) { _set_parent(); }
+ explicit CaseNode(TaggedScalar const& k, AnchorRef const& ark, TaggedScalar const& v) : type((ark.type|KEYVAL|KEYTAG|VALTAG)), key(k.scalar), key_tag(k.tag), key_anchor(ark), val(v.scalar), val_tag(v.tag), val_anchor(), children(), parent(nullptr) { _set_parent(); }
+ // keyval, with key anchor/ref + val anchor/ref
+ template<size_t N, size_t M> explicit CaseNode(const char (&k)[N] , AnchorRef const& ark, const char (&v)[M] , AnchorRef const& arv) : type((ark.type|arv.type|KEYVAL )), key(k ), key_tag( ), key_anchor(ark), val(v ), val_tag( ), val_anchor(arv), children(), parent(nullptr) { _set_parent(); }
+ template<size_t N> explicit CaseNode(const char (&k)[N] , AnchorRef const& ark, TaggedScalar const& v, AnchorRef const& arv) : type((ark.type|arv.type|KEYVAL|VALTAG )), key(k ), key_tag( ), key_anchor(ark), val(v.scalar), val_tag(v.tag), val_anchor(arv), children(), parent(nullptr) { _set_parent(); }
+ template<size_t M> explicit CaseNode(TaggedScalar const& k, AnchorRef const& ark, const char (&v)[M] , AnchorRef const& arv) : type((ark.type|arv.type|KEYVAL|KEYTAG )), key(k.scalar), key_tag(k.tag), key_anchor(ark), val(v ), val_tag( ), val_anchor(arv), children(), parent(nullptr) { _set_parent(); }
+ explicit CaseNode(TaggedScalar const& k, AnchorRef const& ark, TaggedScalar const& v, AnchorRef const& arv) : type((ark.type|arv.type|KEYVAL|KEYTAG|VALTAG)), key(k.scalar), key_tag(k.tag), key_anchor(ark), val(v.scalar), val_tag(v.tag), val_anchor(arv), children(), parent(nullptr) { _set_parent(); }
+
+
+ // keyval, explicit type
+ template<size_t N, size_t M> explicit CaseNode(NodeType t, const char (&k)[N] , const char (&v)[M] ) : type((KEYVAL|t )), key(k ), key_tag( ), key_anchor(), val(v ), val_tag( ), val_anchor(), children(), parent(nullptr) { _set_parent(); }
+ template<size_t N> explicit CaseNode(NodeType t, const char (&k)[N] , std::nullptr_t ) : type((KEYVAL|t )), key(k ), key_tag( ), key_anchor(), val( ), val_tag( ), val_anchor(), children(), parent(nullptr) { _set_parent(); }
+ template<size_t M> explicit CaseNode(NodeType t, std::nullptr_t , const char (&v)[M] ) : type((KEYVAL|t )), key( ), key_tag( ), key_anchor(), val(v ), val_tag( ), val_anchor(), children(), parent(nullptr) { _set_parent(); }
+ template<size_t N> explicit CaseNode(NodeType t, const char (&k)[N] , TaggedScalar const& v) : type((KEYVAL|VALTAG|t )), key(k ), key_tag( ), key_anchor(), val(v.scalar), val_tag(v.tag), val_anchor(), children(), parent(nullptr) { _set_parent(); }
+ template<size_t M> explicit CaseNode(NodeType t, TaggedScalar const& k, const char (&v)[M] ) : type((KEYVAL|KEYTAG|t )), key(k.scalar), key_tag(k.tag), key_anchor(), val(v ), val_tag( ), val_anchor(), children(), parent(nullptr) { _set_parent(); }
+ explicit CaseNode(NodeType t, TaggedScalar const& k, TaggedScalar const& v) : type((KEYVAL|KEYTAG|VALTAG|t)), key(k.scalar), key_tag(k.tag), key_anchor(), val(v.scalar), val_tag(v.tag), val_anchor(), children(), parent(nullptr) { _set_parent(); }
+ explicit CaseNode(NodeType t, TaggedScalar const& k, std::nullptr_t ) : type((KEYVAL|KEYTAG |t)), key(k.scalar), key_tag(k.tag), key_anchor(), val( ), val_tag( ), val_anchor(), children(), parent(nullptr) { _set_parent(); }
+ explicit CaseNode(NodeType t, std::nullptr_t , TaggedScalar const& v) : type((KEYVAL |VALTAG|t)), key( ), key_tag( ), key_anchor(), val(v.scalar), val_tag(v.tag), val_anchor(), children(), parent(nullptr) { _set_parent(); }
+ explicit CaseNode(NodeType t, std::nullptr_t , std::nullptr_t ) : type((KEYVAL |t)), key( ), key_tag( ), key_anchor(), val( ), val_tag( ), val_anchor(), children(), parent(nullptr) { _set_parent(); }
+ // keyval, explicit type, with val anchor/ref
+ template<size_t N, size_t M> explicit CaseNode(NodeType t, const char (&k)[N] , const char (&v)[M] , AnchorRef const& arv) : type((arv.type|KEYVAL|t )), key(k ), key_tag( ), key_anchor(), val(v ), val_tag( ), val_anchor(arv), children(), parent(nullptr) { _set_parent(); }
+ template<size_t N> explicit CaseNode(NodeType t, const char (&k)[N] , TaggedScalar const& v, AnchorRef const& arv) : type((arv.type|KEYVAL|VALTAG|t )), key(k ), key_tag( ), key_anchor(), val(v.scalar), val_tag(v.tag), val_anchor(arv), children(), parent(nullptr) { _set_parent(); }
+ template<size_t M> explicit CaseNode(NodeType t, TaggedScalar const& k, const char (&v)[M] , AnchorRef const& arv) : type((arv.type|KEYVAL|KEYTAG|t )), key(k.scalar), key_tag(k.tag), key_anchor(), val(v ), val_tag( ), val_anchor(arv), children(), parent(nullptr) { _set_parent(); }
+ explicit CaseNode(NodeType t, TaggedScalar const& k, TaggedScalar const& v, AnchorRef const& arv) : type((arv.type|KEYVAL|KEYTAG|VALTAG|t)), key(k.scalar), key_tag(k.tag), key_anchor(), val(v.scalar), val_tag(v.tag), val_anchor(arv), children(), parent(nullptr) { _set_parent(); }
+ // keyval, explicit type, with key anchor/ref
+ template<size_t N, size_t M> explicit CaseNode(NodeType t, const char (&k)[N] , AnchorRef const& ark, const char (&v)[M] ) : type((ark.type|KEYVAL|t )), key(k ), key_tag( ), key_anchor(ark), val(v ), val_tag( ), val_anchor(), children(), parent(nullptr) { _set_parent(); }
+ template<size_t N> explicit CaseNode(NodeType t, const char (&k)[N] , AnchorRef const& ark, TaggedScalar const& v) : type((ark.type|KEYVAL|VALTAG|t )), key(k ), key_tag( ), key_anchor(ark), val(v.scalar), val_tag(v.tag), val_anchor(), children(), parent(nullptr) { _set_parent(); }
+ template<size_t M> explicit CaseNode(NodeType t, TaggedScalar const& k, AnchorRef const& ark, const char (&v)[M] ) : type((ark.type|KEYVAL|KEYTAG|t )), key(k.scalar), key_tag(k.tag), key_anchor(ark), val(v ), val_tag( ), val_anchor(), children(), parent(nullptr) { _set_parent(); }
+ explicit CaseNode(NodeType t, TaggedScalar const& k, AnchorRef const& ark, TaggedScalar const& v) : type((ark.type|KEYVAL|KEYTAG|VALTAG|t)), key(k.scalar), key_tag(k.tag), key_anchor(ark), val(v.scalar), val_tag(v.tag), val_anchor(), children(), parent(nullptr) { _set_parent(); }
+ // keyval, explicit type, with key anchor/ref + val anchor/ref
+ template<size_t N, size_t M> explicit CaseNode(NodeType t, const char (&k)[N] , AnchorRef const& ark, const char (&v)[M] , AnchorRef const& arv) : type((ark.type|arv.type|KEYVAL|t )), key(k ), key_tag( ), key_anchor(ark), val(v ), val_tag( ), val_anchor(arv), children(), parent(nullptr) { _set_parent(); }
+ template<size_t N> explicit CaseNode(NodeType t, const char (&k)[N] , AnchorRef const& ark, TaggedScalar const& v, AnchorRef const& arv) : type((ark.type|arv.type|KEYVAL|VALTAG|t )), key(k ), key_tag( ), key_anchor(ark), val(v.scalar), val_tag(v.tag), val_anchor(arv), children(), parent(nullptr) { _set_parent(); }
+ template<size_t M> explicit CaseNode(NodeType t, TaggedScalar const& k, AnchorRef const& ark, const char (&v)[M] , AnchorRef const& arv) : type((ark.type|arv.type|KEYVAL|KEYTAG|t )), key(k.scalar), key_tag(k.tag), key_anchor(ark), val(v ), val_tag( ), val_anchor(arv), children(), parent(nullptr) { _set_parent(); }
+ explicit CaseNode(NodeType t, TaggedScalar const& k, AnchorRef const& ark, TaggedScalar const& v, AnchorRef const& arv) : type((ark.type|arv.type|KEYVAL|KEYTAG|VALTAG|t)), key(k.scalar), key_tag(k.tag), key_anchor(ark), val(v.scalar), val_tag(v.tag), val_anchor(arv), children(), parent(nullptr) { _set_parent(); }
+
+
+ // container
+ template<size_t N> explicit CaseNode(const char (&k)[N] , iseqmap s) : type(), key(k ), key_tag( ), key_anchor(), val(), val_tag( ), val_anchor(), children(s ), parent(nullptr) { _set_parent(); type = _guess(); }
+ template<size_t N> explicit CaseNode(const char (&k)[N] , TaggedList s) : type(), key(k ), key_tag( ), key_anchor(), val(), val_tag(s.tag), val_anchor(), children(s.ilist), parent(nullptr) { _set_parent(); type = _guess(); }
+ explicit CaseNode(TaggedScalar const& k, iseqmap s) : type(), key(k.scalar), key_tag(k.tag), key_anchor(), val(), val_tag( ), val_anchor(), children(s ), parent(nullptr) { _set_parent(); type = _guess(); }
+ explicit CaseNode(TaggedScalar const& k, TaggedList s) : type(), key(k.scalar), key_tag(k.tag), key_anchor(), val(), val_tag(s.tag), val_anchor(), children(s.ilist), parent(nullptr) { _set_parent(); type = _guess(); }
+ explicit CaseNode( iseqmap m) : CaseNode("", m) {}
+ explicit CaseNode( TaggedList m) : CaseNode("", m) {}
+ // container, with val anchor/ref
+ template<size_t N> explicit CaseNode(const char (&k)[N] , iseqmap s, AnchorRef const& arv) : type(), key(k ), key_tag( ), key_anchor(), val(), val_tag( ), val_anchor(arv), children(s ), parent(nullptr) { _set_parent(); type = _guess(); }
+ template<size_t N> explicit CaseNode(const char (&k)[N] , TaggedList s, AnchorRef const& arv) : type(), key(k ), key_tag( ), key_anchor(), val(), val_tag(s.tag), val_anchor(arv), children(s.ilist), parent(nullptr) { _set_parent(); type = _guess(); }
+ explicit CaseNode(TaggedScalar const& k, iseqmap s, AnchorRef const& arv) : type(), key(k.scalar), key_tag(k.tag), key_anchor(), val(), val_tag( ), val_anchor(arv), children(s ), parent(nullptr) { _set_parent(); type = _guess(); }
+ explicit CaseNode(TaggedScalar const& k, TaggedList s, AnchorRef const& arv) : type(), key(k.scalar), key_tag(k.tag), key_anchor(), val(), val_tag(s.tag), val_anchor(arv), children(s.ilist), parent(nullptr) { _set_parent(); type = _guess(); }
+ explicit CaseNode( iseqmap m, AnchorRef const& arv) : CaseNode("", m, arv) {}
+ explicit CaseNode( TaggedList m, AnchorRef const& arv) : CaseNode("", m, arv) {}
+ // container, with key anchor/ref
+ template<size_t N> explicit CaseNode(const char (&k)[N] , AnchorRef const& ark, iseqmap s) : type(), key(k ), key_tag( ), key_anchor(ark), val(), val_tag( ), val_anchor(), children(s ), parent(nullptr) { _set_parent(); type = _guess(); }
+ template<size_t N> explicit CaseNode(const char (&k)[N] , AnchorRef const& ark, TaggedList s) : type(), key(k ), key_tag( ), key_anchor(ark), val(), val_tag(s.tag), val_anchor(), children(s.ilist), parent(nullptr) { _set_parent(); type = _guess(); }
+ explicit CaseNode(TaggedScalar const& k, AnchorRef const& ark, iseqmap s) : type(), key(k.scalar), key_tag(k.tag), key_anchor(ark), val(), val_tag( ), val_anchor(), children(s ), parent(nullptr) { _set_parent(); type = _guess(); }
+ explicit CaseNode(TaggedScalar const& k, AnchorRef const& ark, TaggedList s) : type(), key(k.scalar), key_tag(k.tag), key_anchor(ark), val(), val_tag(s.tag), val_anchor(), children(s.ilist), parent(nullptr) { _set_parent(); type = _guess(); }
+ // container, with key anchor/ref + val anchor/ref
+ template<size_t N> explicit CaseNode(const char (&k)[N] , AnchorRef const& ark, iseqmap s, AnchorRef const& arv) : type(), key(k ), key_tag( ), key_anchor(ark), val(), val_tag( ), val_anchor(arv), children(s ), parent(nullptr) { _set_parent(); type = _guess(); }
+ template<size_t N> explicit CaseNode(const char (&k)[N] , AnchorRef const& ark, TaggedList s, AnchorRef const& arv) : type(), key(k ), key_tag( ), key_anchor(ark), val(), val_tag(s.tag), val_anchor(arv), children(s.ilist), parent(nullptr) { _set_parent(); type = _guess(); }
+ explicit CaseNode(TaggedScalar const& k, AnchorRef const& ark, iseqmap s, AnchorRef const& arv) : type(), key(k.scalar), key_tag(k.tag), key_anchor(ark), val(), val_tag( ), val_anchor(arv), children(s ), parent(nullptr) { _set_parent(); type = _guess(); }
+ explicit CaseNode(TaggedScalar const& k, AnchorRef const& ark, TaggedList s, AnchorRef const& arv) : type(), key(k.scalar), key_tag(k.tag), key_anchor(ark), val(), val_tag(s.tag), val_anchor(arv), children(s.ilist), parent(nullptr) { _set_parent(); type = _guess(); }
+
+
+ // container, explicit type
+ template<size_t N> explicit CaseNode(NodeType t, const char (&k)[N] , iseqmap s) : type((t )), key(k ), key_tag( ), key_anchor(), val(), val_tag( ), val_anchor(), children(s ), parent(nullptr) { _set_parent(); }
+ template<size_t N> explicit CaseNode(NodeType t, const char (&k)[N] , TaggedList s) : type((t|VALTAG)), key(k ), key_tag( ), key_anchor(), val(), val_tag(s.tag), val_anchor(), children(s.ilist), parent(nullptr) { _set_parent(); }
+ explicit CaseNode(NodeType t, TaggedScalar const& k, iseqmap s) : type((t|KEYTAG)), key(k.scalar), key_tag(k.tag), key_anchor(), val(), val_tag( ), val_anchor(), children(s ), parent(nullptr) { _set_parent(); }
+ explicit CaseNode(NodeType t, iseqmap s) : type((t )), key( ), key_tag( ), key_anchor(), val(), val_tag( ), val_anchor(), children(s ), parent(nullptr) { _set_parent(); }
+ explicit CaseNode(NodeType t, TaggedList s) : type((t|VALTAG)), key( ), key_tag( ), key_anchor(), val(), val_tag(s.tag), val_anchor(), children(s.ilist), parent(nullptr) { _set_parent(); }
+ // container, explicit type, with val anchor/ref
+ template<size_t N> explicit CaseNode(NodeType t, const char (&k)[N] , iseqmap s, AnchorRef const& arv) : type((t |VALANCH)), key(k ), key_tag( ), key_anchor(), val(), val_tag( ), val_anchor(arv), children(s ), parent(nullptr) { _set_parent(); }
+ template<size_t N> explicit CaseNode(NodeType t, const char (&k)[N] , TaggedList s, AnchorRef const& arv) : type((t|VALTAG|VALANCH)), key(k ), key_tag( ), key_anchor(), val(), val_tag(s.tag), val_anchor(arv), children(s.ilist), parent(nullptr) { _set_parent(); }
+ explicit CaseNode(NodeType t, TaggedScalar const& k, iseqmap s, AnchorRef const& arv) : type((t|KEYTAG|VALANCH)), key(k.scalar), key_tag(k.tag), key_anchor(), val(), val_tag( ), val_anchor(arv), children(s ), parent(nullptr) { _set_parent(); }
+ explicit CaseNode(NodeType t, iseqmap s, AnchorRef const& arv) : type((t |VALANCH)), key( ), key_tag( ), key_anchor(), val(), val_tag( ), val_anchor(arv), children(s ), parent(nullptr) { _set_parent(); }
+ explicit CaseNode(NodeType t, TaggedList s, AnchorRef const& arv) : type((t|VALTAG|VALANCH)), key( ), key_tag( ), key_anchor(), val(), val_tag(s.tag), val_anchor(arv), children(s.ilist), parent(nullptr) { _set_parent(); }
+ // container, explicit type, with key anchor/ref
+ template<size_t N> explicit CaseNode(NodeType t, const char (&k)[N] , AnchorRef const& ark, iseqmap s) : type((t |KEYANCH)), key(k ), key_tag( ), key_anchor(ark), val(), val_tag( ), val_anchor(), children(s ), parent(nullptr) { _set_parent(); }
+ template<size_t N> explicit CaseNode(NodeType t, const char (&k)[N] , AnchorRef const& ark, TaggedList s) : type((t|VALTAG|KEYANCH)), key(k ), key_tag( ), key_anchor(ark), val(), val_tag(s.tag), val_anchor(), children(s.ilist), parent(nullptr) { _set_parent(); }
+ explicit CaseNode(NodeType t, TaggedScalar const& k, AnchorRef const& ark, iseqmap s) : type((t|KEYTAG|KEYANCH)), key(k.scalar), key_tag(k.tag), key_anchor(ark), val(), val_tag( ), val_anchor(), children(s ), parent(nullptr) { _set_parent(); }
+ // container, explicit type, with key anchor/ref + val anchor/ref
+ template<size_t N> explicit CaseNode(NodeType t, const char (&k)[N] , AnchorRef const& ark, iseqmap s, AnchorRef const& arv) : type((t |KEYANCH|VALANCH)), key(k ), key_tag( ), key_anchor(ark), val(), val_tag( ), val_anchor(arv), children(s ), parent(nullptr) { _set_parent(); }
+ template<size_t N> explicit CaseNode(NodeType t, const char (&k)[N] , AnchorRef const& ark, TaggedList s, AnchorRef const& arv) : type((t|VALTAG|KEYANCH|VALANCH)), key(k ), key_tag( ), key_anchor(ark), val(), val_tag(s.tag), val_anchor(arv), children(s.ilist), parent(nullptr) { _set_parent(); }
+ explicit CaseNode(NodeType t, TaggedScalar const& k, AnchorRef const& ark, iseqmap s, AnchorRef const& arv) : type((t|KEYTAG|KEYANCH|VALANCH)), key(k.scalar), key_tag(k.tag), key_anchor(ark), val(), val_tag( ), val_anchor(arv), children(s ), parent(nullptr) { _set_parent(); }
+
+
+public:
+
+ void _move(CaseNode&& that)
+ {
+ type = that.type;
+ key = that.key;
+ key_tag = that.key_tag;
+ key_anchor = that.key_anchor;
+ val = that.val;
+ val_tag = that.val_tag;
+ val_anchor = that.val_anchor;
+ children = std::move(that.children);
+ parent = nullptr;
+ _set_parent();
+ }
+ void _copy(CaseNode const& that)
+ {
+ type = that.type;
+ key = that.key;
+ key_tag = that.key_tag;
+ key_anchor = that.key_anchor;
+ val = that.val;
+ val_tag = that.val_tag;
+ val_anchor = that.val_anchor;
+ children = that.children;
+ parent = nullptr;
+ _set_parent();
+ }
+
+ void _set_parent()
+ {
+ for(auto &ch : children)
+ {
+ ch.parent = this;
+ }
+ }
+
+ NodeType_e _guess() const;
+
+ bool is_root() const { return parent; }
+ bool is_doc() const { return type & DOC; }
+ bool is_map() const { return type & MAP; }
+ bool is_seq() const { return type & SEQ; }
+ bool has_val() const { return type & VAL; }
+ bool has_key() const { return type & KEY; }
+ bool is_container() const { return type & (SEQ|MAP); }
+ bool has_key_anchor() const { return type & KEYANCH; }
+ bool has_val_anchor() const { return type & VALANCH; }
+
+public:
+
+ CaseNode const& operator[] (size_t i) const
+ {
+ C4_ASSERT(i >= 0 && i < children.size());
+ return children[i];
+ }
+
+ CaseNode const& operator[] (csubstr const& name) const
+ {
+ auto ch = lookup(name);
+ C4_ASSERT(ch != nullptr);
+ return *ch;
+ }
+
+ CaseNode const* lookup(csubstr const& name) const
+ {
+ C4_ASSERT( ! children.empty());
+ for(auto const& ch : children)
+ if(ch.key == name)
+ return &ch;
+ return nullptr;
+ }
+
+public:
+
+ void compare(yml::ConstNodeRef const& n, bool ignore_quote=false) const;
+ void compare_child(yml::ConstNodeRef const& n, size_t pos) const;
+
+ size_t reccount() const
+ {
+ size_t c = 1;
+ for(auto const& ch : children)
+ c += ch.reccount();
+ return c;
+ }
+
+ void recreate(yml::NodeRef *n) const;
+
+};
+
+
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+typedef enum {
+ EXPECT_PARSE_ERROR = (1<<0),
+ RESOLVE_REFS = (1<<1),
+ JSON_ALSO = (1<<2), // TODO: make it the opposite: opt-out instead of opt-in
+} TestCaseFlags_e;
+
+
+struct Case
+{
+ std::string filelinebuf;
+ csubstr fileline;
+ csubstr name;
+ csubstr src;
+ CaseNode root;
+ TestCaseFlags_e flags;
+ Location expected_location;
+
+ //! create a standard test case: name, source and expected CaseNode structure
+ template<class... Args> Case(csubstr file, int line, const char *name_, const char *src_, Args&& ...args) : filelinebuf(catrs<std::string>(file, ':', line)), fileline(to_csubstr(filelinebuf)), name(to_csubstr(name_)), src(to_csubstr(src_)), root(std::forward<Args>(args)...), flags(), expected_location() {}
+ //! create a test case with explicit flags: name, source flags, and expected CaseNode structure
+ template<class... Args> Case(csubstr file, int line, const char *name_, int f_, const char *src_, Args&& ...args) : filelinebuf(catrs<std::string>(file, ':', line)), fileline(to_csubstr(filelinebuf)), name(to_csubstr(name_)), src(to_csubstr(src_)), root(std::forward<Args>(args)...), flags((TestCaseFlags_e)f_), expected_location() {}
+ //! create a test case with an error on an expected location
+ Case(csubstr file, int line, const char *name_, int f_, const char *src_, LineCol loc) : filelinebuf(catrs<std::string>(file, ':', line)), fileline(to_csubstr(filelinebuf)), name(to_csubstr(name_)), src(to_csubstr(src_)), root(), flags((TestCaseFlags_e)f_), expected_location(name, loc.line, loc.col) {}
+};
+
+//-----------------------------------------------------------------------------
+
+// a persistent data store to avoid repeating operations on every test
+struct CaseDataLineEndings
+{
+ std::vector<char> src_buf;
+ substr src;
+
+ Tree parsed_tree;
+
+ size_t numbytes_stdout;
+ size_t numbytes_stdout_json;
+
+ std::string emit_buf;
+ csubstr emitted_yml;
+
+ std::string emitjson_buf;
+ csubstr emitted_json;
+
+ std::string parse_buf;
+ substr parsed_yml;
+
+ std::string parse_buf_json;
+ substr parsed_json;
+
+ Tree emitted_tree;
+ Tree emitted_tree_json;
+
+ Tree recreated;
+};
+
+
+struct CaseData
+{
+ CaseDataLineEndings unix_style;
+ CaseDataLineEndings unix_style_json;
+ CaseDataLineEndings windows_style;
+ CaseDataLineEndings windows_style_json;
+};
+
+
+} // namespace yml
+} // namespace c4
+
+#if defined(_MSC_VER)
+# pragma warning(pop)
+#endif
+
+#ifdef __GNUC__
+# pragma GCC diagnostic pop
+#endif
+
+#endif /* _TEST_CASE_HPP_ */
diff --git a/thirdparty/ryml/test/test_double_quoted.cpp b/thirdparty/ryml/test/test_double_quoted.cpp
new file mode 100644
index 000000000..6c3915873
--- /dev/null
+++ b/thirdparty/ryml/test/test_double_quoted.cpp
@@ -0,0 +1,610 @@
+#include "./test_group.hpp"
+
+namespace c4 {
+namespace yml {
+
+TEST(double_quoted, escaped_chars)
+{
+ csubstr yaml = R"("\\\"\n\r\t\ \/\ \0\b\f\a\v\e\_\N\L\P")";
+ // build the string like this because some of the characters are
+ // filtered out under the double quotes
+ std::string expected;
+ expected += '\\';
+ expected += '"';
+ expected += '\n';
+ expected += '\r';
+ expected += '\t';
+ expected += '\t';
+ expected += '/';
+ expected += ' ';
+ expected += '\0';
+ expected += '\b';
+ expected += '\f';
+ expected += '\a';
+ expected += '\v';
+ expected += INT8_C(0x1b); // \e
+ //
+ // wrap explicitly to avoid overflow
+ expected += _RYML_CHCONST(-0x3e, 0xc2); // \_ (1)
+ expected += _RYML_CHCONST(-0x60, 0xa0); // \_ (2)
+ //
+ expected += _RYML_CHCONST(-0x3e, 0xc2); // \N (1)
+ expected += _RYML_CHCONST(-0x7b, 0x85); // \N (2)
+ //
+ expected += _RYML_CHCONST(-0x1e, 0xe2); // \L (1)
+ expected += _RYML_CHCONST(-0x80, 0x80); // \L (2)
+ expected += _RYML_CHCONST(-0x58, 0xa8); // \L (3)
+ //
+ expected += _RYML_CHCONST(-0x1e, 0xe2); // \P (1)
+ expected += _RYML_CHCONST(-0x80, 0x80); // \P (2)
+ expected += _RYML_CHCONST(-0x57, 0xa9); // \P (3)
+ //
+ Tree t = parse_in_arena(yaml);
+ csubstr v = t.rootref().val();
+ std::string actual = {v.str, v.len};
+ EXPECT_EQ(actual, expected);
+}
+
+TEST(double_quoted, test_suite_3RLN)
+{
+ csubstr yaml = R"(---
+"1 leading
+ \ttab"
+---
+"2 leading
+ \ tab"
+---
+"3 leading
+ tab"
+---
+"4 leading
+ \t tab"
+---
+"5 leading
+ \ tab"
+---
+"6 leading
+ tab"
+)";
+ test_check_emit_check(yaml, [](Tree const &t){
+ EXPECT_EQ(t.docref(0).val(), "1 leading \ttab");
+ EXPECT_EQ(t.docref(1).val(), "2 leading \ttab");
+ EXPECT_EQ(t.docref(2).val(), "3 leading tab");
+ EXPECT_EQ(t.docref(3).val(), "4 leading \t tab");
+ EXPECT_EQ(t.docref(4).val(), "5 leading \t tab");
+ EXPECT_EQ(t.docref(5).val(), "6 leading tab");
+ });
+}
+
+TEST(double_quoted, test_suite_5GBF)
+{
+ csubstr yaml = R"(
+Folding:
+ "Empty line
+
+ as a line feed"
+Folding2:
+ "Empty line
+
+ as a line feed"
+Folding3:
+ "Empty line
+
+ as a line feed"
+)";
+ test_check_emit_check(yaml, [](Tree const &t){
+ ASSERT_TRUE(t.rootref().is_map());
+ EXPECT_EQ(t["Folding"].val(), csubstr("Empty line\nas a line feed"));
+ EXPECT_EQ(t["Folding2"].val(), csubstr("Empty line\nas a line feed"));
+ EXPECT_EQ(t["Folding3"].val(), csubstr("Empty line\nas a line feed"));
+ });
+}
+
+TEST(double_quoted, test_suite_6SLA)
+{
+ csubstr yaml = R"(
+"foo\nbar:baz\tx \\$%^&*()x": 23
+'x\ny:z\tx $%^&*()x': 24
+)";
+ test_check_emit_check(yaml, [](Tree const &t){
+ ASSERT_TRUE(t.rootref().is_map());
+ ASSERT_TRUE(t.rootref().has_child("foo\nbar:baz\tx \\$%^&*()x"));
+ ASSERT_TRUE(t.rootref().has_child("x\\ny:z\\tx $%^&*()x"));
+ ASSERT_EQ(t["foo\nbar:baz\tx \\$%^&*()x"].val(), csubstr("23"));
+ ASSERT_EQ(t["x\\ny:z\\tx $%^&*()x"].val(), csubstr("24"));
+ });
+}
+
+TEST(double_quoted, test_suite_6WPF)
+{
+ csubstr yaml = R"(
+"
+ foo
+
+ bar
+
+ baz
+"
+)";
+ test_check_emit_check(yaml, [](Tree const &t){
+ ASSERT_TRUE(t.rootref().is_val());
+ EXPECT_EQ(t.rootref().val(), csubstr(" foo\nbar\nbaz "));
+ });
+}
+
+TEST(double_quoted, test_suite_9TFX)
+{
+ csubstr yaml = R"(
+" 1st non-empty
+
+ 2nd non-empty
+ 3rd non-empty "
+)";
+ test_check_emit_check(yaml, [](Tree const &t){
+ ASSERT_TRUE(t.rootref().is_val());
+ EXPECT_EQ(t.rootref().val(), csubstr(" 1st non-empty\n2nd non-empty 3rd non-empty "));
+ });
+}
+
+TEST(double_quoted, test_suite_G4RS)
+{
+ csubstr yaml = R"(---
+unicode: "\u263A\u2705\U0001D11E"
+control: "\b1998\t1999\t2000\n"
+#hex esc: "\x0d\x0a is \r\n"
+#---
+#- "\x0d\x0a is \r\n"
+#---
+#{hex esc: "\x0d\x0a is \r\n"}
+#---
+#["\x0d\x0a is \r\n"]
+)";
+ test_check_emit_check(yaml, [](Tree const &t){
+ EXPECT_EQ(t.docref(0)["unicode"].val(), csubstr(R"(☺✅𝄞)"));
+ EXPECT_EQ(t.docref(0)["control"].val(), csubstr("\b1998\t1999\t2000\n"));
+ //EXPECT_EQ(t.docref(0)["hex esc"].val(), csubstr("\r\n is \r\n")); TODO
+ //EXPECT_EQ(t.docref(1)[0].val(), csubstr("\r\n is \r\n"));
+ //EXPECT_EQ(t.docref(2)[0].val(), csubstr("\r\n is \r\n"));
+ //EXPECT_EQ(t.docref(3)[0].val(), csubstr("\r\n is \r\n"));
+ });
+}
+
+TEST(double_quoted, test_suite_KSS4)
+{
+ csubstr yaml = R"(
+---
+"quoted
+string"
+--- "quoted
+string"
+---
+- "quoted
+ string"
+---
+- "quoted
+string"
+---
+"quoted
+ string": "quoted
+ string"
+---
+"quoted
+string": "quoted
+string"
+)";
+ test_check_emit_check(yaml, [](Tree const &t){
+ EXPECT_EQ(t.docref(0).val(), "quoted string");
+ EXPECT_EQ(t.docref(1).val(), "quoted string");
+ EXPECT_EQ(t.docref(2)[0].val(), "quoted string");
+ EXPECT_EQ(t.docref(3)[0].val(), "quoted string");
+ EXPECT_EQ(t.docref(4)["quoted string"].val(), "quoted string");
+ EXPECT_EQ(t.docref(5)["quoted string"].val(), "quoted string");
+ });
+}
+
+TEST(double_quoted, test_suite_NAT4)
+{
+ csubstr yaml = R"(
+a: '
+ '
+b: '
+ '
+c: "
+ "
+d: "
+ "
+e: '
+
+ '
+f: "
+
+ "
+g: '
+
+
+ '
+h: "
+
+
+ "
+)";
+ test_check_emit_check(yaml, [](Tree const &t){
+ EXPECT_EQ(t["a"].val(), csubstr(" "));
+ EXPECT_EQ(t["b"].val(), csubstr(" "));
+ EXPECT_EQ(t["c"].val(), csubstr(" "));
+ EXPECT_EQ(t["d"].val(), csubstr(" "));
+ EXPECT_EQ(t["e"].val(), csubstr("\n"));
+ EXPECT_EQ(t["f"].val(), csubstr("\n"));
+ EXPECT_EQ(t["g"].val(), csubstr("\n\n"));
+ EXPECT_EQ(t["h"].val(), csubstr("\n\n"));
+ });
+}
+
+TEST(double_quoted, test_suite_NP9H)
+{
+ csubstr yaml = R"(
+"folded
+to a space,
+
+to a line feed, or \
+ \ non-content"
+)";
+ test_check_emit_check(yaml, [](Tree const &t){
+ ASSERT_TRUE(t.rootref().is_val());
+ EXPECT_EQ(t.rootref().val(), csubstr("folded to a space,\nto a line feed, or \t \tnon-content"));
+ });
+}
+
+TEST(double_quoted, test_suite_Q8AD)
+{
+ csubstr yaml = R"(
+"folded
+to a space,
+
+to a line feed, or \
+ \ non-content"
+)";
+ test_check_emit_check(yaml, [](Tree const &t){
+ ASSERT_TRUE(t.rootref().is_val());
+ EXPECT_EQ(t.rootref().val(), csubstr("folded to a space,\nto a line feed, or \t \tnon-content"));
+ });
+}
+
+TEST(double_quoted, test_suite_R4YG)
+{
+ csubstr yaml = R"(
+- "
+
+detected
+
+"
+
+)";
+ test_check_emit_check(yaml, [](Tree const &t){
+ EXPECT_EQ(t[0].val(), csubstr("\t\ndetected\n"));
+ });
+}
+
+
+//-----------------------------------------------------------------------------
+
+void verify_error_is_reported(csubstr case_name, csubstr yaml, Location loc={})
+{
+ SCOPED_TRACE(case_name);
+ SCOPED_TRACE(yaml);
+ Tree tree;
+ ExpectError::do_check(&tree, [&](){
+ parse_in_arena(yaml, &tree);
+ }, loc);
+}
+
+TEST(double_quoted, error_on_unmatched_quotes)
+{
+ verify_error_is_reported("map block", R"(foo: "'
+bar: "")");
+ verify_error_is_reported("seq block", R"(- "'
+- "")");
+ verify_error_is_reported("map flow", R"({foo: "', bar: ""})");
+ verify_error_is_reported("seq flow", R"(["', ""])");
+}
+
+TEST(double_quoted, error_on_unmatched_quotes_with_escapes)
+{
+ verify_error_is_reported("map block", R"(foo: "\"'
+bar: "")");
+ verify_error_is_reported("seq block", R"(- "\"'
+- "")");
+ verify_error_is_reported("map flow", R"({foo: "\"', bar: ""})");
+ verify_error_is_reported("seq flow", R"(["\"', ""])");
+}
+
+TEST(double_quoted, error_on_unmatched_quotes_at_end)
+{
+ verify_error_is_reported("map block", R"(foo: ""
+bar: "')");
+ verify_error_is_reported("seq block", R"(- ""
+- "')");
+ verify_error_is_reported("map flow", R"({foo: "", bar: "'})");
+ verify_error_is_reported("seq flow", R"(["", "'])");
+}
+
+TEST(double_quoted, error_on_unmatched_quotes_at_end_with_escapes)
+{
+ verify_error_is_reported("map block", R"(foo: ""
+bar: "\"')");
+ verify_error_is_reported("seq block", R"(- ""
+- "\"')");
+ verify_error_is_reported("map flow", R"({foo: "", bar: "\"'})");
+ verify_error_is_reported("seq flow", R"(["", "\"'])");
+}
+
+TEST(double_quoted, error_on_unclosed_quotes)
+{
+ verify_error_is_reported("map block", R"(foo: ",
+bar: what)");
+ verify_error_is_reported("seq block", R"(- "
+- what)");
+ verify_error_is_reported("map flow", R"({foo: ", bar: what})");
+ verify_error_is_reported("seq flow", R"([", what])");
+}
+
+TEST(double_quoted, error_on_unclosed_quotes_with_escapes)
+{
+ verify_error_is_reported("map block", R"(foo: "\",
+bar: what)");
+ verify_error_is_reported("seq block", R"(- "\"
+- what)");
+ verify_error_is_reported("map flow", R"({foo: "\", bar: what})");
+ verify_error_is_reported("seq flow", R"(["\", what])");
+}
+
+TEST(double_quoted, error_on_unclosed_quotes_at_end)
+{
+ verify_error_is_reported("map block", R"(foo: what
+bar: ")");
+ verify_error_is_reported("seq block", R"(- what
+- ")");
+ verify_error_is_reported("map flow", R"({foo: what, bar: "})");
+ verify_error_is_reported("seq flow", R"([what, "])");
+}
+
+TEST(double_quoted, error_on_unclosed_quotes_at_end_with_escapes)
+{
+ verify_error_is_reported("map block", R"(foo: what
+bar: "\")");
+ verify_error_is_reported("seq block", R"(- what
+- "\")");
+ verify_error_is_reported("map flow", R"({foo: what, bar: "\"})");
+ verify_error_is_reported("seq flow", R"([what, "\"])");
+}
+
+TEST(double_quoted, error_on_bad_utf_codepoints)
+{
+ verify_error_is_reported("incomplete \\x 0", R"(foo: "\x")");
+ verify_error_is_reported("incomplete \\x 1", R"(foo: "\x1")");
+ verify_error_is_reported("bad value \\x" , R"(foo: "\xko")");
+ verify_error_is_reported("incomplete \\u 0", R"(foo: "\u")");
+ verify_error_is_reported("incomplete \\u 1", R"(foo: "\u1")");
+ verify_error_is_reported("incomplete \\u 2", R"(foo: "\u12")");
+ verify_error_is_reported("incomplete \\u 3", R"(foo: "\u123")");
+ verify_error_is_reported("bad value \\u" , R"(foo: "\ukoko")");
+ verify_error_is_reported("incomplete \\U 0", R"(foo: "\U")");
+ verify_error_is_reported("incomplete \\U 1", R"(foo: "\U1")");
+ verify_error_is_reported("incomplete \\U 2", R"(foo: "\U12")");
+ verify_error_is_reported("incomplete \\U 3", R"(foo: "\U123")");
+ verify_error_is_reported("incomplete \\U 4", R"(foo: "\U1234")");
+ verify_error_is_reported("incomplete \\U 5", R"(foo: "\U12345")");
+ verify_error_is_reported("incomplete \\U 6", R"(foo: "\U123456")");
+ verify_error_is_reported("incomplete \\U 7", R"(foo: "\U1234567")");
+ verify_error_is_reported("bad value \\U" , R"(foo: "\Ukokokoko")");
+}
+
+TEST(double_quoted, github253)
+{
+ {
+ Tree tree;
+ NodeRef root = tree.rootref();
+ root |= MAP;
+ root["t"] = "t't\\nt";
+ root["t"] |= _WIP_VAL_DQUO;
+ std::string s = emitrs_yaml<std::string>(tree);
+ Tree tree2 = parse_in_arena(to_csubstr(s));
+ EXPECT_EQ(tree2["t"].val(), tree["t"].val());
+ }
+ {
+ Tree tree;
+ NodeRef root = tree.rootref();
+ root |= MAP;
+ root["t"] = "t't\\nt";
+ root["t"] |= _WIP_VAL_SQUO;
+ std::string s = emitrs_yaml<std::string>(tree);
+ Tree tree2 = parse_in_arena(to_csubstr(s));
+ EXPECT_EQ(tree2["t"].val(), tree["t"].val());
+ }
+ {
+ Tree tree;
+ NodeRef root = tree.rootref();
+ root |= MAP;
+ root["s"] = "t\rt";
+ root["s"] |= _WIP_VAL_DQUO;
+ std::string s = emitrs_yaml<std::string>(tree);
+ EXPECT_EQ(s, "s: \"t\\rt\"\n");
+ Tree tree2 = parse_in_arena(to_csubstr(s));
+ EXPECT_EQ(tree2["s"].val(), tree["s"].val());
+ }
+}
+
+
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+
+CASE_GROUP(DOUBLE_QUOTED)
+{
+
+ADD_CASE_TO_GROUP("dquoted, only text",
+R"("Some text without any quotes."
+)",
+ N(DOCVAL | VALQUO, "Some text without any quotes.")
+);
+
+ADD_CASE_TO_GROUP("dquoted, with single quotes",
+R"("Some text 'with single quotes'")",
+ N(DOCVAL|VALQUO, "Some text 'with single quotes'")
+);
+
+ADD_CASE_TO_GROUP("dquoted, with double quotes",
+R"("Some \"text\" \"with double quotes\"")",
+ N(DOCVAL|VALQUO, "Some \"text\" \"with double quotes\"")
+);
+
+ADD_CASE_TO_GROUP("dquoted, with single and double quotes",
+R"("Some text 'with single quotes' \"and double quotes\".")",
+ N(DOCVAL|VALQUO, "Some text 'with single quotes' \"and double quotes\".")
+);
+
+ADD_CASE_TO_GROUP("dquoted, with escapes",
+R"("Some text with escapes \\n \\r \\t")",
+ N(DOCVAL|VALQUO, "Some text with escapes \\n \\r \\t")
+);
+
+ADD_CASE_TO_GROUP("dquoted, with newline",
+R"("Some text with\nnewline")",
+ N(DOCVAL|VALQUO, "Some text with\nnewline")
+);
+
+ADD_CASE_TO_GROUP("dquoted, with tabs",
+R"("\tSome\ttext\twith\ttabs\t")",
+ N(DOCVAL|VALQUO, "\tSome\ttext\twith\ttabs\t")
+);
+
+ADD_CASE_TO_GROUP("dquoted, with tabs 4ZYM",
+R"(plain: text
+ lines
+quoted: "text
+ lines"
+block: |
+ text
+ lines
+)",
+ L{N("plain", "text lines"),
+ N(KEYVAL|VALQUO, "quoted", "text lines"),
+ N(KEYVAL|VALQUO,"block", "text\n \tlines\n")}
+);
+
+ADD_CASE_TO_GROUP("dquoted, with tabs 7A4E",
+R"(" 1st non-empty
+
+ 2nd non-empty
+ 3rd non-empty ")",
+ N(DOCVAL|VALQUO, " 1st non-empty\n2nd non-empty 3rd non-empty ")
+);
+
+ADD_CASE_TO_GROUP("dquoted, with tabs TL85",
+R"("
+ foo
+
+ bar
+
+ baz
+")", N(DOCVAL|VALQUO, " foo\nbar\nbaz "));
+
+ADD_CASE_TO_GROUP("dquoted, all",
+R"("Several lines of text,
+containing 'single quotes' and \"double quotes\". \
+Escapes (like \\n) work.\nIn addition,
+newlines can be esc\
+aped to prevent them from being converted to a space.
+
+Newlines can also be added by leaving a blank line.
+ Leading whitespace on lines is ignored."
+)",
+ N(DOCVAL|VALQUO, "Several lines of text, containing 'single quotes' and \"double quotes\". Escapes (like \\n) work.\nIn addition, newlines can be escaped to prevent them from being converted to a space.\nNewlines can also be added by leaving a blank line. Leading whitespace on lines is ignored.")
+);
+
+ADD_CASE_TO_GROUP("dquoted, empty",
+R"("")",
+ N(DOCVAL|VALQUO, "")
+);
+
+ADD_CASE_TO_GROUP("dquoted, blank",
+R"(
+- ""
+- " "
+- " "
+- " "
+- " "
+)",
+ L{N(QV, ""), N(QV, " "), N(QV, " "), N(QV, " "), N(QV, " ")}
+);
+
+ADD_CASE_TO_GROUP("dquoted, numbers", // these should not be quoted when emitting
+R"(
+- -1
+- -1.0
+- +1.0
+- 1e-2
+- 1e+2
+)",
+ L{N("-1"), N("-1.0"), N("+1.0"), N("1e-2"), N("1e+2")}
+);
+
+ADD_CASE_TO_GROUP("dquoted, trailing space",
+R"('a aaaa ')",
+ N(DOCVAL|VALQUO, "a aaaa ")
+);
+
+ADD_CASE_TO_GROUP("dquoted, leading space",
+R"(' a aaaa')",
+ N(DOCVAL|VALQUO, " a aaaa")
+);
+
+ADD_CASE_TO_GROUP("dquoted, trailing and leading space",
+R"(' 012345 ')",
+ N(DOCVAL|VALQUO, " 012345 ")
+);
+
+ADD_CASE_TO_GROUP("dquoted, 1 dquote",
+R"("\"")",
+ N(DOCVAL|VALQUO, "\"")
+);
+
+ADD_CASE_TO_GROUP("dquoted, 2 dquotes",
+R"("\"\"")",
+ N(DOCVAL|VALQUO, "\"\"")
+);
+
+ADD_CASE_TO_GROUP("dquoted, 3 dquotes",
+R"("\"\"\"")",
+ N(DOCVAL|VALQUO, "\"\"\"")
+);
+
+ADD_CASE_TO_GROUP("dquoted, 4 dquotes",
+R"("\"\"\"\"")",
+ N(DOCVAL|VALQUO, "\"\"\"\"")
+);
+
+ADD_CASE_TO_GROUP("dquoted, 5 dquotes",
+R"("\"\"\"\"\"")",
+ N(DOCVAL|VALQUO, "\"\"\"\"\"")
+);
+
+ADD_CASE_TO_GROUP("dquoted, example 2",
+R"("This is a key\nthat has multiple lines\n": and this is its value
+)",
+ L{N(QK, "This is a key\nthat has multiple lines\n", "and this is its value")}
+);
+
+ADD_CASE_TO_GROUP("dquoted, example 2.1",
+R"("This is a key
+
+that has multiple lines
+
+": and this is its value
+)",
+ L{N(QK, "This is a key\nthat has multiple lines\n", "and this is its value")}
+);
+}
+
+} // namespace yml
+} // namespace c4
diff --git a/thirdparty/ryml/test/test_emit.cpp b/thirdparty/ryml/test/test_emit.cpp
new file mode 100644
index 000000000..3166dfaed
--- /dev/null
+++ b/thirdparty/ryml/test/test_emit.cpp
@@ -0,0 +1,491 @@
+#ifndef RYML_SINGLE_HEADER
+#include "c4/yml/std/std.hpp"
+#include "c4/yml/parse.hpp"
+#include "c4/yml/emit.hpp"
+#include <c4/format.hpp>
+#include <c4/yml/detail/checks.hpp>
+#include <c4/yml/detail/print.hpp>
+#endif
+#include <c4/fs/fs.hpp>
+
+#include "./test_case.hpp"
+
+#include <gtest/gtest.h>
+
+namespace c4 {
+namespace yml {
+
+template<class Emit>
+std::string emit2file(Emit &&fn)
+{
+ C4_SUPPRESS_WARNING_MSVC_WITH_PUSH(4996) // fopen unsafe
+ std::string filename = fs::tmpnam<std::string>();
+ FILE *f = fopen(filename.c_str(), "wb");
+ C4_CHECK(f != nullptr);
+ fn(f);
+ fflush(f);
+ fclose(f);
+ std::string result = fs::file_get_contents<std::string>(filename.c_str());
+ fs::rmfile(filename.c_str());
+ return result;
+ C4_SUPPRESS_WARNING_MSVC_POP
+}
+
+template<class Emit>
+std::string emit2stream(Emit &&fn)
+{
+ std::ostringstream ss;
+ fn(ss);
+ return ss.str();
+}
+
+template<class Emit>
+std::string emit2buf(Emit &&fn)
+{
+ std::string buf;
+ buf.resize(2048);
+ substr out = fn(to_substr(buf));
+ if(out.len > buf.size())
+ {
+ buf.resize(out.len);
+ out = fn(to_substr(buf));
+ }
+ buf.resize(out.len);
+ return buf;
+}
+
+
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+
+TEST(as_json, basic)
+{
+ Tree et;
+ {
+ as_json j(et);
+ EXPECT_EQ(j.tree, &et);
+ }
+ Tree t = parse_in_arena("[foo, bar]");
+ {
+ as_json j(t);
+ EXPECT_EQ(j.tree, &t);
+ EXPECT_EQ(j.node, t.root_id());
+ }
+ {
+ as_json j(t, 2u);
+ EXPECT_EQ(j.tree, &t);
+ EXPECT_EQ(j.node, 2u);
+ }
+ {
+ as_json j(t[0]);
+ EXPECT_EQ(j.tree, &t);
+ EXPECT_EQ(j.node, 1u);
+ }
+}
+
+
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+
+template<class ...Args>
+void test_emits(Tree const& t, size_t id, std::string const& expected, std::string const& expected_json)
+{
+ EXPECT_EQ(emit2buf([&](substr buf){ return emit_yaml(t, id, buf); }), expected);
+ EXPECT_EQ(emit2buf([&](substr buf){ return emit_json(t, id, buf); }), expected_json);
+ EXPECT_EQ(emit2file([&](FILE *f){ return emit_yaml(t, id, f); }), expected);
+ EXPECT_EQ(emit2file([&](FILE *f){ return emit_json(t, id, f); }), expected_json);
+ EXPECT_EQ(emitrs_yaml<std::string>(t, id), expected);
+ EXPECT_EQ(emitrs_json<std::string>(t, id), expected_json);
+}
+
+template<class ...Args>
+void test_emits(Tree const& t, std::string const& expected, std::string const& expected_json)
+{
+ EXPECT_EQ(emit2buf([&](substr buf){ return emit_yaml(t, buf); }), expected);
+ EXPECT_EQ(emit2buf([&](substr buf){ return emit_json(t, buf); }), expected_json);
+ EXPECT_EQ(emit2file([&](FILE *f){ return emit_yaml(t, f); }), expected);
+ EXPECT_EQ(emit2file([&](FILE *f){ return emit_json(t, f); }), expected_json);
+ EXPECT_EQ(emit2stream([&](std::ostream& s){ s << t; }), expected);
+ EXPECT_EQ(emit2stream([&](std::ostream& s){ s << as_json(t); }), expected_json);
+ EXPECT_EQ(emitrs_yaml<std::string>(t), expected);
+ EXPECT_EQ(emitrs_json<std::string>(t), expected_json);
+}
+
+template<class ...Args>
+void test_emits(ConstNodeRef t, std::string const& expected, std::string const& expected_json)
+{
+ EXPECT_EQ(emit2buf([&](substr buf){ return emit_yaml(t, buf); }), expected);
+ EXPECT_EQ(emit2buf([&](substr buf){ return emit_json(t, buf); }), expected_json);
+ EXPECT_EQ(emit2file([&](FILE *f){ return emit_yaml(t, f); }), expected);
+ EXPECT_EQ(emit2file([&](FILE *f){ return emit_json(t, f); }), expected_json);
+ EXPECT_EQ(emit2stream([&](std::ostream& s){ s << t; }), expected);
+ EXPECT_EQ(emit2stream([&](std::ostream& s){ s << as_json(t); }), expected_json);
+ EXPECT_EQ(emitrs_yaml<std::string>(t), expected);
+ EXPECT_EQ(emitrs_json<std::string>(t), expected_json);
+}
+
+
+TEST(emit, empty_tree)
+{
+ const Tree t; // must be const!
+ std::string expected = R"()";
+ test_emits(t, expected, expected);
+}
+
+TEST(emit, existing_tree)
+{
+ const Tree t = parse_in_arena("[foo, bar]");
+ std::string expected = "- foo\n- bar\n";
+ std::string expected_json = R"(["foo","bar"])";
+ test_emits(t, expected, expected_json);
+}
+
+TEST(emit, existing_seq_node)
+{
+ Tree nct = parse_in_arena("[foo, bar, [nested, seq], {nested: map}]");
+ Tree const& t = nct;
+ {
+ std::string expected = "- foo\n- bar\n- - nested\n - seq\n- nested: map\n";
+ std::string expected_json = R"(["foo","bar",["nested","seq"],{"nested": "map"}])";
+ {
+ SCOPED_TRACE("rootref");
+ test_emits(t.crootref(), expected, expected_json);
+ }
+ {
+ SCOPED_TRACE("t");
+ test_emits(t, expected, expected_json);
+ }
+ {
+ SCOPED_TRACE("t, id");
+ test_emits(t, t.root_id(), expected, expected_json);
+ }
+ }
+ {
+ ConstNodeRef n = t[0];
+ std::string expected = "foo\n";
+ std::string expected_json = "\"foo\"";
+ {
+ SCOPED_TRACE("noderef");
+ test_emits(n, expected, expected_json);
+ }
+ {
+ SCOPED_TRACE("t, id");
+ test_emits(t, n.id(), expected, expected_json);
+ }
+ nct._add_flags(n.id(), _WIP_STYLE_FLOW_SL);
+ expected = "foo";
+ {
+ SCOPED_TRACE("t, id");
+ test_emits(n, expected, expected_json);
+ }
+ {
+ SCOPED_TRACE("t, id");
+ test_emits(t, n.id(), expected, expected_json);
+ }
+ }
+ {
+ ConstNodeRef n = t[1];
+ std::string expected = "bar\n";
+ std::string expected_json = "\"bar\"";
+ {
+ SCOPED_TRACE("noderef");
+ test_emits(n, expected, expected_json);
+ }
+ {
+ SCOPED_TRACE("t, id");
+ test_emits(t, n.id(), expected, expected_json);
+ }
+ nct._add_flags(n.id(), _WIP_STYLE_FLOW_SL);
+ expected = "bar";
+ {
+ SCOPED_TRACE("t, id");
+ test_emits(n, expected, expected_json);
+ }
+ {
+ SCOPED_TRACE("t, id");
+ test_emits(t, n.id(), expected, expected_json);
+ }
+
+ }
+ {
+ ConstNodeRef n = t[2];
+ std::string expected = "- nested\n- seq\n";
+ std::string expected_json = "[\"nested\",\"seq\"]";
+ {
+ SCOPED_TRACE("noderef");
+ test_emits(n, expected, expected_json);
+ }
+ {
+ SCOPED_TRACE("t, id");
+ test_emits(t, n.id(), expected, expected_json);
+ }
+ expected = "[nested,seq]";
+ nct._add_flags(n.id(), _WIP_STYLE_FLOW_SL);
+ {
+ SCOPED_TRACE("t, id");
+ test_emits(n, expected, expected_json);
+ }
+ {
+ SCOPED_TRACE("t, id");
+ test_emits(t, n.id(), expected, expected_json);
+ }
+ }
+ {
+ ConstNodeRef n = t[3];
+ std::string expected = "nested: map\n";
+ std::string expected_json = "{\"nested\": \"map\"}";
+ {
+ SCOPED_TRACE("noderef");
+ test_emits(n, expected, expected_json);
+ }
+ {
+ SCOPED_TRACE("t, id");
+ test_emits(t, n.id(), expected, expected_json);
+ }
+ expected = "{nested: map}";
+ nct._add_flags(n.id(), _WIP_STYLE_FLOW_SL);
+ {
+ SCOPED_TRACE("t, id");
+ test_emits(n, expected, expected_json);
+ }
+ {
+ SCOPED_TRACE("t, id");
+ test_emits(t, n.id(), expected, expected_json);
+ }
+ }
+}
+
+TEST(emit, existing_map_node)
+{
+ Tree nct = parse_in_arena("{0: foo, 1: bar, 2: [nested, seq], 3: {nested: map}}");
+ Tree const& t = nct;
+ {
+ std::string expected = "0: foo\n1: bar\n2:\n - nested\n - seq\n3:\n nested: map\n";
+ std::string expected_json = R"({"0": "foo","1": "bar","2": ["nested","seq"],"3": {"nested": "map"}})";
+ {
+ SCOPED_TRACE("rootref");
+ test_emits(t.rootref(), expected, expected_json);
+ }
+ {
+ SCOPED_TRACE("t");
+ test_emits(t, expected, expected_json);
+ }
+ {
+ SCOPED_TRACE("t, id");
+ test_emits(t, t.root_id(), expected, expected_json);
+ }
+ }
+ {
+ ConstNodeRef n = t[0];
+ std::string expected = "0: foo\n";
+ std::string expected_json = "\"0\": \"foo\"";
+ {
+ SCOPED_TRACE("noderef");
+ test_emits(n, expected, expected_json);
+ }
+ {
+ SCOPED_TRACE("t, id");
+ test_emits(t, n.id(), expected, expected_json);
+ }
+ expected = "0: foo";
+ nct._add_flags(n.id(), _WIP_STYLE_FLOW_SL);
+ {
+ SCOPED_TRACE("t, id");
+ test_emits(n, expected, expected_json);
+ }
+ {
+ SCOPED_TRACE("t, id");
+ test_emits(t, n.id(), expected, expected_json);
+ }
+ }
+ {
+ ConstNodeRef n = t[1];
+ std::string expected = "1: bar\n";
+ std::string expected_json = "\"1\": \"bar\"";
+ {
+ SCOPED_TRACE("noderef");
+ test_emits(n, expected, expected_json);
+ }
+ {
+ SCOPED_TRACE("t, id");
+ test_emits(t, n.id(), expected, expected_json);
+ }
+ expected = "1: bar";
+ nct._add_flags(n.id(), _WIP_STYLE_FLOW_SL);
+ {
+ SCOPED_TRACE("t, id");
+ test_emits(n, expected, expected_json);
+ }
+ {
+ SCOPED_TRACE("t, id");
+ test_emits(t, n.id(), expected, expected_json);
+ }
+ }
+ {
+ ConstNodeRef n = t[2];
+ std::string expected = "2:\n - nested\n - seq\n";
+ std::string expected_json = "\"2\": [\"nested\",\"seq\"]";
+ {
+ SCOPED_TRACE("noderef");
+ test_emits(n, expected, expected_json);
+ }
+ {
+ SCOPED_TRACE("t, id");
+ test_emits(t, n.id(), expected, expected_json);
+ }
+ expected = "2: [nested,seq]";
+ nct._add_flags(n.id(), _WIP_STYLE_FLOW_SL);
+ {
+ SCOPED_TRACE("t, id");
+ test_emits(n, expected, expected_json);
+ }
+ {
+ SCOPED_TRACE("t, id");
+ test_emits(t, n.id(), expected, expected_json);
+ }
+ }
+ {
+ ConstNodeRef n = t[3];
+ std::string expected = "3:\n nested: map\n";
+ std::string expected_json = "\"3\": {\"nested\": \"map\"}";
+ {
+ SCOPED_TRACE("noderef");
+ test_emits(n, expected, expected_json);
+ }
+ {
+ SCOPED_TRACE("t, id");
+ test_emits(t, n.id(), expected, expected_json);
+ }
+ expected = "3: {nested: map}";
+ nct._add_flags(n.id(), _WIP_STYLE_FLOW_SL);
+ {
+ SCOPED_TRACE("t, id");
+ test_emits(n, expected, expected_json);
+ }
+ {
+ SCOPED_TRACE("t, id");
+ test_emits(t, n.id(), expected, expected_json);
+ }
+ }
+}
+
+TEST(emit, percent_is_quoted)
+{
+ Tree ti = parse_in_arena("{}");
+ ASSERT_TRUE(ti.rootref().is_map());
+ ti["%ROOT"] = "%VAL";
+ ti["%ROOT2"] |= SEQ;
+ ti["%ROOT2"][0] = "%VAL";
+ ti["%ROOT2"][1] = "%VAL";
+ std::string yaml = emitrs_yaml<std::string>(ti);
+ test_check_emit_check(to_csubstr(yaml), [](Tree const &t){
+ ASSERT_TRUE(t.rootref().is_map());
+ ASSERT_TRUE(t.rootref().has_child("%ROOT"));
+ ASSERT_TRUE(t.rootref().has_child("%ROOT2"));
+ ASSERT_EQ(t["%ROOT2"].num_children(), 2u);
+ EXPECT_TRUE(t["%ROOT"].is_key_quoted());
+ EXPECT_TRUE(t["%ROOT"].is_val_quoted());
+ EXPECT_TRUE(t["%ROOT2"].is_key_quoted());
+ EXPECT_TRUE(t["%ROOT2"][0].is_val_quoted());
+ EXPECT_TRUE(t["%ROOT2"][1].is_val_quoted());
+ });
+}
+
+TEST(emit, at_is_quoted__issue_309)
+{
+ Tree ti = parse_in_arena("{at: [], backtick: []");
+ ti["at"][0] << "@test";
+ ti["at"][1] = "@test2";
+ ti["at"][2] << "@";
+ ti["at"][3] = "@";
+ ti["backtick"][0] << "`test";
+ ti["backtick"][1] = "`test2";
+ ti["backtick"][2] << "`";
+ ti["backtick"][3] = "`";
+ std::string yaml = emitrs_yaml<std::string>(ti);
+ test_check_emit_check(to_csubstr(yaml), [](Tree const &t){
+ ASSERT_TRUE(t.rootref().is_map());
+ ASSERT_TRUE(t.rootref().has_child("at"));
+ ASSERT_TRUE(t.rootref().has_child("backtick"));
+ ASSERT_EQ(t["at"].num_children(), 4u);
+ ASSERT_EQ(t["backtick"].num_children(), 4u);
+ EXPECT_EQ(t["at"][0].val(), "@test");
+ EXPECT_EQ(t["at"][1].val(), "@test2");
+ EXPECT_EQ(t["at"][2].val(), "@");
+ EXPECT_EQ(t["at"][3].val(), "@");
+ EXPECT_TRUE(t["at"][0].is_val_quoted());
+ EXPECT_TRUE(t["at"][1].is_val_quoted());
+ EXPECT_TRUE(t["at"][2].is_val_quoted());
+ EXPECT_TRUE(t["at"][3].is_val_quoted());
+ EXPECT_EQ(t["backtick"][0].val(), "`test");
+ EXPECT_EQ(t["backtick"][1].val(), "`test2");
+ EXPECT_EQ(t["backtick"][2].val(), "`");
+ EXPECT_EQ(t["backtick"][3].val(), "`");
+ EXPECT_TRUE(t["backtick"][0].is_val_quoted());
+ EXPECT_TRUE(t["backtick"][1].is_val_quoted());
+ EXPECT_TRUE(t["backtick"][2].is_val_quoted());
+ EXPECT_TRUE(t["backtick"][3].is_val_quoted());
+ });
+}
+
+TEST(emit, at_is_quoted_only_in_the_beggining__issue_320)
+{
+ Tree ti = parse_in_arena("{at: [], backtick: []");
+ ti["at"].append_child() << "@test";
+ ti["at"].append_child() << "t@est";
+ ti["at"].append_child() << "test@";
+ ti["at"].append_child() = "@test2";
+ ti["at"].append_child() = "t@est2";
+ ti["at"].append_child() = "test2@";
+ ti["backtick"].append_child() << "`test";
+ ti["backtick"].append_child() << "t`est";
+ ti["backtick"].append_child() << "test`";
+ ti["backtick"].append_child() = "`test2";
+ ti["backtick"].append_child() = "t`est2";
+ ti["backtick"].append_child() = "test2`";
+ std::string yaml = emitrs_yaml<std::string>(ti);
+ test_check_emit_check(to_csubstr(yaml), [](Tree const &t){
+ ASSERT_TRUE(t.rootref().is_map());
+ ASSERT_TRUE(t.rootref().has_child("at"));
+ ASSERT_TRUE(t.rootref().has_child("backtick"));
+ ASSERT_EQ(t["at"].num_children(), 6u);
+ ASSERT_EQ(t["backtick"].num_children(), 6u);
+ EXPECT_EQ(t["at"][0].val(), "@test");
+ EXPECT_EQ(t["at"][1].val(), "t@est");
+ EXPECT_EQ(t["at"][2].val(), "test@");
+ EXPECT_EQ(t["at"][3].val(), "@test2");
+ EXPECT_EQ(t["at"][4].val(), "t@est2");
+ EXPECT_EQ(t["at"][5].val(), "test2@");
+ EXPECT_TRUE( t["at"][0].is_val_quoted());
+ EXPECT_TRUE( ! t["at"][1].is_val_quoted());
+ EXPECT_TRUE( ! t["at"][2].is_val_quoted());
+ EXPECT_TRUE( t["at"][3].is_val_quoted());
+ EXPECT_TRUE( ! t["at"][4].is_val_quoted());
+ EXPECT_TRUE( ! t["at"][5].is_val_quoted());
+ EXPECT_EQ(t["backtick"][0].val(), "`test");
+ EXPECT_EQ(t["backtick"][1].val(), "t`est");
+ EXPECT_EQ(t["backtick"][2].val(), "test`");
+ EXPECT_EQ(t["backtick"][3].val(), "`test2");
+ EXPECT_EQ(t["backtick"][4].val(), "t`est2");
+ EXPECT_EQ(t["backtick"][5].val(), "test2`");
+ EXPECT_TRUE( t["backtick"][0].is_val_quoted());
+ EXPECT_TRUE( ! t["backtick"][1].is_val_quoted());
+ EXPECT_TRUE( ! t["backtick"][2].is_val_quoted());
+ EXPECT_TRUE( t["backtick"][3].is_val_quoted());
+ EXPECT_TRUE( ! t["backtick"][4].is_val_quoted());
+ EXPECT_TRUE( ! t["backtick"][5].is_val_quoted());
+ });
+}
+
+
+//-------------------------------------------
+// this is needed to use the test case library
+Case const* get_case(csubstr /*name*/)
+{
+ return nullptr;
+}
+
+} // namespace yml
+} // namespace c4
diff --git a/thirdparty/ryml/test/test_empty_file.cpp b/thirdparty/ryml/test/test_empty_file.cpp
new file mode 100644
index 000000000..38e66fbaf
--- /dev/null
+++ b/thirdparty/ryml/test/test_empty_file.cpp
@@ -0,0 +1,79 @@
+#include "./test_group.hpp"
+
+namespace c4 {
+namespace yml {
+
+
+CASE_GROUP(EMPTY_FILE)
+{
+
+ADD_CASE_TO_GROUP("empty0-nochars",
+"",
+NOTYPE);
+
+
+ADD_CASE_TO_GROUP("empty0-multiline", R"(
+
+
+)", NOTYPE);
+
+
+ADD_CASE_TO_GROUP("empty0-multiline with spaces", R"(
+
+
+
+
+
+
+)", NOTYPE);
+
+
+ADD_CASE_TO_GROUP("empty0-multiline with spaces and tabs", R"(
+
+
+
+
+
+
+
+)", NOTYPE);
+
+
+ADD_CASE_TO_GROUP("empty0-multiline-with-comments 0", R"(
+# well hello sir, I see you are fine
+# very fine thank you
+# send my very best wishes
+)", NOTYPE);
+
+ADD_CASE_TO_GROUP("empty0-multiline-with-comments 1", R"(
+
+
+
+# well hello sir, I see you are fine
+# very fine thank you
+# send my very best wishes
+
+
+
+)", NOTYPE);
+
+ADD_CASE_TO_GROUP("empty0-multiline-with-comments 2", R"(
+
+
+
+
+# well hello sir, I see you are fine
+# very fine thank you
+# send my very best wishes
+
+
+
+
+
+
+)", NOTYPE);
+
+}
+
+} // namespace yml
+} // namespace c4
diff --git a/thirdparty/ryml/test/test_empty_map.cpp b/thirdparty/ryml/test/test_empty_map.cpp
new file mode 100644
index 000000000..44d818cc6
--- /dev/null
+++ b/thirdparty/ryml/test/test_empty_map.cpp
@@ -0,0 +1,43 @@
+#include "./test_group.hpp"
+
+namespace c4 {
+namespace yml {
+
+
+CASE_GROUP(EMPTY_MAP)
+{
+
+ADD_CASE_TO_GROUP("empty map, explicit",
+"{}",
+ MAP
+);
+
+
+ADD_CASE_TO_GROUP("empty map, explicit, whitespace",
+" {}",
+ MAP
+);
+
+
+ADD_CASE_TO_GROUP("empty map, multiline",
+R"({
+
+}
+)",
+ MAP
+);
+
+
+ADD_CASE_TO_GROUP("empty map, multilines",
+R"({
+# ksjdfkjhsdfkjhsdfkjh
+
+
+}
+)",
+ MAP
+ );
+}
+
+} // namespace yml
+} // namespace c4
diff --git a/thirdparty/ryml/test/test_empty_scalar.cpp b/thirdparty/ryml/test/test_empty_scalar.cpp
new file mode 100644
index 000000000..52147569c
--- /dev/null
+++ b/thirdparty/ryml/test/test_empty_scalar.cpp
@@ -0,0 +1,353 @@
+#include "./test_group.hpp"
+#include <string>
+
+namespace c4 {
+namespace yml {
+
+// See also:
+// https://github.com/biojppm/rapidyaml/issues/263
+// https://github.com/biojppm/rapidyaml/pull/264
+
+C4_SUPPRESS_WARNING_GCC_WITH_PUSH("-Wuseless-cast")
+
+constexpr const NodeType_e DQV = (NodeType_e)(DOC | QV);
+
+TEST(empty_scalar, parse_zero_length_strings)
+{
+ char inp[] = R"(
+seq:
+ - ""
+ - ''
+ - >
+ - |
+map:
+ a: ""
+ b: ''
+ c: >
+ d: |
+)";
+ const Tree tr = parse_in_place(inp);
+ EXPECT_TRUE(tr["seq"].has_key());
+ EXPECT_TRUE(tr["map"].has_key());
+ EXPECT_TRUE(tr["seq"].is_seq());
+ EXPECT_TRUE(tr["map"].is_map());
+ for(const char *name : {"seq", "map"})
+ {
+ ConstNodeRef node = tr[to_csubstr(name)];
+ ASSERT_EQ(node.num_children(), 4);
+ for(const auto &child : node.children())
+ {
+ EXPECT_TRUE(child.is_val_quoted());
+ EXPECT_EQ(child.val().len, 0u);
+ EXPECT_NE(child.val().str, nullptr);
+ EXPECT_NE(child.val(), nullptr);
+ EXPECT_EQ(child.val(), "");
+ EXPECT_FALSE(child.val_is_null());
+ }
+ }
+}
+
+TEST(empty_scalar, flow_seq)
+{
+ test_check_emit_check("['', '']", [&](Tree const &t){
+ ASSERT_TRUE(t.rootref().has_children());
+ for(ConstNodeRef ch : t.rootref().children())
+ {
+ EXPECT_TRUE(ch.is_val_quoted());
+ EXPECT_FALSE(ch.val_is_null());
+ EXPECT_EQ(ch.val().len, 0);
+ EXPECT_NE(ch.val().str, nullptr);
+ EXPECT_NE(ch.val(), nullptr);
+ }
+ });
+ test_check_emit_check("[ , ]", [&](Tree const &t){
+ ASSERT_TRUE(t.rootref().has_children());
+ for(ConstNodeRef ch : t.rootref().children())
+ {
+ EXPECT_FALSE(ch.is_val_quoted());
+ EXPECT_TRUE(ch.val_is_null());
+ EXPECT_EQ(ch.val().len, 0);
+ EXPECT_EQ(ch.val().str, nullptr);
+ EXPECT_EQ(ch.val(), nullptr);
+ }
+ });
+}
+
+TEST(empty_scalar, parse_empty_strings)
+{
+ char inp[] = R"(
+# use multiple empty entries to ensure the parser
+# correctly deals with the several cases
+seq:
+ -
+ -
+ -
+ -
+map:
+ a:
+ b:
+ c:
+ d:
+)";
+ const Tree tr = parse_in_place(inp);
+ for(const char *name : {"seq", "map"})
+ {
+ ConstNodeRef node = tr[to_csubstr(name)];
+ ASSERT_EQ(node.num_children(), 4);
+ for(const auto &child : node.children())
+ {
+ EXPECT_FALSE(child.type().is_val_quoted());
+ EXPECT_EQ(child.val(), "");
+ EXPECT_EQ(child.val(), nullptr);
+ EXPECT_EQ(child.val().str, nullptr);
+ EXPECT_EQ(child.val().len, 0u);
+ EXPECT_TRUE(child.val_is_null());
+ }
+ }
+}
+
+TEST(empty_scalar, std_string)
+{
+ std::string stdstr;
+ csubstr stdss = to_csubstr(stdstr);
+ csubstr nullss;
+ EXPECT_NE(stdss, nullptr);
+ EXPECT_NE(stdss.str, nullptr);
+ EXPECT_EQ(stdss.len, 0u);
+ EXPECT_EQ(nullss, nullptr);
+ EXPECT_EQ(nullss.str, nullptr);
+ EXPECT_EQ(nullss.len, 0u);
+ Tree tree = parse_in_arena("{ser: {}, eq: {}}");
+ tree["ser"]["stdstr"] << stdss;
+ tree["ser"]["nullss"] << nullss;
+ tree["eq"]["stdstr"] = stdss;
+ tree["eq"]["nullss"] = nullss;
+ EXPECT_EQ(emitrs_yaml<std::string>(tree),
+ "ser:\n"
+ " stdstr: ''\n"
+ " nullss: \n"
+ "eq:\n"
+ " stdstr: ''\n"
+ " nullss: \n"
+ );
+}
+
+TEST(empty_scalar, to_arena)
+{
+ Tree tr;
+ {
+ const char *val = "";
+ size_t num = to_chars(substr{}, val);
+ ASSERT_EQ(num, 0u);
+ char buf_[10];
+ csubstr serialized = to_chars_sub(buf_, val);
+ EXPECT_EQ(serialized.len, 0);
+ EXPECT_NE(serialized.str, nullptr);
+ EXPECT_NE(serialized, nullptr);
+ csubstr r = tr.to_arena("");
+ EXPECT_EQ(r.len, 0u);
+ EXPECT_NE(r.str, nullptr);
+ EXPECT_NE(r, nullptr);
+ }
+ {
+ const char *val = nullptr;
+ size_t num = to_chars(substr{}, val);
+ ASSERT_EQ(num, 0u);
+ char buf_[10];
+ csubstr serialized = to_chars_sub(buf_, val);
+ EXPECT_EQ(serialized.len, 0);
+ EXPECT_NE(serialized.str, nullptr);
+ EXPECT_NE(serialized, nullptr);
+ csubstr r = tr.to_arena("");
+ EXPECT_EQ(r.len, 0u);
+ EXPECT_NE(r.str, nullptr);
+ EXPECT_NE(r, nullptr);
+ r = tr.to_arena(val);
+ EXPECT_EQ(r.len, 0u);
+ EXPECT_EQ(r.str, nullptr);
+ EXPECT_EQ(r, nullptr);
+ }
+ {
+ std::nullptr_t val = nullptr;
+ size_t num = to_chars(substr{}, val);
+ ASSERT_EQ(num, 0u);
+ csubstr r = tr.to_arena(val);
+ EXPECT_EQ(r.len, 0u);
+ EXPECT_EQ(r.str, nullptr);
+ EXPECT_EQ(r, nullptr);
+ }
+}
+
+TEST(empty_scalar, gcc_error)
+{
+ Tree tr;
+ csubstr nullstr = {};
+ ASSERT_EQ(nullstr.str, nullptr);
+ ASSERT_EQ(nullstr.len, 0);
+ std::cout << "\nserializing with empty arena...\n";
+ csubstr result = tr.to_arena(nullstr);
+ EXPECT_EQ(result.str, nullptr); // fails!
+ EXPECT_EQ(result.len, 0);
+ std::cout << "\nserializing with nonempty arena...\n";
+ result = tr.to_arena(nullstr);
+ EXPECT_EQ(result.str, nullptr); // fails!
+ EXPECT_EQ(result.len, 0);
+}
+
+TEST(empty_scalar, build_zero_length_string)
+{
+ Tree tr;
+ NodeRef root = tr.rootref();
+ root |= MAP;
+ auto addseq = [&root](csubstr name) { NodeRef n = root[name]; n |= SEQ; return n; };
+
+ // try both with nonnull-zero-length and null-zero-length
+ std::string stdstr;
+ csubstr stdss = to_csubstr(stdstr);
+ csubstr empty = csubstr("nonempty").first(0);
+ csubstr nullss = {};
+
+ // these are the conditions we wish to cover:
+ ASSERT_TRUE(stdss.str != nullptr);
+ ASSERT_TRUE(stdss.len == 0u);
+ ASSERT_TRUE(empty.str != nullptr);
+ ASSERT_TRUE(empty.len == 0u);
+ ASSERT_TRUE(nullss.str == nullptr);
+ ASSERT_TRUE(nullss.len == 0u);
+
+ // = and << must have exactly the same behavior where nullity is
+ // regarded
+
+ {
+ NodeRef quoted = addseq("quoted");
+ {NodeRef r = quoted.append_child(); r = "" ; r.set_type(r.type() | VALQUO);}
+ {NodeRef r = quoted.append_child(); r << "" ; r.set_type(r.type() | VALQUO);}
+ {NodeRef r = quoted.append_child(); r = empty ; r.set_type(r.type() | VALQUO);}
+ {NodeRef r = quoted.append_child(); r << empty ; r.set_type(r.type() | VALQUO);}
+ {NodeRef r = quoted.append_child(); r = stdss ; r.set_type(r.type() | VALQUO);}
+ {NodeRef r = quoted.append_child(); r << stdss ; r.set_type(r.type() | VALQUO);}
+ }
+ {
+ NodeRef quoted_null = addseq("quoted_null");
+ {NodeRef r = quoted_null.append_child(); r = nullss ; r.set_type(r.type() | VALQUO);}
+ {NodeRef r = quoted_null.append_child(); r << nullss ; r.set_type(r.type() | VALQUO);}
+ {NodeRef r = quoted_null.append_child(); r = nullptr ; r.set_type(r.type() | VALQUO);}
+ {NodeRef r = quoted_null.append_child(); r << nullptr; r.set_type(r.type() | VALQUO);}
+ }
+ {
+ NodeRef non_quoted = addseq("nonquoted");
+ non_quoted.append_child() = "";
+ non_quoted.append_child() << "";
+ non_quoted.append_child() = empty;
+ non_quoted.append_child() << empty;
+ non_quoted.append_child() = stdss;
+ non_quoted.append_child() << stdss;
+ }
+ {
+ NodeRef non_quoted_null = addseq("nonquoted_null");
+ non_quoted_null.append_child() = nullss;
+ non_quoted_null.append_child() << nullss;
+ non_quoted_null.append_child() = nullptr;
+ non_quoted_null.append_child() << nullptr;
+ }
+
+ // quoted cases will never be null, regardless of the
+ // incoming scalar
+ auto test_quoted_empty = [](ConstNodeRef node){
+ SCOPED_TRACE(node.key());
+ ASSERT_TRUE(node.has_children());
+ {
+ size_t pos = 0;
+ for(ConstNodeRef child : node.cchildren())
+ {
+ EXPECT_TRUE(child.is_val_quoted()) << "pos=" << pos;
+ EXPECT_EQ(child.val().len, 0u) << "pos=" << pos;
+ EXPECT_NE(child.val().str, nullptr) << "pos=" << pos;
+ EXPECT_NE(child.val(), nullptr) << "pos=" << pos;
+ EXPECT_EQ(child.val(), "") << "pos=" << pos;
+ EXPECT_FALSE(child.val_is_null()) << "pos=" << pos;
+ pos++;
+ }
+ }
+ };
+ auto test_quoted_null = [](ConstNodeRef node){
+ SCOPED_TRACE(node.key());
+ ASSERT_TRUE(node.has_children());
+ size_t pos = 0;
+ for(ConstNodeRef child : node.cchildren())
+ {
+ EXPECT_TRUE(child.is_val_quoted()) << "pos=" << pos;
+ EXPECT_FALSE(child.val_is_null()) << "pos=" << pos; // because it's quoted
+ EXPECT_EQ(child.val().len, 0u) << "pos=" << pos;
+ EXPECT_EQ(child.val().str, nullptr) << "pos=" << pos;
+ EXPECT_EQ(child.val(), nullptr) << "pos=" << pos;
+ EXPECT_EQ(child.val(), "") << "pos=" << pos;
+ pos++;
+ }
+ };
+ // ... but according to the incoming scalar, non quoted cases may
+ // or may not be null
+ auto test_non_quoted_empty = [](ConstNodeRef node){
+ SCOPED_TRACE(node.key());
+ ASSERT_TRUE(node.has_children());
+ size_t pos = 0;
+ for(ConstNodeRef child : node.cchildren())
+ {
+ EXPECT_TRUE(child.is_val()) << "pos=" << pos;
+ EXPECT_FALSE(child.val_is_null()) << "pos=" << pos; // because it's quoted
+ EXPECT_EQ(child.val(), "") << "pos=" << pos;
+ EXPECT_NE(child.val(), nullptr) << "pos=" << pos;
+ EXPECT_EQ(child.val().len, 0u) << "pos=" << pos;
+ EXPECT_NE(child.val().str, nullptr) << "pos=" << pos;
+ ++pos;
+ }
+ };
+ auto test_non_quoted_null = [](ConstNodeRef node){
+ SCOPED_TRACE(node.key());
+ ASSERT_TRUE(node.has_children());
+ size_t pos = 0;
+ for(ConstNodeRef child : node.cchildren())
+ {
+ EXPECT_TRUE(child.is_val()) << "pos=" << pos;
+ EXPECT_EQ(child.val(), "") << "pos=" << pos;
+ EXPECT_EQ(child.val(), nullptr) << "pos=" << pos;
+ EXPECT_EQ(child.val().len, 0u) << "pos=" << pos;
+ EXPECT_EQ(child.val().str, nullptr) << "pos=" << pos;
+ EXPECT_TRUE(child.val_is_null()) << "pos=" << pos;
+ ++pos;
+ }
+ };
+
+ {
+ SCOPED_TRACE("input tree");
+ test_quoted_empty(tr["quoted"]);
+ // in the built tree, the values will be quoted and null
+ test_quoted_null(tr["quoted_null"]);
+ test_non_quoted_empty(tr["nonquoted"]);
+ test_non_quoted_null(tr["nonquoted_null"]);
+ }
+
+ std::string yaml = emitrs_yaml<std::string>(tr);
+ std::cout << yaml;
+ test_check_emit_check(to_csubstr(yaml), [&](Tree const &t){
+ SCOPED_TRACE("output tree");
+ test_quoted_empty(t["quoted"]);
+ // after a roundtrip, they will be nonnull, because the quotes win.
+ test_quoted_empty(t["quoted_null"]);
+ test_non_quoted_empty(t["nonquoted"]);
+ test_non_quoted_null(t["nonquoted_null"]);
+ });
+}
+
+CASE_GROUP(EMPTY_SCALAR)
+{
+ADD_CASE_TO_GROUP("empty scalar, single quoted",
+ "''",
+ N(DQV, "")
+);
+}
+
+} // namespace yml
+} // namespace c4
+
+C4_SUPPRESS_WARNING_GCC_POP
diff --git a/thirdparty/ryml/test/test_empty_seq.cpp b/thirdparty/ryml/test/test_empty_seq.cpp
new file mode 100644
index 000000000..2b8bcab7b
--- /dev/null
+++ b/thirdparty/ryml/test/test_empty_seq.cpp
@@ -0,0 +1,40 @@
+#include "./test_group.hpp"
+
+namespace c4 {
+namespace yml {
+
+CASE_GROUP(EMPTY_SEQ)
+{
+
+ADD_CASE_TO_GROUP("empty seq, explicit",
+"[]",
+ SEQ
+);
+
+
+ADD_CASE_TO_GROUP("empty seq, explicit, whitespace",
+" []",
+ SEQ
+);
+
+
+ADD_CASE_TO_GROUP("empty seq, multiline",
+R"([
+]
+)",
+ SEQ
+);
+
+ADD_CASE_TO_GROUP("empty seq, multilines",
+R"([
+# ksjdfkjhsdfkjhsdfkjh
+
+
+]
+)",
+ SEQ
+);
+}
+
+} // namespace yml
+} // namespace c4
diff --git a/thirdparty/ryml/test/test_explicit_key.cpp b/thirdparty/ryml/test/test_explicit_key.cpp
new file mode 100644
index 000000000..a9aefd59e
--- /dev/null
+++ b/thirdparty/ryml/test/test_explicit_key.cpp
@@ -0,0 +1,419 @@
+#include "./test_group.hpp"
+
+namespace c4 {
+namespace yml {
+
+
+TEST(explicit_key, test_suite_5WE3)
+{
+ csubstr yaml = R"(
+? explicit key # Empty value
+? |
+ block key
+: - one # Explicit compact
+ - two # block value
+)";
+ test_check_emit_check(yaml, [](Tree const &t){
+ ASSERT_TRUE(t.rootref().is_map());
+ ASSERT_NE(t.find_child(t.root_id(), "explicit key"), (size_t)NONE);
+ ASSERT_NE(t.find_child(t.root_id(), "block key\n"), (size_t)NONE);
+ EXPECT_EQ(t["explicit key"].val(), csubstr{});
+ EXPECT_TRUE(t["block key\n"].is_seq());
+ EXPECT_EQ(t["block key\n"][0], csubstr("one"));
+ EXPECT_EQ(t["block key\n"][1], csubstr("two"));
+ });
+}
+
+
+TEST(explicit_key, test_suite_DFF7_v1)
+{
+ csubstr yaml = R"(
+{
+? explicit: entry,
+implicit: entry,
+?
+}
+)";
+ test_check_emit_check(yaml, [](Tree const &t){
+ ASSERT_TRUE(t.rootref().is_map());
+ ASSERT_EQ(t.rootref().num_children(), 3u);
+ ASSERT_TRUE(t.rootref().has_child("explicit"));
+ EXPECT_EQ(t["explicit"].val(), csubstr("entry"));
+ ASSERT_TRUE(t.rootref().has_child("implicit"));
+ EXPECT_EQ(t["explicit"].val(), csubstr("entry"));
+ ASSERT_TRUE(t.rootref().has_child(csubstr{}));
+ EXPECT_EQ(t[csubstr{}].val(), csubstr{});
+ });
+}
+
+TEST(explicit_key, test_suite_DFF7_v2)
+{
+ csubstr yaml = R"(
+{
+?
+ key on next line
+:
+ val on next line
+,
+?
+ # no key
+:
+ val on next line
+}
+)";
+ test_check_emit_check(yaml, [](Tree const &t){
+ ASSERT_TRUE(t.rootref().is_map());
+ ASSERT_EQ(t.rootref().num_children(), 2u);
+ ASSERT_TRUE(t.rootref().has_child("key on next line"));
+ EXPECT_EQ(t[0].key(), "key on next line");
+ EXPECT_EQ(t[0].val(), "val on next line");
+ EXPECT_EQ(t[1].key(), csubstr{});
+ EXPECT_EQ(t[1].val(), "val on next line");
+ });
+}
+
+
+TEST(explicit_key, test_suite_FRK4)
+{
+ csubstr yaml = R"(
+{
+ ? foo :,
+ : bar,
+}
+)";
+ test_check_emit_check(yaml, [](Tree const &t){
+ ASSERT_TRUE(t.rootref().is_map());
+ ASSERT_TRUE(t.rootref().has_child("foo"));
+ EXPECT_EQ(t["foo"].val(), csubstr{});
+ ASSERT_TRUE(t.rootref().has_child(csubstr{}));
+ EXPECT_EQ(t[csubstr{}].val(), csubstr("bar"));
+ });
+}
+
+
+TEST(explicit_key, test_suite_M2N8)
+{
+ csubstr yaml = R"(
+- ? : x
+- ? :
+- ? :
+)";
+ test_check_emit_check(yaml, [](Tree const &t){
+ ASSERT_TRUE(t.rootref().is_seq());
+ ASSERT_EQ(t.rootref().num_children(), 3u);
+ ASSERT_EQ(t[0].num_children(), 1u);
+ EXPECT_EQ(t[0][0].key(), csubstr{});
+ EXPECT_EQ(t[0][0].val(), "x");
+ ASSERT_EQ(t[1].num_children(), 1u);
+ EXPECT_EQ(t[1][0].key(), csubstr{});
+ EXPECT_EQ(t[1][0].val(), csubstr{});
+ ASSERT_EQ(t[2].num_children(), 1u);
+ EXPECT_EQ(t[2][0].key(), csubstr{});
+ EXPECT_EQ(t[2][0].val(), csubstr{});
+ });
+}
+
+
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+
+
+CASE_GROUP(EXPLICIT_KEY)
+{
+//
+ADD_CASE_TO_GROUP("explicit key, last value missing",
+R"(
+? a
+? b
+?
+--- !!set # test that we do not add any last item
+? a
+? b
+--- !!set # test that we do add the last item
+? a
+? b
+?
+...
+)",
+N(STREAM, L{
+ N(DOCMAP, L{
+ N(KEYVAL, "a", {}),
+ N(KEYVAL, "b", {}),
+ N(KEYVAL, "", {})
+ }),
+ N(DOCMAP, TL("!!set", L{
+ N(KEYVAL, "a", {}),
+ N(KEYVAL, "b", {}),
+ })),
+ N(DOCMAP, TL("!!set", L{
+ N(KEYVAL, "a", {}),
+ N(KEYVAL, "b", {}),
+ N(KEYVAL, "", {})
+ })),
+ })
+);
+
+ADD_CASE_TO_GROUP("explicit key, ambiguity 2EBW",
+R"(
+a!"#$%&'()*+,-./09:;<=>?@AZ[\]^_`az{|}~: safe
+?foo: safe question mark
+:foo: safe colon
+-foo: safe dash
+this is#not: a comment
+)",
+L{
+ N("a!\"#$%&'()*+,-./09:;<=>?@AZ[\\]^_`az{|}~", "safe"),
+ N("?foo", "safe question mark"),
+ N(":foo", "safe colon"),
+ N("-foo", "safe dash"),
+ N("this is#not", "a comment"),
+});
+
+ADD_CASE_TO_GROUP("explicit key, ambiguity 2EBW, expl",
+R"({
+ a!"#$%&'()*+-./09:;<=>?@AZ[\]^_`az{|~: safe,
+ ?foo: safe question mark,
+ :foo: safe colon,
+ -foo: safe dash,
+ this is#not: a comment,
+})",
+L{
+ N("a!\"#$%&'()*+-./09:;<=>?@AZ[\\]^_`az{|~", "safe"),
+ N("?foo", "safe question mark"),
+ N(":foo", "safe colon"),
+ N("-foo", "safe dash"),
+ N("this is#not", "a comment"),
+});
+
+ADD_CASE_TO_GROUP("explicit key, ambiguity 2EBW, impl seq",
+R"(
+- a!"#$%&'()*+,-./09:;<=>?@AZ[\]^_`az{|}~
+- ?foo
+- :foo
+- -foo
+- this is#not:a comment
+)",
+L{
+ N("a!\"#$%&'()*+,-./09:;<=>?@AZ[\\]^_`az{|}~"),
+ N("?foo"),
+ N(":foo"),
+ N("-foo"),
+ N("this is#not:a comment"),
+});
+
+ADD_CASE_TO_GROUP("explicit key, ambiguity 2EBW, expl seq",
+R"([
+ a!"#$%&'()*+-./09:;<=>?@AZ[\^_`az{|}~,
+ ?foo,
+ :foo,
+ -foo,
+ this is#not:a comment,
+])",
+L{
+ N("a!\"#$%&'()*+-./09:;<=>?@AZ[\\^_`az{|}~"),
+ N("?foo"),
+ N(":foo"),
+ N("-foo"),
+ N("this is#not:a comment"),
+});
+
+ADD_CASE_TO_GROUP("explicit key with line break in between",
+R"(
+? an explicit key
+: its value
+)",
+ L{N("an explicit key", "its value")}
+);
+
+ADD_CASE_TO_GROUP("explicit key 2nd, inside explicit map",
+R"(
+{
+ a simple key: a value,
+ ? an explicit key: another value,
+}
+)",
+ L{
+ N("a simple key", "a value"),
+ N("an explicit key", "another value"),
+ }
+);
+
+ADD_CASE_TO_GROUP("explicit key 1st, inside explicit map",
+R"(
+{
+ ? an explicit key: another value,
+ a simple key: a value,
+}
+)",
+ L{
+ N("an explicit key", "another value"),
+ N("a simple key", "a value"),
+ }
+);
+
+ADD_CASE_TO_GROUP("explicit key 2nd",
+R"(
+a simple key: a value
+? an explicit key: another value
+)",
+ L{
+ N("a simple key", "a value"),
+ N("an explicit key", "another value"),
+ }
+);
+
+ADD_CASE_TO_GROUP("explicit key 1st",
+R"(
+? an explicit key: another value
+a simple key: a value
+)",
+ L{
+ N("an explicit key", "another value"),
+ N("a simple key", "a value"),
+ }
+);
+
+ADD_CASE_TO_GROUP("explicit key nested in a map, 1st",
+R"(
+map:
+ ? an explicit key: another value
+ a simple key: a value
+? an explicit key deindented: its value
+)",
+ L{
+ N("map", L{
+ N("an explicit key", "another value"),
+ N("a simple key", "a value"),
+ }),
+ N("an explicit key deindented", "its value")
+ }
+);
+
+ADD_CASE_TO_GROUP("explicit key nested in a seq, 1st",
+R"(
+- ? an explicit key: another value
+ a simple key: a value
+- ? another explicit key: its value
+)",
+ L{
+ N(L{
+ N("an explicit key", "another value"),
+ N("a simple key", "a value"),
+ }),
+ N(L{N("another explicit key", "its value")})
+ }
+);
+
+ADD_CASE_TO_GROUP("explicit block key, literal, clip",
+R"(? |
+ This is a key
+ that has multiple lines
+
+: and this is its value
+)",
+ L{
+ N(QK, "This is a key\nthat has multiple lines\n", "and this is its value")
+ }
+);
+
+ADD_CASE_TO_GROUP("explicit block key, literal, keep",
+R"(? |+
+ This is a key
+ that has multiple lines
+
+: and this is its value
+)",
+ L{
+ N(QK, "This is a key\nthat has multiple lines\n\n", "and this is its value")
+ }
+);
+
+ADD_CASE_TO_GROUP("explicit block key, literal, strip",
+R"(? |-
+ This is a key
+ that has multiple lines
+
+: and this is its value
+)",
+ L{
+ N(QK, "This is a key\nthat has multiple lines", "and this is its value")
+ }
+);
+
+ADD_CASE_TO_GROUP("explicit block key, folded, clip",
+R"(? >
+ This is a key
+ that has multiple lines
+
+: and this is its value
+)",
+ L{
+ N(QK, "This is a key that has multiple lines\n", "and this is its value")
+ }
+);
+
+ADD_CASE_TO_GROUP("explicit block key, folded, keep",
+R"(? >+
+ This is a key
+ that has multiple lines
+
+: and this is its value
+)",
+ L{
+ N(QK, "This is a key that has multiple lines\n\n", "and this is its value")
+ }
+);
+
+ADD_CASE_TO_GROUP("explicit block key, folded, strip",
+R"(? >-
+ This is a key
+ that has multiple lines
+
+: and this is its value
+)",
+ L{
+ N(QK, "This is a key that has multiple lines", "and this is its value")
+ }
+);
+
+ADD_CASE_TO_GROUP("explicit key, missing val 7W2P",
+R"(
+? a
+? b
+c:
+? d
+e:
+)",
+N(MAP, L{
+ N(KEYVAL, "a", {}),
+ N(KEYVAL, "b", {}),
+ N(KEYVAL, "c", {}),
+ N(KEYVAL, "d", {}),
+ N(KEYVAL, "e", {}),
+ })
+);
+
+ADD_CASE_TO_GROUP("explicit key, missing val ZWK4",
+R"(
+a: 1
+? b
+&anchor c: 3
+? d
+!!str e: 4
+? f
+)",
+N(MAP, L{
+ N("a", "1"),
+ N(KEYVAL, "b", {}),
+ N("c", AR(KEYANCH, "anchor"), "3"),
+ N(KEYVAL, "d", {}),
+ N(TS("!!str", "e"), "4"),
+ N(KEYVAL, "f", {}),
+ })
+);
+
+}
+
+} // namespace yml
+} // namespace c4
diff --git a/thirdparty/ryml/test/test_generic_map.cpp b/thirdparty/ryml/test/test_generic_map.cpp
new file mode 100644
index 000000000..273c05cb4
--- /dev/null
+++ b/thirdparty/ryml/test/test_generic_map.cpp
@@ -0,0 +1,89 @@
+#include "./test_group.hpp"
+
+namespace c4 {
+namespace yml {
+
+
+CASE_GROUP(GENERIC_MAP)
+{
+
+ADD_CASE_TO_GROUP("generic map",
+R"(
+a simple key: a value # The KEY token is produced here.
+? a complex key
+: another value
+a mapping:
+ key 1: value 1
+ key 2: value 2
+a sequence:
+ - item 1
+ - item 2
+)",
+ L{
+ N("a simple key", "a value"),
+ N("a complex key", "another value"),
+ N("a mapping", L{N("key 1", "value 1"), N("key 2", "value 2")}),
+ N("a sequence", L{N("item 1"), N("item 2")}),
+ }
+);
+
+
+ADD_CASE_TO_GROUP("seq nested in map",
+R"(
+items:
+ - part_no: A4786
+ descrip: Water Bucket (Filled)
+ price: 1.47
+ quantity: 4
+ - part_no: E1628
+ descrip: High Heeled "Ruby" Slippers
+ size: 8
+ price: 133.7
+ quantity: 1
+)",
+L{
+ N{"items", L{
+ N{L{N{"part_no", "A4786"},
+ N{"descrip", "Water Bucket (Filled)"},
+ N{"price", "1.47"},
+ N{"quantity", "4"},}},
+ N{L{N{"part_no", "E1628"},
+ N{"descrip", "High Heeled \"Ruby\" Slippers"},
+ N{"size", "8"},
+ N{"price", "133.7"},
+ N{"quantity", "1"},}}}},
+ }
+);
+
+ADD_CASE_TO_GROUP("seq nested in map, v2",
+R"(
+items:
+ -
+ part_no: A4786
+ descrip: Water Bucket (Filled)
+ price: 1.47
+ quantity: 4
+ -
+ part_no: E1628
+ descrip: High Heeled "Ruby" Slippers
+ size: 8
+ price: 133.7
+ quantity: 1
+)",
+L{
+ N{"items", L{
+ N{L{N{"part_no", "A4786"},
+ N{"descrip", "Water Bucket (Filled)"},
+ N{"price", "1.47"},
+ N{"quantity", "4"},}},
+ N{L{N{"part_no", "E1628"},
+ N{"descrip", "High Heeled \"Ruby\" Slippers"},
+ N{"size", "8"},
+ N{"price", "133.7"},
+ N{"quantity", "1"},}}}},
+ }
+);
+}
+
+} // namespace yml
+} // namespace c4
diff --git a/thirdparty/ryml/test/test_generic_seq.cpp b/thirdparty/ryml/test/test_generic_seq.cpp
new file mode 100644
index 000000000..45f9c1d3e
--- /dev/null
+++ b/thirdparty/ryml/test/test_generic_seq.cpp
@@ -0,0 +1,47 @@
+#include "./test_group.hpp"
+
+namespace c4 {
+namespace yml {
+
+CASE_GROUP(GENERIC_SEQ)
+{
+
+ADD_CASE_TO_GROUP("generic seq v0",
+R"(
+- item 1
+- item 2
+- - item 3.1
+ - item 3.2
+- key 1: value 1
+ key 2: value 2
+)",
+ L{
+ N("item 1"),
+ N("item 2"),
+ N(L{N("item 3.1"), N("item 3.2")}),
+ N(L{N("key 1", "value 1"), N("key 2", "value 2")})
+ }
+);
+
+ADD_CASE_TO_GROUP("generic seq v1",
+R"(
+- item 1
+- item 2
+-
+ - item 3.1
+ - item 3.2
+-
+ key 1: value 1
+ key 2: value 2
+)",
+ L{
+ N("item 1"),
+ N("item 2"),
+ N(L{N("item 3.1"), N("item 3.2")}),
+ N(L{N("key 1", "value 1"), N("key 2", "value 2")})
+ }
+);
+}
+
+} // namespace yml
+} // namespace c4
diff --git a/thirdparty/ryml/test/test_github_issues.cpp b/thirdparty/ryml/test/test_github_issues.cpp
new file mode 100644
index 000000000..3c12307fc
--- /dev/null
+++ b/thirdparty/ryml/test/test_github_issues.cpp
@@ -0,0 +1,590 @@
+#include "./test_group.hpp"
+
+namespace c4 {
+namespace yml {
+
+TEST(github, 268)
+{
+ Tree tree = parse_in_arena(R"(
+ list:
+ - &bar bar
+ map:
+ node: *bar
+ )");
+ tree.resolve();
+ auto root = tree.rootref();
+ ASSERT_TRUE(root["map"].is_map());
+ ASSERT_TRUE(root["map"].has_child("node"));
+ ASSERT_EQ(root["map"]["node"], "bar");
+}
+
+TEST(github, 277)
+{
+ Tree tree = parse_in_arena(R"(
+ A: &A
+ V: 3
+ W: 4
+ B:
+ <<: *A
+ V: 5
+ X: 6
+ )");
+ const char *keys[] = {"V", "W", "X"};
+ const char *vals[] = {"5", "4", "6"};
+ tree.resolve();
+ auto root = tree.rootref();
+ ASSERT_TRUE(root["B"].is_map());
+ size_t num_childs = root["B"].num_children();
+ size_t child = 0;
+ ASSERT_EQ(num_childs, 3);
+ for (const auto node : root["B"].children())
+ {
+ EXPECT_EQ(node.key(), csubstr(keys[child], 1));
+ EXPECT_EQ(node.val(), csubstr(vals[child], 1));
+ child++;
+ }
+ // test whether the tree is corrupted
+ test_invariants(tree);
+ child = num_childs;
+ for (size_t n = tree.last_child(root["B"].id()); n != NONE; n = tree.prev_sibling(n))
+ {
+ ASSERT_NE(child, 0);
+ EXPECT_EQ(tree.key(n), csubstr(keys[child - 1], 1));
+ child--;
+ }
+}
+
+
+TEST(github, 78)
+{
+ Tree t = parse_in_arena("{foo: 1, bar: [2, 3]}");
+ EXPECT_EQ(t["foo"].val(), "1");
+ EXPECT_EQ(t["bar"][0].val(), "2");
+ EXPECT_EQ(t["bar"][1].val(), "3");
+}
+
+
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+
+TEST(github, 60)
+{
+ Tree tree = parse_in_arena(R"(
+ traits:
+ roleBonuses:
+ - bonus: 5
+ bonusText:
+ de: Bonus auf die Virusstärke von <a href=showinfo:22177>Relikt-</a>
+ und <a href=showinfo:22175>Datenanalysatoren</a>
+ en: bonus to <a href=showinfo:22177>Relic</a> and <a href=showinfo:22175>Data
+ Analyzer</a> virus strength
+ fr: de bonus à la puissance du virus des <a href=showinfo:22177>analyseurs
+ de reliques</a> et des <a href=showinfo:22175>analyseurs de données</a>
+ ja: <a href=showinfo:22177>遺物アナライザー</a>と<a href=showinfo:22175>データアナライザー</a>のウイルス強度が増加
+ ru: повышается степень опасности вирусов, применяемых в <a href=showinfo:22175>комплексах
+ анализа данных</a> и <a href=showinfo:22177>комплексах анализа
+ артефактов</a>
+ zh: <a href="showinfo:22177">遗迹分析仪</a>和<a href="showinfo:22175">数据分析仪</a>病毒强度加成
+ importance: 1
+ unitID: 139
+)");
+ auto root = tree.rootref();
+ ASSERT_TRUE(root.is_map());
+ ASSERT_TRUE(root.has_child("traits"));
+ auto rb = root["traits"]["roleBonuses"][0];
+ ASSERT_TRUE(rb.valid());
+ EXPECT_EQ(rb["bonus"].val(), "5");
+ auto txt = rb["bonusText"];
+ ASSERT_TRUE(txt.valid());
+ ASSERT_TRUE(txt.is_map());
+ EXPECT_TRUE(txt.has_child("de"));
+ EXPECT_TRUE(txt.has_child("en"));
+ EXPECT_TRUE(txt.has_child("fr"));
+ EXPECT_TRUE(txt.has_child("ja"));
+ EXPECT_TRUE(txt.has_child("ru"));
+ EXPECT_TRUE(txt.has_child("zh"));
+ EXPECT_EQ(txt["de"].val(), "Bonus auf die Virusstärke von <a href=showinfo:22177>Relikt-</a> und <a href=showinfo:22175>Datenanalysatoren</a>");
+ EXPECT_EQ(txt["en"].val(), "bonus to <a href=showinfo:22177>Relic</a> and <a href=showinfo:22175>Data Analyzer</a> virus strength");
+ EXPECT_EQ(txt["fr"].val(), "de bonus à la puissance du virus des <a href=showinfo:22177>analyseurs de reliques</a> et des <a href=showinfo:22175>analyseurs de données</a>");
+ EXPECT_EQ(txt["ja"].val(), "<a href=showinfo:22177>遺物アナライザー</a>と<a href=showinfo:22175>データアナライザー</a>のウイルス強度が増加");
+ EXPECT_EQ(txt["ru"].val(), "повышается степень опасности вирусов, применяемых в <a href=showinfo:22175>комплексах анализа данных</a> и <a href=showinfo:22177>комплексах анализа артефактов</a>");
+ EXPECT_EQ(txt["zh"].val(), "<a href=\"showinfo:22177\">遗迹分析仪</a>和<a href=\"showinfo:22175\">数据分析仪</a>病毒强度加成");
+
+
+ tree = parse_in_arena(R"(208:
+ basePrice: 3000.0
+ description:
+ de: Ursprünglich als Rakete für den Fangschuss entworfen, um einem beschädigten
+ Schiff den Todesstoß zu geben, hat die Inferno Heavy Missile seither eine
+ Reihe technischer Upgrades durchlaufen. Die neueste Version hat eine leichtere
+ Sprengladung als das Original, aber stark verbesserte Lenksysteme.
+ en: Originally designed as a 'finisher' - the killing blow to a crippled ship
+ - the Inferno heavy missile has since gone through various technological
+ upgrades. The latest version has a lighter payload than the original,
+ but much improved guidance systems.
+ fr: Conçu à l'origine pour donner le coup de grâce, le missile lourd Inferno
+ a depuis subi de nombreuses améliorations techniques. La dernière version
+ emporte une charge utile réduite par rapport à l'originale, mais est dotée
+ de systèmes de guidage améliorés.
+ ja: 元々「フィニッシャー」―大破した船にとどめを刺す兵器として設計されたインフェルノヘビーミサイルは、以来各種の技術改良を経てきた。現行型は初期型より軽い弾頭を採用しているが、それを補って余りある優れた誘導システムを持つ。
+ ru: Тяжелая ракета Inferno изначально была спроектирована как «оружие последнего
+ удара» для уничтожения подбитых кораблей. С тех пор было выпущено несколько
+ ее модификаций. В последней модификации используется заряд меньшей мощности,
+ но более совершенная система наведения.
+ zh: 炼狱重型导弹历经多种技术改良,原本被设计为给予落魄敌舰最后一击的“终结者”角色。相比原型,最新版导弹载荷较轻,但装配了大幅改进的制导系统。
+ graphicID: 20048
+ groupID: 385
+ iconID: 188
+ marketGroupID: 924
+ mass: 1000.0
+ name:
+ de: Inferno Heavy Missile
+ en: Inferno Heavy Missile
+ fr: Missile lourd Inferno
+ ja: インフェルノヘビーミサイル
+ ru: Inferno Heavy Missile
+ zh: 炼狱重型导弹
+ portionSize: 100
+ published: true
+ radius: 300.0
+ volume: 0.03
+)");
+ root = tree.rootref()["208"];
+ EXPECT_EQ(root["description"]["ja"].val(), "元々「フィニッシャー」―大破した船にとどめを刺す兵器として設計されたインフェルノヘビーミサイルは、以来各種の技術改良を経てきた。現行型は初期型より軽い弾頭を採用しているが、それを補って余りある優れた誘導システムを持つ。");
+ EXPECT_EQ(root["name"]["ja"].val(), "インフェルノヘビーミサイル");
+ EXPECT_EQ(root["name"]["zh"].val(), "炼狱重型导弹");
+}
+
+
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+
+TEST(github, 31)
+{
+ Tree tree;
+ NodeRef r = tree.rootref();
+ r |= MAP;
+
+ auto meas = r["meas"];
+ meas |= MAP;
+
+ auto plist = meas["createParameterList"];
+ plist |= SEQ;
+
+ {
+ auto lumi = plist.append_child();
+ lumi << "Lumi";
+ EXPECT_TRUE(lumi.is_val());
+ }
+
+ {
+ auto lumi = plist.append_child();
+ lumi |= MAP;
+ lumi["value"] << 1;
+ lumi["relErr"] << 0.1;
+ EXPECT_TRUE(lumi.is_map());
+ }
+
+ {
+ ExpectError::check_assertion(&tree, [&](){
+ auto lumi = plist.append_child();
+ lumi << "Lumi";
+ lumi |= MAP;
+ });
+ }
+
+ {
+ ExpectError::check_assertion(&tree, [&](){
+ auto lumi = plist.append_child();
+ lumi << "Lumi";
+ lumi |= SEQ;
+ });
+ }
+
+ {
+ ExpectError::check_assertion(&tree, [&](){
+ auto lumi = plist.append_child();
+ lumi |= MAP;
+ lumi << "Lumi";
+ });
+ }
+}
+
+
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+
+CASE_GROUP(GITHUB_ISSUES)
+{
+
+ADD_CASE_TO_GROUP("github3-problem1",
+R"(
+translation: [-2, -2, 5])",
+L{N("translation", L{N("-2"), N("-2"), N("5")})}
+);
+
+// these must work without quotes
+ADD_CASE_TO_GROUP("github3-problem2-ex1",
+R"(
+audio resource:
+)",
+L{N(KEYVAL, "audio resource", /*"~"*/{})}
+);
+ADD_CASE_TO_GROUP("github3-problem2-ex2",
+R"(
+audio resource:
+more:
+ example: y
+)",
+L{N(KEYVAL, "audio resource", /*"~"*/{}), N("more", L{N("example", "y")})}
+);
+
+ADD_CASE_TO_GROUP("github3-problem3",
+R"(component:
+ type: perspective camera component
+ some_data: {} # this was working
+ data:
+ {} # but this was not working
+)",
+L{N("component", L{
+ N("type", "perspective camera component"),
+ N(KEYMAP, "some_data", L{}),
+ N(KEYMAP, "data", L{})
+ }
+)}
+);
+
+/* THIS IS CAUSING VS TO CRASH OUT OF HEAP SPACE
+ADD_CASE_TO_GROUP("github3",
+R"(
+universe:
+ objects:
+ object:
+ uuid: A7AB039C0EF3A74480A1B398247039A7
+ components:
+ - component:
+ type: name component
+ data:
+ object name: Root Node
+ - component:
+ type: transform component
+ data:
+ translation: [-2, -2, 5]
+ rotation: [0, 0, 0, 1]
+ scaling: [1, 1, 1]
+ - component:
+ type: perspective camera component
+ data:
+ {}
+ - component:
+ type: mesh component
+ data:
+ mesh resource: TODO
+ - component:
+ type: lua script component
+ data:
+ {}
+ - component:
+ type: audio component
+ data:
+ audio resource: ''
+ type: 0
+ current sample: 184102
+ spatialized: true
+ children:
+ - object:
+ uuid: E1C364A925D649408E83C8EEF5179A87
+ components:
+ - component:
+ type: name component
+ data:
+ object name: Prepend
+ children:
+ []
+ - object:
+ uuid: 377DBA885AF4CD42B8A56BB3471F60E5
+ components:
+ - component:
+ type: name component
+ data:
+ object name: pivot
+ children:
+ []
+ - object:
+ uuid: 6DD1835797DADB4F95232CE7E9DE41BA
+ components:
+ - component:
+ type: name component
+ data:
+ object name: Append
+ children:
+ []
+)",
+ L{N("universe", L{
+ N("objects", L{
+ N("object", L{
+ N("uuid", "A7AB039C0EF3A74480A1B398247039A7"),
+ N("components", L{
+ N(L{N("component", L{N("type", "name component"), N("data", L{N("object name", "Root Node")}), }), }),
+ N(L{N("component", L{N("type", "transform component"), N("data", L{N("translation", L{N("-2"), N("-2"), N("5")}), N("rotation", L{N("0"), N("0"), N("0"), N("1")}), N("scaling", L{N("1"), N("1"), N("1")}),}), }), }),
+ N(L{N("component", L{N("type", "perspective camera component"), N(KEYMAP, "data", L{}), }), }),
+ N(L{N("component", L{N("type", "mesh component"), N("data", L{N("mesh resource", "TODO")}), }), }),
+ N(L{N("component", L{N("type", "lua script component"), N(KEYMAP, "data", L{}), }), }),
+ N(L{N("component", L{N("type", "audio component"), N("data", L{N("audio resource", ""), N("type", "0"), N("current sample", "184102"), N("spatialized", "true"), }), }), }), // component
+ }), // components
+ N("children", L{
+ N(L{N("object", L{
+ N("uuid", "E1C364A925D649408E83C8EEF5179A87"),
+ N("components", L{N(L{N("component", L{N("type", "name component"), N("data", L{N("object name", "Prepend")}), }), }), }),
+ N(KEYSEQ, "children", L{}),
+ }), }), // object
+ N(L{N("object", L{
+ N("uuid", "377DBA885AF4CD42B8A56BB3471F60E5"),
+ N("components", L{N(L{N("component", L{N("type", "name component"), N("data", L{N("object name", "pivot")}), }), }), }),
+ N(KEYSEQ, "children", L{}),
+ }), }), // object
+ N(L{N("object", L{
+ N("uuid", "6DD1835797DADB4F95232CE7E9DE41BA"),
+ N("components", L{N(L{N("component", L{N("type", "name component"), N("data", L{N("object name", "Append")}), }), }), }),
+ N(KEYSEQ, "children", L{}),
+ }), }), // object
+ }), // children
+ }), // object
+ }) // objects
+ }) // universe
+ }
+);
+*/
+
+ADD_CASE_TO_GROUP("github6-problem1",
+R"(
+- UQxRibHKEDI:
+ - 0.mp4
+ - 1.mp4
+ - 2.mp4
+ - 3.mp4
+- DcYsg8VFdC0:
+ - 0.mp4
+ - 1.mp4
+ - 2.mp4
+ - 3.mp4
+- Yt3ymqZXzLY:
+ - 0.mp4
+ - 1.mp4
+ - 2.mp4
+ - 3.mp4
+)",
+L{
+N(L{N("UQxRibHKEDI", L{N("0.mp4"), N("1.mp4"), N("2.mp4"), N("3.mp4")})}),
+N(L{N("DcYsg8VFdC0", L{N("0.mp4"), N("1.mp4"), N("2.mp4"), N("3.mp4")})}),
+N(L{N("Yt3ymqZXzLY", L{N("0.mp4"), N("1.mp4"), N("2.mp4"), N("3.mp4")})}),
+}
+);
+
+ADD_CASE_TO_GROUP("github6",
+R"(videos:
+- UQxRibHKEDI:
+ - 0.mp4
+ - 1.mp4
+ - 2.mp4
+ - 3.mp4
+- DcYsg8VFdC0:
+ - 0.mp4
+ - 1.mp4
+ - 2.mp4
+ - 3.mp4
+- Yt3ymqZXzLY:
+ - 0.mp4
+ - 1.mp4
+ - 2.mp4
+ - 3.mp4
+)",
+L{N("videos", L{
+N(L{N("UQxRibHKEDI", L{N("0.mp4"), N("1.mp4"), N("2.mp4"), N("3.mp4")})}),
+N(L{N("DcYsg8VFdC0", L{N("0.mp4"), N("1.mp4"), N("2.mp4"), N("3.mp4")})}),
+N(L{N("Yt3ymqZXzLY", L{N("0.mp4"), N("1.mp4"), N("2.mp4"), N("3.mp4")})}),
+})}
+);
+
+ADD_CASE_TO_GROUP("github34/ex1",
+R"(
+# correct:
+MessageID1: 'MapRegion_HyrulePrairie'
+MessageID2: "MapRegion_HyrulePrairie"
+MessageID3: 'MapRegion_HyrulePrairie'
+MessageID4: "MapRegion_HyrulePrairie"
+# incorrect: uninitialised memory?
+MessageID5: 'MapRegion_HyrulePrairie'
+MessageID6: "MapRegion_HyrulePrairie"
+MessageID7: 'MapRegion_HyrulePrairie'
+MessageID8: "MapRegion_HyrulePrairie"
+MessageID9: 'MapRegion_HyrulePrairie'
+MessageID0: "MapRegion_HyrulePrairie"
+)",
+L{
+ N(QV, "MessageID1", "MapRegion_HyrulePrairie"),
+ N(QV, "MessageID2", "MapRegion_HyrulePrairie"),
+ N(QV, "MessageID3", "MapRegion_HyrulePrairie"),
+ N(QV, "MessageID4", "MapRegion_HyrulePrairie"),
+ N(QV, "MessageID5", "MapRegion_HyrulePrairie"),
+ N(QV, "MessageID6", "MapRegion_HyrulePrairie"),
+ N(QV, "MessageID7", "MapRegion_HyrulePrairie"),
+ N(QV, "MessageID8", "MapRegion_HyrulePrairie"),
+ N(QV, "MessageID9", "MapRegion_HyrulePrairie"),
+ N(QV, "MessageID0", "MapRegion_HyrulePrairie"),
+}
+);
+
+ADD_CASE_TO_GROUP("github34/ex2",
+R"(
+# correct:
+- MessageID1: 'MapRegion_HyrulePrairie'
+- MessageID2: "MapRegion_HyrulePrairie"
+- MessageID3: 'MapRegion_HyrulePrairie'
+- MessageID4: "MapRegion_HyrulePrairie"
+# incorrect: uninitialised memory?
+- MessageID5: 'MapRegion_HyrulePrairie'
+- MessageID6: "MapRegion_HyrulePrairie"
+- MessageID7: 'MapRegion_HyrulePrairie'
+- MessageID8: "MapRegion_HyrulePrairie"
+- MessageID9: 'MapRegion_HyrulePrairie'
+- MessageID0: "MapRegion_HyrulePrairie"
+)",
+L{
+ N(L{N(QV, "MessageID1", "MapRegion_HyrulePrairie")}),
+ N(L{N(QV, "MessageID2", "MapRegion_HyrulePrairie")}),
+ N(L{N(QV, "MessageID3", "MapRegion_HyrulePrairie")}),
+ N(L{N(QV, "MessageID4", "MapRegion_HyrulePrairie")}),
+ N(L{N(QV, "MessageID5", "MapRegion_HyrulePrairie")}),
+ N(L{N(QV, "MessageID6", "MapRegion_HyrulePrairie")}),
+ N(L{N(QV, "MessageID7", "MapRegion_HyrulePrairie")}),
+ N(L{N(QV, "MessageID8", "MapRegion_HyrulePrairie")}),
+ N(L{N(QV, "MessageID9", "MapRegion_HyrulePrairie")}),
+ N(L{N(QV, "MessageID0", "MapRegion_HyrulePrairie")}),
+}
+);
+
+ADD_CASE_TO_GROUP("github34",
+R"(
+# incorrect: uninitialised memory?
+- MessageID1: 'MapRegion_HyrulePrairie'
+- MessageID2: "MapRegion_HyrulePrairie"
+
+# incorrect: uninitialised memory?
+- MessageID3: 'MapRegion_HyrulePrairie '
+- MessageID4: "MapRegion_HyrulePrairie "
+
+# incorrect: for some reason the ' is included in the string
+- MessageID5: 'MapRegion_HyrulePrairie '
+- MessageID6: 'MapRegion_HyrulePrairie '
+- MessageID7: "MapRegion_HyrulePrairie "
+- MessageID8: "MapRegion_HyrulePrairie "
+
+# incorrect: same issue
+- MessageID9: 'MapRegion_HyrulePrairie '
+- MessageID10: "MapRegion_HyrulePrairie "
+
+# incorrect: still has the trailing quote
+- MessageID11: 'MapRegion_HyrulePrairie'
+- MessageID12: "MapRegion_HyrulePrairie"
+
+# the string is parsed correctly in this case
+- key1: true1
+ MessageID1: 'MapRegion_HyrulePrairie1 '
+- key2: true2
+ MessageID2: "MapRegion_HyrulePrairie2 "
+)",
+L{
+ N(L{N(QV, "MessageID1", "MapRegion_HyrulePrairie")}),
+ N(L{N(QV, "MessageID2", "MapRegion_HyrulePrairie")}),
+ N(L{N(QV, "MessageID3", "MapRegion_HyrulePrairie ")}),
+ N(L{N(QV, "MessageID4", "MapRegion_HyrulePrairie ")}),
+ N(L{N(QV, "MessageID5", "MapRegion_HyrulePrairie ")}),
+ N(L{N(QV, "MessageID6", "MapRegion_HyrulePrairie ")}),
+ N(L{N(QV, "MessageID7", "MapRegion_HyrulePrairie ")}),
+ N(L{N(QV, "MessageID8", "MapRegion_HyrulePrairie ")}),
+ N(L{N(QV, "MessageID9", "MapRegion_HyrulePrairie ")}),
+ N(L{N(QV, "MessageID10", "MapRegion_HyrulePrairie ")}),
+ N(L{N(QV, "MessageID11", "MapRegion_HyrulePrairie")}),
+ N(L{N(QV, "MessageID12", "MapRegion_HyrulePrairie")}),
+ N(L{N("key1", "true1"), N(QV, "MessageID1", "MapRegion_HyrulePrairie1 ")}),
+ N(L{N("key2", "true2"), N(QV, "MessageID2", "MapRegion_HyrulePrairie2 ")}),
+}
+);
+
+ADD_CASE_TO_GROUP("github35/expected_error11", EXPECT_PARSE_ERROR,
+R"(
+# *segfault* // not anymore!
+- key1: true1
+ MessageID1: 'MapRegion_HyrulePrairie1 '
+)",
+ LineCol(4, 1)
+);
+
+ADD_CASE_TO_GROUP("github35/expected_error12", EXPECT_PARSE_ERROR,
+R"(
+# *segfault* // not anymore!
+- key2: true2
+ MessageID2: "MapRegion_HyrulePrairie2 "
+)",
+ LineCol(4, 1)
+);
+
+ADD_CASE_TO_GROUP("github35/expected_error21", EXPECT_PARSE_ERROR,
+R"(
+# *segfault* // not anymore!
+- key1: true1
+ MessageID1: 'MapRegion_HyrulePrairie1 '
+)",
+ LineCol(4, 15)
+);
+
+ADD_CASE_TO_GROUP("github35/expected_error22", EXPECT_PARSE_ERROR,
+R"(
+# *segfault* // not anymore!
+- key2: true2
+ MessageID2: "MapRegion_HyrulePrairie2 "
+)",
+ LineCol(4, 15)
+);
+
+ADD_CASE_TO_GROUP("github128/1", RESOLVE_REFS | EXPECT_PARSE_ERROR, "a: *invalid");
+ADD_CASE_TO_GROUP("github128/2", RESOLVE_REFS/* | HAS_PARSE_ERROR*/, "*", N(DOCVAL, "*"));
+
+ADD_CASE_TO_GROUP("github129", RESOLVE_REFS, R"(
+ref: &ref ref_val
+a: *ref # resolve the reference
+b: '*ref' # don't resolve, it's just a string
+c: "*ref" # don't resolve, it's just a string
+d: > # don't resolve, it's just a string
+ *ref
+e: >- # don't resolve, it's just a string
+ *ref
+f: >+ # don't resolve, it's just a string
+ *ref
+g: | # don't resolve, it's just a string
+ *ref
+h: |- # don't resolve, it's just a string
+ *ref
+i: |+ # don't resolve, it's just a string
+ *ref
+)", L{
+ N("ref", "ref_val"),
+ N("a", "ref_val"), // this should be resolved
+ N(QV, "b", "*ref"), // this should not be resolved (just a string)
+ N(QV, "c", "*ref"), // this should not be resolved (just a string)
+ N(QV, "d", "*ref\n"), // this should not be resolved (just a string)
+ N(QV, "e", "*ref"), // this should not be resolved (just a string)
+ N(QV, "f", "*ref\n"), // this should not be resolved (just a string)
+ N(QV, "g", "*ref\n"), // this should not be resolved (just a string)
+ N(QV, "h", "*ref"), // this should not be resolved (just a string)
+ N(QV, "i", "*ref\n"), // this should not be resolved (just a string)
+ }
+);
+}
+
+} // namespace yml
+} // namespace c4
diff --git a/thirdparty/ryml/test/test_group.cpp b/thirdparty/ryml/test/test_group.cpp
new file mode 100644
index 000000000..77ef5fa23
--- /dev/null
+++ b/thirdparty/ryml/test/test_group.cpp
@@ -0,0 +1,732 @@
+#ifndef RYML_SINGLE_HEADER
+#include "c4/yml/detail/print.hpp"
+#endif
+#include "test_group.hpp"
+#include "test_case.hpp"
+#include <c4/fs/fs.hpp>
+#include <fstream>
+#include <stdexcept>
+
+#define RYML_NFO (RYML_DBG || 0)
+
+//-----------------------------------------------------------------------------
+namespace c4 {
+namespace yml {
+
+void YmlTestCase::_test_parse_using_ryml(CaseDataLineEndings *cd)
+{
+ #ifdef RYML_NFO
+ std::cout << "---------------\n";
+ std::cout << c->src;
+ std::cout << "---------------\n";
+ #endif
+
+ if(c->flags & EXPECT_PARSE_ERROR)
+ {
+ auto flags = c->flags;
+ ExpectError::do_check(&cd->parsed_tree, [this, cd, flags](){
+ parse_in_place(c->fileline, cd->src, &cd->parsed_tree);
+ if(flags & RESOLVE_REFS)
+ cd->parsed_tree.resolve();
+ #ifdef RYML_DBG
+ // if this point was reached, then it means that the expected
+ // error failed to occur. So print debugging info.
+ std::cout << "failed to catch expected error while parsing.\nPARSED TREE:\n";
+ print_tree(cd->parsed_tree);
+ #endif
+ }, c->expected_location);
+ return;
+ }
+
+ cd->parsed_tree.clear();
+ parse_in_place(c->fileline, cd->src, &cd->parsed_tree);
+
+ #ifdef RYML_NFO
+ std::cout << "REF TREE:\n";
+ print_tree(c->root);
+ std::cout << "PARSED TREE:\n";
+ print_tree(cd->parsed_tree);
+ #endif
+
+ {
+ SCOPED_TRACE("checking tree invariants of unresolved parsed tree");
+ test_invariants(cd->parsed_tree);
+ }
+ {
+ SCOPED_TRACE("checking node invariants of unresolved parsed tree");
+ test_invariants(cd->parsed_tree.rootref());
+ }
+
+ if(c->flags & RESOLVE_REFS)
+ {
+ cd->parsed_tree.resolve();
+ #ifdef RYML_NFO
+ std::cout << "resolved tree!!!\n";
+ print_tree(cd->parsed_tree);
+ #endif
+ {
+ SCOPED_TRACE("checking tree invariants of resolved parsed tree");
+ test_invariants(cd->parsed_tree);
+ }
+ {
+ SCOPED_TRACE("checking node invariants of resolved parsed tree");
+ test_invariants(cd->parsed_tree.rootref());
+ }
+ }
+
+ {
+ SCOPED_TRACE("comparing parsed tree to ref tree");
+ EXPECT_GE(cd->parsed_tree.capacity(), c->root.reccount());
+ EXPECT_EQ(cd->parsed_tree.size(), c->root.reccount());
+ c->root.compare(cd->parsed_tree.rootref());
+ }
+
+ if(c->flags & RESOLVE_REFS)
+ {
+ cd->parsed_tree.reorder();
+ #ifdef RYML_NFO
+ std::cout << "reordered tree!!!\n";
+ print_tree(cd->parsed_tree);
+ #endif
+ {
+ SCOPED_TRACE("checking tree invariants of reordered parsed tree after resolving");
+ test_invariants(cd->parsed_tree);
+ }
+ {
+ SCOPED_TRACE("checking node invariants of reordered parsed tree after resolving");
+ test_invariants(cd->parsed_tree.rootref());
+ }
+
+ {
+ SCOPED_TRACE("comparing parsed tree to ref tree");
+ EXPECT_GE(cd->parsed_tree.capacity(), c->root.reccount());
+ EXPECT_EQ(cd->parsed_tree.size(), c->root.reccount());
+ c->root.compare(cd->parsed_tree.rootref());
+ }
+ }
+}
+
+
+//-----------------------------------------------------------------------------
+void YmlTestCase::_test_emit_yml_stdout(CaseDataLineEndings *cd)
+{
+ if(c->flags & EXPECT_PARSE_ERROR)
+ return;
+ _ensure_parse(cd);
+ _ensure_emit(cd);
+ cd->numbytes_stdout = emit_yaml(cd->parsed_tree);
+ EXPECT_EQ(cd->numbytes_stdout, cd->emitted_yml.size());
+}
+
+//-----------------------------------------------------------------------------
+void YmlTestCase::_test_emit_json_stdout(CaseDataLineEndings *cd)
+{
+ if(!(c->flags & JSON_ALSO))
+ return;
+ if(c->flags & EXPECT_PARSE_ERROR)
+ return;
+ _ensure_parse(cd);
+ _ensure_emit_json(cd);
+ cd->numbytes_stdout_json = emit_json(cd->parsed_tree);
+ EXPECT_EQ(cd->numbytes_stdout_json, cd->emitted_json.size());
+}
+
+//-----------------------------------------------------------------------------
+void YmlTestCase::_test_emit_yml_cout(CaseDataLineEndings *cd)
+{
+ if(c->flags & EXPECT_PARSE_ERROR)
+ return;
+ _ensure_parse(cd);
+ _ensure_emit(cd);
+ std::cout << cd->parsed_tree;
+ std::cout << cd->parsed_tree.rootref();
+}
+
+//-----------------------------------------------------------------------------
+void YmlTestCase::_test_emit_json_cout(CaseDataLineEndings *cd)
+{
+ if(!(c->flags & JSON_ALSO))
+ return;
+ if(c->flags & EXPECT_PARSE_ERROR)
+ return;
+ _ensure_parse(cd);
+ _ensure_emit_json(cd);
+ std::cout << as_json(cd->parsed_tree);
+ std::cout << as_json(cd->parsed_tree.rootref());
+}
+
+
+//-----------------------------------------------------------------------------
+void YmlTestCase::_test_emit_yml_stringstream(CaseDataLineEndings *cd)
+{
+ if(c->flags & EXPECT_PARSE_ERROR)
+ return;
+ _ensure_parse(cd);
+ _ensure_emit(cd);
+ {
+ std::stringstream ss;
+ ss << cd->parsed_tree;
+ std::string actual = ss.str();
+ EXPECT_EQ(actual, cd->emit_buf);
+ }
+ {
+ std::stringstream ss;
+ ss << cd->parsed_tree.rootref();
+ std::string actual = ss.str();
+ EXPECT_EQ(actual, cd->emit_buf);
+ }
+}
+
+//-----------------------------------------------------------------------------
+void YmlTestCase::_test_emit_json_stringstream(CaseDataLineEndings *cd)
+{
+ if(!(c->flags & JSON_ALSO))
+ return;
+ if(c->flags & EXPECT_PARSE_ERROR)
+ return;
+ _ensure_parse(cd);
+ _ensure_emit_json(cd);
+ {
+ std::stringstream ss;
+ ss << as_json(cd->parsed_tree);
+ std::string actual = ss.str();
+ EXPECT_EQ(actual, cd->emitjson_buf);
+ }
+ {
+ std::stringstream ss;
+ ss << as_json(cd->parsed_tree.rootref());
+ std::string actual = ss.str();
+ EXPECT_EQ(actual, cd->emitjson_buf);
+ }
+}
+
+//-----------------------------------------------------------------------------
+void YmlTestCase::_test_emit_yml_ofstream(CaseDataLineEndings *cd)
+{
+ if(c->flags & EXPECT_PARSE_ERROR)
+ return;
+ _ensure_parse(cd);
+ _ensure_emit(cd);
+ {
+ auto fn = fs::tmpnam<std::string>();
+ {
+ std::ofstream f(fn, std::ios::binary);
+ f << cd->parsed_tree;
+ }
+ auto actual = fs::file_get_contents<std::string>(fn.c_str());
+ EXPECT_EQ(actual, cd->emit_buf);
+ fs::rmfile(fn.c_str());
+ }
+ {
+ auto fn = fs::tmpnam<std::string>();
+ {
+ std::ofstream f(fn, std::ios::binary);
+ f << cd->parsed_tree.rootref();
+ }
+ auto actual = fs::file_get_contents<std::string>(fn.c_str());
+ EXPECT_EQ(actual, cd->emit_buf);
+ fs::rmfile(fn.c_str());
+ }
+}
+
+//-----------------------------------------------------------------------------
+void YmlTestCase::_test_emit_json_ofstream(CaseDataLineEndings *cd)
+{
+ if(!(c->flags & JSON_ALSO))
+ return;
+ if(c->flags & EXPECT_PARSE_ERROR)
+ return;
+ _ensure_parse(cd);
+ _ensure_emit_json(cd);
+ {
+ auto fn = fs::tmpnam<std::string>();
+ {
+ std::ofstream f(fn, std::ios::binary);
+ f << as_json(cd->parsed_tree);
+ }
+ auto actual = fs::file_get_contents<std::string>(fn.c_str());
+ EXPECT_EQ(actual, cd->emitjson_buf);
+ fs::rmfile(fn.c_str());
+ }
+ {
+ auto fn = fs::tmpnam<std::string>();
+ {
+ std::ofstream f(fn, std::ios::binary);
+ f << as_json(cd->parsed_tree.rootref());
+ }
+ auto actual = fs::file_get_contents<std::string>(fn.c_str());
+ EXPECT_EQ(actual, cd->emitjson_buf);
+ fs::rmfile(fn.c_str());
+ }
+}
+
+//-----------------------------------------------------------------------------
+void YmlTestCase::_test_emit_yml_string(CaseDataLineEndings *cd)
+{
+ if(c->flags & EXPECT_PARSE_ERROR)
+ return;
+ _ensure_parse(cd);
+ _ensure_emit(cd);
+ auto em = emitrs_yaml(cd->parsed_tree, &cd->emit_buf);
+ EXPECT_EQ(em.len, cd->emit_buf.size());
+ EXPECT_EQ(em.len, cd->numbytes_stdout);
+ #ifdef RYML_NFO
+ std::cout << em;
+ #endif
+}
+
+//-----------------------------------------------------------------------------
+void YmlTestCase::_test_emit_json_string(CaseDataLineEndings *cd)
+{
+ if(!(c->flags & JSON_ALSO))
+ return;
+ if(c->flags & EXPECT_PARSE_ERROR)
+ return;
+ _ensure_parse(cd);
+ _ensure_emit_json(cd);
+ auto em = emitrs_json(cd->parsed_tree, &cd->emit_buf);
+ EXPECT_EQ(em.len, cd->emitjson_buf.size());
+ EXPECT_EQ(em.len, cd->numbytes_stdout_json);
+ #ifdef RYML_NFO
+ std::cout << em;
+ #endif
+}
+
+//-----------------------------------------------------------------------------
+void YmlTestCase::_test_emitrs(CaseDataLineEndings *cd)
+{
+ if(c->flags & EXPECT_PARSE_ERROR)
+ return;
+ _ensure_parse(cd);
+ using vtype = std::vector<char>;
+ using stype = std::string;
+ vtype vv, v = emitrs_yaml<vtype>(cd->parsed_tree);
+ stype ss, s = emitrs_yaml<stype>(cd->parsed_tree);
+ EXPECT_EQ(to_csubstr(v), to_csubstr(s));
+ csubstr svv = emitrs_yaml(cd->parsed_tree, &vv);
+ csubstr sss = emitrs_yaml(cd->parsed_tree, &ss);
+ EXPECT_EQ(svv, sss);
+}
+
+//-----------------------------------------------------------------------------
+void YmlTestCase::_test_emitrs_json(CaseDataLineEndings *cd)
+{
+ if(!(c->flags & JSON_ALSO))
+ return;
+ if(c->flags & EXPECT_PARSE_ERROR)
+ return;
+ _ensure_parse(cd);
+ using vtype = std::vector<char>;
+ using stype = std::string;
+ vtype vv, v = emitrs_json<vtype>(cd->parsed_tree);
+ stype ss, s = emitrs_json<stype>(cd->parsed_tree);
+ EXPECT_EQ(to_csubstr(v), to_csubstr(s));
+ csubstr svv = emitrs_json(cd->parsed_tree, &vv);
+ csubstr sss = emitrs_json(cd->parsed_tree, &ss);
+ EXPECT_EQ(svv, sss);
+}
+
+//-----------------------------------------------------------------------------
+void YmlTestCase::_test_emitrs_cfile(CaseDataLineEndings *cd)
+{
+ if(c->flags & EXPECT_PARSE_ERROR)
+ return;
+ _ensure_parse(cd);
+ auto s = emitrs_yaml<std::string>(cd->parsed_tree);
+ std::string r;
+ {
+ c4::fs::ScopedTmpFile f;
+ emit_yaml(cd->parsed_tree, f.m_file);
+ fflush(f.m_file);
+ r = f.contents<std::string>();
+ }
+ EXPECT_EQ(s, r);
+}
+
+//-----------------------------------------------------------------------------
+void YmlTestCase::_test_emitrs_json_cfile(CaseDataLineEndings *cd)
+{
+ if(!(c->flags & JSON_ALSO))
+ return;
+ if(c->flags & EXPECT_PARSE_ERROR)
+ return;
+ _ensure_parse(cd);
+ auto s = emitrs_json<std::string>(cd->parsed_tree);
+ std::string r;
+ {
+ c4::fs::ScopedTmpFile f;
+ emit_json(cd->parsed_tree, f.m_file);
+ fflush(f.m_file);
+ r = f.contents<std::string>();
+ }
+ EXPECT_EQ(s, r);
+}
+
+
+//-----------------------------------------------------------------------------
+void YmlTestCase::_test_complete_round_trip(CaseDataLineEndings *cd)
+{
+ if(c->flags & EXPECT_PARSE_ERROR)
+ return;
+ _ensure_parse(cd);
+ _ensure_emit(cd);
+ {
+ SCOPED_TRACE("parsing emitted yml");
+ cd->parse_buf = cd->emit_buf;
+ cd->parsed_yml = to_substr(cd->parse_buf);
+ parse_in_place(c->fileline, cd->parsed_yml, &cd->emitted_tree);
+ }
+ #ifdef RYML_NFO
+ std::cout << "~~~~~~~~~~~~~~ src yml:\n";
+ _c4presc(cd->src);
+ std::cout << "~~~~~~~~~~~~~~ parsed tree:\n";
+ print_tree(cd->parsed_tree);
+ std::cout << "~~~~~~~~~~~~~~ emitted yml:\n";
+ _c4presc(cd->emitted_yml);
+ std::cout << "~~~~~~~~~~~~~~ emitted tree:\n";
+ print_tree(cd->emitted_tree);
+ std::cout << "~~~~~~~~~~~~~~\n";
+ #endif
+ {
+ SCOPED_TRACE("checking node invariants of emitted tree");
+ test_invariants(cd->parsed_tree.rootref());
+ }
+ {
+ SCOPED_TRACE("checking node invariants of emitted tree");
+ test_invariants(cd->emitted_tree.rootref());
+ }
+ {
+ SCOPED_TRACE("comparing emitted and parsed tree");
+ test_compare(cd->emitted_tree, cd->parsed_tree);
+ }
+ {
+ SCOPED_TRACE("checking tree invariants of emitted tree");
+ test_invariants(cd->emitted_tree);
+ }
+ {
+ SCOPED_TRACE("comparing parsed tree to ref tree");
+ EXPECT_GE(cd->parsed_tree.capacity(), c->root.reccount());
+ EXPECT_EQ(cd->parsed_tree.size(), c->root.reccount());
+ c->root.compare(cd->parsed_tree.rootref());
+ }
+ {
+ SCOPED_TRACE("comparing emitted tree to ref tree");
+ EXPECT_GE(cd->emitted_tree.capacity(), c->root.reccount());
+ EXPECT_EQ(cd->emitted_tree.size(), c->root.reccount());
+ // in this case, we can ignore whether scalars are quoted.
+ // Because it can happen that a scalar was quoted in the
+ // original file, but the re-emitted data does not quote the
+ // scalars.
+ c->root.compare(cd->emitted_tree.rootref(), true);
+ }
+}
+
+
+//-----------------------------------------------------------------------------
+void YmlTestCase::_test_complete_round_trip_json(CaseDataLineEndings *cd)
+{
+ if(!(c->flags & JSON_ALSO))
+ return;
+ if(c->flags & EXPECT_PARSE_ERROR)
+ return;
+ _ensure_parse(cd);
+ _ensure_emit_json(cd);
+ {
+ SCOPED_TRACE("parsing emitted json");
+ cd->parse_buf_json = cd->emitjson_buf;
+ cd->parsed_json = to_substr(cd->parse_buf_json);
+ parse_in_place(c->fileline, cd->parsed_json, &cd->emitted_tree_json);
+ }
+ #ifdef RYML_NFO
+ std::cout << "~~~~~~~~~~~~~~ src yml:\n";
+ _c4presc(cd->src);
+ std::cout << "~~~~~~~~~~~~~~ parsed tree:\n";
+ print_tree(cd->parsed_tree);
+ std::cout << "~~~~~~~~~~~~~~ emitted json:\n";
+ _c4presc(cd->emitted_json);
+ std::cout << "~~~~~~~~~~~~~~ emitted json tree:\n";
+ print_tree(cd->emitted_tree_json);
+ std::cout << "~~~~~~~~~~~~~~\n";
+ #endif
+ {
+ SCOPED_TRACE("checking node invariants of emitted tree");
+ test_invariants(cd->parsed_tree.rootref());
+ }
+ {
+ SCOPED_TRACE("checking node invariants of emitted json tree");
+ test_invariants(cd->emitted_tree_json.rootref());
+ }
+ {
+ SCOPED_TRACE("comparing emitted json and parsed tree");
+ test_compare(cd->emitted_tree_json, cd->parsed_tree);
+ }
+ {
+ SCOPED_TRACE("checking tree invariants of emitted json tree");
+ test_invariants(cd->emitted_tree_json);
+ }
+ {
+ SCOPED_TRACE("comparing parsed tree to ref tree");
+ EXPECT_GE(cd->parsed_tree.capacity(), c->root.reccount());
+ EXPECT_EQ(cd->parsed_tree.size(), c->root.reccount());
+ c->root.compare(cd->parsed_tree.rootref());
+ }
+ {
+ SCOPED_TRACE("comparing emitted tree to ref tree");
+ EXPECT_GE(cd->emitted_tree_json.capacity(), c->root.reccount());
+ EXPECT_EQ(cd->emitted_tree_json.size(), c->root.reccount());
+ // in this case, we can ignore whether scalars are quoted.
+ // Because it can happen that a scalar was quoted in the
+ // original file, but the re-emitted data does not quote the
+ // scalars.
+ c->root.compare(cd->emitted_tree_json.rootref(), true);
+ }
+}
+
+//-----------------------------------------------------------------------------
+void YmlTestCase::_test_recreate_from_ref(CaseDataLineEndings *cd)
+{
+ if(c->flags & EXPECT_PARSE_ERROR)
+ return;
+ if(cd->parsed_tree.empty())
+ parse_in_place(c->fileline, cd->src, &cd->parsed_tree);
+ if(cd->emit_buf.empty())
+ cd->emitted_yml = emitrs_yaml(cd->parsed_tree, &cd->emit_buf);
+ {
+ SCOPED_TRACE("recreating a new tree from the ref tree");
+ cd->recreated.reserve(cd->parsed_tree.size());
+ NodeRef r = cd->recreated.rootref();
+ c->root.recreate(&r);
+ }
+ #ifdef RYML_NFO
+ std::cout << "REF TREE:\n";
+ print_tree(c->root);
+ std::cout << "RECREATED TREE:\n";
+ print_tree(cd->recreated);
+ #endif
+ {
+ SCOPED_TRACE("checking node invariants of recreated tree");
+ test_invariants(cd->recreated.rootref());
+ }
+ {
+ SCOPED_TRACE("checking tree invariants of recreated tree");
+ test_invariants(cd->recreated);
+ }
+ {
+ SCOPED_TRACE("comparing recreated tree to ref tree");
+ c->root.compare(cd->recreated.rootref());
+ }
+}
+
+//-----------------------------------------------------------------------------
+TEST_P(YmlTestCase, parse_unix)
+{
+ SCOPED_TRACE("unix style");
+ _test_parse_using_ryml(&d->unix_style);
+}
+
+TEST_P(YmlTestCase, parse_windows)
+{
+ SCOPED_TRACE("windows style");
+ _test_parse_using_ryml(&d->windows_style);
+}
+
+//-----------------------------------------------------------------------------
+TEST_P(YmlTestCase, emit_yml_unix_stdout)
+{
+ SCOPED_TRACE("unix style");
+ _test_emit_yml_stdout(&d->unix_style);
+}
+TEST_P(YmlTestCase, emit_json_unix_stdout)
+{
+ SCOPED_TRACE("unix style json");
+ _test_emit_json_stdout(&d->unix_style_json);
+}
+
+TEST_P(YmlTestCase, emit_yml_windows_stdout)
+{
+ SCOPED_TRACE("windows style");
+ _test_emit_yml_stdout(&d->windows_style);
+}
+TEST_P(YmlTestCase, emit_json_windows_stdout)
+{
+ SCOPED_TRACE("windows style json");
+ _test_emit_json_stdout(&d->windows_style_json);
+}
+
+//-----------------------------------------------------------------------------
+TEST_P(YmlTestCase, emit_yml_unix_cout)
+{
+ SCOPED_TRACE("unix style");
+ _test_emit_yml_cout(&d->unix_style);
+}
+TEST_P(YmlTestCase, emit_json_unix_cout)
+{
+ SCOPED_TRACE("unix style json");
+ _test_emit_json_cout(&d->unix_style_json);
+}
+
+TEST_P(YmlTestCase, emit_yml_windows_cout)
+{
+ SCOPED_TRACE("windows style");
+ _test_emit_yml_cout(&d->windows_style);
+}
+TEST_P(YmlTestCase, emit_json_windows_cout)
+{
+ SCOPED_TRACE("windows style json");
+ _test_emit_json_cout(&d->windows_style_json);
+}
+
+//-----------------------------------------------------------------------------
+TEST_P(YmlTestCase, emit_yml_unix_stringstream)
+{
+ SCOPED_TRACE("unix style");
+ _test_emit_yml_stringstream(&d->unix_style);
+}
+TEST_P(YmlTestCase, emit_json_unix_stringstream)
+{
+ SCOPED_TRACE("unix style json");
+ _test_emit_json_stringstream(&d->unix_style_json);
+}
+
+TEST_P(YmlTestCase, emit_yml_windows_stringstream)
+{
+ SCOPED_TRACE("windows style");
+ _test_emit_yml_stringstream(&d->windows_style);
+}
+TEST_P(YmlTestCase, emit_json_windows_stringstream)
+{
+ SCOPED_TRACE("windows style json");
+ _test_emit_json_stringstream(&d->windows_style_json);
+}
+
+//-----------------------------------------------------------------------------
+TEST_P(YmlTestCase, emit_yml_unix_ofstream)
+{
+ SCOPED_TRACE("unix style");
+ _test_emit_yml_ofstream(&d->unix_style);
+}
+TEST_P(YmlTestCase, emit_json_unix_ofstream)
+{
+ SCOPED_TRACE("unix style json");
+ _test_emit_json_ofstream(&d->unix_style_json);
+}
+
+TEST_P(YmlTestCase, emit_yml_windows_ofstream)
+{
+ SCOPED_TRACE("windows style");
+ _test_emit_yml_ofstream(&d->windows_style);
+}
+TEST_P(YmlTestCase, emit_json_windows_ofstream)
+{
+ SCOPED_TRACE("windows style json");
+ _test_emit_json_ofstream(&d->windows_style_json);
+}
+
+//-----------------------------------------------------------------------------
+TEST_P(YmlTestCase, emit_yml_unix_string)
+{
+ SCOPED_TRACE("unix style");
+ _test_emit_yml_string(&d->unix_style);
+}
+TEST_P(YmlTestCase, emit_json_unix_string)
+{
+ SCOPED_TRACE("unix style json");
+ _test_emit_json_string(&d->unix_style_json);
+}
+
+TEST_P(YmlTestCase, emit_yml_windows_string)
+{
+ SCOPED_TRACE("windows style");
+ _test_emit_yml_string(&d->windows_style);
+}
+TEST_P(YmlTestCase, emit_json_windows_string)
+{
+ SCOPED_TRACE("windows style json");
+ _test_emit_json_string(&d->windows_style_json);
+}
+
+//-----------------------------------------------------------------------------
+TEST_P(YmlTestCase, unix_emitrs)
+{
+ SCOPED_TRACE("unix style");
+ _test_emitrs(&d->unix_style);
+}
+TEST_P(YmlTestCase, unix_emitrs_json)
+{
+ SCOPED_TRACE("unix style json");
+ _test_emitrs_json(&d->unix_style_json);
+}
+
+TEST_P(YmlTestCase, windows_emitrs)
+{
+ SCOPED_TRACE("windows style");
+ _test_emitrs(&d->windows_style);
+}
+TEST_P(YmlTestCase, windows_emitrs_json)
+{
+ SCOPED_TRACE("windows style json");
+ _test_emitrs_json(&d->windows_style_json);
+}
+
+//-----------------------------------------------------------------------------
+TEST_P(YmlTestCase, unix_emitrs_cfile)
+{
+ SCOPED_TRACE("unix style");
+ _test_emitrs_cfile(&d->unix_style);
+}
+TEST_P(YmlTestCase, unix_emitrs_json_cfile)
+{
+ SCOPED_TRACE("unix style json");
+ _test_emitrs_json_cfile(&d->unix_style_json);
+}
+
+TEST_P(YmlTestCase, windows_emitrs_cfile)
+{
+ SCOPED_TRACE("windows style");
+ _test_emitrs_cfile(&d->windows_style);
+}
+TEST_P(YmlTestCase, windows_emitrs_json_cfile)
+{
+ SCOPED_TRACE("windows style json");
+ _test_emitrs_json_cfile(&d->windows_style_json);
+}
+
+//-----------------------------------------------------------------------------
+TEST_P(YmlTestCase, complete_unix_round_trip)
+{
+ SCOPED_TRACE("unix style");
+ _test_complete_round_trip(&d->unix_style);
+}
+TEST_P(YmlTestCase, complete_unix_round_trip_json)
+{
+ SCOPED_TRACE("unix style json");
+ _test_complete_round_trip_json(&d->unix_style_json);
+}
+
+TEST_P(YmlTestCase, complete_windows_round_trip)
+{
+ SCOPED_TRACE("windows style");
+ _test_complete_round_trip(&d->windows_style);
+}
+TEST_P(YmlTestCase, complete_windows_round_trip_json)
+{
+ SCOPED_TRACE("windows style json");
+ _test_complete_round_trip_json(&d->windows_style_json);
+}
+
+//-----------------------------------------------------------------------------
+TEST_P(YmlTestCase, unix_recreate_from_ref)
+{
+ SCOPED_TRACE("unix style");
+ _test_recreate_from_ref(&d->unix_style);
+}
+
+TEST_P(YmlTestCase, windows_recreate_from_ref)
+{
+ SCOPED_TRACE("windows style");
+ _test_recreate_from_ref(&d->windows_style);
+}
+
+
+} // namespace yml
+} // namespace c4
diff --git a/thirdparty/ryml/test/test_group.hpp b/thirdparty/ryml/test/test_group.hpp
new file mode 100644
index 000000000..f661ec9b5
--- /dev/null
+++ b/thirdparty/ryml/test/test_group.hpp
@@ -0,0 +1,210 @@
+#pragma once
+#ifndef C4_RYML_TEST_GROUP_HPP_
+#define C4_RYML_TEST_GROUP_HPP_
+
+#include "./test_case.hpp"
+#include "c4/span.hpp"
+#include <algorithm>
+
+#if defined(_MSC_VER)
+# pragma warning(push)
+# pragma warning(disable: 4068/*unknown pragma*/)
+#elif defined(__clang__)
+# pragma clang diagnostic push
+# pragma clang diagnostic ignored "-Wgnu-zero-variadic-macro-arguments"
+#elif defined(__GNUC__)
+# pragma GCC diagnostic push
+# pragma GCC diagnostic ignored "-Wunknown-pragmas"
+//# pragma GCC diagnostic ignored "-Wpragma-system-header-outside-header"
+#endif
+
+
+namespace c4 {
+namespace yml {
+
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+// a fixture for running the tests
+struct YmlTestCase : public ::testing::TestWithParam<csubstr>
+{
+ csubstr const name;
+ Case const* c;
+ CaseData * d;
+
+ YmlTestCase() : name(to_csubstr(GetParam()))
+ {
+ c = get_case(name);
+ d = get_data(name);
+ }
+
+ void SetUp() override
+ {
+ // Code here will be called immediately after the constructor (right
+ // before each test).
+ std::cout << "-------------------------------------------\n";
+ std::cout << "running test case '" << name << "'\n";
+ std::cout << "-------------------------------------------\n";
+ }
+
+ void _test_parse_using_ryml(CaseDataLineEndings *cd);
+
+ void _test_emit_yml_stdout(CaseDataLineEndings *cd);
+ void _test_emit_json_stdout(CaseDataLineEndings *cd);
+
+ void _test_emit_yml_cout(CaseDataLineEndings *cd);
+ void _test_emit_json_cout(CaseDataLineEndings *cd);
+
+ void _test_emit_yml_stringstream(CaseDataLineEndings *cd);
+ void _test_emit_json_stringstream(CaseDataLineEndings *cd);
+
+ void _test_emit_yml_ofstream(CaseDataLineEndings *cd);
+ void _test_emit_json_ofstream(CaseDataLineEndings *cd);
+
+ void _test_emit_yml_string(CaseDataLineEndings *cd);
+ void _test_emit_json_string(CaseDataLineEndings *cd);
+
+ void _test_emitrs(CaseDataLineEndings *cd);
+ void _test_emitrs_json(CaseDataLineEndings *cd);
+
+ void _test_emitrs_cfile(CaseDataLineEndings *cd);
+ void _test_emitrs_json_cfile(CaseDataLineEndings *cd);
+
+ void _test_complete_round_trip(CaseDataLineEndings *cd);
+ void _test_complete_round_trip_json(CaseDataLineEndings *cd);
+
+ void _test_recreate_from_ref(CaseDataLineEndings *cd);
+
+ void _ensure_parse(CaseDataLineEndings *cd)
+ {
+ if(cd->parsed_tree.empty())
+ parse_in_place(c->fileline, cd->src, &cd->parsed_tree);
+ }
+ void _ensure_emit(CaseDataLineEndings *cd)
+ {
+ _ensure_parse(cd);
+ if(cd->emit_buf.empty())
+ {
+ cd->emitted_yml = emitrs_yaml(cd->parsed_tree, &cd->emit_buf);
+ ASSERT_EQ(cd->emitted_yml.size(), cd->emit_buf.size());
+ if(cd->emitted_yml.size())
+ {
+ ASSERT_EQ(cd->emitted_yml.data(), cd->emit_buf.data());
+ }
+ }
+ }
+ void _ensure_emit_json(CaseDataLineEndings *cd)
+ {
+ _ensure_parse(cd);
+ if(cd->emitjson_buf.empty())
+ {
+ cd->emitted_json = emitrs_json(cd->parsed_tree, &cd->emitjson_buf);
+ ASSERT_EQ(cd->emitted_json.size(), cd->emitjson_buf.size());
+ if(cd->emitted_json.size())
+ {
+ ASSERT_EQ(cd->emitted_json.data(), cd->emitjson_buf.data());
+ }
+ }
+ }
+};
+
+
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+// facilities for declaring test data
+
+using N = CaseNode;
+using L = CaseNode::iseqmap;
+using TS = TaggedScalar;
+using TL = CaseNode::TaggedList;
+using AR = AnchorRef;
+
+constexpr const NodeType_e QK = (NodeType_e)(VAL | KEYQUO);
+constexpr const NodeType_e QV = (NodeType_e)(VAL | VALQUO);
+constexpr const NodeType_e QKV = (NodeType_e)(VAL | KEYQUO | VALQUO);
+
+#ifdef __GNUC__
+#if __GNUC__ == 4 && __GNUC_MINOR__ >= 8
+struct CaseAdder {
+ std::vector<Case> *group_cases;
+ const csubstr file;
+ const int line;
+
+ template<typename... Args>
+ void operator ()(Args... parameters) const {
+ group_cases->emplace_back(csubstr(file), line, parameters...);
+ }
+};
+
+/* all arguments are to the constructor of Case */
+#define ADD_CASE_TO_GROUP CaseAdder{group_cases__, csubstr(__FILE__), __LINE__+1}
+#endif
+#endif
+
+#ifndef ADD_CASE_TO_GROUP
+#define ADD_CASE_TO_GROUP(...) \
+ group_cases__->emplace_back(csubstr(__FILE__), __LINE__+1, __VA_ARGS__)
+#endif
+
+#define CASE_GROUP(group_name) \
+ \
+/* fwd declaration to fill the container with cases */ \
+void add_cases_##group_name(std::vector<Case> *group_cases); \
+ \
+/* container with the cases */ \
+std::vector<Case> const& get_cases_##group_name() \
+{ \
+ static std::vector<Case> cases_##group_name; \
+ if(cases_##group_name.empty()) \
+ add_cases_##group_name(&cases_##group_name); \
+ return cases_##group_name; \
+} \
+ \
+/* container with the case names */ \
+std::vector<csubstr> const& get_case_names_##group_name() \
+{ \
+ static std::vector<csubstr> case_names_##group_name; \
+ if(case_names_##group_name.empty()) \
+ { \
+ for(auto const& c : get_cases_##group_name()) \
+ case_names_##group_name.emplace_back(c.name); \
+ /* check repetitions */ \
+ std::vector<csubstr> cp = case_names_##group_name; \
+ std::sort(cp.begin(), cp.end()); \
+ for(size_t i = 0; i+1 < cp.size(); ++i) \
+ if(cp[i] == cp[i+1]) \
+ C4_ERROR("duplicate case name: '%.*s'", _c4prsp(cp[i])); \
+ } \
+ return case_names_##group_name; \
+} \
+ \
+INSTANTIATE_TEST_SUITE_P(group_name, YmlTestCase, ::testing::ValuesIn(get_case_names_##group_name())); \
+GTEST_ALLOW_UNINSTANTIATED_PARAMETERIZED_TEST(YmlTestCase); \
+ \
+/* used by the fixture to obtain a case by name */ \
+Case const* get_case(csubstr name) \
+{ \
+ for(Case const& c : get_cases_##group_name()) \
+ if(c.name == name) \
+ return &c; \
+ C4_ERROR("case not found: '%.*s'", _c4prsp(name)); \
+ return nullptr; \
+} \
+ \
+/* finally, define the cases by calling ADD_CASE_TO_GROUP() */ \
+void add_cases_##group_name(std::vector<Case> *group_cases__)
+
+
+} // namespace yml
+} // namespace c4
+
+#if defined(_MSC_VER)
+# pragma warning(pop)
+#elif defined(__clang__)
+# pragma clang diagnostic pop
+#elif defined(__GNUC__)
+# pragma GCC diagnostic pop
+#endif
+
+#endif // C4_RYML_TEST_GROUP_HPP_
diff --git a/thirdparty/ryml/test/test_indentation.cpp b/thirdparty/ryml/test/test_indentation.cpp
new file mode 100644
index 000000000..46b6f9bff
--- /dev/null
+++ b/thirdparty/ryml/test/test_indentation.cpp
@@ -0,0 +1,340 @@
+#include "./test_group.hpp"
+
+namespace c4 {
+namespace yml {
+
+CASE_GROUP(INDENTATION)
+{
+
+ADD_CASE_TO_GROUP("indented doc", R"(
+ # this is an indented doc
+ ---
+ - foo
+ - bar
+ - baz
+)",
+N(STREAM, L{N(DOCSEQ, L{N("foo"), N("bar"), N("baz")})})
+);
+
+ADD_CASE_TO_GROUP("4 chars",
+R"(
+key:
+ value
+another_key:
+ sub_key0:
+ - val0
+ - val1
+ sub_key1:
+ - val2
+ - val3
+ sub_key2:
+ - val4
+ - val5
+)",
+L{
+ N("key", "value"),
+ N("another_key", L{
+ N("sub_key0", L{N("val0"), N("val1")}),
+ N("sub_key1", L{N("val2"), N("val3")}),
+ N("sub_key2", L{N("val4"), N("val5")}),
+ })
+});
+
+ADD_CASE_TO_GROUP("2 chars + 4 chars, ex0",
+R"(
+key:
+ value
+another_key:
+ sub_key0:
+ - val0
+ - val1
+ sub_key1:
+ - val2
+ - val3
+ sub_key2:
+ - val4
+ - val5
+)",
+L{
+ N("key", "value"),
+ N("another_key", L{
+ N("sub_key0", L{N("val0"), N("val1")}),
+ N("sub_key1", L{N("val2"), N("val3")}),
+ N("sub_key2", L{N("val4"), N("val5")}),
+ })
+});
+
+ADD_CASE_TO_GROUP("2 chars + 4 chars, ex1",
+R"(
+key:
+ value
+another_key:
+ sub_key0:
+ - val0
+ - val1
+ sub_key1:
+ - val2
+ - val3
+ sub_key2:
+ - val4
+ - val5
+)",
+L{
+ N("key", "value"),
+ N("another_key", L{
+ N("sub_key0", L{N("val0"), N("val1")}),
+ N("sub_key1", L{N("val2"), N("val3")}),
+ N("sub_key2", L{N("val4"), N("val5")}),
+ })
+});
+
+ADD_CASE_TO_GROUP("2 chars + 4 chars, ex2",
+R"(
+key:
+ value
+another_key:
+ sub_key0:
+ - val0
+ - val1
+ sub_key1:
+ - val2
+ - val3
+ sub_key2:
+ - val4
+ - val5
+)",
+L{
+ N("key", "value"),
+ N("another_key", L{
+ N("sub_key0", L{N("val0"), N("val1")}),
+ N("sub_key1", L{N("val2"), N("val3")}),
+ N("sub_key2", L{N("val4"), N("val5")}),
+ })
+});
+
+ADD_CASE_TO_GROUP("non-indented blank lines",
+R"(
+matrix:
+
+ include: # next line is blank
+
+ - env01
+ - env02
+ - env03
+ - env04 # next line has one space
+
+ - env11
+ - env12
+ - env13
+ - env14 # next line has two spaces
+
+ - env21
+ - env22
+ - env23
+ - env24 # next line has three spaces
+
+ - env31
+ - env32
+ - env33
+ - env34 # next line has four spaces
+
+ - env41
+ - env42
+ - env43
+ - env44 # next line has five spaces
+
+ - env51
+ - env52
+ - env53
+ - env54 # next line has six spaces
+
+ - env61
+ - env62
+ - env63
+ - env64 # next line has five spaces
+)",
+L{N("matrix", L{
+ N("include", L{
+ N("env01"), N("env02"), N("env03"), N("env04"),
+ N("env11"), N("env12"), N("env13"), N("env14"),
+ N("env21"), N("env22"), N("env23"), N("env24"),
+ N("env31"), N("env32"), N("env33"), N("env34"),
+ N("env41"), N("env42"), N("env43"), N("env44"),
+ N("env51"), N("env52"), N("env53"), N("env54"),
+ N("env61"), N("env62"), N("env63"), N("env64"),
+ }
+ )})
+});
+
+ADD_CASE_TO_GROUP("unnecessary indentation",
+R"(
+skip_commits:
+ files:
+ - a
+ - b
+ - c
+ - d
+ - e
+ - f
+ more_files:
+ - a
+ - b
+ even_more_files:
+ - a
+ - b
+more_skip:
+ files:
+ - a
+ - b
+ - c
+ - d
+ - e
+ - f
+ more_files:
+ - a
+ - b
+)",
+L{
+ N("skip_commits", L{
+ N("files", L{N("a"), N("b"), N("c"), N("d"), N("e"), N("f"),}),
+ N("more_files", L{N("a"), N("b"),}),
+ N("even_more_files", L{N("a"), N("b"),}),
+ }),
+ N("more_skip", L{
+ N("files", L{N("a"), N("b"), N("c"), N("d"), N("e"), N("f"),}),
+ N("more_files", L{N("a"), N("b"),}),
+ })
+});
+
+
+ADD_CASE_TO_GROUP("blank lines indented, 1 - at same scope",
+R"(
+skip_commits:
+ files:
+ - a # next line has 22 spaces (aligns with -)
+
+ - b # next line has 23 spaces (aligns with #)
+
+ - c # next line has 3 spaces
+
+ - d
+)",
+L{
+ N("skip_commits", L{
+ N("files", L{N("a"), N("b"), N("c"), N("d"),}),
+ }),
+});
+
+ADD_CASE_TO_GROUP("indentation at start",
+R"(
+ foo:
+ - a
+ - b
+ bar:
+ - c
+ - d
+)",
+L{
+ N("foo", L{N("a"), N("b"),}),
+ N("bar", L{N("c"), N("d"),}),
+});
+
+ADD_CASE_TO_GROUP("unaligned comments",
+R"(
+ stand2sit:
+ map: mirror
+ dat:
+ - a
+ - b
+#
+ - b1
+ #
+ - b2
+ #
+ #
+ #
+ - b3
+ #
+ #
+ #
+ - b4
+ #
+ # - c
+ #- d
+ - b5
+ #- d2
+ #- d3
+ #- d4
+ - b6
+ #- d41
+ #
+ - b61
+ #
+ #
+ - b62
+ #
+ #
+ #
+ - b63
+ #
+ - b64
+ #
+ - b65
+ #
+ #
+ #
+ - b66
+ #
+ #
+ #
+ #
+ #- d41
+ #- d5
+ #- d6
+ #- d7
+ - b7
+ #- d8
+ #
+ #
+ #
+ - b8
+ #
+ #
+ #
+ - b9
+ #
+ #
+ - b10
+ #
+#
+ - e
+ - f
+ - g
+)",
+L{
+ N("stand2sit", L{
+ N("map", "mirror"),
+ N("dat", L{N("a"), N("b"), N("b1"), N("b2"), N("b3"), N("b4"), N("b5"), N("b6"), N("b61"), N("b62"), N("b63"), N("b64"), N("b65"), N("b66"), N("b7"), N("b8"), N("b9"), N("b10"), N("e"), N("f"), N("g")}),
+ }),
+});
+
+ADD_CASE_TO_GROUP("issue83",
+R"(
+e:
+ - f
+g: h
+a:
+ - b
+
+c: d
+)",
+L{
+N("e", L{N("f")}),
+N("g", "h"),
+N("a", L{N("b")}),
+N("c", "d"),
+});
+}
+
+} // namespace yml
+} // namespace c4
diff --git a/thirdparty/ryml/test/test_json.cpp b/thirdparty/ryml/test/test_json.cpp
new file mode 100644
index 000000000..8be31b794
--- /dev/null
+++ b/thirdparty/ryml/test/test_json.cpp
@@ -0,0 +1,516 @@
+#ifndef RYML_SINGLE_HEADER
+#include "c4/yml/std/std.hpp"
+#include "c4/yml/parse.hpp"
+#include "c4/yml/emit.hpp"
+#include <c4/format.hpp>
+#include <c4/yml/detail/checks.hpp>
+#include <c4/yml/detail/print.hpp>
+#endif
+
+#include "./test_case.hpp"
+
+#include <gtest/gtest.h>
+
+namespace foo {
+
+template<class T>
+struct vec2
+{
+ T x, y;
+};
+template<class T>
+struct vec3
+{
+ T x, y, z;
+};
+template<class T>
+struct vec4
+{
+ T x, y, z, w;
+};
+
+template<class T> size_t to_chars(c4::substr buf, vec2<T> v) { return c4::format(buf, "({},{})", v.x, v.y); }
+template<class T> size_t to_chars(c4::substr buf, vec3<T> v) { return c4::format(buf, "({},{},{})", v.x, v.y, v.z); }
+template<class T> size_t to_chars(c4::substr buf, vec4<T> v) { return c4::format(buf, "({},{},{},{})", v.x, v.y, v.z, v.w); }
+
+template<class T> bool from_chars(c4::csubstr buf, vec2<T> *v) { size_t ret = c4::unformat(buf, "({},{})", v->x, v->y); return ret != c4::yml::npos; }
+template<class T> bool from_chars(c4::csubstr buf, vec3<T> *v) { size_t ret = c4::unformat(buf, "({},{},{})", v->x, v->y, v->z); return ret != c4::yml::npos; }
+template<class T> bool from_chars(c4::csubstr buf, vec4<T> *v) { size_t ret = c4::unformat(buf, "({},{},{},{})", v->x, v->y, v->z, v->w); return ret != c4::yml::npos; }
+
+TEST(serialize, type_as_str)
+{
+ c4::yml::Tree t;
+
+ auto r = t.rootref();
+ r |= c4::yml::MAP;
+
+ vec2<int> v2in{10, 11};
+ vec2<int> v2out;
+ r["v2"] << v2in;
+ r["v2"] >> v2out;
+ EXPECT_EQ(v2in.x, v2out.x);
+ EXPECT_EQ(v2in.y, v2out.y);
+
+ vec3<int> v3in{100, 101, 102};
+ vec3<int> v3out;
+ r["v3"] << v3in;
+ r["v3"] >> v3out;
+ EXPECT_EQ(v3in.x, v3out.x);
+ EXPECT_EQ(v3in.y, v3out.y);
+ EXPECT_EQ(v3in.z, v3out.z);
+
+ vec4<int> v4in{1000, 1001, 1002, 1003};
+ vec4<int> v4out;
+ r["v4"] << v4in;
+ r["v4"] >> v4out;
+ EXPECT_EQ(v4in.x, v4out.x);
+ EXPECT_EQ(v4in.y, v4out.y);
+ EXPECT_EQ(v4in.z, v4out.z);
+ EXPECT_EQ(v4in.w, v4out.w);
+
+ char buf[256];
+ c4::csubstr interm = c4::yml::emit_json(t, buf);
+ EXPECT_EQ(interm, R"_({"v2": "(10,11)","v3": "(100,101,102)","v4": "(1000,1001,1002,1003)"})_");
+}
+} // namespace foo
+
+namespace c4 {
+namespace yml {
+
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+
+TEST(general, emitting)
+{
+ std::string cmpbuf;
+ std::string cmpbuf2;
+
+ Tree tree;
+ auto r = tree.rootref();
+
+ r |= MAP; // this is needed to make the root a map
+
+ r["foo"] = "1"; // ryml works only with strings.
+ // Note that the tree will be __pointing__ at the
+ // strings "foo" and "1" used here. You need
+ // to make sure they have at least the same
+ // lifetime as the tree.
+
+ auto s = r["seq"]; // does not change the tree until s is written to.
+ s |= SEQ;
+ r["seq"].append_child() = "bar0"; // value of this child is now __pointing__ at "bar0"
+ r["seq"].append_child() = "bar1";
+ r["seq"].append_child() = "bar2";
+
+ //print_tree(tree);
+
+ // emit to stdout (can also emit to FILE* or ryml::span)
+ emitrs_json(tree, &cmpbuf);
+ EXPECT_EQ(cmpbuf, R"({"foo": 1,"seq": ["bar0","bar1","bar2"]})");
+
+ // serializing: using operator<< instead of operator=
+ // will make the tree serialize the value into a char
+ // arena inside the tree. This arena can be reserved at will.
+ int ch3 = 33, ch4 = 44;
+ s.append_child() << ch3;
+ s.append_child() << ch4;
+
+ {
+ std::string tmp = "child5";
+ s.append_child() << tmp;
+ // now tmp can go safely out of scope, as it was
+ // serialized to the tree's internal string arena
+ }
+
+ emitrs_json(tree, &cmpbuf);
+ EXPECT_EQ(cmpbuf, R"({"foo": 1,"seq": ["bar0","bar1","bar2",33,44,"child5"]})");
+
+ // to serialize keys:
+ int k = 66;
+ r.append_child() << key(k) << 7;
+ emitrs_json(tree, &cmpbuf);
+ EXPECT_EQ(cmpbuf, R"({"foo": 1,"seq": ["bar0","bar1","bar2",33,44,"child5"],"66": 7})");
+}
+
+TEST(general, map_to_root)
+{
+ std::string cmpbuf; const char *exp;
+ std::map<std::string, int> m({{"bar", 2}, {"foo", 1}});
+ Tree t;
+ t.rootref() << m;
+
+ emitrs_json(t, &cmpbuf);
+ exp = "{\"bar\": 2,\"foo\": 1}";
+ EXPECT_EQ(cmpbuf, exp);
+
+ t["foo"] << 10;
+ t["bar"] << 20;
+
+ m.clear();
+ t.rootref() >> m;
+
+ EXPECT_EQ(m["foo"], 10);
+ EXPECT_EQ(m["bar"], 20);
+}
+
+TEST(general, json_stream_operator)
+{
+ std::map<std::string, int> out, m({{"bar", 2}, {"foo", 1}, {"foobar_barfoo:barfoo_foobar", 1001}, {"asdfjkl;", 42}, {"00000000000000000000000000000000000000000000000000000000000000", 1}});
+ Tree t;
+ t.rootref() << m;
+ std::string str;
+ {
+ std::stringstream ss;
+ ss << as_json(t);
+ str = ss.str();
+ }
+ Tree res = c4::yml::parse_in_place(to_substr(str));
+ EXPECT_EQ(res["foo"].val(), "1");
+ EXPECT_EQ(res["bar"].val(), "2");
+ EXPECT_EQ(res["foobar_barfoo:barfoo_foobar"].val(), "1001");
+ EXPECT_EQ(res["asdfjkl;"].val(), "42");
+ EXPECT_EQ(res["00000000000000000000000000000000000000000000000000000000000000"].val(), "1");
+ res.rootref() >> out;
+ EXPECT_EQ(out["foo"], 1);
+ EXPECT_EQ(out["bar"], 2);
+ EXPECT_EQ(out["foobar_barfoo:barfoo_foobar"], 1001);
+ EXPECT_EQ(out["asdfjkl;"], 42);
+ EXPECT_EQ(out["00000000000000000000000000000000000000000000000000000000000000"], 1);
+}
+
+TEST(emit_json, issue72)
+{
+ Tree t;
+ NodeRef r = t.rootref();
+
+ r |= MAP;
+ r["1"] = "null";
+ r["2"] = "true";
+ r["3"] = "false";
+ r["null"] = "1";
+ r["true"] = "2";
+ r["false"] = "3";
+
+ std::string out;
+ emitrs_json(t, &out);
+
+ EXPECT_EQ(out, R"({"1": null,"2": true,"3": false,"null": 1,"true": 2,"false": 3})");
+}
+
+
+TEST(emit_json, issue121)
+{
+ Tree t = parse_in_arena(R"(
+string_value: "string"
+number_value: "9001"
+broken_value: "0.30.2"
+)");
+ EXPECT_TRUE(t["string_value"].get()->m_type.type & VALQUO);
+ EXPECT_TRUE(t["number_value"].get()->m_type.type & VALQUO);
+ EXPECT_TRUE(t["broken_value"].get()->m_type.type & VALQUO);
+ std::string out;
+ emitrs_json(t, &out);
+ EXPECT_EQ(out, R"({"string_value": "string","number_value": "9001","broken_value": "0.30.2"})");
+ out.clear();
+ emitrs_yaml(t, &out);
+ EXPECT_EQ(out, R"(string_value: 'string'
+number_value: '9001'
+broken_value: '0.30.2'
+)");
+}
+
+TEST(emit_json, issue291)
+{
+ Tree t = parse_in_arena("{}");
+ t["james"] = "045";
+ auto s = emitrs_json<std::string>(t);
+ EXPECT_EQ(s, "{\"james\": \"045\"}");
+}
+
+TEST(emit_json, issue292)
+{
+ EXPECT_FALSE(csubstr("0.0.0").is_number());
+ EXPECT_FALSE(csubstr("0.0.0").is_integer());
+ EXPECT_FALSE(csubstr("0.0.0").is_real());
+ EXPECT_FALSE(csubstr("0.1.0").is_number());
+ EXPECT_FALSE(csubstr("0.1.0").is_integer());
+ EXPECT_FALSE(csubstr("0.1.0").is_real());
+ EXPECT_FALSE(csubstr("0.6.1").is_number());
+ EXPECT_FALSE(csubstr("0.6.1").is_integer());
+ EXPECT_FALSE(csubstr("0.6.1").is_real());
+ EXPECT_FALSE(csubstr("1.1.9").is_number());
+ EXPECT_FALSE(csubstr("1.1.9").is_integer());
+ EXPECT_FALSE(csubstr("1.1.9").is_real());
+ EXPECT_FALSE(csubstr("1.2.3").is_number());
+ EXPECT_FALSE(csubstr("1.2.3").is_integer());
+ EXPECT_FALSE(csubstr("1.2.3").is_real());
+ Tree t = parse_in_arena("{}");
+ t["james"] = "0.0.0";
+ EXPECT_EQ(emitrs_json<std::string>(t), "{\"james\": \"0.0.0\"}");
+ t["james"] = "0.1.0";
+ EXPECT_EQ(emitrs_json<std::string>(t), "{\"james\": \"0.1.0\"}");
+ t["james"] = "0.6.1";
+ EXPECT_EQ(emitrs_json<std::string>(t), "{\"james\": \"0.6.1\"}");
+ t["james"] = "1.1.9";
+ EXPECT_EQ(emitrs_json<std::string>(t), "{\"james\": \"1.1.9\"}");
+ t["james"] = "1.2.3";
+ EXPECT_EQ(emitrs_json<std::string>(t), "{\"james\": \"1.2.3\"}");
+}
+
+TEST(emit_json, issue297)
+{
+ char yml_buf[] = R"(
+comment: |
+ abc
+ def
+)";
+ Tree t = parse_in_place(yml_buf);
+ auto s = emitrs_json<std::string>(t);
+ EXPECT_EQ(s, "{\"comment\": \"abc\\ndef\\n\"}");
+}
+
+TEST(emit_json, issue297_escaped_chars)
+{
+ Tree t = parse_in_arena("{}");
+ t["quote"] = "abc\"def";
+ t["newline"] = "abc\ndef";
+ t["tab"] = "abc\tdef";
+ t["carriage"] = "abc\rdef";
+ t["backslash"] = "abc\\def";
+ t["backspace"] = "abc\bdef";
+ t["formfeed"] = "abc\fdef";
+ std::string expected = R"({"quote": "abc\"def","newline": "abc\ndef","tab": "abc\tdef","carriage": "abc\rdef","backslash": "abc\\def","backspace": "abc\bdef","formfeed": "abc\fdef"})";
+ auto actual = emitrs_json<std::string>(t);
+ EXPECT_EQ(actual, expected);
+}
+
+namespace {
+std::string k(ConstNodeRef node) { return std::string(node.key().str, node.key().len); }
+std::string v(ConstNodeRef node) { return std::string(node.val().str, node.val().len); }
+}
+TEST(emit_json, issue313_quoted_numbers__1)
+{
+ EXPECT_TRUE(csubstr("0.99356698989868164").is_number()); // [WEIRD0][0]
+ EXPECT_TRUE(csubstr("0.99356698989868164").is_real()); // [WEIRD0][0]
+ EXPECT_FALSE(csubstr("0.99356698989868164").is_integer()); // [WEIRD0][0]
+ EXPECT_TRUE(csubstr("0.0064908224157989025").is_number()); // [WEIRD2][0]
+ EXPECT_TRUE(csubstr("0.0064908224157989025").is_real()); // [WEIRD2][0]
+ EXPECT_FALSE(csubstr("0.0064908224157989025").is_integer()); // [WEIRD2][0]
+ EXPECT_TRUE(csubstr("0.0064917667768895626").is_number()); // [WEIRD2][1]
+ EXPECT_TRUE(csubstr("0.0064917667768895626").is_real()); // [WEIRD2][1]
+ EXPECT_FALSE(csubstr("0.0064917667768895626").is_integer()); // [WEIRD2][1]
+ EXPECT_TRUE(csubstr("0.0064947893843054771").is_number()); // [WEIRD2][2]
+ EXPECT_TRUE(csubstr("0.0064947893843054771").is_real()); // [WEIRD2][2]
+ EXPECT_FALSE(csubstr("0.0064947893843054771").is_integer()); // [WEIRD2][2]
+ EXPECT_TRUE(csubstr("0.91054189205169678").is_number()); // [WEIRD4][0]
+ EXPECT_TRUE(csubstr("0.91054189205169678").is_real()); // [WEIRD4][0]
+ EXPECT_FALSE(csubstr("0.91054189205169678").is_integer()); // [WEIRD4][0]
+ EXPECT_TRUE(csubstr("0.13215841352939606").is_number()); // [REALLY_WEIRD5][9][0]
+ EXPECT_TRUE(csubstr("0.13215841352939606").is_real()); // [REALLY_WEIRD5][9][0]
+ EXPECT_FALSE(csubstr("0.13215841352939606").is_integer()); // [REALLY_WEIRD5][9][0]
+ Tree t0 = parse_in_arena(R"([
+ 0.99356698989868164,
+ 0.0064908224157989025,
+ 0.0064917667768895626,
+ 0.0064947893843054771,
+ 0.91054189205169678,
+ 0.13215841352939606,
+])");
+ std::string yaml = emitrs_json<std::string>(t0);
+ test_check_emit_check(to_csubstr(yaml), [&](Tree const &t){
+ for(ConstNodeRef number : t.rootref().children())
+ {
+ ASSERT_TRUE(number.is_val());
+ EXPECT_FALSE(number.is_val_quoted()) << "tree[" << t.rootref().child_pos(number) << "]=" << v(number);
+ }
+ });
+}
+
+
+TEST(emit_json, issue313_quoted_numbers__2)
+{
+ Tree ti = parse_in_arena(R"({
+WEIRD0: [0.99356698989868164, 1.0605627298355103],
+OK1: [0, 0, 0],
+WEIRD2: [0.0064908224157989025, 0.0064917667768895626, 0.0064947893843054771],
+OK3: [6.6227097511291504, 6.8674740791320801, 7.0403199195861816, 7.5792555809020996, 7.9916787147521973, 8.136042594909668, 8.5505847930908203, 8.701807975769043, 8.926518440246582, 8.9484291076660156, 9.0740194320678711, 9.3788108825683594, 9.406926155090332],
+WEIRD4: [0.91054189205169678, 0.98725020885467529, 1.070807933807373],
+REALLY_WEIRD5: [
+ [1.5158847570419312, 1.6361792087554932], # 0
+ [1.0741721391677856, 1.1791903972625732], # 1
+ [1.4423576593399048, 1.7063977718353271], # 2
+ [1.1791903972625732], # 3
+ [1.1493504047393799, 1.1791903972625732], # 4
+ [1.1791903972625732, 1.3334760665893555], # 5
+ [1.0655292272567749, 1.4933452606201172], # 6
+ [1.0712906122207642, 1.1791903972625732], # 7
+ [1.1791903972625732, 1.830910325050354], # 8
+ [0.13215841352939606, 1.4161584377288818], # 9
+ [1.1791903972625732, 1.5179581642150879], # 10
+ [1.1791903972625732, 1.2864601612091064], # 11
+ [1.1791903972625732, 1.6865267753601074], # 12
+ [1.1791903972625732, 1.2192368507385254], # 13
+ [1.1130030155181885, 1.5196701288223267], # 14
+ [1.0621790885925293, 1.1791903972625732] # 15
+]})");
+ std::string yaml = emitrs_json<std::string>(ti);
+ test_check_emit_check(to_csubstr(yaml), [](Tree const &t){
+ for(ConstNodeRef node : t.rootref().children())
+ {
+ ASSERT_TRUE(node.is_seq());
+ ASSERT_TRUE(node.has_key());
+ EXPECT_TRUE(node.is_key_quoted()) << "tree[" << k(node) << "]";
+ if(node.key() != "REALLY_WEIRD5")
+ {
+ for(ConstNodeRef number : node.children())
+ {
+ ASSERT_TRUE(number.is_val());
+ EXPECT_FALSE(number.is_val_quoted()) << "tree[" << k(node) << "][" << node.child_pos(number) << "]=" << v(number);
+ }
+ }
+ else
+ {
+ for(ConstNodeRef seq : node.children())
+ {
+ ASSERT_TRUE(seq.is_seq());
+ for(ConstNodeRef number : seq.children())
+ {
+ ASSERT_TRUE(number.is_val());
+ EXPECT_FALSE(number.is_val_quoted()) << "tree[" << k(node) << "][" << node.child_pos(seq) << "][" << seq.child_pos(number) << "]=" << v(number);
+ }
+ }
+ }
+ }
+ });
+}
+
+
+#define _test(actual_src, expected_src) \
+ { \
+ SCOPED_TRACE(__LINE__); \
+ csubstr file = __FILE__ ":" C4_XQUOTE(__LINE__); \
+ Tree actual = parse_in_arena(file, actual_src); \
+ Tree expected = parse_in_arena(file, expected_src); \
+ test_compare(actual, expected); \
+ }
+
+
+TEST(json, basic)
+{
+ _test("", "");
+ _test("{}", "{}");
+ _test(R"("a":"b")",
+ R"("a": "b")");
+ _test(R"('a':'b')",
+ R"('a': 'b')");
+ _test(R"({'a':'b'})",
+ R"({'a': 'b'})");
+ _test(R"({"a":"b"})",
+ R"({"a": "b"})");
+
+ _test(R"({"a":{"a":"b"}})",
+ R"({"a": {"a": "b"}})");
+ _test(R"({'a':{'a':'b'}})",
+ R"({'a': {'a': 'b'}})");
+}
+
+TEST(json, github142)
+{
+ _test(R"({"A":"B}"})",
+ R"({"A": "B}"})");
+ _test(R"({"A":"{B"})",
+ R"({"A": "{B"})");
+ _test(R"({"A":"{B}"})",
+ R"({"A": "{B}"})");
+ _test(R"({ "A":"B}" })",
+ R"({ "A": "B}" })");
+ _test(R"({"A":["B]","[C","[D]"]})",
+ R"({"A": ["B]","[C","[D]"]})");
+ //_test(R"({"A":["B\"]","[\"C","\"[D]\""]})", // VS2019 chokes on this.
+ // R"({"A": ["B\"]","[\"C","\"[D]\""]})");
+
+ _test(R"({'A':'B}'})",
+ R"({'A': 'B}'})");
+ _test(R"({'A':'{B'})",
+ R"({'A': '{B'})");
+ _test(R"({'A':'{B}'})",
+ R"({'A': '{B}'})");
+ _test(R"({ 'A':'B}' })",
+ R"({ 'A': 'B}' })");
+ _test(R"({'A':['B]','[C','[D]']})",
+ R"({'A': ['B]','[C','[D]']})");
+ _test(R"({'A':['B'']','[''C','''[D]''']})",
+ R"({'A': ['B'']','[''C','''[D]''']})");
+}
+
+TEST(json, github52)
+{
+ _test(R"({"a": "b","c": 42,"d": "e"})",
+ R"({"a": "b","c": 42,"d": "e"})");
+ _test(R"({"aaaa": "bbbb","cccc": 424242,"dddddd": "eeeeeee"})",
+ R"({"aaaa": "bbbb","cccc": 424242,"dddddd": "eeeeeee"})");
+
+ _test(R"({"a":"b","c":42,"d":"e"})",
+ R"({"a": "b","c": 42,"d": "e"})");
+ _test(R"({"aaaaa":"bbbbb","ccccc":424242,"ddddd":"eeeee"})",
+ R"({"aaaaa": "bbbbb","ccccc": 424242,"ddddd": "eeeee"})");
+ _test(R"({"a":"b","c":{},"d":"e"})",
+ R"({"a": "b","c": {},"d": "e"})");
+ _test(R"({"aaaaa":"bbbbb","ccccc":{ },"ddddd":"eeeee"})",
+ R"({"aaaaa": "bbbbb","ccccc": { },"ddddd": "eeeee"})");
+ _test(R"({"a":"b","c":true,"d":"e"})",
+ R"({"a": "b","c": true,"d": "e"})");
+ _test(R"({"a":"b","c":false,"d":"e"})",
+ R"({"a": "b","c": false,"d": "e"})");
+ _test(R"({"a":"b","c":true,"d":"e"})",
+ R"({"a": "b","c": true,"d": "e"})");
+ _test(R"({"a":"b","c":null,"d":"e"})",
+ R"({"a": "b","c": null,"d": "e"})");
+ _test(R"({"aaaaa":"bbbbb","ccccc":false,"ddddd":"eeeee"})",
+ R"({"aaaaa": "bbbbb","ccccc": false,"ddddd": "eeeee"})");
+ _test(R"({"a":"b","c":false,"d":"e"})",
+ R"({"a": "b","c": false,"d": "e"})");
+ _test(R"({"aaaaa":"bbbbb","ccccc":true,"ddddd":"eeeee"})",
+ R"({"aaaaa": "bbbbb","ccccc": true,"ddddd": "eeeee"})");
+}
+
+TEST(json, nested)
+{
+ _test(R"({"a":"b","c":{"a":"b","c":{},"d":"e"},"d":"e"})",
+ R"({"a": "b","c": {"a": "b","c": {},"d": "e"},"d": "e"})");
+ _test(R"({"a":"b","c":{"a":"b","c":{"a":"b","c":{},"d":"e"},"d":"e"},"d":"e"})",
+ R"({"a": "b","c": {"a": "b","c": {"a": "b","c": {},"d": "e"},"d": "e"},"d": "e"})");
+ _test(R"({"a":"b","c":{"a":"b","c":{"a":"b","c":{"a":"b","c":{},"d":"e"},"d":"e"},"d":"e"},"d":"e"})",
+ R"({"a": "b","c": {"a": "b","c": {"a": "b","c": {"a": "b","c": {},"d": "e"},"d": "e"},"d": "e"},"d": "e"})");
+ _test(R"({"a":"b","c":{"a":"b","c":{"a":"b","c":{"a":"b","c":{"a":"b","c":{},"d":"e"},"d":"e"},"d":"e"},"d":"e"},"d":"e"})",
+ R"({"a": "b","c": {"a": "b","c": {"a": "b","c": {"a": "b","c": {"a": "b","c": {},"d": "e"},"d": "e"},"d": "e"},"d": "e"},"d": "e"})");
+
+ _test(R"({"a":"b","c":["a","c","d","e"],"d":"e"})",
+ R"({"a": "b","c": ["a","c","d","e"],"d": "e"})");
+}
+
+TEST(json, nested_end)
+{
+ _test(R"({"a":"b","d":"e","c":{"a":"b","d":"e","c":{}}})",
+ R"({"a": "b","d": "e","c": {"a": "b","d": "e","c": {}}})");
+ _test(R"({"a":"b","d":"e","c":{"a":"b","d":"e","c":{"a":"b","d":"e","c":{}}}})",
+ R"({"a": "b","d": "e","c": {"a": "b","d": "e","c": {"a": "b","d": "e","c": {}}}})");
+ _test(R"({"a":"b","d":"e","c":{"a":"b","d":"e","c":{"a":"b","d":"e","c":{"a":"b","d":"e","c":{}}}}})",
+ R"({"a": "b","d": "e","c": {"a": "b","d": "e","c": {"a": "b","d": "e","c": {"a": "b","d": "e","c": {}}}}})");
+ _test(R"({"a":"b","d":"e","c":{"a":"b","d":"e","c":{"a":"b","d":"e","c":{"a":"b","d":"e","c":{"a":"b","d":"e","c":{}}}}}})",
+ R"({"a": "b","d": "e","c": {"a": "b","d": "e","c": {"a": "b","d": "e","c": {"a": "b","d": "e","c": {"a": "b","d": "e","c": {}}}}}})");
+}
+
+#undef _test
+
+
+//-------------------------------------------
+// this is needed to use the test case library
+Case const* get_case(csubstr /*name*/)
+{
+ return nullptr;
+}
+
+} // namespace yml
+} // namespace c4
diff --git a/thirdparty/ryml/test/test_location.cpp b/thirdparty/ryml/test/test_location.cpp
new file mode 100644
index 000000000..e05407eeb
--- /dev/null
+++ b/thirdparty/ryml/test/test_location.cpp
@@ -0,0 +1,720 @@
+#ifndef RYML_SINGLE_HEADER
+#include <c4/yml/std/std.hpp>
+#include <c4/yml/yml.hpp>
+#endif
+#include "./test_case.hpp"
+#include <gtest/gtest.h>
+
+
+namespace c4 {
+namespace yml {
+
+TEST(locations, default_is_no_location)
+{
+ {
+ ParserOptions opts;
+ EXPECT_EQ(opts.locations(), false);
+ }
+ {
+ Parser parser;
+ EXPECT_EQ(parser.options().locations(), false);
+ }
+ {
+ Parser parser(ParserOptions{});
+ EXPECT_EQ(parser.options().locations(), false);
+ }
+ {
+ Parser parser(ParserOptions().locations(false));
+ EXPECT_EQ(parser.options().locations(), false);
+ }
+ {
+ Parser parser(ParserOptions().locations(true));
+ EXPECT_EQ(parser.options().locations(), true);
+ }
+}
+
+
+TEST(locations, error_is_triggered_querying_with_locations_disabled)
+{
+ bool parsed_ok = false;
+ ExpectError::do_check([&]{
+ Parser parser(ParserOptions().locations(false));
+ Tree t = parser.parse_in_arena("test", "foo: bar");
+ parsed_ok = true;
+ (void)parser.location(t["foo"]);
+ });
+ EXPECT_TRUE(parsed_ok);
+}
+
+
+
+#define _checkloc(node, line_, col_, str) \
+ { \
+ const Location loc = parser.location(node); \
+ EXPECT_EQ(loc.name, "myfile.yml"); \
+ EXPECT_EQ(loc.line, line_); \
+ EXPECT_EQ(loc.col, col_); \
+ EXPECT_EQ(t.arena().sub(loc.offset, csubstr(str).len), csubstr(str)); \
+ }
+
+TEST(locations, no_error_is_triggered_querying_with_locations)
+{
+ Parser parser(ParserOptions().locations(true));
+ EXPECT_EQ(parser.options().locations(), true);
+ Tree t = parser.parse_in_arena("myfile.yml", "foo: bar");
+ _checkloc(t["foo"], 0, 0, "foo");
+}
+
+
+TEST(locations, docval)
+{
+ ParserOptions opts = ParserOptions().locations(true);
+ Parser parser(opts);
+ Tree t = parser.parse_in_arena("myfile.yml", "docval");
+ _checkloc(t.rootref(), 0u, 0u, "docval");
+ t = parser.parse_in_arena("myfile.yml", "\n docval");
+ _checkloc(t.rootref(), 1u, 1u, "docval");
+ t = parser.parse_in_arena("myfile.yml", "\n\n docval");
+ _checkloc(t.rootref(), 2u, 1u, "docval");
+}
+
+TEST(locations, docval_null)
+{
+ ParserOptions opts = ParserOptions().locations(true);
+ Parser parser(opts);
+ Tree t = parser.parse_in_arena("myfile.yml", "~");
+ _checkloc(t.rootref(), 0u, 0u, "~");
+ t = parser.parse_in_arena("myfile.yml", "");
+ _checkloc(t.rootref(), 0u, 0u, "");
+ t = parser.parse_in_arena("myfile.yml", R"(#
+#
+#
+#
+#
+)");
+ _checkloc(t.rootref(), 0u, 0u, "");
+}
+
+TEST(locations, seq_block)
+{
+ ParserOptions opts = ParserOptions().locations(true);
+ Parser parser(opts);
+ csubstr yaml = R"(
+- this
+- is
+- a
+- seq
+- - and
+ - this
+ - - as
+ - well
+ - - # this one works as well
+ # even with a comment in between
+ the scalar value is here
+ - and here's another value
+ -
+ - another val
+ - yet another val
+)";
+ Tree t = parser.parse_in_arena("myfile.yml", yaml);
+ ConstNodeRef seq = t.rootref();
+ ASSERT_TRUE(seq.is_seq());
+ _checkloc(seq , 1u, 0u, "- ");
+ _checkloc(seq[0] , 1u, 2u, "this");
+ _checkloc(seq[1] , 2u, 2u, "is");
+ _checkloc(seq[2] , 3u, 2u, "a");
+ _checkloc(seq[3] , 4u, 2u, "seq");
+ _checkloc(seq[4] , 5u, 2u, "- ");
+ _checkloc(seq[4][0] , 5u, 4u, "and");
+ _checkloc(seq[4][1] , 6u, 4u, "this");
+ _checkloc(seq[4][2] , 7u, 4u, "- ");
+ _checkloc(seq[4][2][0], 7u, 6u, "as");
+ _checkloc(seq[4][2][1], 8u, 6u, "well");
+ _checkloc(seq[4][3] , 9u, 4u, "- # this one works as well");
+ _checkloc(seq[4][3][0], 11u, 6u, "the scalar value is here");
+ _checkloc(seq[4][3][1], 12u, 8u, "and here's another value");
+ _checkloc(seq[4][3][2], 14u, 6u, "- ");
+ _checkloc(seq[4][3][2][0], 14u, 8u, "another val");
+ _checkloc(seq[4][3][2][1], 15u, 8u, "yet another val");
+}
+
+TEST(locations, map_block)
+{
+ ParserOptions opts = ParserOptions().locations(true);
+ Parser parser(opts);
+ csubstr yaml = R"(
+this: ~
+is: ~
+a: ~
+map: ~
+and:
+ this:
+ as: ~
+ well: ~
+ aswell: # this one works as well
+ # even with a comment in between
+ val: here
+ hah: here
+)";
+ Tree t = parser.parse_in_arena("myfile.yml", yaml);
+ ConstNodeRef map = t.rootref();
+ ASSERT_TRUE(map.is_map());
+ _checkloc(map , 1u, 0u, "this:");
+ _checkloc(map["this"] , 1u, 0u, "this:");
+ _checkloc(map["is"] , 2u, 0u, "is:");
+ _checkloc(map["a"] , 3u, 0u, "a:");
+ _checkloc(map["map"] , 4u, 0u, "map:");
+ _checkloc(map["and"] , 5u, 0u, "and:");
+ _checkloc(map["and"]["this"] , 6u, 2u, "this:");
+ _checkloc(map["and"]["this"]["as"] , 7u, 4u, "as:");
+ _checkloc(map["and"]["this"]["well"] , 8u, 4u, "well:");
+ _checkloc(map["and"]["aswell"] , 9u, 2u, "aswell:");
+ _checkloc(map["and"]["aswell"]["val"] , 11u, 4u, "val:");
+ _checkloc(map["and"]["aswell"]["hah"] , 12u, 4u, "hah:");
+}
+
+TEST(locations, seq_block_null)
+{
+ ParserOptions opts = ParserOptions().locations(true);
+ Parser parser(opts);
+ const Tree t = parser.parse_in_arena("myfile.yml", R"(---
+- ~
+- ~
+- notnull
+- ~
+- ~
+---
+- ~
+- - ~
+- - - ~
+- - - - ~
+- - - - - ~
+- - - - ~
+- - - ~
+- - ~
+- ~
+---
+-
+-
+-
+-
+ -
+ -
+ -
+ -
+ -
+ -
+ -
+ -
+ -
+ -
+ -
+ -
+-
+)");
+ _checkloc(t.rootref() , 0u, 0u, "---");
+ _checkloc(t.docref(0) , 1u, 0u, "- ");
+ _checkloc(t.docref(0)[0], 1u, 2u, "~");
+ _checkloc(t.docref(0)[1], 2u, 2u, "~");
+ _checkloc(t.docref(0)[2], 3u, 2u, "notnull");
+ _checkloc(t.docref(0)[3], 4u, 2u, "~");
+ _checkloc(t.docref(0)[4], 5u, 2u, "~");
+ _checkloc(t.docref(1) , 7u, 0u, "- ");
+ _checkloc(t.docref(1)[0], 7u, 2u, "~");
+ _checkloc(t.docref(1)[1], 8u, 2u, "- ");
+ _checkloc(t.docref(1)[1][0], 8u, 4u, "~");
+ _checkloc(t.docref(1)[2], 9u, 2u, "- ");
+ _checkloc(t.docref(1)[2][0], 9u, 4u, "- ");
+ _checkloc(t.docref(1)[2][0][0], 9u, 6u, "~");
+ _checkloc(t.docref(1)[3], 10u, 2u, "- ");
+ _checkloc(t.docref(1)[3][0], 10u, 4u, "- ");
+ _checkloc(t.docref(1)[3][0][0], 10u, 6u, "- ");
+ _checkloc(t.docref(1)[3][0][0][0], 10u, 8u, "~");
+ _checkloc(t.docref(1)[4], 11u, 2u, "- ");
+ _checkloc(t.docref(1)[4][0], 11u, 4u, "- ");
+ _checkloc(t.docref(1)[4][0][0], 11u, 6u, "- ");
+ _checkloc(t.docref(1)[4][0][0][0], 11u, 8u, "- ");
+ _checkloc(t.docref(1)[4][0][0][0][0], 11u, 10u, "~");
+ _checkloc(t.docref(1)[5], 12u, 2u, "- ");
+ _checkloc(t.docref(1)[5][0], 12u, 4u, "- ");
+ _checkloc(t.docref(1)[5][0][0], 12u, 6u, "- ");
+ _checkloc(t.docref(1)[5][0][0][0], 12u, 8u, "~");
+ _checkloc(t.docref(1)[6], 13u, 2u, "- ");
+ _checkloc(t.docref(1)[6][0], 13u, 4u, "- ");
+ _checkloc(t.docref(1)[6][0][0], 13u, 6u, "~");
+ _checkloc(t.docref(1)[7], 14u, 2u, "- ");
+ _checkloc(t.docref(1)[7][0], 14u, 4u, "~");
+ _checkloc(t.docref(1)[8], 15u, 2u, "~");
+ _checkloc(t.docref(2) , 17u, 0u, "-");
+ _checkloc(t.docref(2)[0], 17u, 0u, "-");
+ _checkloc(t.docref(2)[1], 17u, 0u, "-");
+ _checkloc(t.docref(2)[2], 21u, 2u, "-");
+ _checkloc(t.docref(2)[3], 21u, 2u, "-");
+ _checkloc(t.docref(2)[3][0], 21u, 2u, "-");
+ _checkloc(t.docref(2)[3][1], 24u, 4u, "-");
+ _checkloc(t.docref(2)[3][2], 24u, 4u, "-");
+ _checkloc(t.docref(2)[3][2][0], 25u, 6u, "-");
+ _checkloc(t.docref(2)[3][2][0][0], 26u, 8u, "-");
+ _checkloc(t.docref(2)[3][2][0][0][0], 26u, 8u, "-");
+ _checkloc(t.docref(2)[3][2][0][0][1], 26u, 8u, "-");
+ _checkloc(t.docref(2)[3][2][1], 25u, 6u, "-");
+ _checkloc(t.docref(2)[3][3], 24u, 4u, "-"); // fix: this should be after the previous child
+ _checkloc(t.docref(2)[3][4], 21u, 2u, "-"); // fix: this should be after the previous child
+ _checkloc(t.docref(2)[3][5], 21u, 2u, "-"); // fix: this should be after the previous child
+}
+
+TEST(locations, map_block_null)
+{
+ ParserOptions opts = ParserOptions().locations(true);
+ Parser parser(opts);
+ Tree t = parser.parse_in_arena("myfile.yml", R"(---
+~: v
+---
+null: v
+---
+ : v
+)");
+ _checkloc(t.rootref() , 0u, 0u, "---");
+ _checkloc(t.docref(0) , 1u, 0u, "");
+ _checkloc(t.docref(0)[0], 1u, 0u, "~");
+ _checkloc(t.docref(1) , 3u, 0u, "null");
+ _checkloc(t.docref(1)[0], 3u, 0u, "null");
+ _checkloc(t.docref(2) , 5u, 1u, "");
+ _checkloc(t.docref(2)[0], 5u, 3u, "");
+}
+
+TEST(locations, empty_seq)
+{
+ ParserOptions opts = ParserOptions().locations(true);
+ Parser parser(opts);
+ Tree t = parser.parse_in_arena("myfile.yml", R"(---
+- []
+- []
+- notnull
+- []
+- []
+---
+- []
+---
+[]
+---
+key0: []
+key1: []
+key2: notnull
+key3: []
+key4: []
+---
+key: []
+)");
+ _checkloc(t.rootref() , 0u, 0u, "---");
+ _checkloc(t.docref(0) , 1u, 0u, "- ");
+ _checkloc(t.docref(0)[0], 1u, 2u, "[]");
+ _checkloc(t.docref(0)[1], 2u, 2u, "[]");
+ _checkloc(t.docref(0)[2], 3u, 2u, "notnull");
+ _checkloc(t.docref(0)[3], 4u, 2u, "[]");
+ _checkloc(t.docref(0)[4], 5u, 2u, "[]");
+ _checkloc(t.docref(1) , 7u, 0u, "- ");
+ _checkloc(t.docref(1)[0], 7u, 2u, "[]");
+ _checkloc(t.docref(2) , 9u, 0u, "[]");
+ _checkloc(t.docref(3) , 11u, 0u, "key0"); // WTF
+ _checkloc(t.docref(3)["key0"], 11u, 0u, "key0");
+ _checkloc(t.docref(3)["key1"], 12u, 0u, "key1");
+ _checkloc(t.docref(3)["key2"], 13u, 0u, "key2");
+ _checkloc(t.docref(3)["key3"], 14u, 0u, "key3");
+ _checkloc(t.docref(3)["key4"], 15u, 0u, "key4");
+}
+
+TEST(locations, empty_map)
+{
+ ParserOptions opts = ParserOptions().locations(true);
+ Parser parser(opts);
+ Tree t = parser.parse_in_arena("myfile.yml", R"(---
+- {}
+- {}
+- notnull
+- {}
+- {}
+---
+- {}
+---
+{}
+---
+key0: {}
+key1: {}
+key2: notnull
+key3: {}
+key4: {}
+---
+key: {}
+)");
+ _checkloc(t.rootref() , 0u, 0u, "---");
+ _checkloc(t.docref(0) , 1u, 0u, "- ");
+ _checkloc(t.docref(0)[0], 1u, 2u, "{}");
+ _checkloc(t.docref(0)[1], 2u, 2u, "{}");
+ _checkloc(t.docref(0)[2], 3u, 2u, "notnull");
+ _checkloc(t.docref(0)[3], 4u, 2u, "{}");
+ _checkloc(t.docref(0)[4], 5u, 2u, "{}");
+ _checkloc(t.docref(1) , 7u, 0u, "- ");
+ _checkloc(t.docref(1)[0], 7u, 2u, "{}");
+ _checkloc(t.docref(2) , 9u, 0u, "{}");
+ _checkloc(t.docref(3) , 11u, 0u, "key0"); // WTF
+ _checkloc(t.docref(3)["key0"], 11u, 0u, "key0");
+ _checkloc(t.docref(3)["key1"], 12u, 0u, "key1");
+ _checkloc(t.docref(3)["key2"], 13u, 0u, "key2");
+ _checkloc(t.docref(3)["key3"], 14u, 0u, "key3");
+ _checkloc(t.docref(3)["key4"], 15u, 0u, "key4");
+}
+
+
+TEST(locations, seq_flow)
+{
+ Tree t;
+ ParserOptions opts = ParserOptions().locations(true);
+ Parser parser(opts);
+ csubstr yaml = R"([one,two,three,four,items])";
+ parser.parse_in_arena("myfile.yml", yaml, &t);
+ ConstNodeRef seq = t.rootref();
+ ASSERT_TRUE(seq.is_seq());
+ _checkloc(seq , 0u, 0u, "[");
+ _checkloc(seq[0], 0u, 1u, "one");
+ _checkloc(seq[1], 0u, 5u, "two");
+ _checkloc(seq[2], 0u, 9u, "three");
+ _checkloc(seq[3], 0u, 15u, "four");
+ _checkloc(seq[4], 0u, 20u, "items");
+}
+
+TEST(locations, map_flow)
+{
+ Tree t;
+ ParserOptions opts = ParserOptions().locations(true);
+ Parser parser(opts);
+ csubstr yaml = R"({one: item,two: items,three: items,four: items})";
+ parser.parse_in_arena("myfile.yml", yaml, &t);
+ ConstNodeRef map = t.rootref();
+ ASSERT_TRUE(map.is_map());
+ _checkloc(map , 0u, 0u, "{");
+ _checkloc(map[0], 0u, 1u, "one:");
+ _checkloc(map[1], 0u, 11u, "two:");
+ _checkloc(map[2], 0u, 22u, "three:");
+ _checkloc(map[3], 0u, 35u, "four:");
+}
+
+TEST(locations, seq_flow_nested)
+{
+ Tree t;
+ ParserOptions opts = ParserOptions().locations(true);
+ Parser parser(opts);
+ csubstr yaml = R"([
+ one,
+ two,
+ [woops, there, [ goes, master]],
+ five,
+ {wait: is, this: { really: a map? }, nope: [ a seq!, { had: you there, eehh: no. } ]},
+ yep,
+ it,
+ was
+])";
+ parser.parse_in_arena("myfile.yml", yaml, &t);
+ ConstNodeRef seq = t.rootref();
+ ASSERT_TRUE(seq.is_seq());
+ _checkloc(seq , 0u, 0u, "[");
+ _checkloc(seq[0] , 1u, 2u, "one");
+ _checkloc(seq[1] , 2u, 2u, "two");
+ _checkloc(seq[2] , 3u, 2u, "[");
+ _checkloc(seq[2][0] , 3u, 3u, "woops");
+ _checkloc(seq[2][1] , 3u, 14u, "there");
+ _checkloc(seq[2][2] , 3u, 24u, "[");
+ _checkloc(seq[2][2][0] , 3u, 27u, "goes");
+ _checkloc(seq[2][2][1] , 3u, 37u, "master");
+ _checkloc(seq[3] , 4u, 2u, "five");
+ _checkloc(seq[4] , 5u, 2u, "{");
+ _checkloc(seq[4]["wait"] , 5u, 3u, "wait");
+ _checkloc(seq[4]["this"] , 5u, 15u, "this");
+ _checkloc(seq[4]["this"]["really"] , 5u, 27u, "really");
+ _checkloc(seq[4]["nope"] , 5u, 47u, "nope");
+ _checkloc(seq[4]["nope"][0] , 5u, 56u, "a seq!");
+ _checkloc(seq[4]["nope"][1] , 5u, 64u, "{");
+ _checkloc(seq[4]["nope"][1]["had"] , 5u, 66u, "had");
+ _checkloc(seq[4]["nope"][1]["eehh"], 5u, 83u, "eehh");
+ _checkloc(seq[5] , 6u, 2u, "yep");
+ _checkloc(seq[6] , 7u, 2u, "it");
+ _checkloc(seq[7] , 8u, 2u, "was");
+}
+
+TEST(locations, grow_array)
+{
+ ParserOptions opts = ParserOptions().locations(true);
+ Parser parser(opts);
+ Tree t = parser.parse_in_arena("myfile.yml", "docval");
+ _checkloc(t.rootref(), 0u, 0u, "docval");
+ t = parser.parse_in_arena("myfile.yml", "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\ndocval");
+ _checkloc(t.rootref(), 47u, 0u, "docval");
+}
+
+// do a test with a buffer size up to 30 lines to ensure hitting
+// the binary search path
+TEST(locations, small_array)
+{
+ Tree t;
+ ParserOptions opts = ParserOptions().locations(true);
+ Parser parser(opts);
+ csubstr yaml = R"(---
+foo: yes
+bar:
+ - 1
+ - 2
+baz:
+ - 1_
+ - 2_
+ - 3_
+)";
+ parser.parse_in_arena("myfile.yml", yaml, &t);
+ ConstNodeRef stream = t.rootref();
+ ConstNodeRef map = t.docref(0);
+ ASSERT_TRUE(map.is_map());
+ ASSERT_TRUE(map.is_doc());
+ _checkloc(stream , 0u, 0u, "---");
+ _checkloc(map , 1u, 0u, "foo");
+ _checkloc(map["foo"] , 1u, 0u, "foo");
+ _checkloc(map["bar"] , 2u, 0u, "bar");
+ _checkloc(map["bar"][0], 3u, 4u, "1");
+ _checkloc(map["bar"][1], 4u, 4u, "2");
+ _checkloc(map["baz"] , 5u, 0u, "baz");
+ _checkloc(map["baz"][0], 6u, 6u, "1_");
+ _checkloc(map["baz"][1], 7u, 6u, "2_");
+ _checkloc(map["baz"][2], 8u, 10u, "3_");
+}
+
+// do a test with a buffer of at least 30 lines to ensure hitting
+// the binary search path
+TEST(locations, large_array)
+{
+ Tree t;
+ ParserOptions opts = ParserOptions().locations(true);
+ Parser parser(opts);
+ csubstr yaml = R"(---
+foo1: definitely # 1
+bar1:
+ - 1
+ - 2
+baz1:
+ - 1_
+ - 2_
+ - 3_
+
+
+
+foo2: definitely # 12
+bar2:
+ - 1
+ - 2
+baz2:
+ - 1_
+ - 2_
+ - 3_
+
+
+
+foo3: definitely # 23
+bar3:
+ - 1
+ - 2
+baz3:
+ - 1_
+ - 2_
+ - 3_
+
+
+
+foo4: definitely # 34
+bar4:
+ - 1
+ - 2
+baz4:
+ - 1_
+ - 2_
+ - 3_
+
+
+
+foo5: definitely # 45
+bar5:
+ - 1
+ - 2
+baz5:
+ - 1_
+ - 2_
+ - 3_
+
+
+
+foo6: definitely # 56
+bar6:
+ - 1
+ - 2
+baz6:
+ - 1_
+ - 2_
+ - 3_
+)";
+ parser.parse_in_arena("myfile.yml", yaml, &t);
+ ConstNodeRef map = t.docref(0);
+ ASSERT_TRUE(map.is_map());
+ ASSERT_TRUE(map.is_doc());
+ _checkloc(t.rootref() , 0u, 0u, "---");
+ _checkloc(map , 1u, 0u, "foo1");
+ _checkloc(map["foo1"] , 0u+1u, 0u, "foo1");
+ _checkloc(map["bar1"] , 0u+2u, 0u, "bar1");
+ _checkloc(map["bar1"][0], 0u+3u, 4u, "1");
+ _checkloc(map["bar1"][1], 0u+4u, 4u, "2");
+ _checkloc(map["baz1"] , 0u+5u, 0u, "baz");
+ _checkloc(map["baz1"][0], 0u+6u, 6u, "1_");
+ _checkloc(map["baz1"][1], 0u+7u, 6u, "2_");
+ _checkloc(map["baz1"][2], 0u+8u, 10u, "3_");
+ //
+ _checkloc(map["foo2"] , 11u+1u, 0u, "foo2");
+ _checkloc(map["bar2"] , 11u+2u, 0u, "bar2");
+ _checkloc(map["bar2"][0], 11u+3u, 4u, "1");
+ _checkloc(map["bar2"][1], 11u+4u, 4u, "2");
+ _checkloc(map["baz2"] , 11u+5u, 0u, "baz2");
+ _checkloc(map["baz2"][0], 11u+6u, 6u, "1_");
+ _checkloc(map["baz2"][1], 11u+7u, 6u, "2_");
+ _checkloc(map["baz2"][2], 11u+8u, 10u, "3_");
+ //
+ _checkloc(map["foo3"] , 22u+1u, 0u, "foo3");
+ _checkloc(map["bar3"] , 22u+2u, 0u, "bar3");
+ _checkloc(map["bar3"][0], 22u+3u, 4u, "1");
+ _checkloc(map["bar3"][1], 22u+4u, 4u, "2");
+ _checkloc(map["baz3"] , 22u+5u, 0u, "baz3");
+ _checkloc(map["baz3"][0], 22u+6u, 6u, "1_");
+ _checkloc(map["baz3"][1], 22u+7u, 6u, "2_");
+ _checkloc(map["baz3"][2], 22u+8u, 10u, "3_");
+ //
+ _checkloc(map["foo4"] , 33u+1u, 0u, "foo4");
+ _checkloc(map["bar4"] , 33u+2u, 0u, "bar4");
+ _checkloc(map["bar4"][0], 33u+3u, 4u, "1");
+ _checkloc(map["bar4"][1], 33u+4u, 4u, "2");
+ _checkloc(map["baz4"] , 33u+5u, 0u, "baz4");
+ _checkloc(map["baz4"][0], 33u+6u, 6u, "1_");
+ _checkloc(map["baz4"][1], 33u+7u, 6u, "2_");
+ _checkloc(map["baz4"][2], 33u+8u, 10u, "3_");
+ //
+ _checkloc(map["foo5"] , 44u+1u, 0u, "foo5");
+ _checkloc(map["bar5"] , 44u+2u, 0u, "bar5");
+ _checkloc(map["bar5"][0], 44u+3u, 4u, "1");
+ _checkloc(map["bar5"][1], 44u+4u, 4u, "2");
+ _checkloc(map["baz5"] , 44u+5u, 0u, "baz5");
+ _checkloc(map["baz5"][0], 44u+6u, 6u, "1_");
+ _checkloc(map["baz5"][1], 44u+7u, 6u, "2_");
+ _checkloc(map["baz5"][2], 44u+8u, 10u, "3_");
+ //
+ _checkloc(map["foo6"] , 55u+1u, 0u, "foo6");
+ _checkloc(map["bar6"] , 55u+2u, 0u, "bar6");
+ _checkloc(map["bar6"][0], 55u+3u, 4u, "1");
+ _checkloc(map["bar6"][1], 55u+4u, 4u, "2");
+ _checkloc(map["baz6"] , 55u+5u, 0u, "baz6");
+ _checkloc(map["baz6"][0], 55u+6u, 6u, "1_");
+ _checkloc(map["baz6"][1], 55u+7u, 6u, "2_");
+ _checkloc(map["baz6"][2], 55u+8u, 10u, "3_");
+}
+
+
+TEST(locations, issue260_0)
+{
+ ParserOptions opts = ParserOptions().locations(true);
+ Parser parser(opts);
+ Tree tree = parser.parse_in_arena("source.yml", R"(Body:
+ - Id: 1
+ Name: Apple
+ Script: |
+ Line One
+ Line Two
+ - Id: 2
+ Name: Cat
+ Script: |
+ Line One
+ Line Two
+ - Id: 3
+ Name: Dog
+ Script: |
+ Line One
+ Line Two)");
+ EXPECT_EQ(tree["Body"][2]["Name"].val(), "Dog");
+ EXPECT_EQ(parser.location(tree["Body"][2]["Name"]).line, 12u);
+}
+
+TEST(locations, issue260_1)
+{
+ ParserOptions opts = ParserOptions().locations(true);
+ Parser parser(opts);
+ Tree tree = parser.parse_in_arena("source.yml", R"(Body: # 0
+ - Id: 1 # line 1
+ Name: Apple
+ - Id: 2 # line 3
+ Name: Cat
+ Script: |
+ Line One
+ Line Two
+ - Id: 3 # line 8
+ Name: Cat2
+ Script: >
+ Line One
+ Line Two
+ - Id: 4 # line 13
+ Name: Cat3
+ Script: "
+ Line One
+ Line Two"
+ - Id: 5 # line 18
+ Name: Cat4
+ Script: '
+ Line One
+ Line Two'
+ - Id: 5 # line 23
+ Name: Cat5
+ Script:
+ Line One
+ Line Two
+ - Id: 6 # line 28
+ Name: Dog
+ Script: |
+ Line One
+ Line Two)");
+ EXPECT_EQ(parser.location(tree["Body"][0]).line, 1u);
+ EXPECT_EQ(parser.location(tree["Body"][1]).line, 3u);
+ EXPECT_EQ(parser.location(tree["Body"][2]).line, 8u);
+ EXPECT_EQ(parser.location(tree["Body"][3]).line, 13u);
+ EXPECT_EQ(parser.location(tree["Body"][4]).line, 18u);
+ EXPECT_EQ(parser.location(tree["Body"][5]).line, 23u);
+ EXPECT_EQ(parser.location(tree["Body"][6]).line, 28u);
+ EXPECT_EQ(parser.location(tree["Body"][0]["Id"]).line, 1u);
+ EXPECT_EQ(parser.location(tree["Body"][1]["Id"]).line, 3u);
+ EXPECT_EQ(parser.location(tree["Body"][2]["Id"]).line, 8u);
+ EXPECT_EQ(parser.location(tree["Body"][3]["Id"]).line, 13u);
+ EXPECT_EQ(parser.location(tree["Body"][4]["Id"]).line, 18u);
+ EXPECT_EQ(parser.location(tree["Body"][5]["Id"]).line, 23u);
+ EXPECT_EQ(parser.location(tree["Body"][6]["Id"]).line, 28u);
+ EXPECT_EQ(parser.location(tree["Body"][0]["Name"]).line, 1u+1u);
+ EXPECT_EQ(parser.location(tree["Body"][1]["Name"]).line, 3u+1u);
+ EXPECT_EQ(parser.location(tree["Body"][2]["Name"]).line, 8u+1u);
+ EXPECT_EQ(parser.location(tree["Body"][3]["Name"]).line, 13u+1u);
+ EXPECT_EQ(parser.location(tree["Body"][4]["Name"]).line, 18u+1u);
+ EXPECT_EQ(parser.location(tree["Body"][5]["Name"]).line, 23u+1u);
+ EXPECT_EQ(parser.location(tree["Body"][6]["Name"]).line, 28u+1u);
+}
+
+
+
+// The other test executables are written to contain the declarative-style
+// YmlTestCases. This executable does not have any but the build setup
+// assumes it does, and links with the test lib, which requires an existing
+// get_case() function. So this is here to act as placeholder until (if?)
+// proper test cases are added here. This was detected in #47 (thanks
+// @cburgard).
+Case const* get_case(csubstr)
+{
+ return nullptr;
+}
+
+} // namespace yml
+} // namespace c4
diff --git a/thirdparty/ryml/test/test_map_of_seq.cpp b/thirdparty/ryml/test/test_map_of_seq.cpp
new file mode 100644
index 000000000..fc577223e
--- /dev/null
+++ b/thirdparty/ryml/test/test_map_of_seq.cpp
@@ -0,0 +1,201 @@
+#include "./test_group.hpp"
+
+namespace c4 {
+namespace yml {
+
+
+CASE_GROUP(MAP_OF_SEQ)
+{
+
+ADD_CASE_TO_GROUP("map of empty seqs",
+R"({foo: [], bar: [], baz: []})",
+ L{
+ N(KEYSEQ, "foo", L()),
+ N(KEYSEQ, "bar", L()),
+ N(KEYSEQ, "baz", L()),
+ }
+);
+
+ADD_CASE_TO_GROUP("map of seqs, one line",
+R"({men: [John Smith, Bill Jones], women: [Mary Smith, Susan Williams]})",
+ L{
+ N("men", L{N{"John Smith"}, N{"Bill Jones"}}),
+ N("women", L{N{"Mary Smith"}, N{"Susan Williams"}})
+ }
+);
+
+ADD_CASE_TO_GROUP("map of seqs",
+R"(
+men:
+ - John Smith
+ - Bill Jones
+women:
+ - Mary Smith
+ - Susan Williams
+)",
+ L{
+ N("men", L{N{"John Smith"}, N{"Bill Jones"}}),
+ N("women", L{N{"Mary Smith"}, N{"Susan Williams"}})
+ }
+);
+
+ADD_CASE_TO_GROUP("map of seqs, not indented",
+R"(
+men:
+- John Smith
+- Bill Jones
+women:
+- Mary Smith
+- Susan Williams
+)",
+ L{
+ N("men", L{N{"John Smith"}, N{"Bill Jones"}}),
+ N("women", L{N{"Mary Smith"}, N{"Susan Williams"}})
+ }
+);
+
+ADD_CASE_TO_GROUP("map of seqs, not indented, more",
+R"(
+product:
+- sku: BL4438H
+ quantity: 1
+ description: Super Hoop
+ price: 2392.00 # jumping one level here would be wrong.
+tax: 1234.5 # we must jump two levels
+product2:
+ subproduct1:
+ - sku: BL4438H
+ quantity: 1
+ description: Super Hoop
+ price: 2392.00 # jumping one level here would be wrong.
+ subproduct2:
+ - sku: BL4438H
+ quantity: 1
+ description: Super Hoop
+ price: 2392.00 # jumping one level here would be wrong.
+ tax2: 789.10 # we must jump two levels
+tax3: 1234.5
+product3:
+ subproduct1:
+ - sku: BL4438H
+ quantity: 1
+ description: Super Hoop
+ price: 2392.00 # jumping one level here would be wrong.
+ subproduct2:
+ - sku: BL4438H
+ quantity: 1
+ description: Super Hoop
+ price: 2392.00 # jumping one level here would be wrong.
+ # a comment here, will it ruin parsing?
+ tax2: 789.10 # we must jump two levels
+tax4: 1234.5
+product4:
+ subproduct1:
+ - sku: BL4438H
+ quantity: 1
+ description: Super Hoop
+ price: 2392.00 # jumping one level here would be wrong.
+ subproduct2:
+ - sku: BL4438H
+ quantity: 1
+ description: Super Hoop
+ price: 2392.00 # jumping one level here would be wrong.
+ # what about here?
+ tax2: 789.10 # we must jump two levels
+tax5: 1234.5
+)",
+L{
+ N("product", L{
+ N(L{N("sku", "BL4438H"), N("quantity", "1"), N("description", "Super Hoop"), N("price", "2392.00")}),
+ }),
+ N("tax", "1234.5"),
+ N("product2", L{
+ N("subproduct1", L{
+ N(L{N("sku", "BL4438H"), N("quantity", "1"), N("description", "Super Hoop"), N("price", "2392.00")}),
+ }),
+ N("subproduct2", L{
+ N(L{N("sku", "BL4438H"), N("quantity", "1"), N("description", "Super Hoop"), N("price", "2392.00")}),
+ }),
+ N("tax2", "789.10"),
+ }),
+ N("tax3", "1234.5"),
+ N("product3", L{
+ N("subproduct1", L{
+ N(L{N("sku", "BL4438H"), N("quantity", "1"), N("description", "Super Hoop"), N("price", "2392.00")}),
+ }),
+ N("subproduct2", L{
+ N(L{N("sku", "BL4438H"), N("quantity", "1"), N("description", "Super Hoop"), N("price", "2392.00")}),
+ }),
+ N("tax2", "789.10"),
+ }),
+ N("tax4", "1234.5"),
+ N("product4", L{
+ N("subproduct1", L{
+ N(L{N("sku", "BL4438H"), N("quantity", "1"), N("description", "Super Hoop"), N("price", "2392.00")}),
+ }),
+ N("subproduct2", L{
+ N(L{N("sku", "BL4438H"), N("quantity", "1"), N("description", "Super Hoop"), N("price", "2392.00")}),
+ }),
+ N("tax2", "789.10"),
+ }),
+ N("tax5", "1234.5"),
+});
+
+ADD_CASE_TO_GROUP("map of seqs, next line",
+R"(
+men:
+ -
+ John Smith
+ -
+ Bill Jones
+women:
+ -
+ Mary Smith
+ -
+ Susan Williams
+)",
+ L{
+ N("men", L{N{"John Smith"}, N{"Bill Jones"}}),
+ N("women", L{N{"Mary Smith"}, N{"Susan Williams"}})
+ }
+);
+
+ADD_CASE_TO_GROUP("map of seqs, next line without space",
+R"(
+men:
+ -
+ John Smith
+ -
+ Bill Jones
+women:
+ -
+ Mary Smith
+ -
+ Susan Williams
+)",
+ L{
+ N("men", L{N{"John Smith"}, N{"Bill Jones"}}),
+ N("women", L{N{"Mary Smith"}, N{"Susan Williams"}})
+ }
+);
+
+ADD_CASE_TO_GROUP("map of seqs, deal with unk",
+R"(
+skip_commits:
+ files:
+ - a
+ - b
+ - c
+ - d
+ - e
+)",
+L{
+ N("skip_commits", L{N("files",
+ L{N("a"), N("b"), N("c"), N("d"), N("e")}
+ )}),
+}
+);
+}
+
+} // namespace yml
+} // namespace c4
diff --git a/thirdparty/ryml/test/test_merge.cpp b/thirdparty/ryml/test/test_merge.cpp
new file mode 100644
index 000000000..873412115
--- /dev/null
+++ b/thirdparty/ryml/test/test_merge.cpp
@@ -0,0 +1,225 @@
+#ifndef RYML_SINGLE_HEADER
+#include <c4/yml/std/std.hpp>
+#include <c4/yml/yml.hpp>
+#endif
+#include <gtest/gtest.h>
+
+#include "./test_case.hpp"
+
+namespace c4 {
+namespace yml {
+
+// The other test executables are written to contain the declarative-style
+// YmlTestCases. This executable does not have any but the build setup
+// assumes it does, and links with the test lib, which requires an existing
+// get_case() function. So this is here to act as placeholder until (if?)
+// proper test cases are added here. This was detected in #47 (thanks
+// @cburgard).
+Case const* get_case(csubstr)
+{
+ return nullptr;
+}
+
+
+void test_merge(std::initializer_list<csubstr> li, csubstr expected)
+{
+ Tree loaded, merged, ref;
+
+ parse_in_arena(expected, &ref);
+
+ // make sure the arena in the loaded tree is never resized
+ size_t arena_dim = 2;
+ for(csubstr src : li)
+ {
+ arena_dim += src.len;
+ }
+ loaded.reserve_arena(arena_dim);
+
+ for(csubstr src : li)
+ {
+ loaded.clear(); // do not clear the arena of the loaded tree
+ parse_in_arena(src, &loaded);
+ merged.merge_with(&loaded);
+ }
+
+ auto buf_result = emitrs_yaml<std::string>(merged);
+ auto buf_expected = emitrs_yaml<std::string>(ref);
+
+ EXPECT_EQ(buf_result, buf_expected);
+}
+
+
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+
+TEST(merge, basic)
+{
+ test_merge(
+ {
+ "{a: 0, b: 1}",
+ "{a: 1, c: 20}"
+ },
+ "{a: 1, b: 1, c: 20}"
+ );
+}
+
+TEST(merge, val_to_seq)
+{
+ test_merge(
+ {
+ "{a: 0, b: 1}",
+ "{a: [1, 2]}"
+ },
+ "{a: [1, 2], b: 1}"
+ );
+}
+
+TEST(merge, seq_to_val)
+{
+ test_merge(
+ {
+ "{a: [1, 2]}",
+ "{a: 0, b: 1}",
+ },
+ "{a: 0, b: 1}"
+ );
+}
+
+TEST(merge, val_to_map)
+{
+ test_merge(
+ {
+ "{a: 0, b: 1}",
+ "{a: {c: 10, d: 20}}"
+ },
+ "{a: {c: 10, d: 20}, b: 1}"
+ );
+}
+
+TEST(merge, map_to_val)
+{
+ test_merge(
+ {
+ "{a: {c: 10, d: 20}}",
+ "{a: 0, b: 1}",
+ },
+ "{a: 0, b: 1}"
+ );
+}
+
+TEST(merge, seq_no_overlap_explicit)
+{
+ test_merge(
+ {"[0, 1, 2]", "[3, 4, 5]", "[6, 7, 8]"},
+ "[0, 1, 2, 3, 4, 5, 6, 7, 8]"
+ );
+}
+
+
+TEST(merge, seq_no_overlap_implicit)
+{
+ test_merge(
+ {"0, 1, 2", "3, 4, 5", "6, 7, 8"},
+ "0, 1, 2, 3, 4, 5, 6, 7, 8"
+ );
+}
+
+
+TEST(merge, seq_overlap_explicit)
+{
+ test_merge(
+ {"[0, 1, 2]", "[1, 2, 3]", "[2, 3, 4]"},
+ "[0, 1, 2, 1, 2, 3, 2, 3, 4]"
+ // or this? "[0, 1, 2, 3, 4]"
+ );
+}
+
+
+TEST(merge, seq_overlap_implicit)
+{
+ // now a bit more difficult
+ test_merge(
+ {"0, 1, 2", "1, 2, 3", "2, 3, 4"},
+ "0, 1, 2, 1, 2, 3, 2, 3, 4"
+ // or this? "0, 1, 2, 3, 4"
+ );
+}
+
+
+TEST(merge, map_orthogonal)
+{
+ test_merge(
+ {"a: 0", "b: 1", "c: 2"},
+ "{a: 0, b: 1, c: 2}"
+ );
+}
+
+
+TEST(merge, map_overriding)
+{
+ test_merge(
+ {
+ "a: 0",
+ "{a: 1, b: 1}",
+ "c: 2"
+ },
+ "{a: 1, b: 1, c: 2}"
+ );
+}
+
+TEST(merge, map_overriding_multiple)
+{
+ test_merge(
+ {
+ "a: 0",
+ "{a: 1, b: 1}",
+ "c: 2",
+ "a: 2",
+ "a: 3",
+ "c: 4",
+ "c: 5",
+ "a: 4",
+ },
+ "{a: 4, b: 1, c: 5}"
+ );
+}
+
+
+TEST(merge, seq_nested_in_map)
+{
+ test_merge(
+ {
+ "{a: 0, seq: [a, b, c], d: 2}",
+ "{a: 1, seq: [d, e, f], d: 3, c: 3}"
+ },
+ "{a: 1, seq: [a, b, c, d, e, f], d: 3, c: 3}"
+ );
+}
+
+
+TEST(merge, seq_nested_in_map_override_with_map)
+{
+ test_merge(
+ {
+ "{a: 0, ovr: [a, b, c], d: 2}",
+ "{a: 1, ovr: {d: 0, b: 1, c: 2}, d: 3, c: 3}"
+ },
+ "{a: 1, ovr: {d: 0, b: 1, c: 2}, d: 3, c: 3}"
+ );
+}
+
+
+TEST(merge, seq_nested_in_map_override_with_keyval)
+{
+ test_merge(
+ {
+ "{a: 0, ovr: [a, b, c], d: 2}",
+ "{a: 1, ovr: foo, d: 3, c: 3}"
+ },
+ "{a: 1, ovr: foo, d: 3, c: 3}"
+ );
+}
+
+} // namespace yml
+} // namespace c4
diff --git a/thirdparty/ryml/test/test_nested_mapx2.cpp b/thirdparty/ryml/test/test_nested_mapx2.cpp
new file mode 100644
index 000000000..b1856fa08
--- /dev/null
+++ b/thirdparty/ryml/test/test_nested_mapx2.cpp
@@ -0,0 +1,73 @@
+#include "./test_group.hpp"
+
+namespace c4 {
+namespace yml {
+
+CASE_GROUP(NESTED_MAPX2)
+{
+
+ADD_CASE_TO_GROUP("nested map x2, explicit, same line",
+R"({foo: {foo0: 00, bar0: 01, baz0: 02}, bar: {foo1: 10, bar1: 11, baz1: 12}, baz: {foo2: 20, bar2: 21, baz2: 22}})",
+ L{
+ N{"foo", L{N{"foo0", "00"}, N{"bar0", "01"}, N{"baz0", "02"}}},
+ N{"bar", L{N{"foo1", "10"}, N{"bar1", "11"}, N{"baz1", "12"}}},
+ N{"baz", L{N{"foo2", "20"}, N{"bar2", "21"}, N{"baz2", "22"}}},
+ }
+);
+
+ADD_CASE_TO_GROUP("nested map x2, explicit",
+R"({
+foo: {foo0: 00, bar0: 01, baz0: 02},
+bar: {foo1: 10, bar1: 11, baz1: 12},
+baz: {foo2: 20, bar2: 21, baz2: 22}
+})",
+ L{
+ N{"foo", L{N{"foo0", "00"}, N{"bar0", "01"}, N{"baz0", "02"}}},
+ N{"bar", L{N{"foo1", "10"}, N{"bar1", "11"}, N{"baz1", "12"}}},
+ N{"baz", L{N{"foo2", "20"}, N{"bar2", "21"}, N{"baz2", "22"}}},
+ }
+);
+
+ADD_CASE_TO_GROUP("nested map x2",
+R"(
+foo:
+ foo0: 00
+ bar0: 01
+ baz0: 02
+bar:
+ foo1: 10
+ bar1: 11
+ baz1: 12
+baz:
+ foo2: 20
+ bar2: 21
+ baz2: 22
+)",
+ L{
+ N{"foo", L{N{"foo0", "00"}, N{"bar0", "01"}, N{"baz0", "02"}}},
+ N{"bar", L{N{"foo1", "10"}, N{"bar1", "11"}, N{"baz1", "12"}}},
+ N{"baz", L{N{"foo2", "20"}, N{"bar2", "21"}, N{"baz2", "22"}}},
+ }
+);
+
+
+ADD_CASE_TO_GROUP("nested map x2, commented",
+ R"(
+send_to:
+ #host: 192.168.1.100
+ #port: 7000
+ host: 192.168.1.101
+ port: 7001
+ #host: 192.168.1.102
+ #port: 7002
+)",
+ L{
+ N("send_to", L{
+ N("host", "192.168.1.101"),
+ N("port", "7001") })
+ }
+);
+}
+
+} // namespace yml
+} // namespace c4
diff --git a/thirdparty/ryml/test/test_nested_mapx3.cpp b/thirdparty/ryml/test/test_nested_mapx3.cpp
new file mode 100644
index 000000000..2ae163475
--- /dev/null
+++ b/thirdparty/ryml/test/test_nested_mapx3.cpp
@@ -0,0 +1,103 @@
+#include "./test_group.hpp"
+
+namespace c4 {
+namespace yml {
+
+CASE_GROUP(NESTED_MAPX3)
+{
+
+ADD_CASE_TO_GROUP("nested map x3, explicit",
+R"({
+ foo0: {
+ foo1: {foo2: 000, bar2: 001, baz2: 002},
+ bar1: {foo2: 010, bar2: 011, baz2: 012},
+ baz1: {foo2: 020, bar2: 021, baz2: 022}
+ },
+ bar0: {
+ foo1: {foo2: 100, bar2: 101, baz2: 102},
+ bar1: {foo2: 110, bar2: 111, baz2: 112},
+ baz1: {foo2: 120, bar2: 121, baz2: 122}
+ },
+ baz0: {
+ foo1: {foo2: 200, bar2: 201, baz2: 202},
+ bar1: {foo2: 210, bar2: 211, baz2: 212},
+ baz1: {foo2: 220, bar2: 221, baz2: 222}
+ }
+})",
+ L{
+ N{"foo0", L{
+ N{"foo1", L{N{"foo2", "000"}, N{"bar2", "001"}, N{"baz2", "002"}}},
+ N{"bar1", L{N{"foo2", "010"}, N{"bar2", "011"}, N{"baz2", "012"}}},
+ N{"baz1", L{N{"foo2", "020"}, N{"bar2", "021"}, N{"baz2", "022"}}} }},
+ N{"bar0", L{
+ N{"foo1", L{N{"foo2", "100"}, N{"bar2", "101"}, N{"baz2", "102"}}},
+ N{"bar1", L{N{"foo2", "110"}, N{"bar2", "111"}, N{"baz2", "112"}}},
+ N{"baz1", L{N{"foo2", "120"}, N{"bar2", "121"}, N{"baz2", "122"}}} }},
+ N{"baz0", L{
+ N{"foo1", L{N{"foo2", "200"}, N{"bar2", "201"}, N{"baz2", "202"}}},
+ N{"bar1", L{N{"foo2", "210"}, N{"bar2", "211"}, N{"baz2", "212"}}},
+ N{"baz1", L{N{"foo2", "220"}, N{"bar2", "221"}, N{"baz2", "222"}}} }},
+ }
+);
+
+ADD_CASE_TO_GROUP("nested map x3",
+R"(
+foo0:
+ foo1:
+ foo2: 000
+ bar2: 001
+ baz2: 002
+ bar1:
+ foo2: 010
+ bar2: 011
+ baz2: 012
+ baz1:
+ foo2: 020
+ bar2: 021
+ baz2: 022
+bar0:
+ foo1:
+ foo2: 100
+ bar2: 101
+ baz2: 102
+ bar1:
+ foo2: 110
+ bar2: 111
+ baz2: 112
+ baz1:
+ foo2: 120
+ bar2: 121
+ baz2: 122
+baz0:
+ foo1:
+ foo2: 200
+ bar2: 201
+ baz2: 202
+ bar1:
+ foo2: 210
+ bar2: 211
+ baz2: 212
+ baz1:
+ foo2: 220
+ bar2: 221
+ baz2: 222
+)",
+ L{
+ N{"foo0", L{
+ N{"foo1", L{N{"foo2", "000"}, N{"bar2", "001"}, N{"baz2", "002"}}},
+ N{"bar1", L{N{"foo2", "010"}, N{"bar2", "011"}, N{"baz2", "012"}}},
+ N{"baz1", L{N{"foo2", "020"}, N{"bar2", "021"}, N{"baz2", "022"}}} }},
+ N{"bar0", L{
+ N{"foo1", L{N{"foo2", "100"}, N{"bar2", "101"}, N{"baz2", "102"}}},
+ N{"bar1", L{N{"foo2", "110"}, N{"bar2", "111"}, N{"baz2", "112"}}},
+ N{"baz1", L{N{"foo2", "120"}, N{"bar2", "121"}, N{"baz2", "122"}}} }},
+ N{"baz0", L{
+ N{"foo1", L{N{"foo2", "200"}, N{"bar2", "201"}, N{"baz2", "202"}}},
+ N{"bar1", L{N{"foo2", "210"}, N{"bar2", "211"}, N{"baz2", "212"}}},
+ N{"baz1", L{N{"foo2", "220"}, N{"bar2", "221"}, N{"baz2", "222"}}} }},
+ }
+);
+}
+
+} // namespace yml
+} // namespace c4
diff --git a/thirdparty/ryml/test/test_nested_mapx4.cpp b/thirdparty/ryml/test/test_nested_mapx4.cpp
new file mode 100644
index 000000000..1d40f2cb1
--- /dev/null
+++ b/thirdparty/ryml/test/test_nested_mapx4.cpp
@@ -0,0 +1,190 @@
+#include "./test_group.hpp"
+
+namespace c4 {
+namespace yml {
+
+CASE_GROUP(NESTED_MAPX4)
+{
+
+ADD_CASE_TO_GROUP("nested map x4, explicit",
+R"({
+ foo0: {
+ foo1: { foo2: {foo3: 0000, bar3: 0001, baz3: 0002}, bar2: {foo3: 0010, bar3: 0011, baz3: 0012}, baz2: {foo3: 0020, bar3: 0021, baz3: 0022} },
+ bar1: { foo2: {foo3: 0100, bar3: 0101, baz3: 0102}, bar2: {foo3: 0110, bar3: 0111, baz3: 0112}, baz2: {foo3: 0120, bar3: 0121, baz3: 0122} },
+ baz1: { foo2: {foo3: 0200, bar3: 0201, baz3: 0202}, bar2: {foo3: 0210, bar3: 0211, baz3: 0212}, baz2: {foo3: 0220, bar3: 0221, baz3: 0222} },
+ },
+ bar0: {
+ foo1: { foo2: {foo3: 1000, bar3: 1001, baz3: 1002}, bar2: {foo3: 1010, bar3: 1011, baz3: 1012}, baz2: {foo3: 1020, bar3: 1021, baz3: 1022} },
+ bar1: { foo2: {foo3: 1100, bar3: 1101, baz3: 1102}, bar2: {foo3: 1110, bar3: 1111, baz3: 1112}, baz2: {foo3: 1120, bar3: 1121, baz3: 1122} },
+ baz1: { foo2: {foo3: 1200, bar3: 1201, baz3: 1202}, bar2: {foo3: 1210, bar3: 1211, baz3: 1212}, baz2: {foo3: 1220, bar3: 1221, baz3: 1222} },
+ },
+ baz0: {
+ foo1: { foo2: {foo3: 2000, bar3: 2001, baz3: 2002}, bar2: {foo3: 2010, bar3: 2011, baz3: 2012}, baz2: {foo3: 2020, bar3: 2021, baz3: 2022} },
+ bar1: { foo2: {foo3: 2100, bar3: 2101, baz3: 2102}, bar2: {foo3: 2110, bar3: 2111, baz3: 2112}, baz2: {foo3: 2120, bar3: 2121, baz3: 2122} },
+ baz1: { foo2: {foo3: 2200, bar3: 2201, baz3: 2202}, bar2: {foo3: 2210, bar3: 2211, baz3: 2212}, baz2: {foo3: 2220, bar3: 2221, baz3: 2222} },
+ },
+})",
+ L{
+ N("foo0", L{
+ N("foo1", L{N("foo2", L{N("foo3", "0000"), N("bar3", "0001"), N("baz3", "0002")}), N("bar2", L{N("foo3", "0010"), N("bar3", "0011"), N("baz3", "0012")}), N("baz2", L{N("foo3", "0020"), N("bar3", "0021"), N("baz3", "0022")})}),
+ N("bar1", L{N("foo2", L{N("foo3", "0100"), N("bar3", "0101"), N("baz3", "0102")}), N("bar2", L{N("foo3", "0110"), N("bar3", "0111"), N("baz3", "0112")}), N("baz2", L{N("foo3", "0120"), N("bar3", "0121"), N("baz3", "0122")})}),
+ N("baz1", L{N("foo2", L{N("foo3", "0200"), N("bar3", "0201"), N("baz3", "0202")}), N("bar2", L{N("foo3", "0210"), N("bar3", "0211"), N("baz3", "0212")}), N("baz2", L{N("foo3", "0220"), N("bar3", "0221"), N("baz3", "0222")})}),
+ }),
+ N("bar0", L{
+ N("foo1", L{N("foo2", L{N("foo3", "1000"), N("bar3", "1001"), N("baz3", "1002")}), N("bar2", L{N("foo3", "1010"), N("bar3", "1011"), N("baz3", "1012")}), N("baz2", L{N("foo3", "1020"), N("bar3", "1021"), N("baz3", "1022")})}),
+ N("bar1", L{N("foo2", L{N("foo3", "1100"), N("bar3", "1101"), N("baz3", "1102")}), N("bar2", L{N("foo3", "1110"), N("bar3", "1111"), N("baz3", "1112")}), N("baz2", L{N("foo3", "1120"), N("bar3", "1121"), N("baz3", "1122")})}),
+ N("baz1", L{N("foo2", L{N("foo3", "1200"), N("bar3", "1201"), N("baz3", "1202")}), N("bar2", L{N("foo3", "1210"), N("bar3", "1211"), N("baz3", "1212")}), N("baz2", L{N("foo3", "1220"), N("bar3", "1221"), N("baz3", "1222")})}),
+ }),
+ N("baz0", L{
+ N("foo1", L{N("foo2", L{N("foo3", "2000"), N("bar3", "2001"), N("baz3", "2002")}), N("bar2", L{N("foo3", "2010"), N("bar3", "2011"), N("baz3", "2012")}), N("baz2", L{N("foo3", "2020"), N("bar3", "2021"), N("baz3", "2022")})}),
+ N("bar1", L{N("foo2", L{N("foo3", "2100"), N("bar3", "2101"), N("baz3", "2102")}), N("bar2", L{N("foo3", "2110"), N("bar3", "2111"), N("baz3", "2112")}), N("baz2", L{N("foo3", "2120"), N("bar3", "2121"), N("baz3", "2122")})}),
+ N("baz1", L{N("foo2", L{N("foo3", "2200"), N("bar3", "2201"), N("baz3", "2202")}), N("bar2", L{N("foo3", "2210"), N("bar3", "2211"), N("baz3", "2212")}), N("baz2", L{N("foo3", "2220"), N("bar3", "2221"), N("baz3", "2222")})}),
+ })
+ }
+);
+
+ADD_CASE_TO_GROUP("nested map x4",
+R"(
+foo0:
+ foo1:
+ foo2:
+ foo3: 0000
+ bar3: 0001
+ baz3: 0002
+ bar2:
+ foo3: 0010
+ bar3: 0011
+ baz3: 0012
+ baz2:
+ foo3: 0020
+ bar3: 0021
+ baz3: 0022
+ bar1:
+ foo2:
+ foo3: 0100
+ bar3: 0101
+ baz3: 0102
+ bar2:
+ foo3: 0110
+ bar3: 0111
+ baz3: 0112
+ baz2:
+ foo3: 0120
+ bar3: 0121
+ baz3: 0122
+ baz1:
+ foo2:
+ foo3: 0200
+ bar3: 0201
+ baz3: 0202
+ bar2:
+ foo3: 0210
+ bar3: 0211
+ baz3: 0212
+ baz2:
+ foo3: 0220
+ bar3: 0221
+ baz3: 0222
+bar0:
+ foo1:
+ foo2:
+ foo3: 1000
+ bar3: 1001
+ baz3: 1002
+ bar2:
+ foo3: 1010
+ bar3: 1011
+ baz3: 1012
+ baz2:
+ foo3: 1020
+ bar3: 1021
+ baz3: 1022
+ bar1:
+ foo2:
+ foo3: 1100
+ bar3: 1101
+ baz3: 1102
+ bar2:
+ foo3: 1110
+ bar3: 1111
+ baz3: 1112
+ baz2:
+ foo3: 1120
+ bar3: 1121
+ baz3: 1122
+ baz1:
+ foo2:
+ foo3: 1200
+ bar3: 1201
+ baz3: 1202
+ bar2:
+ foo3: 1210
+ bar3: 1211
+ baz3: 1212
+ baz2:
+ foo3: 1220
+ bar3: 1221
+ baz3: 1222
+baz0:
+ foo1:
+ foo2:
+ foo3: 2000
+ bar3: 2001
+ baz3: 2002
+ bar2:
+ foo3: 2010
+ bar3: 2011
+ baz3: 2012
+ baz2:
+ foo3: 2020
+ bar3: 2021
+ baz3: 2022
+ bar1:
+ foo2:
+ foo3: 2100
+ bar3: 2101
+ baz3: 2102
+ bar2:
+ foo3: 2110
+ bar3: 2111
+ baz3: 2112
+ baz2:
+ foo3: 2120
+ bar3: 2121
+ baz3: 2122
+ baz1:
+ foo2:
+ foo3: 2200
+ bar3: 2201
+ baz3: 2202
+ bar2:
+ foo3: 2210
+ bar3: 2211
+ baz3: 2212
+ baz2:
+ foo3: 2220
+ bar3: 2221
+ baz3: 2222
+)",
+ L{
+ N("foo0", L{
+ N("foo1", L{N("foo2", L{N("foo3", "0000"), N("bar3", "0001"), N("baz3", "0002")}), N("bar2", L{N("foo3", "0010"), N("bar3", "0011"), N("baz3", "0012")}), N("baz2", L{N("foo3", "0020"), N("bar3", "0021"), N("baz3", "0022")})}),
+ N("bar1", L{N("foo2", L{N("foo3", "0100"), N("bar3", "0101"), N("baz3", "0102")}), N("bar2", L{N("foo3", "0110"), N("bar3", "0111"), N("baz3", "0112")}), N("baz2", L{N("foo3", "0120"), N("bar3", "0121"), N("baz3", "0122")})}),
+ N("baz1", L{N("foo2", L{N("foo3", "0200"), N("bar3", "0201"), N("baz3", "0202")}), N("bar2", L{N("foo3", "0210"), N("bar3", "0211"), N("baz3", "0212")}), N("baz2", L{N("foo3", "0220"), N("bar3", "0221"), N("baz3", "0222")})}),
+ }),
+ N("bar0", L{
+ N("foo1", L{N("foo2", L{N("foo3", "1000"), N("bar3", "1001"), N("baz3", "1002")}), N("bar2", L{N("foo3", "1010"), N("bar3", "1011"), N("baz3", "1012")}), N("baz2", L{N("foo3", "1020"), N("bar3", "1021"), N("baz3", "1022")})}),
+ N("bar1", L{N("foo2", L{N("foo3", "1100"), N("bar3", "1101"), N("baz3", "1102")}), N("bar2", L{N("foo3", "1110"), N("bar3", "1111"), N("baz3", "1112")}), N("baz2", L{N("foo3", "1120"), N("bar3", "1121"), N("baz3", "1122")})}),
+ N("baz1", L{N("foo2", L{N("foo3", "1200"), N("bar3", "1201"), N("baz3", "1202")}), N("bar2", L{N("foo3", "1210"), N("bar3", "1211"), N("baz3", "1212")}), N("baz2", L{N("foo3", "1220"), N("bar3", "1221"), N("baz3", "1222")})}),
+ }),
+ N("baz0", L{
+ N("foo1", L{N("foo2", L{N("foo3", "2000"), N("bar3", "2001"), N("baz3", "2002")}), N("bar2", L{N("foo3", "2010"), N("bar3", "2011"), N("baz3", "2012")}), N("baz2", L{N("foo3", "2020"), N("bar3", "2021"), N("baz3", "2022")})}),
+ N("bar1", L{N("foo2", L{N("foo3", "2100"), N("bar3", "2101"), N("baz3", "2102")}), N("bar2", L{N("foo3", "2110"), N("bar3", "2111"), N("baz3", "2112")}), N("baz2", L{N("foo3", "2120"), N("bar3", "2121"), N("baz3", "2122")})}),
+ N("baz1", L{N("foo2", L{N("foo3", "2200"), N("bar3", "2201"), N("baz3", "2202")}), N("bar2", L{N("foo3", "2210"), N("bar3", "2211"), N("baz3", "2212")}), N("baz2", L{N("foo3", "2220"), N("bar3", "2221"), N("baz3", "2222")})}),
+ })
+ }
+);
+}
+
+} // namespace yml
+} // namespace c4
diff --git a/thirdparty/ryml/test/test_nested_seqx2.cpp b/thirdparty/ryml/test/test_nested_seqx2.cpp
new file mode 100644
index 000000000..1361ae0ab
--- /dev/null
+++ b/thirdparty/ryml/test/test_nested_seqx2.cpp
@@ -0,0 +1,133 @@
+#include "./test_group.hpp"
+
+namespace c4 {
+namespace yml {
+
+CASE_GROUP(NESTED_SEQX2)
+{
+
+ADD_CASE_TO_GROUP("nested seq x2, empty, oneline",
+R"([[], [], []])",
+ L{SEQ, SEQ, SEQ}
+);
+
+ADD_CASE_TO_GROUP("nested seq x2, explicit, same line",
+R"([[00, 01, 02], [10, 11, 12], [20, 21, 22]])",
+ L{
+ N{L{N{"00"}, N{"01"}, N{"02"}}},
+ N{L{N{"10"}, N{"11"}, N{"12"}}},
+ N{L{N{"20"}, N{"21"}, N{"22"}}},
+ }
+);
+
+ADD_CASE_TO_GROUP("nested seq x2, explicit first+last level, same line, no spaces",
+R"([[00,01,02],[10,11,12],[20,21,22]])",
+ L{
+ N{L{N{"00"}, N{"01"}, N{"02"}}},
+ N{L{N{"10"}, N{"11"}, N{"12"}}},
+ N{L{N{"20"}, N{"21"}, N{"22"}}},
+ }
+);
+
+ADD_CASE_TO_GROUP("nested seq x2, explicit",
+R"([
+[00, 01, 02],
+[10, 11, 12],
+[20, 21, 22],
+])",
+ L{
+ N{L{N{"00"}, N{"01"}, N{"02"}}},
+ N{L{N{"10"}, N{"11"}, N{"12"}}},
+ N{L{N{"20"}, N{"21"}, N{"22"}}},
+ }
+);
+
+ADD_CASE_TO_GROUP("nested seq x2",
+R"(
+- - 00
+ - 01
+ - 02
+- - 10
+ - 11
+ - 12
+- - 20
+ - 21
+ - 22
+)",
+ L{
+ N{L{N{"00"}, N{"01"}, N{"02"}}},
+ N{L{N{"10"}, N{"11"}, N{"12"}}},
+ N{L{N{"20"}, N{"21"}, N{"22"}}},
+ }
+);
+
+ADD_CASE_TO_GROUP("nested seq x2, next line",
+R"(
+-
+ - 00
+ - 01
+ - 02
+-
+ - 10
+ - 11
+ - 12
+-
+ - 20
+ - 21
+ - 22
+)",
+ L{
+ N{L{N{"00"}, N{"01"}, N{"02"}}},
+ N{L{N{"10"}, N{"11"}, N{"12"}}},
+ N{L{N{"20"}, N{"21"}, N{"22"}}},
+ }
+);
+
+ADD_CASE_TO_GROUP("nested seq x2, all next line",
+R"(
+-
+ -
+ 00
+ -
+ 01
+ -
+ 02
+-
+ -
+ 10
+ -
+ 11
+ -
+ 12
+-
+ -
+ 20
+ -
+ 21
+ -
+ 22
+)",
+ L{
+ N{L{N{"00"}, N{"01"}, N{"02"}}},
+ N{L{N{"10"}, N{"11"}, N{"12"}}},
+ N{L{N{"20"}, N{"21"}, N{"22"}}},
+ }
+);
+
+ADD_CASE_TO_GROUP("nested seq x2, implicit first, explicit last level",
+R"(
+- [00, 01, 02]
+- [10, 11, 12]
+- [20, 21, 22]
+)",
+ L{
+ N{L{N{"00"}, N{"01"}, N{"02"}}},
+ N{L{N{"10"}, N{"11"}, N{"12"}}},
+ N{L{N{"20"}, N{"21"}, N{"22"}}},
+ }
+);
+}
+
+
+} // namespace yml
+} // namespace c4
diff --git a/thirdparty/ryml/test/test_nested_seqx3.cpp b/thirdparty/ryml/test/test_nested_seqx3.cpp
new file mode 100644
index 000000000..d1cc0beec
--- /dev/null
+++ b/thirdparty/ryml/test/test_nested_seqx3.cpp
@@ -0,0 +1,187 @@
+#include "./test_group.hpp"
+
+namespace c4 {
+namespace yml {
+
+
+CASE_GROUP(NESTED_SEQX3)
+{
+
+ADD_CASE_TO_GROUP("nested seq x3, explicit",
+R"([
+[[000, 001, 002], [010, 011, 012], [020, 021, 022]],
+[[100, 101, 102], [110, 111, 112], [120, 121, 122]],
+[[200, 201, 202], [210, 211, 212], [220, 221, 222]],
+])",
+ L{
+ N{L{N{L{N{"000"}, N{"001"}, N{"002"}}}, N{L{N{"010"}, N{"011"}, N{"012"}}}, N{L{N{"020"}, N{"021"}, N{"022"}}}}},
+ N{L{N{L{N{"100"}, N{"101"}, N{"102"}}}, N{L{N{"110"}, N{"111"}, N{"112"}}}, N{L{N{"120"}, N{"121"}, N{"122"}}}}},
+ N{L{N{L{N{"200"}, N{"201"}, N{"202"}}}, N{L{N{"210"}, N{"211"}, N{"212"}}}, N{L{N{"220"}, N{"221"}, N{"222"}}}}},
+ }
+);
+
+ADD_CASE_TO_GROUP("nested seq x3",
+R"(
+- - - 000
+ - 001
+ - 002
+ - - 010
+ - 011
+ - 012
+ - - 020
+ - 021
+ - 022
+- - - 100
+ - 101
+ - 102
+ - - 110
+ - 111
+ - 112
+ - - 120
+ - 121
+ - 122
+- - - 200
+ - 201
+ - 202
+ - - 210
+ - 211
+ - 212
+ - - 220
+ - 221
+ - 222
+)",
+ L{
+ N{L{N{L{N{"000"}, N{"001"}, N{"002"}}}, N{L{N{"010"}, N{"011"}, N{"012"}}}, N{L{N{"020"}, N{"021"}, N{"022"}}}}},
+ N{L{N{L{N{"100"}, N{"101"}, N{"102"}}}, N{L{N{"110"}, N{"111"}, N{"112"}}}, N{L{N{"120"}, N{"121"}, N{"122"}}}}},
+ N{L{N{L{N{"200"}, N{"201"}, N{"202"}}}, N{L{N{"210"}, N{"211"}, N{"212"}}}, N{L{N{"220"}, N{"221"}, N{"222"}}}}},
+ }
+);
+
+ADD_CASE_TO_GROUP("nested seq x3, continued on next line",
+R"(
+-
+ -
+ - 000
+ - 001
+ - 002
+ -
+ - 010
+ - 011
+ - 012
+ -
+ - 020
+ - 021
+ - 022
+-
+ -
+ - 100
+ - 101
+ - 102
+ -
+ - 110
+ - 111
+ - 112
+ -
+ - 120
+ - 121
+ - 122
+-
+ -
+ - 200
+ - 201
+ - 202
+ -
+ - 210
+ - 211
+ - 212
+ -
+ - 220
+ - 221
+ - 222
+)",
+ L{
+ N{L{N{L{N{"000"}, N{"001"}, N{"002"}}}, N{L{N{"010"}, N{"011"}, N{"012"}}}, N{L{N{"020"}, N{"021"}, N{"022"}}}}},
+ N{L{N{L{N{"100"}, N{"101"}, N{"102"}}}, N{L{N{"110"}, N{"111"}, N{"112"}}}, N{L{N{"120"}, N{"121"}, N{"122"}}}}},
+ N{L{N{L{N{"200"}, N{"201"}, N{"202"}}}, N{L{N{"210"}, N{"211"}, N{"212"}}}, N{L{N{"220"}, N{"221"}, N{"222"}}}}},
+ }
+);
+
+ADD_CASE_TO_GROUP("nested seq x3, all continued on next line",
+R"(
+-
+ -
+ -
+ 000
+ -
+ 001
+ -
+ 002
+ -
+ -
+ 010
+ -
+ 011
+ -
+ 012
+ -
+ -
+ 020
+ -
+ 021
+ -
+ 022
+-
+ -
+ -
+ 100
+ -
+ 101
+ -
+ 102
+ -
+ -
+ 110
+ -
+ 111
+ -
+ 112
+ -
+ -
+ 120
+ -
+ 121
+ -
+ 122
+-
+ -
+ -
+ 200
+ -
+ 201
+ -
+ 202
+ -
+ -
+ 210
+ -
+ 211
+ -
+ 212
+ -
+ -
+ 220
+ -
+ 221
+ -
+ 222
+)",
+ L{
+ N{L{N{L{N{"000"}, N{"001"}, N{"002"}}}, N{L{N{"010"}, N{"011"}, N{"012"}}}, N{L{N{"020"}, N{"021"}, N{"022"}}}}},
+ N{L{N{L{N{"100"}, N{"101"}, N{"102"}}}, N{L{N{"110"}, N{"111"}, N{"112"}}}, N{L{N{"120"}, N{"121"}, N{"122"}}}}},
+ N{L{N{L{N{"200"}, N{"201"}, N{"202"}}}, N{L{N{"210"}, N{"211"}, N{"212"}}}, N{L{N{"220"}, N{"221"}, N{"222"}}}}},
+ }
+);
+}
+
+} // namespace yml
+} // namespace c4
diff --git a/thirdparty/ryml/test/test_nested_seqx4.cpp b/thirdparty/ryml/test/test_nested_seqx4.cpp
new file mode 100644
index 000000000..b63c4bac8
--- /dev/null
+++ b/thirdparty/ryml/test/test_nested_seqx4.cpp
@@ -0,0 +1,124 @@
+#include "./test_group.hpp"
+
+namespace c4 {
+namespace yml {
+
+
+CASE_GROUP(NESTED_SEQX4)
+{
+
+ADD_CASE_TO_GROUP("nested seq x4, explicit",
+R"([
+[[[0000, 0001, 0002], [0010, 0011, 0012], [0020, 0021, 0022]],
+ [[0100, 0101, 0102], [0110, 0111, 0112], [0120, 0121, 0122]],
+ [[0200, 0201, 0202], [0210, 0211, 0212], [0220, 0221, 0222]]],
+
+[[[1000, 1001, 1002], [1010, 1011, 1012], [1020, 1021, 1022]],
+ [[1100, 1101, 1102], [1110, 1111, 1112], [1120, 1121, 1122]],
+ [[1200, 1201, 1202], [1210, 1211, 1212], [1220, 1221, 1222]]],
+
+[[[2000, 2001, 2002], [2010, 2011, 2012], [2020, 2021, 2022]],
+ [[2100, 2101, 2102], [2110, 2111, 2112], [2120, 2121, 2122]],
+ [[2200, 2201, 2202], [2210, 2211, 2212], [2220, 2221, 2222]]],
+])",
+ L{
+ N{L{N{L{N{L{N{"0000"}, N{"0001"}, N{"0002"}}}, N{L{N{"0010"}, N{"0011"}, N{"0012"}}}, N{L{N{"0020"}, N{"0021"}, N{"0022"}}}}}, N{L{N{L{N{"0100"}, N{"0101"}, N{"0102"}}}, N{L{N{"0110"}, N{"0111"}, N{"0112"}}}, N{L{N{"0120"}, N{"0121"}, N{"0122"}}}}}, N{L{N{L{N{"0200"}, N{"0201"}, N{"0202"}}}, N{L{N{"0210"}, N{"0211"}, N{"0212"}}}, N{L{N{"0220"}, N{"0221"}, N{"0222"}}}}}}},
+ N{L{N{L{N{L{N{"1000"}, N{"1001"}, N{"1002"}}}, N{L{N{"1010"}, N{"1011"}, N{"1012"}}}, N{L{N{"1020"}, N{"1021"}, N{"1022"}}}}}, N{L{N{L{N{"1100"}, N{"1101"}, N{"1102"}}}, N{L{N{"1110"}, N{"1111"}, N{"1112"}}}, N{L{N{"1120"}, N{"1121"}, N{"1122"}}}}}, N{L{N{L{N{"1200"}, N{"1201"}, N{"1202"}}}, N{L{N{"1210"}, N{"1211"}, N{"1212"}}}, N{L{N{"1220"}, N{"1221"}, N{"1222"}}}}}}},
+ N{L{N{L{N{L{N{"2000"}, N{"2001"}, N{"2002"}}}, N{L{N{"2010"}, N{"2011"}, N{"2012"}}}, N{L{N{"2020"}, N{"2021"}, N{"2022"}}}}}, N{L{N{L{N{"2100"}, N{"2101"}, N{"2102"}}}, N{L{N{"2110"}, N{"2111"}, N{"2112"}}}, N{L{N{"2120"}, N{"2121"}, N{"2122"}}}}}, N{L{N{L{N{"2200"}, N{"2201"}, N{"2202"}}}, N{L{N{"2210"}, N{"2211"}, N{"2212"}}}, N{L{N{"2220"}, N{"2221"}, N{"2222"}}}}}}},
+ }
+);
+
+ADD_CASE_TO_GROUP("nested seq x4",
+R"(
+- - - - 0000
+ - 0001
+ - 0002
+ - - 0010
+ - 0011
+ - 0012
+ - - 0020
+ - 0021
+ - 0022
+ - - - 0100
+ - 0101
+ - 0102
+ - - 0110
+ - 0111
+ - 0112
+ - - 0120
+ - 0121
+ - 0122
+ - - - 0200
+ - 0201
+ - 0202
+ - - 0210
+ - 0211
+ - 0212
+ - - 0220
+ - 0221
+ - 0222
+- - - - 1000
+ - 1001
+ - 1002
+ - - 1010
+ - 1011
+ - 1012
+ - - 1020
+ - 1021
+ - 1022
+ - - - 1100
+ - 1101
+ - 1102
+ - - 1110
+ - 1111
+ - 1112
+ - - 1120
+ - 1121
+ - 1122
+ - - - 1200
+ - 1201
+ - 1202
+ - - 1210
+ - 1211
+ - 1212
+ - - 1220
+ - 1221
+ - 1222
+- - - - 2000
+ - 2001
+ - 2002
+ - - 2010
+ - 2011
+ - 2012
+ - - 2020
+ - 2021
+ - 2022
+ - - - 2100
+ - 2101
+ - 2102
+ - - 2110
+ - 2111
+ - 2112
+ - - 2120
+ - 2121
+ - 2122
+ - - - 2200
+ - 2201
+ - 2202
+ - - 2210
+ - 2211
+ - 2212
+ - - 2220
+ - 2221
+ - 2222
+)",
+ L{
+ N{L{N{L{N{L{N{"0000"}, N{"0001"}, N{"0002"}}}, N{L{N{"0010"}, N{"0011"}, N{"0012"}}}, N{L{N{"0020"}, N{"0021"}, N{"0022"}}}}}, N{L{N{L{N{"0100"}, N{"0101"}, N{"0102"}}}, N{L{N{"0110"}, N{"0111"}, N{"0112"}}}, N{L{N{"0120"}, N{"0121"}, N{"0122"}}}}}, N{L{N{L{N{"0200"}, N{"0201"}, N{"0202"}}}, N{L{N{"0210"}, N{"0211"}, N{"0212"}}}, N{L{N{"0220"}, N{"0221"}, N{"0222"}}}}}}},
+ N{L{N{L{N{L{N{"1000"}, N{"1001"}, N{"1002"}}}, N{L{N{"1010"}, N{"1011"}, N{"1012"}}}, N{L{N{"1020"}, N{"1021"}, N{"1022"}}}}}, N{L{N{L{N{"1100"}, N{"1101"}, N{"1102"}}}, N{L{N{"1110"}, N{"1111"}, N{"1112"}}}, N{L{N{"1120"}, N{"1121"}, N{"1122"}}}}}, N{L{N{L{N{"1200"}, N{"1201"}, N{"1202"}}}, N{L{N{"1210"}, N{"1211"}, N{"1212"}}}, N{L{N{"1220"}, N{"1221"}, N{"1222"}}}}}}},
+ N{L{N{L{N{L{N{"2000"}, N{"2001"}, N{"2002"}}}, N{L{N{"2010"}, N{"2011"}, N{"2012"}}}, N{L{N{"2020"}, N{"2021"}, N{"2022"}}}}}, N{L{N{L{N{"2100"}, N{"2101"}, N{"2102"}}}, N{L{N{"2110"}, N{"2111"}, N{"2112"}}}, N{L{N{"2120"}, N{"2121"}, N{"2122"}}}}}, N{L{N{L{N{"2200"}, N{"2201"}, N{"2202"}}}, N{L{N{"2210"}, N{"2211"}, N{"2212"}}}, N{L{N{"2220"}, N{"2221"}, N{"2222"}}}}}}},
+ }
+);
+}
+
+} // namespace yml
+} // namespace c4
diff --git a/thirdparty/ryml/test/test_noderef.cpp b/thirdparty/ryml/test/test_noderef.cpp
new file mode 100644
index 000000000..cfd3363c3
--- /dev/null
+++ b/thirdparty/ryml/test/test_noderef.cpp
@@ -0,0 +1,813 @@
+#ifndef RYML_SINGLE_HEADER
+#include "c4/yml/std/std.hpp"
+#include "c4/yml/parse.hpp"
+#include "c4/yml/emit.hpp"
+#include <c4/format.hpp>
+#include <c4/yml/detail/checks.hpp>
+#include <c4/yml/detail/print.hpp>
+#endif
+#include "./test_case.hpp"
+#include "./callbacks_tester.hpp"
+
+#include <gtest/gtest.h>
+
+#if defined(_MSC_VER)
+# pragma warning(push)
+# pragma warning(disable: 4389) // signed/unsigned mismatch
+#elif defined(__clang__)
+# pragma clang diagnostic push
+# pragma clang diagnostic ignored "-Wdollar-in-identifier-extension"
+#elif defined(__GNUC__)
+# pragma GCC diagnostic push
+#endif
+
+namespace c4 {
+namespace yml {
+
+TEST(NodeRef, general)
+{
+ Tree t;
+
+ NodeRef root(&t);
+
+ //using S = csubstr;
+ //using V = NodeScalar;
+ using N = NodeInit;
+
+ root = N{MAP};
+ root.append_child({"a", "0"});
+ root.append_child({MAP, "b"});
+ root["b"].append_child({SEQ, "seq"});
+ root["b"]["seq"].append_child({"0"});
+ root["b"]["seq"].append_child({"1"});
+ root["b"]["seq"].append_child({"2"});
+ root["b"]["seq"].append_child({NodeScalar{"!!str", "3"}});
+ auto ch4 = root["b"]["seq"][3].append_sibling({"4"});
+ EXPECT_EQ(ch4.id(), root["b"]["seq"][4].id());
+ EXPECT_EQ(ch4.get(), root["b"]["seq"][4].get());
+ EXPECT_EQ((type_bits)root["b"]["seq"][4].type(), (type_bits)VAL);
+ EXPECT_EQ(root["b"]["seq"][4].val(), "4");
+ root["b"]["seq"].append_sibling({NodeScalar{"!!str", "aaa"}, NodeScalar{"!!int", "0"}});
+ EXPECT_EQ((type_bits)root["b"]["seq"][4].type(), (type_bits)VAL);
+ EXPECT_EQ(root["b"]["seq"][4].val(), "4");
+
+ root["b"]["key"] = "val";
+ auto seq = root["b"]["seq"];
+ auto seq2 = root["b"]["seq2"];
+ EXPECT_TRUE(seq2.is_seed());
+ root["b"]["seq2"] = N(SEQ);
+ seq2 = root["b"]["seq2"];
+ EXPECT_FALSE(seq2.is_seed());
+ EXPECT_TRUE(seq2.is_seq());
+ EXPECT_EQ(seq2.num_children(), 0);
+ EXPECT_EQ(root["b"]["seq2"].get(), seq2.get());
+ auto seq20 = seq2[0];
+ EXPECT_TRUE(seq20.is_seed());
+ EXPECT_TRUE(seq2[0].is_seed());
+ EXPECT_EQ(seq2.num_children(), 0);
+ EXPECT_TRUE(seq2[0].is_seed());
+ EXPECT_TRUE(seq20.is_seed());
+ EXPECT_NE(seq.get(), seq2.get());
+ seq20 = root["b"]["seq2"][0];
+ EXPECT_TRUE(seq20.is_seed());
+ root["b"]["seq2"][0] = "00";
+ seq20 = root["b"]["seq2"][0];
+ EXPECT_FALSE(seq20.is_seed());
+ NodeRef before = root["b"]["key"];
+ EXPECT_EQ(before.key(), "key");
+ EXPECT_EQ(before.val(), "val");
+ root["b"]["seq2"][1] = "01";
+ NodeRef after = root["b"]["key"];
+ EXPECT_EQ(before.key(), "key");
+ EXPECT_EQ(before.val(), "val");
+ EXPECT_EQ(after.key(), "key");
+ EXPECT_EQ(after.val(), "val");
+ root["b"]["seq2"][2] = "02";
+ root["b"]["seq2"][3] = "03";
+ int iv = 0;
+ root["b"]["seq2"][4] << 55; root["b"]["seq2"][4] >> iv;
+ EXPECT_EQ(iv, 55);
+ size_t zv = 0;
+ root["b"]["seq2"][5] << size_t(55); root["b"]["seq2"][5] >> zv;
+ EXPECT_EQ(zv, size_t(55));
+ float fv = 0;
+ root["b"]["seq2"][6] << 2.0f; root["b"]["seq2"][6] >> fv;
+ EXPECT_EQ(fv, 2.f);
+ float dv = 0;
+ root["b"]["seq2"][7] << 2.0; root["b"]["seq2"][7] >> dv;
+ EXPECT_EQ(dv, 2.0);
+
+ EXPECT_EQ(root["b"]["key"].key(), "key");
+ EXPECT_EQ(root["b"]["key"].val(), "val");
+
+
+ emit_yaml(t);
+
+ EXPECT_TRUE(root.type().is_map());
+ EXPECT_TRUE(root["a"].type().is_keyval());
+ EXPECT_EQ(root["a"].key(), "a");
+ EXPECT_EQ(root["a"].val(), "0");
+
+ EXPECT_TRUE(root["b"].type().has_key());
+ EXPECT_TRUE(root["b"].type().is_map());
+
+ EXPECT_TRUE(root["b"]["seq"].type().has_key());
+ EXPECT_TRUE(root["b"]["seq"].type().is_seq());
+ EXPECT_EQ (root["b"]["seq"].key(), "seq");
+ EXPECT_TRUE(root["b"]["seq"][0].type().is_val());
+ EXPECT_EQ( root["b"]["seq"][0].val(), "0");
+ EXPECT_TRUE(root["b"]["seq"][1].type().is_val());
+ EXPECT_EQ( root["b"]["seq"][1].val(), "1");
+ EXPECT_TRUE(root["b"]["seq"][2].type().is_val());
+ EXPECT_EQ( root["b"]["seq"][2].val(), "2");
+ EXPECT_TRUE(root["b"]["seq"][3].type().is_val());
+ EXPECT_EQ( root["b"]["seq"][3].val(), "3");
+ EXPECT_EQ( root["b"]["seq"][3].val_tag(), "!!str");
+ EXPECT_TRUE(root["b"]["seq"][4].type().is_val());
+ EXPECT_EQ( root["b"]["seq"][4].val(), "4");
+
+ int tv;
+ EXPECT_EQ(root["b"]["key"].key(), "key");
+ EXPECT_EQ(root["b"]["key"].val(), "val");
+ EXPECT_EQ(root["b"]["seq2"][0].val(), "00"); root["b"]["seq2"][0] >> tv; EXPECT_EQ(tv, 0);
+ EXPECT_EQ(root["b"]["seq2"][1].val(), "01"); root["b"]["seq2"][1] >> tv; EXPECT_EQ(tv, 1);
+ EXPECT_EQ(root["b"]["seq2"][2].val(), "02"); root["b"]["seq2"][2] >> tv; EXPECT_EQ(tv, 2);
+ EXPECT_EQ(root["b"]["seq2"][3].val(), "03"); root["b"]["seq2"][3] >> tv; EXPECT_EQ(tv, 3);
+ EXPECT_EQ(root["b"]["seq2"][4].val(), "55"); EXPECT_EQ(iv, 55);
+ EXPECT_EQ(root["b"]["seq2"][5].val(), "55"); EXPECT_EQ(zv, size_t(55));
+ EXPECT_EQ(root["b"]["seq2"][6].val(), "2"); EXPECT_EQ(fv, 2.f);
+ EXPECT_EQ(root["b"]["seq2"][6].val(), "2"); EXPECT_EQ(dv, 2.);
+
+ root["b"]["seq"][2].set_val_serialized(22);
+
+ emit_yaml(t);
+
+ EXPECT_TRUE(root["b"]["aaa"].type().is_keyval());
+ EXPECT_EQ(root["b"]["aaa"].key_tag(), "!!str");
+ EXPECT_EQ(root["b"]["aaa"].key(), "aaa");
+ EXPECT_EQ(root["b"]["aaa"].val_tag(), "!!int");
+ EXPECT_EQ(root["b"]["aaa"].val(), "0");
+}
+
+
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+
+void noderef_check_tree(ConstNodeRef const& root)
+{
+ test_invariants(*root.tree());
+
+ EXPECT_EQ(root.tree()->size(), 7u);
+ EXPECT_EQ(root.num_children(), 6u);
+ EXPECT_EQ(root.is_container(), true);
+ EXPECT_EQ(root.is_seq(), true);
+
+ EXPECT_TRUE(root[0].type().is_val());
+ EXPECT_EQ( root[0].val(), "0");
+ EXPECT_TRUE(root[1].type().is_val());
+ EXPECT_EQ( root[1].val(), "1");
+ EXPECT_TRUE(root[2].type().is_val());
+ EXPECT_EQ( root[2].val(), "2");
+ EXPECT_TRUE(root[3].type().is_val());
+ EXPECT_EQ( root[3].val(), "3");
+ EXPECT_TRUE(root[4].type().is_val());
+ EXPECT_EQ( root[4].val(), "4");
+ EXPECT_TRUE(root[5].type().is_val());
+ EXPECT_EQ( root[5].val(), "5");
+}
+
+TEST(NodeRef, append_child)
+{
+ Tree t;
+
+ NodeRef root(&t);
+
+ root |= SEQ;
+ root.append_child({"0"});
+ root.append_child({"1"});
+ root.append_child({"2"});
+ root.append_child({"3"});
+ root.append_child({"4"});
+ root.append_child({"5"});
+
+ noderef_check_tree(root);
+}
+
+TEST(NodeRef, prepend_child)
+{
+ Tree t;
+
+ NodeRef root(&t);
+
+ root |= SEQ;
+ root.prepend_child({"5"});
+ root.prepend_child({"4"});
+ root.prepend_child({"3"});
+ root.prepend_child({"2"});
+ root.prepend_child({"1"});
+ root.prepend_child({"0"});
+
+ noderef_check_tree(root);
+}
+
+TEST(NodeRef, insert_child)
+{
+ Tree t;
+
+ NodeRef root(&t);
+ NodeRef none(&t, NONE);
+
+ root |= SEQ;
+ root.insert_child({"3"}, none);
+ root.insert_child({"4"}, root[0]);
+ root.insert_child({"0"}, none);
+ root.insert_child({"5"}, root[2]);
+ root.insert_child({"1"}, root[0]);
+ root.insert_child({"2"}, root[1]);
+
+ noderef_check_tree(root);
+}
+
+TEST(NodeRef, remove_child)
+{
+ Tree t;
+
+ NodeRef root(&t);
+ NodeRef none(&t, NONE);
+
+ root |= SEQ;
+ root.insert_child({"3"}, none);
+ root.insert_child({"4"}, root[0]);
+ root.insert_child({"0"}, none);
+ root.insert_child({"5"}, root[2]);
+ root.insert_child({"1"}, root[0]);
+ root.insert_child({"2"}, root[1]);
+
+ std::vector<int> vec({10, 20, 30, 40, 50, 60, 70, 80, 90});
+ root.insert_child(root[0]) << vec; // 1
+ root.insert_child(root[2]) << vec; // 3
+ root.insert_child(root[4]) << vec; // 5
+ root.insert_child(root[6]) << vec; // 7
+ root.insert_child(root[8]) << vec; // 9
+ root.append_child() << vec; // 10
+
+ root.remove_child(11);
+ root.remove_child(9);
+ root.remove_child(7);
+ root.remove_child(5);
+ root.remove_child(3);
+ root.remove_child(1);
+
+ noderef_check_tree(root);
+
+ std::vector<std::vector<int>> vec2({{100, 200}, {300, 400}, {500, 600}, {700, 800, 900}});
+ root.prepend_child() << vec2; // 0
+ root.insert_child(root[1]) << vec2; // 2
+ root.insert_child(root[3]) << vec2; // 4
+ root.insert_child(root[5]) << vec2; // 6
+ root.insert_child(root[7]) << vec2; // 8
+ root.insert_child(root[9]) << vec2; // 10
+ root.append_child() << vec2; // 12
+
+ root.remove_child(12);
+ root.remove_child(10);
+ root.remove_child(8);
+ root.remove_child(6);
+ root.remove_child(4);
+ root.remove_child(2);
+ root.remove_child(0);
+
+ noderef_check_tree(root);
+}
+
+TEST(NodeRef, move_in_same_parent)
+{
+ Tree t;
+ NodeRef r = t;
+
+ std::vector<std::vector<int>> vec2({{100, 200}, {300, 400}, {500, 600}, {700, 800, 900}});
+ std::map<std::string, int> map2({{"foo", 100}, {"bar", 200}, {"baz", 300}});
+
+ r |= SEQ;
+ r.append_child() << vec2;
+ r.append_child() << map2;
+ r.append_child() << "elm2";
+ r.append_child() << "elm3";
+
+ auto s = r[0];
+ auto m = r[1];
+ EXPECT_TRUE(s.is_seq());
+ EXPECT_TRUE(m.is_map());
+ EXPECT_EQ(s.num_children(), vec2.size());
+ EXPECT_EQ(m.num_children(), map2.size());
+ //printf("fonix"); print_tree(t); emit_yaml(r);
+ r[0].move(r[1]);
+ //printf("fonix"); print_tree(t); emit_yaml(r);
+ EXPECT_EQ(r[0].get(), m.get());
+ EXPECT_EQ(r[0].num_children(), map2.size());
+ EXPECT_EQ(r[1].get(), s.get());
+ EXPECT_EQ(r[1].num_children(), vec2.size());
+ test_invariants(t);
+}
+
+TEST(NodeRef, move_in_same_parent_to_first_position)
+{
+ Tree t = parse_in_arena("[1, 2, 3, 0, 4]");
+ NodeRef r = t;
+
+ EXPECT_TRUE(r[0].val() == "1");
+ EXPECT_TRUE(r[1].val() == "2");
+ EXPECT_TRUE(r[2].val() == "3");
+ EXPECT_TRUE(r[3].val() == "0");
+ EXPECT_TRUE(r[4].val() == "4");
+ r[3].move({});
+ EXPECT_TRUE(r[0].val() == "0");
+ EXPECT_TRUE(r[1].val() == "1");
+ EXPECT_TRUE(r[2].val() == "2");
+ EXPECT_TRUE(r[3].val() == "3");
+ EXPECT_TRUE(r[4].val() == "4");
+ test_invariants(t);
+ r[0].move({}); // should have no effect
+ EXPECT_TRUE(r[0].val() == "0");
+ EXPECT_TRUE(r[1].val() == "1");
+ EXPECT_TRUE(r[2].val() == "2");
+ EXPECT_TRUE(r[3].val() == "3");
+ EXPECT_TRUE(r[4].val() == "4");
+ test_invariants(t);
+ r[4].move({});
+ EXPECT_TRUE(r[0].val() == "4");
+ EXPECT_TRUE(r[1].val() == "0");
+ EXPECT_TRUE(r[2].val() == "1");
+ EXPECT_TRUE(r[3].val() == "2");
+ EXPECT_TRUE(r[4].val() == "3");
+ test_invariants(t);
+}
+
+TEST(NodeRef, move_to_other_parent)
+{
+ Tree t;
+ NodeRef r = t;
+
+ std::vector<std::vector<int>> vec2({{100, 200}, {300, 400}, {500, 600}, {700, 800, 900}});
+ std::map<std::string, int> map2({{"foo", 100}, {"bar", 200}, {"baz", 300}});
+
+ r |= SEQ;
+ r.append_child() << vec2;
+ r.append_child() << map2;
+ r.append_child() << "elm2";
+ r.append_child() << "elm3";
+
+ NodeData *elm2 = r[2].get();
+ EXPECT_EQ(r[2].val(), "elm2");
+ //printf("fonix"); print_tree(t); emit_yaml(r);
+ r[2].move(r[0], r[0][0]);
+ EXPECT_EQ(r[0][1].get(), elm2);
+ EXPECT_EQ(r[0][1].val(), "elm2");
+ //printf("fonix"); print_tree(t); emit_yaml(r);
+ test_invariants(t);
+}
+
+TEST(NodeRef, move_to_other_parent_to_first_position)
+{
+ Tree t = parse_in_arena("[[0, 1, 2, 3, 4], [00, 10, 20, 30, 40]]");
+ NodeRef r = t;
+
+ EXPECT_TRUE(r[0][0].val() == "0");
+ EXPECT_TRUE(r[0][1].val() == "1");
+ EXPECT_TRUE(r[0][2].val() == "2");
+ EXPECT_TRUE(r[0][3].val() == "3");
+ EXPECT_TRUE(r[0][4].val() == "4");
+ EXPECT_TRUE(r[1][0].val() == "00");
+ EXPECT_TRUE(r[1][1].val() == "10");
+ EXPECT_TRUE(r[1][2].val() == "20");
+ EXPECT_TRUE(r[1][3].val() == "30");
+ EXPECT_TRUE(r[1][4].val() == "40");
+ test_invariants(t);
+ r[0][0].move(r[1], {});
+ EXPECT_TRUE(r[0][0].val() == "1");
+ EXPECT_TRUE(r[0][1].val() == "2");
+ EXPECT_TRUE(r[0][2].val() == "3");
+ EXPECT_TRUE(r[0][3].val() == "4");
+ EXPECT_TRUE(r[1][0].val() == "0");
+ EXPECT_TRUE(r[1][1].val() == "00");
+ EXPECT_TRUE(r[1][2].val() == "10");
+ EXPECT_TRUE(r[1][3].val() == "20");
+ EXPECT_TRUE(r[1][4].val() == "30");
+ EXPECT_TRUE(r[1][5].val() == "40");
+ test_invariants(t);
+ r[1][0].move(r[0], {});
+ EXPECT_TRUE(r[0][0].val() == "0");
+ EXPECT_TRUE(r[0][1].val() == "1");
+ EXPECT_TRUE(r[0][2].val() == "2");
+ EXPECT_TRUE(r[0][3].val() == "3");
+ EXPECT_TRUE(r[0][4].val() == "4");
+ EXPECT_TRUE(r[1][0].val() == "00");
+ EXPECT_TRUE(r[1][1].val() == "10");
+ EXPECT_TRUE(r[1][2].val() == "20");
+ EXPECT_TRUE(r[1][3].val() == "30");
+ EXPECT_TRUE(r[1][4].val() == "40");
+ test_invariants(t);
+}
+
+TEST(NodeRef, move_to_other_tree)
+{
+ Tree t0 = parse_in_arena("[0, 1, 2, 3, 4]");
+ Tree t1 = parse_in_arena("[00, 10, 20, 30, 40]");
+ NodeRef r0 = t0;
+ NodeRef r1 = t1;
+
+ EXPECT_TRUE(r0[0].val() == "0");
+ EXPECT_TRUE(r0[1].val() == "1");
+ EXPECT_TRUE(r0[2].val() == "2");
+ EXPECT_TRUE(r0[3].val() == "3");
+ EXPECT_TRUE(r0[4].val() == "4");
+ EXPECT_TRUE(r1[0].val() == "00");
+ EXPECT_TRUE(r1[1].val() == "10");
+ EXPECT_TRUE(r1[2].val() == "20");
+ EXPECT_TRUE(r1[3].val() == "30");
+ EXPECT_TRUE(r1[4].val() == "40");
+ r0[0].move(r1, r1[0]);
+ test_invariants(t0);
+ test_invariants(t1);
+ EXPECT_TRUE(r0[0].val() == "1");
+ EXPECT_TRUE(r0[1].val() == "2");
+ EXPECT_TRUE(r0[2].val() == "3");
+ EXPECT_TRUE(r0[3].val() == "4");
+ EXPECT_TRUE(r1[0].val() == "00");
+ EXPECT_TRUE(r1[1].val() == "0");
+ EXPECT_TRUE(r1[2].val() == "10");
+ EXPECT_TRUE(r1[3].val() == "20");
+ EXPECT_TRUE(r1[4].val() == "30");
+ EXPECT_TRUE(r1[5].val() == "40");
+ test_invariants(t0);
+ test_invariants(t1);
+ r1[1].move(r0, r0[0]);
+ EXPECT_TRUE(r0[0].val() == "1");
+ EXPECT_TRUE(r0[1].val() == "0");
+ EXPECT_TRUE(r0[2].val() == "2");
+ EXPECT_TRUE(r0[3].val() == "3");
+ EXPECT_TRUE(r0[4].val() == "4");
+ EXPECT_TRUE(r1[0].val() == "00");
+ EXPECT_TRUE(r1[1].val() == "10");
+ EXPECT_TRUE(r1[2].val() == "20");
+ EXPECT_TRUE(r1[3].val() == "30");
+ EXPECT_TRUE(r1[4].val() == "40");
+ test_invariants(t0);
+ test_invariants(t1);
+}
+
+TEST(NodeRef, move_to_other_tree_to_first_position)
+{
+ Tree t0 = parse_in_arena("[0, 1, 2, 3, 4]");
+ Tree t1 = parse_in_arena("[00, 10, 20, 30, 40]");
+ NodeRef r0 = t0;
+ NodeRef r1 = t1;
+
+ EXPECT_TRUE(r0[0].val() == "0");
+ EXPECT_TRUE(r0[1].val() == "1");
+ EXPECT_TRUE(r0[2].val() == "2");
+ EXPECT_TRUE(r0[3].val() == "3");
+ EXPECT_TRUE(r0[4].val() == "4");
+ EXPECT_TRUE(r1[0].val() == "00");
+ EXPECT_TRUE(r1[1].val() == "10");
+ EXPECT_TRUE(r1[2].val() == "20");
+ EXPECT_TRUE(r1[3].val() == "30");
+ EXPECT_TRUE(r1[4].val() == "40");
+ test_invariants(t0);
+ test_invariants(t1);
+ r0[0].move(r1, {});
+ EXPECT_TRUE(r0[0].val() == "1");
+ EXPECT_TRUE(r0[1].val() == "2");
+ EXPECT_TRUE(r0[2].val() == "3");
+ EXPECT_TRUE(r0[3].val() == "4");
+ EXPECT_TRUE(r1[0].val() == "0");
+ EXPECT_TRUE(r1[1].val() == "00");
+ EXPECT_TRUE(r1[2].val() == "10");
+ EXPECT_TRUE(r1[3].val() == "20");
+ EXPECT_TRUE(r1[4].val() == "30");
+ EXPECT_TRUE(r1[5].val() == "40");
+ test_invariants(t0);
+ test_invariants(t1);
+ r1[0].move(r0, {});
+ EXPECT_TRUE(r0[0].val() == "0");
+ EXPECT_TRUE(r0[1].val() == "1");
+ EXPECT_TRUE(r0[2].val() == "2");
+ EXPECT_TRUE(r0[3].val() == "3");
+ EXPECT_TRUE(r0[4].val() == "4");
+ EXPECT_TRUE(r1[0].val() == "00");
+ EXPECT_TRUE(r1[1].val() == "10");
+ EXPECT_TRUE(r1[2].val() == "20");
+ EXPECT_TRUE(r1[3].val() == "30");
+ EXPECT_TRUE(r1[4].val() == "40");
+ test_invariants(t0);
+ test_invariants(t1);
+}
+
+TEST(NodeRef, duplicate_to_same_tree)
+{
+ Tree t = parse_in_arena("[{a0: [b0, c0], a1: [b1, c1], a2: [b2, c2], a3: [b3, c3]}]");
+ auto checkseq = [](ConstNodeRef const& s){
+ ASSERT_EQ(s.num_children(), 4u);
+ ASSERT_EQ(s[0].num_children(), 2u);
+ ASSERT_EQ(s[1].num_children(), 2u);
+ ASSERT_EQ(s[2].num_children(), 2u);
+ ASSERT_EQ(s[3].num_children(), 2u);
+ EXPECT_EQ(s[0].key(), "a0");
+ EXPECT_EQ(s[0][0].val(), "b0");
+ EXPECT_EQ(s[0][1].val(), "c0");
+ EXPECT_EQ(s[1].key(), "a1");
+ EXPECT_EQ(s[1][0].val(), "b1");
+ EXPECT_EQ(s[1][1].val(), "c1");
+ EXPECT_EQ(s[2].key(), "a2");
+ EXPECT_EQ(s[2][0].val(), "b2");
+ EXPECT_EQ(s[2][1].val(), "c2");
+ EXPECT_EQ(s[3].key(), "a3");
+ EXPECT_EQ(s[3][0].val(), "b3");
+ EXPECT_EQ(s[3][1].val(), "c3");
+ };
+ {
+ SCOPED_TRACE("at the beginning");
+ t[0].duplicate({});
+ test_check_emit_check(t, [&checkseq](ConstNodeRef r){
+ checkseq(r[0]);
+ checkseq(r[1]);
+ });
+ }
+ {
+ SCOPED_TRACE("at the end");
+ t[0].duplicate(t.rootref().last_child());
+ test_check_emit_check(t, [&checkseq](ConstNodeRef r){
+ checkseq(r[0]);
+ checkseq(r[1]);
+ checkseq(r[2]);
+ });
+ }
+ {
+ SCOPED_TRACE("in the middle");
+ t[0].duplicate(t.rootref().first_child());
+ test_check_emit_check(t, [&checkseq](ConstNodeRef r){
+ checkseq(r[0]);
+ checkseq(r[1]);
+ checkseq(r[2]);
+ });
+ }
+}
+
+TEST(NodeRef, duplicate_to_different_tree)
+{
+ Tree t = parse_in_arena("[{a0: [b0, c0], a1: [b1, c1], a2: [b2, c2], a3: [b3, c3]}]");
+ auto checkseq = [](ConstNodeRef const& s){
+ ASSERT_EQ(s.num_children(), 4u);
+ ASSERT_EQ(s[0].num_children(), 2u);
+ ASSERT_EQ(s[1].num_children(), 2u);
+ ASSERT_EQ(s[2].num_children(), 2u);
+ ASSERT_EQ(s[3].num_children(), 2u);
+ EXPECT_EQ(s[0].key(), "a0");
+ EXPECT_EQ(s[0][0].val(), "b0");
+ EXPECT_EQ(s[0][1].val(), "c0");
+ EXPECT_EQ(s[1].key(), "a1");
+ EXPECT_EQ(s[1][0].val(), "b1");
+ EXPECT_EQ(s[1][1].val(), "c1");
+ EXPECT_EQ(s[2].key(), "a2");
+ EXPECT_EQ(s[2][0].val(), "b2");
+ EXPECT_EQ(s[2][1].val(), "c2");
+ EXPECT_EQ(s[3].key(), "a3");
+ EXPECT_EQ(s[3][0].val(), "b3");
+ EXPECT_EQ(s[3][1].val(), "c3");
+ };
+ auto check_orig = [&checkseq](ConstNodeRef const& r){
+ ASSERT_TRUE(r.is_seq());
+ ASSERT_GE(r.num_children(), 1u);
+ checkseq(r[0]);
+ };
+ Tree d = parse_in_arena("[]");
+ {
+ SCOPED_TRACE("at the beginning");
+ t[0].duplicate(d, {});
+ test_check_emit_check(t, check_orig);
+ test_check_emit_check(d, check_orig);
+ }
+ {
+ SCOPED_TRACE("at the end");
+ t[0].duplicate(d, d.rootref().last_child());
+ test_check_emit_check(t, check_orig);
+ test_check_emit_check(d, check_orig);
+ test_check_emit_check(d, [&checkseq](ConstNodeRef r){
+ checkseq(r[1]);
+ });
+ }
+ {
+ SCOPED_TRACE("in the middle");
+ t[0].duplicate(d, d.rootref().first_child());
+ test_check_emit_check(t, check_orig);
+ test_check_emit_check(d, check_orig);
+ test_check_emit_check(d, [&checkseq](ConstNodeRef r){
+ checkseq(r[1]);
+ checkseq(r[2]);
+ });
+ }
+}
+
+TEST(NodeRef, intseq)
+{
+ Tree t = parse_in_arena("iseq: [8, 10]");
+ NodeRef n = t["iseq"];
+ int a, b;
+ n[0] >> a;
+ n[1] >> b;
+ EXPECT_EQ(a, 8);
+ EXPECT_EQ(b, 10);
+ test_invariants(t);
+}
+
+TEST(NodeRef, vsConstNodeRef)
+{
+ Tree t = parse_in_arena("iseq: [8, 10]");
+ Tree const& ct = t;
+ NodeRef mseq = t["iseq"];
+ ConstNodeRef seq = t["iseq"];
+ EXPECT_EQ(mseq.tree(), seq.tree());
+ EXPECT_EQ(mseq.id(), seq.id());
+ EXPECT_TRUE(mseq == seq);
+ EXPECT_FALSE(mseq != seq);
+ EXPECT_TRUE(seq == mseq);
+ EXPECT_FALSE(seq != mseq);
+ // mseq = ct["iseq"]; // deliberate compile error
+ seq = ct["iseq"]; // ok
+ // mseq = seq; // deliberate compilation error
+ seq = mseq; // ok
+ {
+ NodeData *nd = mseq.get();
+ // nd = seq.get(); // deliberate compile error
+ C4_UNUSED(nd);
+ }
+ {
+ NodeData const* nd = seq.get();
+ nd = seq.get(); // ok
+ C4_UNUSED(nd);
+ }
+ test_invariants(t);
+}
+
+
+// see https://github.com/biojppm/rapidyaml/issues/294
+TEST(NodeRef, overload_sets)
+{
+ // doc()
+ {
+ Tree t = parse_in_arena("a\n---\nb");
+ NodeRef n = t;
+ NodeRef const nc = t;
+ ConstNodeRef const cn = t;
+ EXPECT_EQ(n.doc(0), nc.doc(0));
+ EXPECT_EQ(n.doc(0), cn.doc(0));
+ }
+ Tree t = parse_in_arena("{iseq: [8, 10], imap: {a: b, c: d}}");
+ NodeRef n = t;
+ NodeRef const nc = t;
+ ConstNodeRef const cn = t;
+ // get()
+ {
+ EXPECT_EQ(n["iseq"].get(), nc["iseq"].get());
+ EXPECT_EQ(n["iseq"].get(), cn["iseq"].get());
+ }
+ // parent()
+ {
+ EXPECT_EQ(n["iseq"].parent(), nc["iseq"].parent());
+ EXPECT_EQ(n["iseq"].parent(), cn["iseq"].parent());
+ }
+ // child_pos()
+ {
+ EXPECT_EQ(n["iseq"].child_pos(n["iseq"][0]), nc["iseq"].child_pos(n["iseq"][0]));
+ EXPECT_EQ(n["iseq"].child_pos(n["iseq"][0]), cn["iseq"].child_pos(n["iseq"][0]));
+ }
+ // num_children()
+ {
+ EXPECT_EQ(n["iseq"].num_children(), nc["iseq"].num_children());
+ EXPECT_EQ(n["iseq"].num_children(), cn["iseq"].num_children());
+ }
+ // first_child()
+ {
+ EXPECT_EQ(n["iseq"].first_child(), nc["iseq"].first_child());
+ EXPECT_EQ(n["iseq"].first_child(), cn["iseq"].first_child());
+ }
+ // last_child()
+ {
+ EXPECT_EQ(n["iseq"].last_child(), nc["iseq"].last_child());
+ EXPECT_EQ(n["iseq"].last_child(), cn["iseq"].last_child());
+ }
+ // child()
+ {
+ EXPECT_EQ(n["iseq"].child(0), nc["iseq"].child(0));
+ EXPECT_EQ(n["iseq"].child(0), cn["iseq"].child(0));
+ }
+ // find_child()
+ {
+ EXPECT_EQ(n.find_child("iseq"), nc.find_child("iseq"));
+ EXPECT_EQ(n.find_child("iseq"), cn.find_child("iseq"));
+ }
+ // prev_sibling()
+ {
+ EXPECT_EQ(n["iseq"][1].prev_sibling(), nc["iseq"][1].prev_sibling());
+ EXPECT_EQ(n["iseq"][1].prev_sibling(), cn["iseq"][1].prev_sibling());
+ }
+ // next_sibling()
+ {
+ EXPECT_EQ(n["iseq"][0].next_sibling(), nc["iseq"][0].next_sibling());
+ EXPECT_EQ(n["iseq"][0].next_sibling(), cn["iseq"][0].next_sibling());
+ }
+ // first_sibling()
+ {
+ EXPECT_EQ(n["iseq"][1].first_sibling(), nc["iseq"][1].first_sibling());
+ EXPECT_EQ(n["iseq"][1].first_sibling(), cn["iseq"][1].first_sibling());
+ }
+ // last_sibling()
+ {
+ EXPECT_EQ(n["iseq"][0].last_sibling(), nc["iseq"][0].last_sibling());
+ EXPECT_EQ(n["iseq"][0].last_sibling(), cn["iseq"][0].last_sibling());
+ }
+ // sibling()
+ {
+ EXPECT_EQ(n["iseq"][1].sibling(0), nc["iseq"][1].sibling(0));
+ EXPECT_EQ(n["iseq"][1].sibling(0), cn["iseq"][1].sibling(0));
+ }
+ // find_sibling()
+ {
+ EXPECT_EQ(n["iseq"].find_sibling("imap"), nc["iseq"].find_sibling("imap"));
+ EXPECT_EQ(n["iseq"].find_sibling("imap"), cn["iseq"].find_sibling("imap"));
+ }
+ // operator[](csubstr)
+ {
+ EXPECT_EQ(n["iseq"].id(), nc["iseq"].id());
+ EXPECT_EQ(n["iseq"].id(), cn["iseq"].id());
+ }
+ // operator[](size_t)
+ {
+ EXPECT_EQ(n["iseq"][0].id(), nc["iseq"][0].id());
+ EXPECT_EQ(n["iseq"][0].id(), cn["iseq"][0].id());
+ }
+ // begin()
+ {
+ EXPECT_EQ(n["iseq"].begin().m_child_id, nc["iseq"].begin().m_child_id);
+ EXPECT_EQ(n["iseq"].begin().m_child_id, cn["iseq"].begin().m_child_id);
+ }
+ // end()
+ {
+ EXPECT_EQ(n["iseq"].end().m_child_id, nc["iseq"].end().m_child_id);
+ EXPECT_EQ(n["iseq"].end().m_child_id, cn["iseq"].end().m_child_id);
+ }
+ // cbegin()
+ {
+ EXPECT_EQ(n["iseq"].cbegin().m_child_id, nc["iseq"].cbegin().m_child_id);
+ EXPECT_EQ(n["iseq"].cbegin().m_child_id, cn["iseq"].cbegin().m_child_id);
+ }
+ // cend()
+ {
+ EXPECT_EQ(n["iseq"].cend().m_child_id, nc["iseq"].cend().m_child_id);
+ EXPECT_EQ(n["iseq"].cend().m_child_id, cn["iseq"].cend().m_child_id);
+ }
+ // children()
+ {
+ EXPECT_EQ(n["iseq"].children().b.m_child_id, nc["iseq"].children().b.m_child_id);
+ EXPECT_EQ(n["iseq"].children().b.m_child_id, cn["iseq"].children().b.m_child_id);
+ }
+ // cchildren()
+ {
+ EXPECT_EQ(n["iseq"].cchildren().b.m_child_id, nc["iseq"].cchildren().b.m_child_id);
+ EXPECT_EQ(n["iseq"].cchildren().b.m_child_id, cn["iseq"].cchildren().b.m_child_id);
+ }
+ // siblings()
+ {
+ EXPECT_EQ(n["iseq"][0].siblings().b.m_child_id, nc["iseq"][0].siblings().b.m_child_id);
+ EXPECT_EQ(n["iseq"][0].siblings().b.m_child_id, cn["iseq"][0].siblings().b.m_child_id);
+ EXPECT_EQ(n.siblings().b.m_child_id, nc.siblings().b.m_child_id);
+ EXPECT_EQ(n.siblings().b.m_child_id, cn.siblings().b.m_child_id);
+ }
+ // csiblings()
+ {
+ EXPECT_EQ(n["iseq"][0].csiblings().b.m_child_id, nc["iseq"][0].csiblings().b.m_child_id);
+ EXPECT_EQ(n["iseq"][0].csiblings().b.m_child_id, cn["iseq"][0].csiblings().b.m_child_id);
+ EXPECT_EQ(n.csiblings().b.m_child_id, nc.csiblings().b.m_child_id);
+ EXPECT_EQ(n.csiblings().b.m_child_id, cn.csiblings().b.m_child_id);
+ }
+}
+
+
+//-------------------------------------------
+// this is needed to use the test case library
+Case const* get_case(csubstr /*name*/)
+{
+ return nullptr;
+}
+
+} // namespace yml
+} // namespace c4
+
+#if defined(_MSC_VER)
+# pragma warning(pop)
+#elif defined(__clang__)
+# pragma clang diagnostic pop
+#elif defined(__GNUC__)
+# pragma GCC diagnostic pop
+#endif
diff --git a/thirdparty/ryml/test/test_null_val.cpp b/thirdparty/ryml/test/test_null_val.cpp
new file mode 100644
index 000000000..4bdd6d419
--- /dev/null
+++ b/thirdparty/ryml/test/test_null_val.cpp
@@ -0,0 +1,519 @@
+#include "./test_group.hpp"
+#include "c4/error.hpp"
+
+namespace c4 {
+namespace yml {
+
+C4_SUPPRESS_WARNING_GCC_WITH_PUSH("-Wuseless-cast")
+
+csubstr getafter(csubstr yaml, csubstr pattern)
+{
+ size_t pos = yaml.find(pattern);
+ RYML_ASSERT(pos != npos);
+ RYML_ASSERT(yaml.sub(pos).begins_with(pattern));
+ return yaml.sub(pos + pattern.len);
+}
+
+#define _check_null_pointing_at(expr_, which, pattern, arena) \
+ do \
+ { \
+ csubstr expr = expr_.which(); \
+ if(expr.empty()) \
+ { \
+ EXPECT_EQ(expr, nullptr); \
+ EXPECT_EQ(expr.len, 0u); \
+ EXPECT_EQ(expr.str, nullptr); \
+ } \
+ EXPECT_TRUE(expr_.which##_is_null()); \
+ } while(0)
+
+
+TEST(null_val, simple)
+{
+ Tree tree = parse_in_arena("{foo: , bar: '', baz: [,,,], bat: [ , , , ], two: [,,], one: [,], empty: []}");
+ _check_null_pointing_at(tree["foo"], val, " ,", tree.arena());
+ ASSERT_EQ(tree["baz"].num_children(), 3u);
+ _check_null_pointing_at(tree["baz"][0], val, "[,,,]", tree.arena());
+ _check_null_pointing_at(tree["baz"][1], val, ",,,]", tree.arena());
+ _check_null_pointing_at(tree["baz"][2], val, ",,]", tree.arena());
+ ASSERT_EQ(tree["bat"].num_children(), 3u);
+ _check_null_pointing_at(tree["bat"][0], val, " , , , ]", tree.arena());
+ _check_null_pointing_at(tree["bat"][1], val, " , , ]", tree.arena());
+ _check_null_pointing_at(tree["bat"][2], val, " , ]", tree.arena());
+ ASSERT_EQ(tree["two"].num_children(), 2u);
+ _check_null_pointing_at(tree["two"][0], val, "[,,]", tree.arena());
+ _check_null_pointing_at(tree["two"][1], val, ",,]", tree.arena());
+ ASSERT_EQ(tree["one"].num_children(), 1u);
+ _check_null_pointing_at(tree["one"][0], val, "[,]", tree.arena());
+ ASSERT_EQ(tree["empty"].num_children(), 0u);
+}
+
+TEST(null_val, block_seq)
+{
+ csubstr yaml = R"(
+# nospace
+-
+-
+-
+# onespace
+-
+-
+-
+# null
+- null
+- null
+- null
+- ~
+)";
+ ASSERT_EQ(yaml.count('\r'), 0u);
+ Tree tree = parse_in_arena(yaml);
+ ASSERT_EQ(tree.rootref().num_children(), 10u);
+ // FIXME: empty vals in block seqs are pointing at the next item!
+ _check_null_pointing_at(tree[0], val, after("nospace\n-\n"), tree.arena());
+ _check_null_pointing_at(tree[1], val, after("nospace\n-\n-\n"), tree.arena());
+ _check_null_pointing_at(tree[2], val, after("nospace\n-\n-\n-\n# onespace\n"), tree.arena());
+ _check_null_pointing_at(tree[3], val, after("onespace\n- \n"), tree.arena());
+ _check_null_pointing_at(tree[4], val, after("onespace\n- \n- \n"), tree.arena());
+ _check_null_pointing_at(tree[5], val, after("onespace\n- \n- \n- \n# null\n"), tree.arena());
+ // but explicitly null vals are ok:
+ _check_null_pointing_at(tree[6], val, "null\n- null\n- null\n- ~\n", tree.arena());
+ _check_null_pointing_at(tree[7], val, "null\n- null\n- ~", tree.arena());
+ _check_null_pointing_at(tree[8], val, "null\n- ~\n", tree.arena());
+ _check_null_pointing_at(tree[9], val, "~\n", tree.arena());
+}
+
+TEST(null_val, block_map)
+{
+ csubstr yaml = R"(
+# nospace
+val0:
+val1:
+val2:
+# onespace
+val3:
+val4:
+val5:
+# null
+val6: null
+val7: null
+val8: null
+val9: ~
+)";
+ ASSERT_EQ(yaml.count('\r'), 0u);
+ Tree tree = parse_in_arena(yaml);
+ ASSERT_EQ(tree.rootref().num_children(), 10u);
+ // FIXME: empty vals in block seqs are pointing at the next item!
+ _check_null_pointing_at(tree["val0"], val, after("val0:"), tree.arena());
+ _check_null_pointing_at(tree["val1"], val, after("val1:"), tree.arena());
+ _check_null_pointing_at(tree["val2"], val, after("val2:\n# onespace"), tree.arena());
+ _check_null_pointing_at(tree["val3"], val, after("val3: "), tree.arena());
+ _check_null_pointing_at(tree["val4"], val, after("val4: "), tree.arena());
+ _check_null_pointing_at(tree["val5"], val, after("val5: \n# null"), tree.arena());
+ // but explicitly null vals are ok:
+ _check_null_pointing_at(tree["val6"], val, "null\nval7:", tree.arena());
+ _check_null_pointing_at(tree["val7"], val, "null\nval8:", tree.arena());
+ _check_null_pointing_at(tree["val8"], val, "null\nval9:", tree.arena());
+ _check_null_pointing_at(tree["val9"], val, "~\n", tree.arena());
+}
+
+
+TEST(null_val, issue103)
+{
+ C4_SUPPRESS_WARNING_GCC_WITH_PUSH("-Wuseless-cast")
+ Tree tree;
+
+ tree = parse_in_arena(R"({null: null})");
+ ASSERT_EQ(tree.size(), 2u);
+ EXPECT_EQ(tree.root_id(), 0u);
+ EXPECT_EQ(tree.first_child(0), 1u);
+ EXPECT_TRUE(tree.type(1).is_keyval());
+ EXPECT_EQ(tree.key(1), "null");
+ EXPECT_EQ(tree.val(1), "null");
+ EXPECT_TRUE(tree.key_is_null(1));
+ EXPECT_TRUE(tree.val_is_null(1));
+ EXPECT_TRUE(tree[0].key_is_null());
+ EXPECT_TRUE(tree[0].val_is_null());
+ _check_null_pointing_at(tree[0], key, "null:", tree.arena());
+ _check_null_pointing_at(tree[0], val, "null}", tree.arena());
+
+ tree = parse_in_arena(R"({Null: Null})");
+ ASSERT_EQ(tree.size(), 2u);
+ EXPECT_EQ(tree.root_id(), 0u);
+ EXPECT_EQ(tree.first_child(0), 1u);
+ EXPECT_TRUE(tree.type(1).is_keyval());
+ EXPECT_EQ(tree.key(1), "Null");
+ EXPECT_EQ(tree.val(1), "Null");
+ EXPECT_TRUE(tree.key_is_null(1));
+ EXPECT_TRUE(tree.val_is_null(1));
+ EXPECT_TRUE(tree[0].key_is_null());
+ EXPECT_TRUE(tree[0].val_is_null());
+ _check_null_pointing_at(tree[0], key, "Null:", tree.arena());
+ _check_null_pointing_at(tree[0], val, "Null}", tree.arena());
+
+ tree = parse_in_arena(R"({NULL: NULL})");
+ ASSERT_EQ(tree.size(), 2u);
+ EXPECT_EQ(tree.root_id(), 0u);
+ EXPECT_EQ(tree.first_child(0), 1u);
+ EXPECT_TRUE(tree.type(1).is_keyval());
+ EXPECT_EQ(tree.key(1), "NULL");
+ EXPECT_EQ(tree.val(1), "NULL");
+ EXPECT_TRUE(tree.key_is_null(1));
+ EXPECT_TRUE(tree.val_is_null(1));
+ EXPECT_TRUE(tree[0].key_is_null());
+ EXPECT_TRUE(tree[0].val_is_null());
+ _check_null_pointing_at(tree[0], key, "NULL:", tree.arena());
+ _check_null_pointing_at(tree[0], val, "NULL}", tree.arena());
+
+ tree = parse_in_arena(R"({ : })");
+ ASSERT_EQ(tree.size(), 2u);
+ EXPECT_EQ(tree.root_id(), 0u);
+ EXPECT_EQ(tree.first_child(0), 1u);
+ EXPECT_TRUE(tree.type(1).is_keyval());
+ EXPECT_EQ(tree.key(1), nullptr);
+ EXPECT_EQ(tree.val(1), nullptr);
+ EXPECT_TRUE(tree.key_is_null(1));
+ EXPECT_TRUE(tree.val_is_null(1));
+ EXPECT_TRUE(tree[0].key_is_null());
+ EXPECT_TRUE(tree[0].val_is_null());
+ _check_null_pointing_at(tree[0], key, ": }", tree.arena());
+ _check_null_pointing_at(tree[0], val, " }", tree.arena());
+
+ tree = parse_in_arena(R"({~: ~})");
+ ASSERT_EQ(tree.size(), 2u);
+ EXPECT_EQ(tree.root_id(), 0u);
+ EXPECT_EQ(tree.first_child(0), 1u);
+ EXPECT_TRUE(tree.type(1).is_keyval());
+ EXPECT_EQ(tree.key(1), "~");
+ EXPECT_EQ(tree.val(1), "~");
+ EXPECT_TRUE(tree.key_is_null(1));
+ EXPECT_TRUE(tree.val_is_null(1));
+ EXPECT_TRUE(tree[0].key_is_null());
+ EXPECT_TRUE(tree[0].val_is_null());
+ _check_null_pointing_at(tree[0], key, "~:", tree.arena());
+ _check_null_pointing_at(tree[0], val, "~}", tree.arena());
+
+ tree = parse_in_arena(R"({"~": "~"})");
+ ASSERT_EQ(tree.size(), 2u);
+ EXPECT_EQ(tree.root_id(), 0u);
+ EXPECT_EQ(tree.first_child(0), 1u);
+ EXPECT_TRUE(tree.type(1).is_keyval());
+ EXPECT_EQ(tree.key(1), "~");
+ EXPECT_EQ(tree.val(1), "~");
+ EXPECT_NE(tree.key(1), nullptr);
+ EXPECT_NE(tree.val(1), nullptr);
+ EXPECT_FALSE(tree.key_is_null(1));
+ EXPECT_FALSE(tree.val_is_null(1));
+ EXPECT_FALSE(tree[0].key_is_null());
+ EXPECT_FALSE(tree[0].val_is_null());
+
+ tree = parse_in_arena(R"({"null": "null"})");
+ ASSERT_EQ(tree.size(), 2u);
+ EXPECT_EQ(tree.root_id(), 0u);
+ EXPECT_EQ(tree.first_child(0), 1u);
+ EXPECT_TRUE(tree.type(1).is_keyval());
+ EXPECT_EQ(tree.key(1), "null");
+ EXPECT_EQ(tree.val(1), "null");
+ EXPECT_NE(tree.key(1), nullptr);
+ EXPECT_NE(tree.val(1), nullptr);
+ EXPECT_FALSE(tree[0].key_is_null());
+ EXPECT_FALSE(tree[0].val_is_null());
+
+ tree = parse_in_arena(R"({"Null": "Null"})");
+ ASSERT_EQ(tree.size(), 2u);
+ EXPECT_EQ(tree.root_id(), 0u);
+ EXPECT_EQ(tree.first_child(0), 1u);
+ EXPECT_TRUE(tree.type(1).is_keyval());
+ EXPECT_EQ(tree.key(1), "Null");
+ EXPECT_EQ(tree.val(1), "Null");
+ EXPECT_NE(tree.key(1), nullptr);
+ EXPECT_NE(tree.val(1), nullptr);
+ EXPECT_FALSE(tree.key_is_null(1));
+ EXPECT_FALSE(tree.val_is_null(1));
+ EXPECT_FALSE(tree[0].key_is_null());
+ EXPECT_FALSE(tree[0].val_is_null());
+
+ tree = parse_in_arena(R"({"NULL": "NULL"})");
+ ASSERT_EQ(tree.size(), 2u);
+ EXPECT_EQ(tree.root_id(), 0u);
+ EXPECT_EQ(tree.first_child(0), 1u);
+ EXPECT_TRUE(tree.type(1).is_keyval());
+ EXPECT_EQ(tree.key(1), "NULL");
+ EXPECT_EQ(tree.val(1), "NULL");
+ EXPECT_NE(tree.key(1), nullptr);
+ EXPECT_NE(tree.val(1), nullptr);
+ EXPECT_FALSE(tree.key_is_null(1));
+ EXPECT_FALSE(tree.val_is_null(1));
+ EXPECT_FALSE(tree[0].key_is_null());
+ EXPECT_FALSE(tree[0].val_is_null());
+
+ C4_SUPPRESS_WARNING_GCC_POP
+}
+
+
+TEST(null_val, null_key)
+{
+ auto tree = parse_in_arena(R"({null: null})");
+
+ ASSERT_EQ(tree.size(), 2u);
+ _check_null_pointing_at(tree[0], key, "null: ", tree.arena());
+ _check_null_pointing_at(tree[0], val, "null}", tree.arena());
+}
+
+
+TEST(null_val, readme_example)
+{
+ csubstr yaml = R"(
+seq:
+ - ~
+ - null
+ -
+ -
+ # a comment
+ -
+map:
+ val0: ~
+ val1: null
+ val2:
+ val3:
+ # a comment
+ val4:
+)";
+ Parser p;
+ Tree t = p.parse_in_arena("file.yml", yaml);
+ // as expected: (len is null, str is pointing at the value where the node starts)
+ EXPECT_EQ(t["seq"][0].val(), "~");
+ EXPECT_EQ(t["seq"][1].val(), "null");
+ EXPECT_EQ(t["seq"][2].val(), nullptr);
+ EXPECT_EQ(t["seq"][3].val(), nullptr);
+ EXPECT_EQ(t["seq"][4].val(), nullptr);
+ EXPECT_EQ(t["map"][0].val(), "~");
+ EXPECT_EQ(t["map"][1].val(), "null");
+ EXPECT_EQ(t["map"][2].val(), nullptr);
+ EXPECT_EQ(t["map"][3].val(), nullptr);
+ EXPECT_EQ(t["map"][4].val(), nullptr);
+ // standard null values point at the expected location:
+ EXPECT_EQ(csubstr(t["seq"][0].val().str, 1), csubstr("~"));
+ EXPECT_EQ(csubstr(t["seq"][1].val().str, 4), csubstr("null"));
+ EXPECT_EQ(csubstr(t["map"]["val0"].val().str, 1), csubstr("~"));
+ EXPECT_EQ(csubstr(t["map"]["val1"].val().str, 4), csubstr("null"));
+}
+
+
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+
+CASE_GROUP(NULL_VAL)
+{
+
+ADD_CASE_TO_GROUP("all null",
+R"(
+-
+- # with space
+- null
+- Null
+- NULL
+- ~
+- null: null
+- Null: Null
+- NULL: NULL
+- ~: ~
+- ~: null
+- null: ~
+)",
+L{
+N(VAL, nullptr),
+N(VAL, nullptr),
+N(VAL, "null"),
+N(VAL, "Null"),
+N(VAL, "NULL"),
+N(VAL, "~"),
+N(MAP, L{N(KEYVAL, "null", "null")}),
+N(MAP, L{N(KEYVAL, "Null", "Null")}),
+N(MAP, L{N(KEYVAL, "NULL", "NULL")}),
+N(MAP, L{N(KEYVAL, "~", "~")}),
+N(MAP, L{N(KEYVAL, "~", "null")}),
+N(MAP, L{N(KEYVAL, "null", "~")}),
+});
+
+ADD_CASE_TO_GROUP("null map vals, expl",
+R"({foo: , bar: , baz: }
+)",
+L{N(KEYVAL, "foo", nullptr), N(KEYVAL, "bar", nullptr), N(KEYVAL, "baz", nullptr)}
+);
+
+ADD_CASE_TO_GROUP("null map vals, impl",
+R"(
+foo:
+bar:
+baz:
+)",
+L{N(KEYVAL, "foo", nullptr), N(KEYVAL, "bar", nullptr), N(KEYVAL, "baz", nullptr)}
+);
+
+ADD_CASE_TO_GROUP("null seq vals, impl",
+R"(-
+-
+-
+)",
+L{N(VAL, nullptr), N(VAL, nullptr), N(VAL, nullptr)}
+);
+
+ADD_CASE_TO_GROUP("null seq vals in map, impl, mixed 1",
+R"(
+foo:
+ -
+ -
+ -
+bar:
+baz:
+)",
+L{N("foo", L{N(VAL, nullptr), N(VAL, nullptr), N(VAL, nullptr)}), N(KEYVAL, "bar", nullptr), N(KEYVAL, "baz", nullptr)}
+);
+
+ADD_CASE_TO_GROUP("null seq vals in map, impl, mixed 2",
+R"(
+foo:
+bar:
+ -
+ -
+ -
+baz:
+)",
+L{N(KEYVAL, "foo", nullptr), N("bar", L{N(VAL, nullptr), N(VAL, nullptr), N(VAL, nullptr)}), N(KEYVAL, "baz", nullptr)}
+);
+
+ADD_CASE_TO_GROUP("null seq vals in map, impl, mixed 3",
+R"(
+foo:
+bar:
+baz:
+ -
+ -
+ -
+)",
+L{N(KEYVAL, "foo", nullptr), N(KEYVAL, "bar", nullptr), N("baz", L{N(VAL, nullptr), N(VAL, nullptr), N(VAL, nullptr)})}
+);
+
+ADD_CASE_TO_GROUP("null map vals in seq, impl, mixed 1",
+R"(
+- foo:
+ bar:
+ baz:
+-
+-
+)",
+L{N(L{N(KEYVAL, "foo", nullptr), N(KEYVAL, "bar", nullptr), N(KEYVAL, "baz", nullptr)}), N(VAL, nullptr), N(VAL, nullptr)}
+);
+
+ADD_CASE_TO_GROUP("null map vals in seq, impl, mixed 2",
+R"(
+-
+- foo:
+ bar:
+ baz:
+-
+)",
+L{N(VAL, nullptr), N(L{N(KEYVAL, "foo", nullptr), N(KEYVAL, "bar", nullptr), N(KEYVAL, "baz", nullptr)}), N(VAL, nullptr)}
+);
+
+ADD_CASE_TO_GROUP("null map vals in seq, impl, mixed 3",
+R"(
+-
+-
+- foo:
+ bar:
+ baz:
+)",
+L{N(VAL, nullptr), N(VAL, nullptr), N(L{N(KEYVAL, "foo", nullptr), N(KEYVAL, "bar", nullptr), N(KEYVAL, "baz", nullptr)})}
+);
+
+ADD_CASE_TO_GROUP("issue84.1",
+R"(
+fixed case:
+ foo: a
+ bar:
+your case:
+ foo: a
+ bar: ''
+whatever: baz
+)",
+L{
+N("fixed case", L{N("foo", "a"), N(KEYVAL, "bar", nullptr)}),
+N("your case", L{N("foo", "a"), N(QV, "bar", "")}),
+N("whatever", "baz"),
+});
+
+ADD_CASE_TO_GROUP("issue84.2",
+R"(
+version: 0
+type: xml
+param_root:
+ objects:
+ System: {SameGroupActorName: '', IsGetItemSelf: false}
+ General:
+ Speed: 1.0
+ Life: 100
+ IsLifeInfinite: false
+ ElectricalDischarge: 1.0
+ IsBurnOutBorn: false
+ BurnOutBornName:
+ IsBurnOutBornIdent: false
+ ChangeDropTableName: ''
+)",
+L{
+N("version", "0"),
+N("type", "xml"),
+N("param_root", L{
+ N("objects", L{
+ N("System", L{
+ N(QV, "SameGroupActorName", ""),
+ N("IsGetItemSelf", "false")
+ }),
+ N("General", L{
+ N("Speed", "1.0"),
+ N("Life", "100"),
+ N("IsLifeInfinite", "false"),
+ N("ElectricalDischarge", "1.0"),
+ N("IsBurnOutBorn", "false"),
+ N(KEYVAL, "BurnOutBornName", nullptr),
+ N("IsBurnOutBornIdent", "false"),
+ N(QV, "ChangeDropTableName", ""),
+ }),
+ })
+}),
+});
+
+ADD_CASE_TO_GROUP("issue84.3",
+R"(
+version: 10
+type: test
+param_root:
+ objects:
+ TestContent:
+ Str64_empty: ''
+ Str64_empty2:
+ Str64_empty3: ''
+ lists: {}
+)",
+L{
+N("version", "10"),
+N("type", "test"),
+N("param_root", L{
+ N("objects", L{
+ N("TestContent", L{
+ N(QV, "Str64_empty", ""),
+ N(KEYVAL, "Str64_empty2", nullptr),
+ N(QV, "Str64_empty3", ""),
+ }),
+ }),
+ N(KEYMAP, "lists", L{})
+}),
+});
+
+}
+
+} // namespace yml
+} // namespace c4
+
+C4_SUPPRESS_WARNING_GCC_POP
diff --git a/thirdparty/ryml/test/test_number.cpp b/thirdparty/ryml/test/test_number.cpp
new file mode 100644
index 000000000..41c388ae6
--- /dev/null
+++ b/thirdparty/ryml/test/test_number.cpp
@@ -0,0 +1,217 @@
+#include "./test_group.hpp"
+
+namespace c4 {
+namespace yml {
+
+template<class I>
+auto mkvals() -> typename std::enable_if<std::is_signed<I>::value, std::vector<I>>::type
+{
+ return std::vector<I>({std::numeric_limits<I>::min(), -10, -5, -1, 0, 1, 5, 10, std::numeric_limits<I>::max(),});
+}
+template<class I>
+auto mkvals() -> typename std::enable_if<!std::is_signed<I>::value, std::vector<I>>::type
+{
+ return std::vector<I>({0, 1, 5, 10, std::numeric_limits<I>::max(),});
+}
+template<class I>
+void test_ints()
+{
+ C4_SUPPRESS_WARNING_GCC_WITH_PUSH("-Wuseless-cast")
+ std::vector<I> values = mkvals<I>();
+ Tree t = parse_in_arena("{dec: [], hex: [], bin: [], oct: [], versions: ['0.1', 0.1.2, 0.1.2.3, 0.1.2.3.4]}");
+ NodeRef r = t.rootref();
+ for(I val : values)
+ {
+ I out;
+ r["dec"].append_child() << val;
+ r["hex"].append_child() << fmt::hex(val);
+ r["bin"].append_child() << fmt::bin(val);
+ r["oct"].append_child() << fmt::oct(val);
+ out = (I)(val + I(1));
+ r["dec"].last_child() >> out;
+ EXPECT_EQ(out, val);
+ out = (I)(val + I(1));
+ r["hex"].last_child() >> out;
+ EXPECT_EQ(out, val);
+ out = (I)(val + I(1));
+ r["bin"].last_child() >> out;
+ EXPECT_EQ(out, val);
+ out = (I)(val + I(1));
+ r["oct"].last_child() >> out;
+ EXPECT_EQ(out, val);
+ }
+ {
+ std::string emitted = emitrs_yaml<std::string>(t);
+ Tree parsed = parse_in_place(to_substr(emitted));
+ ASSERT_EQ(parsed["dec"].num_children(), values.size());
+ ASSERT_EQ(parsed["hex"].num_children(), values.size());
+ ASSERT_EQ(parsed["bin"].num_children(), values.size());
+ ASSERT_EQ(parsed["oct"].num_children(), values.size());
+ ASSERT_EQ(parsed["versions"].num_children(), 4u);
+ size_t pos = 0;
+ for(I val : values)
+ {
+ I out = (I)(val + I(1));
+ parsed["dec"][pos] >> out;
+ EXPECT_EQ(out, val);
+ out = (I)(val + I(1));
+ parsed["hex"][pos] >> out;
+ EXPECT_EQ(out, val);
+ out = (I)(val + I(1));
+ parsed["bin"][pos]>> out;
+ EXPECT_EQ(out, val);
+ out = (I)(val + I(1));
+ parsed["oct"][pos] >> out;
+ EXPECT_EQ(out, val);
+ ++pos;
+ }
+ EXPECT_EQ(parsed["versions"][0], "0.1");
+ EXPECT_EQ(parsed["versions"][1], "0.1.2");
+ EXPECT_EQ(parsed["versions"][2], "0.1.2.3");
+ EXPECT_EQ(parsed["versions"][3], "0.1.2.3.4");
+ }
+ {
+ std::string emitted = emitrs_json<std::string>(t);
+ Tree parsed = parse_in_place(to_substr(emitted));
+ ASSERT_EQ(parsed["dec"].num_children(), values.size());
+ ASSERT_EQ(parsed["hex"].num_children(), values.size());
+ ASSERT_EQ(parsed["bin"].num_children(), values.size());
+ ASSERT_EQ(parsed["oct"].num_children(), values.size());
+ ASSERT_EQ(parsed["versions"].num_children(), 4u);
+ size_t pos = 0;
+ for(I val : values)
+ {
+ I out = (I)(val + I(1));
+ parsed["dec"][pos] >> out;
+ EXPECT_EQ(out, val);
+ out = (I)(val + I(1));
+ parsed["hex"][pos] >> out;
+ EXPECT_EQ(out, val);
+ out = (I)(val + I(1));
+ parsed["bin"][pos]>> out;
+ EXPECT_EQ(out, val);
+ out = (I)(val + I(1));
+ parsed["oct"][pos] >> out;
+ EXPECT_EQ(out, val);
+ ++pos;
+ }
+ EXPECT_EQ(parsed["versions"][0], "0.1");
+ EXPECT_EQ(parsed["versions"][1], "0.1.2");
+ EXPECT_EQ(parsed["versions"][2], "0.1.2.3");
+ EXPECT_EQ(parsed["versions"][3], "0.1.2.3.4");
+ }
+ C4_SUPPRESS_WARNING_GCC_POP
+}
+
+TEST(number, idec)
+{
+ test_ints<int8_t>();
+ test_ints<int16_t>();
+ test_ints<int32_t>();
+ test_ints<int64_t>();
+ test_ints<uint8_t>();
+ test_ints<uint16_t>();
+ test_ints<uint32_t>();
+ test_ints<uint64_t>();
+}
+
+
+
+CASE_GROUP(NUMBER)
+{
+
+ADD_CASE_TO_GROUP("integer numbers, flow", JSON_ALSO,
+R"(translation: [-2, -2, 5, 0xa, -0xb, 0XA, -0XA, 0b10, -0b10, 0B10, -0B10, 0o17, -0o17, 0O17, -0O17])",
+L{N("translation", L{
+ N("-2"), N("-2"), N("5"),
+ N("0xa"), N("-0xb"),
+ N("0XA"), N("-0XA"),
+ N("0b10"), N("-0b10"),
+ N("0B10"), N("-0B10"),
+ N("0o17"), N("-0o17"),
+ N("0O17"), N("-0O17"),
+})});
+
+ADD_CASE_TO_GROUP("integer numbers, block", JSON_ALSO,
+R"(translation:
+ - -2
+ - -2
+ - -5
+)",
+L{N("translation", L{N("-2"), N("-2"), N("-5")})}
+);
+
+ADD_CASE_TO_GROUP("floating point numbers, flow", JSON_ALSO,
+R"([-2.0, -2.1, 0.1, .1, -.2, -2.e+6, -3e-6, 1.12345e+011])",
+L{N("-2.0"), N("-2.1"), N("0.1"), N(".1"), N("-.2"), N("-2.e+6"), N("-3e-6"), N("1.12345e+011")}
+);
+
+ADD_CASE_TO_GROUP("floating point numbers, block", JSON_ALSO,
+R"(
+- -2.0
+- -2.1
+- 0.1
+- .1
+- -.2
+- -2.e+6
+- -3e-6
+- 1.12345e+011
+)",
+L{N("-2.0"), N("-2.1"), N("0.1"), N(".1"), N("-.2"), N("-2.e+6"), N("-3e-6"), N("1.12345e+011")}
+);
+
+ADD_CASE_TO_GROUP("hex floating point numbers, block", JSON_ALSO,
+R"(
+- -2.0
+- -2.1
+- 0.1
+- .1
+- -.2
+- -2.e+6
+- -3e-6
+- 1.12345e+011
+)",
+L{N("-2.0"), N("-2.1"), N("0.1"), N(".1"), N("-.2"), N("-2.e+6"), N("-3e-6"), N("1.12345e+011")}
+);
+
+ADD_CASE_TO_GROUP("version numbers", JSON_ALSO,
+R"(
+- 1.2.3
+- 1.2.3.4
+- [1.2.3, 4.5.6]
+- [1.2.3.4, 4.5.6.7]
+- - 1.2.3
+ - 4.5.6
+- - 1.2.3.4
+ - 4.5.6.7
+- a: 1.2.3
+- a: 1.2.3.4
+- {a: 1.2.3}
+- {a: 1.2.3.4}
+- a: 1.2.3
+ b: 4.5.6
+- a: 1.2.3.4
+ b: 4.5.6.7
+- {a: 1.2.3, b: 4.5.6}
+- {a: 1.2.3.4, b: 4.5.6.7}
+)",
+L{
+ N("1.2.3"),
+ N("1.2.3.4"),
+ N(L{N("1.2.3"), N("4.5.6")}),
+ N(L{N("1.2.3.4"), N("4.5.6.7")}),
+ N(L{N("1.2.3"), N("4.5.6")}),
+ N(L{N("1.2.3.4"), N("4.5.6.7")}),
+ N(L{N("a", "1.2.3")}),
+ N(L{N("a", "1.2.3.4")}),
+ N(L{N("a", "1.2.3")}),
+ N(L{N("a", "1.2.3.4")}),
+ N(L{N("a", "1.2.3"), N("b", "4.5.6")}),
+ N(L{N("a", "1.2.3.4"), N("b", "4.5.6.7")}),
+ N(L{N("a", "1.2.3"), N("b", "4.5.6")}),
+ N(L{N("a", "1.2.3.4"), N("b", "4.5.6.7")}),
+});
+}
+
+} // namespace yml
+} // namespace c4
diff --git a/thirdparty/ryml/test/test_parser.cpp b/thirdparty/ryml/test/test_parser.cpp
new file mode 100644
index 000000000..3ef9ee145
--- /dev/null
+++ b/thirdparty/ryml/test/test_parser.cpp
@@ -0,0 +1,566 @@
+#ifdef RYML_SINGLE_HEADER
+#include "ryml_all.hpp"
+#else
+#include "c4/yml/parse.hpp"
+#endif
+#include <gtest/gtest.h>
+#include "./callbacks_tester.hpp"
+
+
+namespace c4 {
+namespace yml {
+
+// TODO: add this as a method to csubstr
+bool is_same(csubstr lhs, csubstr rhs)
+{
+ return lhs.str == rhs.str && lhs.len == rhs.len;
+}
+
+void mklarge(Parser *p, Callbacks const& cb)
+{
+ p->~Parser();
+ new ((void*)p) Parser(cb);
+ p->reserve_stack(20); // cause an allocation
+ p->reserve_locations(128); // cause an allocation
+ p->reserve_filter_arena(128); // cause an allocation
+}
+
+
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+
+TEST(Parser, empty_ctor)
+{
+ Parser parser;
+ EXPECT_EQ(parser.callbacks(), get_callbacks());
+}
+
+TEST(Parser, callbacks_ctor)
+{
+ CallbacksTester cbt;
+ {
+ Parser parser(cbt.callbacks());
+ EXPECT_EQ(parser.callbacks(), cbt.callbacks());
+ }
+ EXPECT_EQ(cbt.num_allocs, 0u);
+ EXPECT_EQ(cbt.num_deallocs, 0u);
+}
+
+TEST(Parser, reserve_capacity)
+{
+ CallbacksTester cbt("test", 20000/*Bytes*/);
+ {
+ Parser parser(cbt.callbacks());
+ EXPECT_EQ(cbt.num_allocs, 0u);
+ EXPECT_EQ(cbt.num_deallocs, 0u);
+ parser.reserve_stack(18);
+ EXPECT_EQ(cbt.num_allocs, 1u);
+ EXPECT_EQ(cbt.num_deallocs, 0u);
+ parser.reserve_stack(24);
+ EXPECT_EQ(cbt.num_allocs, 2u);
+ EXPECT_EQ(cbt.num_deallocs, 1u);
+ parser.reserve_stack(28);
+ EXPECT_EQ(cbt.num_allocs, 3u);
+ EXPECT_EQ(cbt.num_deallocs, 2u);
+ }
+ EXPECT_EQ(cbt.num_allocs, 3u);
+ EXPECT_EQ(cbt.num_deallocs, 3u);
+ cbt.check();
+}
+
+TEST(Parser, reserve_locations)
+{
+ CallbacksTester ts;
+ {
+ Parser parser(ts.callbacks());
+ EXPECT_EQ(parser.callbacks(), ts.callbacks());
+ EXPECT_EQ(ts.num_allocs, 0u);
+ EXPECT_EQ(ts.num_deallocs, 0u);
+ parser.reserve_locations(128);
+ EXPECT_EQ(ts.num_allocs, 1u);
+ EXPECT_EQ(ts.num_deallocs, 0u);
+ EXPECT_EQ(ts.alloc_size, 128u * sizeof(size_t));
+ EXPECT_EQ(ts.dealloc_size, 0u);
+ }
+ EXPECT_EQ(ts.num_allocs, 1u);
+ EXPECT_EQ(ts.num_deallocs, 1u);
+ EXPECT_EQ(ts.alloc_size, 128u * sizeof(size_t));
+ EXPECT_EQ(ts.dealloc_size, 128u * sizeof(size_t));
+}
+
+TEST(Parser, reserve_filter_arena)
+{
+ size_t cap = 256u;
+ CallbacksTester ts;
+ {
+ Parser parser(ts.callbacks());
+ EXPECT_EQ(parser.filter_arena_capacity(), 0u);
+ EXPECT_EQ(parser.callbacks(), ts.callbacks());
+ EXPECT_EQ(ts.num_allocs, 0u);
+ EXPECT_EQ(ts.num_deallocs, 0u);
+ parser.reserve_filter_arena(cap);
+ EXPECT_EQ(ts.num_allocs, 1u);
+ EXPECT_EQ(ts.num_deallocs, 0u);
+ EXPECT_EQ(ts.alloc_size, cap);
+ EXPECT_EQ(ts.dealloc_size, 0u);
+ }
+ EXPECT_EQ(ts.num_allocs, 1u);
+ EXPECT_EQ(ts.num_deallocs, 1u);
+ EXPECT_EQ(ts.alloc_size, cap);
+ EXPECT_EQ(ts.dealloc_size, cap);
+}
+
+TEST(Parser, copy_ctor)
+{
+ {
+ Parser src;
+ mklarge(&src, get_callbacks());
+ EXPECT_EQ(src.callbacks(), get_callbacks());
+ Parser dst(src);
+ EXPECT_EQ(src.callbacks(), get_callbacks());
+ EXPECT_EQ(dst.callbacks(), get_callbacks());
+ }
+ {
+ CallbacksTester ts;
+ {
+ Parser src;
+ mklarge(&src, ts.callbacks());
+ ASSERT_EQ(src.callbacks(), ts.callbacks());
+ size_t nbefore = ts.num_allocs;
+ EXPECT_GT(ts.num_allocs, 0u);
+ Parser dst(src);
+ ASSERT_EQ(src.callbacks(), ts.callbacks());
+ ASSERT_EQ(dst.callbacks(), ts.callbacks());
+ EXPECT_GT(ts.num_allocs, nbefore);
+ }
+ EXPECT_EQ(ts.num_allocs, ts.num_deallocs);
+ EXPECT_EQ(ts.alloc_size, ts.dealloc_size);
+ }
+}
+
+TEST(Parser, move_ctor)
+{
+ {
+ Parser src;
+ mklarge(&src, get_callbacks());
+ EXPECT_EQ(src.callbacks(), get_callbacks());
+ Parser dst(std::move(src));
+ EXPECT_EQ(src.callbacks(), get_callbacks());
+ EXPECT_EQ(dst.callbacks(), get_callbacks());
+ }
+ {
+ CallbacksTester ts;
+ {
+ Parser src;
+ mklarge(&src, ts.callbacks());
+ ASSERT_EQ(src.callbacks(), ts.callbacks());
+ size_t nbefore = ts.num_allocs;
+ EXPECT_GT(ts.num_allocs, 0u);
+ Parser dst(std::move(src));
+ ASSERT_EQ(src.callbacks(), ts.callbacks());
+ ASSERT_EQ(dst.callbacks(), ts.callbacks());
+ EXPECT_EQ(ts.num_allocs, nbefore);
+ }
+ EXPECT_EQ(ts.num_allocs, ts.num_deallocs);
+ EXPECT_EQ(ts.alloc_size, ts.dealloc_size);
+ }
+}
+
+TEST(Parser, copy_assign_same_callbacks)
+{
+ CallbacksTester ts;
+ {
+ Parser src;
+ Parser dst;
+ mklarge(&src, ts.callbacks());
+ mklarge(&dst, ts.callbacks());
+ ASSERT_EQ(src.callbacks(), ts.callbacks());
+ ASSERT_EQ(dst.callbacks(), ts.callbacks());
+ size_t nbefore = ts.num_allocs;
+ EXPECT_GT(ts.num_allocs, 0u);
+ EXPECT_GT(ts.num_allocs, 0u);
+ dst = src;
+ ASSERT_EQ(src.callbacks(), ts.callbacks());
+ ASSERT_EQ(dst.callbacks(), ts.callbacks());
+ EXPECT_GT(ts.num_allocs, nbefore);
+ }
+ EXPECT_EQ(ts.num_allocs, ts.num_deallocs);
+ EXPECT_EQ(ts.alloc_size, ts.dealloc_size);
+}
+
+TEST(Parser, copy_assign_diff_callbacks)
+{
+ CallbacksTester ts("src");
+ CallbacksTester td("dst");
+ {
+ Parser src;
+ Parser dst;
+ mklarge(&src, ts.callbacks());
+ mklarge(&dst, td.callbacks());
+ ASSERT_EQ(src.callbacks(), ts.callbacks());
+ ASSERT_EQ(dst.callbacks(), td.callbacks());
+ size_t nbefore = ts.num_allocs;
+ EXPECT_GT(ts.num_allocs, 0u);
+ EXPECT_GT(td.num_allocs, 0u);
+ dst = src;
+ ASSERT_EQ(src.callbacks(), ts.callbacks());
+ ASSERT_EQ(dst.callbacks(), ts.callbacks());
+ EXPECT_GT(ts.num_allocs, nbefore);
+ EXPECT_EQ(td.num_allocs, nbefore);
+ }
+ EXPECT_EQ(ts.num_allocs, ts.num_deallocs);
+ EXPECT_EQ(ts.alloc_size, ts.dealloc_size);
+ EXPECT_EQ(td.num_allocs, td.num_deallocs);
+ EXPECT_EQ(td.alloc_size, td.dealloc_size);
+}
+
+TEST(Parser, move_assign_same_callbacks)
+{
+ CallbacksTester ts;
+ {
+ Parser src;
+ Parser dst;
+ mklarge(&src, ts.callbacks());
+ mklarge(&dst, ts.callbacks());
+ ASSERT_EQ(src.callbacks(), ts.callbacks());
+ ASSERT_EQ(dst.callbacks(), ts.callbacks());
+ size_t nbefore = ts.num_allocs;
+ EXPECT_GT(ts.num_allocs, 0u);
+ EXPECT_GT(ts.num_allocs, 0u);
+ dst = std::move(src);
+ ASSERT_EQ(src.callbacks(), ts.callbacks());
+ ASSERT_EQ(dst.callbacks(), ts.callbacks());
+ EXPECT_EQ(ts.num_allocs, nbefore);
+ }
+ EXPECT_EQ(ts.num_allocs, ts.num_deallocs);
+ EXPECT_EQ(ts.alloc_size, ts.dealloc_size);
+}
+
+TEST(Parser, move_assign_diff_callbacks)
+{
+ CallbacksTester ts("src");
+ CallbacksTester td("dst");
+ {
+ Parser src;
+ Parser dst;
+ mklarge(&src, ts.callbacks());
+ mklarge(&dst, td.callbacks());
+ ASSERT_EQ(src.callbacks(), ts.callbacks());
+ ASSERT_EQ(dst.callbacks(), td.callbacks());
+ size_t nbefore = ts.num_allocs;
+ EXPECT_GT(ts.num_allocs, 0u);
+ EXPECT_GT(td.num_allocs, 0u);
+ dst = std::move(src);
+ ASSERT_EQ(src.callbacks(), ts.callbacks());
+ ASSERT_EQ(dst.callbacks(), ts.callbacks());
+ EXPECT_EQ(td.num_allocs, nbefore); // dst frees with td
+ EXPECT_EQ(ts.num_allocs, nbefore); // dst moves from ts
+ }
+ EXPECT_EQ(ts.num_allocs, ts.num_deallocs);
+ EXPECT_EQ(ts.alloc_size, ts.dealloc_size);
+ EXPECT_EQ(td.num_allocs, td.num_deallocs);
+ EXPECT_EQ(td.alloc_size, td.dealloc_size);
+}
+
+TEST(Parser, new_tree_receives_callbacks)
+{
+ char src_[] = "{a: b}";
+ substr src = src_;
+ csubstr csrc = src_;
+ {
+ {
+ Parser parser;
+ EXPECT_EQ(parser.callbacks(), get_callbacks());
+ Tree t = parser.parse_in_arena("file0", csrc);
+ EXPECT_EQ(t.callbacks(), get_callbacks());
+ }
+ CallbacksTester cbt("test", 20000/*Bytes*/);
+ {
+ Parser parser(cbt.callbacks());
+ EXPECT_EQ(parser.callbacks(), cbt.callbacks());
+ Tree t = parser.parse_in_arena("file1", csrc);
+ EXPECT_EQ(t.callbacks(), cbt.callbacks());
+ }
+ cbt.check();
+ }
+ {
+ {
+ Parser parser;
+ EXPECT_EQ(parser.callbacks(), get_callbacks());
+ Tree t = parser.parse_in_place("file", src);
+ EXPECT_EQ(t.callbacks(), get_callbacks());
+ }
+ CallbacksTester cbt("test", 20000/*Bytes*/);
+ {
+ Parser parser(cbt.callbacks());
+ EXPECT_EQ(parser.callbacks(), cbt.callbacks());
+ Tree t = parser.parse_in_place("file", src);
+ EXPECT_EQ(t.callbacks(), cbt.callbacks());
+ }
+ cbt.check();
+ }
+}
+
+TEST(Parser, existing_tree_overwrites_parser_callbacks)
+{
+ char src_[] = "{a: b}";
+ substr src = src_;
+ csubstr csrc = src_;
+ {
+ CallbacksTester cbp("parser");
+ CallbacksTester cbt("tree");
+ {
+ Tree tree(cbt.callbacks());
+ Parser parser(cbp.callbacks());
+ EXPECT_EQ(tree.callbacks(), cbt.callbacks());
+ EXPECT_EQ(parser.callbacks(), cbp.callbacks());
+ parser.parse_in_arena("file", csrc, &tree);
+ EXPECT_EQ(tree.callbacks(), cbt.callbacks());
+ EXPECT_EQ(parser.callbacks(), cbp.callbacks());
+ }
+ cbp.check();
+ cbt.check();
+ }
+ {
+ CallbacksTester cbp("parser");
+ CallbacksTester cbt("tree");
+ {
+ Tree tree(cbt.callbacks());
+ Parser parser(cbp.callbacks());
+ EXPECT_EQ(tree.callbacks(), cbt.callbacks());
+ EXPECT_EQ(parser.callbacks(), cbp.callbacks());
+ parser.parse_in_place("file", src, &tree);
+ EXPECT_EQ(tree.callbacks(), cbt.callbacks());
+ EXPECT_EQ(parser.callbacks(), cbp.callbacks());
+ }
+ cbp.check();
+ cbt.check();
+ }
+}
+
+TEST(Parser, filename_and_buffer_are_stored)
+{
+ char src_[] = "{a: b}";
+ substr src = src_;
+ csubstr csrc = src_;
+ Parser parser;
+ EXPECT_EQ(parser.filename(), csubstr{});
+ {
+ Tree tree = parser.parse_in_place("file0", src);
+ EXPECT_EQ(parser.filename(), "file0");
+ EXPECT_TRUE(is_same(parser.source(), src));
+ }
+ {
+ Tree tree = parser.parse_in_arena("file1", csrc);
+ EXPECT_EQ(parser.filename(), "file1");
+ EXPECT_TRUE(!is_same(parser.source(), src));
+ }
+ {
+ Tree tree = parser.parse_in_place("file2", src);
+ EXPECT_EQ(parser.filename(), "file2");
+ EXPECT_TRUE(is_same(parser.source(), src));
+ }
+ {
+ Tree tree = parser.parse_in_arena({}, csrc);
+ EXPECT_EQ(parser.filename(), csubstr{});
+ EXPECT_TRUE(!is_same(parser.source(), src));
+ }
+}
+
+TEST(parse_in_place, overloads)
+{
+ char src1_[] = "{a: b}";
+ char src2_[] = "{c: d, e: {}}";
+ {
+ Tree tree = parse_in_place(src1_);
+ EXPECT_EQ(tree["a"].val(), "b");
+ }
+ {
+ Tree tree = parse_in_place("src1", src1_);
+ EXPECT_EQ(tree["a"].val(), "b");
+ }
+ {
+ Tree tree;
+ parse_in_place(src1_, &tree);
+ EXPECT_EQ(tree["a"].val(), "b");
+ }
+ {
+ Tree tree;
+ parse_in_place("src1", src1_, &tree);
+ EXPECT_EQ(tree["a"].val(), "b");
+ }
+ {
+ Tree tree = parse_in_place(src2_);
+ EXPECT_EQ(tree["c"].val(), "d");
+ EXPECT_EQ(tree["e"].is_map(), true);
+ EXPECT_EQ(tree["e"].has_children(), false);
+ size_t e = tree.find_child(tree.root_id(), "e");
+ ASSERT_NE(e, (size_t)NONE);
+ parse_in_place(src1_, &tree, e);
+ EXPECT_EQ(tree["c"].val(), "d");
+ EXPECT_EQ(tree["e"].has_children(), true);
+ EXPECT_EQ(tree["e"]["a"].val(), "b");
+ }
+ {
+ Tree tree = parse_in_place("src2", src2_);
+ EXPECT_EQ(tree["c"].val(), "d");
+ EXPECT_EQ(tree["e"].is_map(), true);
+ EXPECT_EQ(tree["e"].has_children(), false);
+ size_t e = tree.find_child(tree.root_id(), "e");
+ ASSERT_NE(e, (size_t)NONE);
+ parse_in_place("src1", src1_, &tree, e);
+ EXPECT_EQ(tree["c"].val(), "d");
+ EXPECT_EQ(tree["e"].has_children(), true);
+ EXPECT_EQ(tree["e"]["a"].val(), "b");
+ }
+ {
+ Tree tree = parse_in_place(src2_);
+ EXPECT_EQ(tree["c"].val(), "d");
+ EXPECT_EQ(tree["e"].is_map(), true);
+ EXPECT_EQ(tree["e"].has_children(), false);
+ parse_in_place(src1_, tree["e"]);
+ EXPECT_EQ(tree["c"].val(), "d");
+ EXPECT_EQ(tree["e"].has_children(), true);
+ EXPECT_EQ(tree["e"]["a"].val(), "b");
+ }
+ {
+ Tree tree = parse_in_place("src2", src2_);
+ EXPECT_EQ(tree["c"].val(), "d");
+ EXPECT_EQ(tree["e"].is_map(), true);
+ EXPECT_EQ(tree["e"].has_children(), false);
+ parse_in_place("src1", src1_, tree["e"]);
+ EXPECT_EQ(tree["c"].val(), "d");
+ EXPECT_EQ(tree["e"].has_children(), true);
+ EXPECT_EQ(tree["e"]["a"].val(), "b");
+ }
+}
+
+TEST(parse_in_arena, overloads)
+{
+ csubstr src1 = "{a: b}";
+ csubstr src2 = "{c: d, e: {}}";
+ {
+ Tree tree = parse_in_arena(src1);
+ EXPECT_EQ(tree["a"].val(), "b");
+ EXPECT_FALSE(tree.arena().empty());
+ EXPECT_NE(tree.arena().find(src1), (size_t)npos);
+ }
+ {
+ Tree tree = parse_in_arena("src1", src1);
+ EXPECT_EQ(tree["a"].val(), "b");
+ EXPECT_FALSE(tree.arena().empty());
+ EXPECT_NE(tree.arena().find(src1), (size_t)npos);
+ }
+ {
+ Tree tree;
+ parse_in_arena(src1, &tree);
+ EXPECT_EQ(tree["a"].val(), "b");
+ EXPECT_FALSE(tree.arena().empty());
+ EXPECT_NE(tree.arena().find(src1), (size_t)npos);
+ }
+ {
+ Tree tree;
+ parse_in_arena("src1", src1, &tree);
+ EXPECT_EQ(tree["a"].val(), "b");
+ EXPECT_FALSE(tree.arena().empty());
+ EXPECT_NE(tree.arena().find(src1), (size_t)npos);
+ }
+ {
+ Tree tree = parse_in_arena(src2);
+ EXPECT_EQ(tree["c"].val(), "d");
+ EXPECT_EQ(tree["e"].is_map(), true);
+ EXPECT_EQ(tree["e"].has_children(), false);
+ EXPECT_FALSE(tree.arena().empty());
+ EXPECT_NE(tree.arena().find(src2), (size_t)npos);
+ size_t e = tree.find_child(tree.root_id(), "e");
+ ASSERT_NE(e, (size_t)NONE);
+ parse_in_arena(src1, &tree, e);
+ EXPECT_EQ(tree["c"].val(), "d");
+ EXPECT_EQ(tree["e"].has_children(), true);
+ EXPECT_EQ(tree["e"]["a"].val(), "b");
+ EXPECT_FALSE(tree.arena().empty());
+ EXPECT_NE(tree.arena().find(src1), (size_t)npos);
+ EXPECT_NE(tree.arena().find(src2), (size_t)npos);
+ }
+ {
+ Tree tree = parse_in_arena("src2", src2);
+ EXPECT_EQ(tree["c"].val(), "d");
+ EXPECT_EQ(tree["e"].is_map(), true);
+ EXPECT_EQ(tree["e"].has_children(), false);
+ EXPECT_FALSE(tree.arena().empty());
+ EXPECT_NE(tree.arena().find(src2), (size_t)npos);
+ size_t e = tree.find_child(tree.root_id(), "e");
+ ASSERT_NE(e, (size_t)NONE);
+ parse_in_arena("src1", src1, &tree, e);
+ EXPECT_EQ(tree["c"].val(), "d");
+ EXPECT_EQ(tree["e"].has_children(), true);
+ EXPECT_EQ(tree["e"]["a"].val(), "b");
+ EXPECT_FALSE(tree.arena().empty());
+ EXPECT_NE(tree.arena().find(src1), (size_t)npos);
+ EXPECT_NE(tree.arena().find(src2), (size_t)npos);
+ }
+ {
+ Tree tree = parse_in_arena(src2);
+ EXPECT_EQ(tree["c"].val(), "d");
+ EXPECT_EQ(tree["e"].is_map(), true);
+ EXPECT_EQ(tree["e"].has_children(), false);
+ EXPECT_FALSE(tree.arena().empty());
+ EXPECT_NE(tree.arena().find(src2), (size_t)npos);
+ parse_in_arena(src1, tree["e"]);
+ EXPECT_EQ(tree["c"].val(), "d");
+ EXPECT_EQ(tree["e"].has_children(), true);
+ EXPECT_EQ(tree["e"]["a"].val(), "b");
+ EXPECT_FALSE(tree.arena().empty());
+ EXPECT_NE(tree.arena().find(src1), (size_t)npos);
+ EXPECT_NE(tree.arena().find(src2), (size_t)npos);
+ }
+ {
+ Tree tree = parse_in_arena("src2", src2);
+ EXPECT_EQ(tree["c"].val(), "d");
+ EXPECT_EQ(tree["e"].is_map(), true);
+ EXPECT_EQ(tree["e"].has_children(), false);
+ EXPECT_FALSE(tree.arena().empty());
+ EXPECT_NE(tree.arena().find(src2), (size_t)npos);
+ parse_in_arena("src1", src1, tree["e"]);
+ EXPECT_EQ(tree["c"].val(), "d");
+ EXPECT_EQ(tree["e"].has_children(), true);
+ EXPECT_EQ(tree["e"]["a"].val(), "b");
+ EXPECT_FALSE(tree.arena().empty());
+ EXPECT_NE(tree.arena().find(src1), (size_t)npos);
+ EXPECT_NE(tree.arena().find(src2), (size_t)npos);
+ }
+}
+
+TEST(parse_in_place, version_numbers)
+{
+ char src1_[] = "{a: 1.2.3}";
+ {
+ Tree tree = parse_in_place(src1_);
+ EXPECT_EQ(tree["a"].val(), "1.2.3");
+ }
+}
+
+} // namespace yml
+} // namespace c4
+
+
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+
+// this is needed to use the test case library
+
+#ifndef RYML_SINGLE_HEADER
+#include "c4/substr.hpp"
+#endif
+
+namespace c4 {
+namespace yml {
+struct Case;
+Case const* get_case(csubstr /*name*/)
+{
+ return nullptr;
+}
+} // namespace yml
+} // namespace c4
diff --git a/thirdparty/ryml/test/test_plain_scalar.cpp b/thirdparty/ryml/test/test_plain_scalar.cpp
new file mode 100644
index 000000000..ec147c5d8
--- /dev/null
+++ b/thirdparty/ryml/test/test_plain_scalar.cpp
@@ -0,0 +1,800 @@
+#include "./test_group.hpp"
+
+namespace c4 {
+namespace yml {
+
+TEST(plain_scalar, issue153_seq)
+{
+ Tree t = parse_in_arena("- A\n \n");
+ EXPECT_EQ(t[0].val(), "A");
+}
+
+TEST(plain_scalar, issue153_map)
+{
+ Tree t = parse_in_arena("foo: A\n \n");
+ EXPECT_EQ(t["foo"].val(), "A");
+}
+
+
+TEST(plain_scalar, test_suite_7TMG)
+{
+ csubstr yaml = R"(---
+word1
+# comment
+---
+# first value is NOT a multiline plain scalar
+[ word1
+# comment
+, word2]
+)";
+ test_check_emit_check(yaml, [](Tree const &t){
+ EXPECT_TRUE(t.rootref().is_stream());
+ ConstNodeRef doc = t.rootref().first_child();
+ ASSERT_TRUE(doc.is_doc());
+ ASSERT_TRUE(doc.is_val());
+ EXPECT_EQ(doc.val(), "word1");
+ doc = t.rootref().child(1);
+ ASSERT_TRUE(doc.is_doc());
+ ASSERT_TRUE(doc.is_seq());
+ EXPECT_EQ(doc[0].val(), "word1");
+ EXPECT_EQ(doc[1].val(), "word2");
+ });
+}
+
+
+TEST(plain_scalar, test_suite_82AN)
+{
+ csubstr yaml = R"(
+---word1
+word2
+)";
+ test_check_emit_check(yaml, [](Tree const &t){
+ ASSERT_TRUE(t.rootref().is_doc());
+ ASSERT_TRUE(t.rootref().is_val());
+ EXPECT_EQ(t.rootref().val(), csubstr("---word1 word2"));
+ });
+}
+
+TEST(plain_scalar, test_suite_EXG3)
+{
+ csubstr yaml = R"(
+---
+---word1
+word2
+)";
+ test_check_emit_check(yaml, [](Tree const &t){
+ ASSERT_TRUE(t.rootref().is_stream());
+ ASSERT_TRUE(t.rootref().first_child().is_doc());
+ ASSERT_TRUE(t.rootref().first_child().is_val());
+ EXPECT_EQ(t.rootref().first_child().val(), csubstr("---word1 word2"));
+ });
+}
+
+
+TEST(plain_scalar, test_suite_9YRD)
+{
+ csubstr yaml = R"(
+a
+b
+ c
+d
+
+e
+)";
+ test_check_emit_check(yaml, [](Tree const &t){
+ ASSERT_TRUE(t.rootref().is_doc());
+ ASSERT_TRUE(t.rootref().is_val());
+ EXPECT_EQ(t.rootref().val(), csubstr("a b c d\ne"));
+ });
+}
+
+TEST(plain_scalar, test_suite_EX5H)
+{
+ csubstr yaml = R"(
+---
+a
+b
+ c
+d
+
+e
+)";
+ test_check_emit_check(yaml, [](Tree const &t){
+ ASSERT_TRUE(t.rootref().is_stream());
+ ASSERT_TRUE(t.rootref().child(0).is_doc());
+ ASSERT_TRUE(t.rootref().child(0).is_val());
+ EXPECT_EQ(t.rootref().child(0).val(), csubstr("a b c d\ne"));
+ });
+}
+
+
+TEST(plain_scalar, test_suite_M7A3)
+{
+ csubstr yaml = R"(
+Bare
+document
+...
+# No document
+...
+|
+%!PS-Adobe-2.0 # Not the first line
+)";
+ test_check_emit_check(yaml, [](Tree const &t){
+ ASSERT_TRUE(t.rootref().is_stream());
+ ASSERT_EQ(t.rootref().num_children(), 2u);
+ EXPECT_EQ(t.rootref().child(0).val(), csubstr("Bare document"));
+ EXPECT_EQ(t.rootref().child(1).val(), csubstr("%!PS-Adobe-2.0 # Not the first line\n"));
+ });
+}
+
+
+TEST(plain_scalar, test_suite_HS5T)
+{
+ csubstr yaml = R"(
+1st non-empty
+
+ 2nd non-empty
+ 3rd non-empty
+)";
+ test_check_emit_check(yaml, [](Tree const &t){
+ ASSERT_TRUE(t.rootref().is_doc());
+ ASSERT_TRUE(t.rootref().is_val());
+ EXPECT_EQ(t.rootref().val(), csubstr("1st non-empty\n2nd non-empty 3rd non-empty"));
+ });
+}
+
+TEST(plain_scalar, test_suite_NB6Z)
+{
+ csubstr yaml = R"(
+key:
+ value
+ with
+
+ tabs
+ tabs
+
+ foo
+
+ bar
+ baz
+
+key1:
+ value
+ with
+
+ tabs
+ tabs
+
+ foo
+
+ bar
+ baz
+
+key2: something
+ else
+key3: something
+ else
+)";
+ test_check_emit_check(yaml, [](Tree const &t){
+ ASSERT_TRUE(t.rootref().is_map());
+ ASSERT_TRUE(t.rootref().has_child("key"));
+ ASSERT_TRUE(t.rootref().has_child("key1"));
+ ASSERT_TRUE(t.rootref().has_child("key2"));
+ ASSERT_TRUE(t.rootref().has_child("key3"));
+ EXPECT_EQ(t["key"].val(), csubstr("value with\ntabs tabs\nfoo\nbar baz"));
+ EXPECT_EQ(t["key1"].val(), csubstr("value with\ntabs tabs\nfoo\nbar baz"));
+ EXPECT_EQ(t["key2"].val(), csubstr("something else"));
+ EXPECT_EQ(t["key3"].val(), csubstr("something else"));
+ });
+}
+
+TEST(plain_scalar, test_suite_NB6Z_seq)
+{
+ csubstr yaml = R"(
+- value
+ with
+
+ tabs
+ tabs
+
+ foo
+
+ bar
+ baz
+
+- value
+ with
+
+ tabs
+ tabs
+
+ foo
+
+ bar
+ baz
+
+- more
+ value
+)";
+ test_check_emit_check(yaml, [](Tree const &t){
+ ASSERT_TRUE(t.rootref().is_seq());
+ ASSERT_EQ(t.rootref().num_children(), 3u);
+ EXPECT_EQ(t[0].val(), csubstr("value with\ntabs tabs\nfoo\nbar baz"));
+ EXPECT_EQ(t[1].val(), csubstr("value with\ntabs tabs\nfoo\nbar baz"));
+ EXPECT_EQ(t[2].val(), csubstr("more value"));
+ });
+}
+
+TEST(plain_scalar, test_suite_NB6Z_docval)
+{
+ csubstr yaml = R"(
+value
+with
+
+tabs
+tabs
+
+ foo
+
+ bar
+ baz
+
+)";
+ test_check_emit_check(yaml, [](Tree const &t){
+ ASSERT_TRUE(t.rootref().is_doc());
+ ASSERT_TRUE(t.rootref().is_val());
+ EXPECT_EQ(t.rootref().val(), csubstr("value with\ntabs tabs\nfoo\nbar baz"));
+ });
+}
+
+
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+
+CASE_GROUP(PLAIN_SCALAR)
+{
+//
+ADD_CASE_TO_GROUP("plain scalar, 1 word only",
+R"(a_single_word_scalar_to_test)",
+ N(DOCVAL, "a_single_word_scalar_to_test")
+);
+
+ADD_CASE_TO_GROUP("plain scalar, 1 line with spaces",
+R"(a scalar with spaces in it all in one line)",
+ N(DOCVAL, "a scalar with spaces in it all in one line")
+);
+
+ADD_CASE_TO_GROUP("plain scalar, multiline",
+R"(
+a scalar with several lines in it
+ of course also with spaces but for now there are no quotes
+ and also no blank lines to speak of)",
+ N(DOCVAL, "a scalar with several lines in it of course also with spaces but for now there are no quotes and also no blank lines to speak of")
+);
+
+ADD_CASE_TO_GROUP("plain scalar, multiline, unindented",
+R"(
+a scalar with several lines in it
+ of course also with spaces but for now there are no quotes
+ and also no blank lines to speak of)",
+ N(DOCVAL, "a scalar with several lines in it of course also with spaces but for now there are no quotes and also no blank lines to speak of")
+);
+
+ADD_CASE_TO_GROUP("plain scalar, multiline, quotes, escapes",
+R"(
+a scalar with several lines in it and also 'single quotes'
+ and "double quotes" and assorted escapes such as \r or \n)",
+ N(DOCVAL, "a scalar with several lines in it and also 'single quotes' and \"double quotes\" and assorted escapes such as \\r or \\n")
+);
+
+ADD_CASE_TO_GROUP("plain scalar, multiline, quotes, escapes, blank lines middle",
+R"(
+A scalar with several lines in it and also 'single quotes'.
+ A blank line follows after this one.
+
+ And "double quotes" and assorted escapes such as \r or \n)",
+ N(DOCVAL, "A scalar with several lines in it and also 'single quotes'. A blank line follows after this one.\nAnd \"double quotes\" and assorted escapes such as \\r or \\n")
+);
+
+ADD_CASE_TO_GROUP("plain scalar, multiline, quotes, escapes, blank lines first",
+R"(
+A scalar with several lines in it and also 'single quotes'.
+
+ A blank line precedes this one.
+ And "double quotes" and assorted escapes such as \r or \n)",
+ N(DOCVAL, "A scalar with several lines in it and also 'single quotes'.\nA blank line precedes this one. And \"double quotes\" and assorted escapes such as \\r or \\n")
+);
+
+ADD_CASE_TO_GROUP("plain scalar, multiline, quotes, escapes, blank lines last",
+R"(
+A scalar with several lines in it and also 'single quotes'.
+ And "double quotes" and assorted escapes such as \r or \n.
+ A blank line follows after this one.
+
+ )",
+ N(DOCVAL, "A scalar with several lines in it and also 'single quotes'. And \"double quotes\" and assorted escapes such as \\r or \\n. A blank line follows after this one.")
+);
+
+ADD_CASE_TO_GROUP("plain scalar, example",
+R"(
+Several lines of text
+ with some "quotes" of various 'types'.
+ Escapes (like \n) don't do anything.
+
+ Newlines can be added by leaving a blank line.
+ Additional leading whitespace is ignored.)",
+ N(DOCVAL, "Several lines of text with some \"quotes\" of various 'types'. Escapes (like \\n) don't do anything.\nNewlines can be added by leaving a blank line. Additional leading whitespace is ignored.")
+);
+
+ADD_CASE_TO_GROUP("plain scalar, map example 1",
+R"(
+example: Several lines of text,
+ with some "quotes" of various 'types'.
+ Escapes (like \n) don't do anything.
+
+ Newlines can be added by leaving a blank line.
+ Additional leading whitespace is ignored.
+
+another example: Several lines of text,
+
+ but the second line is empty, and _indented_.
+ There are more lines that follow.
+
+yet another example: Several lines of text,
+
+ but the second line is empty, and _unindented_.
+ There are more lines that follow.
+final example: Several lines of text,
+
+
+ but the second line is empty, and _unindented_.
+ There are more lines that follow. And the last line
+ terminates at the end of the file.)",
+ L{
+ N("example", "Several lines of text, with some \"quotes\" of various 'types'. "
+ "Escapes (like \\n) don't do anything.\n"
+ "Newlines can be added by leaving a blank line. "
+ "Additional leading whitespace is ignored."),
+ N("another example", "Several lines of text,\n"
+ "but the second line is empty, and _indented_. "
+ "There are more lines that follow."),
+ N("yet another example", "Several lines of text,\n"
+ "but the second line is empty, and _unindented_. "
+ "There are more lines that follow."),
+ N("final example", "Several lines of text,\n\n"
+ "but the second line is empty, and _unindented_. "
+ "There are more lines that follow. "
+ "And the last line terminates at the end of the file."),
+ }
+);
+
+/*
+ADD_CASE_TO_GROUP("plain scalar, map example 2", IGNORE_LIBYAML_PARSE_FAIL|IGNORE_YAMLCPP_PARSE_FAIL,
+R"(
+example:
+ Several lines of text,
+ with some "quotes" of various 'types'.
+ Escapes (like \n) don't do anything.
+
+ Newlines can be added by leaving a blank line.
+ Additional leading whitespace is ignored.
+)",
+ L{N("example", "Several lines of text, with some \"quotes\" of various 'types'. Escapes (like \\n) don't do anything.\nNewlines can be added by leaving a blank line. Additional leading whitespace is ignored.")}
+);
+*/
+
+ADD_CASE_TO_GROUP("plain scalar, seq example 1",
+R"(
+- Several lines of text,
+ with some "quotes" of various 'types'.
+ Escapes (like \n) don't do anything.
+
+ Newlines can be added by leaving a blank line.
+ Additional leading whitespace is ignored.)",
+ L{N("Several lines of text, with some \"quotes\" of various 'types'. "
+ "Escapes (like \\n) don't do anything.\n"
+ "Newlines can be added by leaving a blank line. "
+ "Additional leading whitespace is ignored.")}
+);
+
+/*
+ADD_CASE_TO_GROUP("plain scalar, seq example 2", IGNORE_LIBYAML_PARSE_FAIL|IGNORE_YAMLCPP_PARSE_FAIL,
+R"(
+-
+ Several lines of text,
+ with some "quotes" of various 'types'.
+ Escapes (like \n) don't do anything.
+
+ Newlines can be added by leaving a blank line.
+ Additional leading whitespace is ignored.
+)",
+ L{N("Several lines of text, with some \"quotes\" of various 'types'. Escapes (like \\n) don't do anything.\nNewlines can be added by leaving a blank line. Additional leading whitespace is ignored.")}
+);
+*/
+
+ADD_CASE_TO_GROUP("plain scalar, special characters 1",
+R"(
+- Several lines of text,
+ with special:characters, like:this-or-this -
+ - and some "quotes" of various 'types'.
+ How about empty lines?
+
+ Can we also have [] or {} inside?
+ Guess we can.
+ And how about at the beginning?
+ { - for example }
+ [ - for example ]
+ - - for example
+ ::- for example
+
+ and now two empty lines -
+
+
+ and now three empty lines -
+
+
+
+ and an empty line, unindented -
+
+ followed by more text
+ and another four at the end -
+
+
+
+
+)",
+ L{N("Several lines of text, with special:characters, like:this-or-this - - and some \"quotes\" of various 'types'. "
+ "How about empty lines?\n"
+ "Can we also have [] or {} inside? Guess we can. "
+ "And how about at the beginning? { - for example } [ - for example ] - - for example ::- for example\n"
+ "and now two empty lines -\n\n"
+ "and now three empty lines -\n\n\n"
+ "and an empty line, unindented -\n"
+ "followed by more text "
+ "and another four at the end -"
+ )}
+);
+
+ADD_CASE_TO_GROUP("plain scalar, special characters 3MYT",
+R"(--- # ZWK4
+a: 1
+? b
+&anchor c: 3 # the anchor is for the scalar 'c'
+? d
+!!str e: 4
+? f
+---
+k:#foo &a !t s
+---
+"k:#foo &a !t s"
+---
+'k:#foo &a !t s'
+
+--- # 3MYT
+k:#foo
+ &a !t s
+---
+k:#foo
+ &a !t s
+---
+k:#foo
+ &a !t s
+---
+k:#foo
+ &a !t s
+
+--- # 3MYT
+k:#foo
+ !t s
+---
+k:#foo
+ !t s
+---
+k:#foo
+ !t s
+---
+k:#foo
+ !t s
+)",
+ N(STREAM, L{
+ N(DOCMAP, L{
+ N("a", "1"),
+ N(KEYVAL, "b", {}),
+ N("c", AR(KEYANCH, "anchor"), "3"),
+ N(KEYVAL, "d", {}),
+ N(TS("!!str", "e"), "4"),
+ N(KEYVAL, "f", {}),
+ }),
+
+ N(DOCVAL, "k:#foo &a !t s"),
+ N(DOCVAL|VALQUO, "k:#foo &a !t s"),
+ N(DOCVAL|VALQUO, "k:#foo &a !t s"),
+
+ N(DOCVAL, "k:#foo &a !t s"),
+ N(DOCVAL, "k:#foo &a !t s"),
+ N(DOCVAL, "k:#foo &a !t s"),
+ N(DOCVAL, "k:#foo &a !t s"),
+
+ N(DOCVAL, "k:#foo !t s"),
+ N(DOCVAL, "k:#foo !t s"),
+ N(DOCVAL, "k:#foo !t s"),
+ N(DOCVAL, "k:#foo !t s"),
+ })
+ );
+
+// make sure there is no ambiguity with this case
+ADD_CASE_TO_GROUP("plain scalar, sequence ambiguity",
+R"(
+- - some text
+ - and this is a sequence
+- some text
+ - and this is /not/ a sequence
+- - some text
+ - and this is a sequence
+- some text
+ - and this is /not/ a sequence
+)",
+ L{
+ N(L{N("some text"), N("and this is a sequence")}),
+ N("some text - and this is /not/ a sequence"),
+ N(L{N("some text"), N("and this is a sequence")}),
+ N("some text - and this is /not/ a sequence"),
+ }
+);
+
+ADD_CASE_TO_GROUP("plain scalar, empty lines at the beginning",
+R"(
+-
+
+
+ Several lines of text,
+ with special:characters, like:this-or-this -
+ - and some "quotes" of various 'types'.
+-
+
+ Several lines of text,
+ with special:characters, like:this-or-this -
+ - and some "quotes" of various 'types'.
+-
+ Several lines of text,
+ with special:characters, like:this-or-this -
+ - and some "quotes" of various 'types'.
+)",
+ L{
+ N("Several lines of text, with special:characters, like:this-or-this - - and some \"quotes\" of various 'types'."),
+ N("Several lines of text, with special:characters, like:this-or-this - - and some \"quotes\" of various 'types'."),
+ N("Several lines of text, with special:characters, like:this-or-this - - and some \"quotes\" of various 'types'."),
+ }
+);
+
+ADD_CASE_TO_GROUP("plain scalar, empty continuation lines",
+R"(
+- the next lines have 2cols, 0cols, 2cols,
+
+
+
+ and this line has some text in it. -> 0
+
+ now 0, 0, 2, 2, 0, 1, 1, 0, 4, 4, 0, 0
+
+
+
+
+
+
+
+
+
+
+
+
+ and finally some more text
+)",
+ L{
+ N("the next lines have 2cols, 0cols, 2cols,"
+ "\n\n\n"
+ "and this line has some text in it. -> 0"
+ "\n"
+ "now 0, 0, 2, 2, 0, 1, 1, 0, 4, 4, 0, 0"
+ "\n\n\n\n\n\n\n\n\n\n\n\n"
+ "and finally some more text"),
+ }
+);
+
+
+ADD_CASE_TO_GROUP("plain scalar, indented first line",
+R"(
+- Several lines of text,
+
+ with special:characters, like:this-or-this -
+ - and some "quotes" of various 'types'.
+-
+
+ Several lines of text,
+ with special:characters, like:this-or-this -
+ - and some "quotes" of various 'types'.
+-
+ Several lines of text,
+ with special:characters, like:this-or-this -
+ - and some "quotes" of various 'types'.
+)",
+ L{
+ N("Several lines of text,\nwith special:characters, like:this-or-this - - and some \"quotes\" of various 'types'."),
+ N("Several lines of text, with special:characters, like:this-or-this - - and some \"quotes\" of various 'types'."),
+ N("Several lines of text, with special:characters, like:this-or-this - - and some \"quotes\" of various 'types'."),
+ }
+);
+
+ADD_CASE_TO_GROUP("plain scalar, do not accept ': ' mid line", EXPECT_PARSE_ERROR,
+R"(- Several lines of text,
+ with special:characters, like:this-or-this -
+ - and some "quotes" of various 'types'.
+ But this: must cause a parse error.
+)",
+ LineCol(4, 11)
+);
+
+ADD_CASE_TO_GROUP("plain scalar, do not accept ': ' start line", EXPECT_PARSE_ERROR,
+R"(
+- Several lines of text,
+ with special:characters, like:this-or-this -
+ - and some "quotes" of various 'types'.
+ But this must cause a parse error -
+ : foo bar
+)",
+ LineCol(6, 3)
+);
+
+ADD_CASE_TO_GROUP("plain scalar, do not accept ': ' at line end", EXPECT_PARSE_ERROR,
+R"(- Several lines of text,
+ with special:characters, like:this-or-this -
+ - and some "quotes" of various 'types'.
+ But this must cause a parse error:
+)",
+ LineCol(4, 36)
+);
+
+ADD_CASE_TO_GROUP("plain scalar, do not accept ':' at line end", EXPECT_PARSE_ERROR,
+R"(- Several lines of text,
+ with special:characters, like:this-or-this -
+ - and some "quotes" of various 'types'.
+ But this must cause a parse error:
+ - well, did it?
+)",
+ LineCol(4, 36)
+);
+
+ADD_CASE_TO_GROUP("plain scalar, accept ' #' at line start",
+R"(- Several lines of text,
+ and this is valid -
+ #with special:characters, like:this-or-this -
+)",
+ L{N("Several lines of text, and this is valid -"),}
+);
+
+ADD_CASE_TO_GROUP("plain scalar, accept ' #' on first line",
+R"(- Several lines of text, and this is valid -
+ #with special:characters, like:this-or-this -
+)",
+ L{N("Several lines of text, and this is valid -")}
+);
+
+ADD_CASE_TO_GROUP("plain scalar, accept ' #' at line end",
+R"(- Several lines of text,
+ with special:characters, #comment at the end
+)",
+ L{N("Several lines of text, with special:characters,")}
+);
+
+ADD_CASE_TO_GROUP("plain scalar, accept '#'",
+R"(
+- Several lines of text, # with a comment
+- Several lines of text,
+ with special#characters, like#this_#_-or-#-:this -
+ - and some "quotes" of various 'types'.
+)",
+ L{
+ N("Several lines of text,"),
+ N("Several lines of text, "
+ "with special#characters, like#this_#_-or-#-:this - "
+ "- and some \"quotes\" of various 'types'."),
+ }
+);
+
+ADD_CASE_TO_GROUP("plain scalar, explicit",
+R"(
+[
+ a plain scalar
+ with several lines
+
+ and blank lines
+
+ as well
+ ,
+ and another plain scalar
+ ,
+ and yet another one
+
+
+
+with many lines
+
+and yet more, deindented
+]
+)",
+ L{
+ N("a plain scalar with several lines\nand blank lines\nas well"),
+ N("and another plain scalar"),
+ N("and yet another one\n\n\nwith many lines\nand yet more"),
+ N("deindented"),
+ }
+);
+
+ADD_CASE_TO_GROUP("plain scalar, explicit, early end, seq", EXPECT_PARSE_ERROR,
+R"([
+ a plain scalar
+ with several lines
+)",
+ LineCol(4, 1)
+);
+
+ADD_CASE_TO_GROUP("plain scalar, explicit, early end, map", EXPECT_PARSE_ERROR,
+R"({foo:
+ a plain scalar
+ with several lines
+)",
+ LineCol(4, 1)
+);
+
+ADD_CASE_TO_GROUP("plain scalar, multiple docs",
+R"(---
+- a plain scalar
+ with several lines
+---
+- a second plain scalar
+ with several lines
+)",
+ N(STREAM, L{
+ N(DOCSEQ, L{N("a plain scalar with several lines")}),
+ N(DOCSEQ, L{N("a second plain scalar with several lines")}),
+ })
+);
+
+ADD_CASE_TO_GROUP("plain scalar, multiple docs, termination",
+R"(---
+- a plain scalar
+ with several lines
+...
+---
+- a second plain scalar
+ with several lines
+)",
+ N(STREAM, L{
+ N(DOCSEQ, L{N("a plain scalar with several lines")}),
+ N(DOCSEQ, L{N("a second plain scalar with several lines")}),
+ })
+);
+
+ADD_CASE_TO_GROUP("plain scalar, trailing whitespace",
+ R"(---
+foo
+---
+foo
+
+---
+foo
+
+
+
+)",
+ N(STREAM, L{
+ N(DOCVAL, "foo"),
+ N(DOCVAL, "foo"),
+ N(DOCVAL, "foo"),
+ })
+ );
+}
+
+} // namespace yml
+} // namespace c4
diff --git a/thirdparty/ryml/test/test_preprocess.cpp b/thirdparty/ryml/test/test_preprocess.cpp
new file mode 100644
index 000000000..7f6719e5f
--- /dev/null
+++ b/thirdparty/ryml/test/test_preprocess.cpp
@@ -0,0 +1,53 @@
+#ifndef RYML_SINGLE_HEADER
+#include <c4/yml/std/string.hpp>
+#include <c4/yml/preprocess.hpp>
+#endif
+#include "./test_case.hpp"
+#include <gtest/gtest.h>
+
+namespace c4 {
+namespace yml {
+
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+
+TEST(preprocess, rxmap_basic)
+{
+ #define _test(val, expected) \
+ EXPECT_EQ(preprocess_rxmap<std::string>(val), expected)
+
+ _test("{}", "{}");
+ _test("a", "{a: 1}");
+ _test("{a}", "{a: 1}");
+ _test("a, b, c", "{a: 1, b: 1, c: 1}");
+ _test("a,b,c", "{a,b,c: 1}");
+ _test("a a a a, b, c", "{a a a a: 1, b: 1, c: 1}");
+ _test(",", "{,}");
+
+ _test("a: [b, c, d]", "{a: [b, c, d]}");
+ _test("a:b: [b, c, d]", "{a:b: [b, c, d]}");
+ _test("a,b: [b, c, d]", "{a,b: [b, c, d]}");
+ _test("a: {b, c, d}", "{a: {b, c, d}}");
+ _test("a: {b: {f, g}, c: {h, i}, d: {j, k}}",
+ "{a: {b: {f, g}, c: {h, i}, d: {j, k}}}");
+ _test("a: {b: {f g}, c: {f g}, d: {j, k}}",
+ "{a: {b: {f g}, c: {f g}, d: {j, k}}}");
+
+ #undef _test
+}
+
+
+
+// The other test executables are written to contain the declarative-style
+// YmlTestCases. This executable does not have any but the build setup
+// assumes it does, and links with the test lib, which requires an existing
+// get_case() function. So this is here to act as placeholder until (if?)
+// proper test cases are added here.
+Case const* get_case(csubstr)
+{
+ return nullptr;
+}
+
+} // namespace yml
+} // namespace c4
diff --git a/thirdparty/ryml/test/test_scalar_names.cpp b/thirdparty/ryml/test/test_scalar_names.cpp
new file mode 100644
index 000000000..b0d420349
--- /dev/null
+++ b/thirdparty/ryml/test/test_scalar_names.cpp
@@ -0,0 +1,94 @@
+#include "./test_group.hpp"
+
+#if defined(_MSC_VER)
+# pragma warning(push)
+//# pragma warning(disable: 4127/*conditional expression is constant*/)
+//# pragma warning(disable: 4389/*'==': signed/unsigned mismatch*/)
+#elif defined(__clang__)
+# pragma clang diagnostic push
+# pragma clang diagnostic ignored "-Wdollar-in-identifier-extension"
+#elif defined(__GNUC__)
+# pragma GCC diagnostic push
+#endif
+
+namespace c4 {
+namespace yml {
+
+#define _(name) N(#name) // makes it simpler
+#define __(name) N(#name, #name) // makes it simpler
+
+CASE_GROUP(SCALAR_NAMES)
+{
+
+ADD_CASE_TO_GROUP("funny names, seq",
+R"(
+- a
+- b:b
+- c{c
+- cc{
+- c}c
+- cc}
+- c!c
+- cc!
+- .foo
+- .
+- -a
+- +b
+- /b
+- :c
+- $g
+- "*"
+- '*'
+- >-
+ *
+- "*a"
+- '*a'
+- >-
+ *a
+)",
+L{_(a), _(b:b), _(c{c), _(cc{), _(c}c), _(cc}), _(c!c), _(cc!), _(.foo), _(.), _(-a), _(+b), _(/b), _(:c), _($g),
+ N(QV, "*"), N(QV, "*"), N(QV, "*"), N(QV, "*a"), N(QV, "*a"), N(QV, "*a")}
+);
+
+ADD_CASE_TO_GROUP("funny names, seq expl",
+R"([a, b, c, .foo, ., -a, +b, /b, :c, $g])",
+L{_(a), _(b), _(c), _(.foo), _(.), _(-a), _(+b), _(/b), _(:c), _($g)}
+);
+
+ADD_CASE_TO_GROUP("funny names, map",
+R"(
+a: a
+b: b
+c: c
+.foo: .foo
+.: .
+-a: -a
++b: +b
+/b: /b
+:c: :c
+$g: $g
+'*': '*'
+'*a': '*a'
+)",
+L{__(a), __(b), __(c), __(.foo), __(.), __(-a), __(+b), __(/b), __(:c), __($g),
+ N(QKV, "*", "*"), N(QKV, "*a", "*a")}
+);
+
+ADD_CASE_TO_GROUP("funny names, map expl",
+R"({a: a, b: b, c: c, .foo: .foo, .: ., -a: -a, +b: +b, /b: /b, :c: :c, $g: $g,
+ '*': '*', '*a':'*a'})",
+L{__(a), __(b), __(c), __(.foo), __(.), __(-a), __(+b), __(/b), __(:c), __($g),
+ N(QKV, "*", "*"), N(QKV, "*a", "*a")}
+);
+}
+
+} // namespace yml
+} // namespace c4
+
+#if defined(_MSC_VER)
+# pragma warning(pop)
+#elif defined(__clang__)
+# pragma clang diagnostic pop
+#elif defined(__GNUC__)
+# pragma GCC diagnostic pop
+#endif
diff --git a/thirdparty/ryml/test/test_seq_of_map.cpp b/thirdparty/ryml/test/test_seq_of_map.cpp
new file mode 100644
index 000000000..90bbcbd79
--- /dev/null
+++ b/thirdparty/ryml/test/test_seq_of_map.cpp
@@ -0,0 +1,348 @@
+#include "./test_group.hpp"
+#include "test_case.hpp"
+
+namespace c4 {
+namespace yml {
+
+TEST(seq_of_map, with_anchors)
+{
+ {
+ // this case is vanilla:
+ csubstr yaml = R"(- a0: v0
+ &a1 a1: v1
+ &a2 a2: v2
+ &a3 a3: v3
+- a0: w0
+ *a1: w1
+ *a2: w2
+ *a3: w3
+- &seq
+ a4: v4
+)";
+ Tree t = parse_in_arena(yaml);
+ EXPECT_EQ(emitrs_yaml<std::string>(t), yaml);
+ ASSERT_EQ(t.rootref().num_children(), 3u);
+ ASSERT_EQ(t[2].has_val_anchor(), true);
+ ASSERT_EQ(t[2].val_anchor(), "seq");
+ }
+ {
+ // but this case may fail because the indentation
+ // may be set from the scalar instead of the tag:
+ csubstr yaml = R"(- &a1 a1: v1
+ &a2 a2: v2
+ &a3 a3: v3
+- *a1: w1
+ *a2: w2
+ *a3: w3
+)";
+ Tree t = parse_in_arena(yaml);
+ EXPECT_EQ(emitrs_yaml<std::string>(t), yaml);
+ }
+}
+
+TEST(seq_of_map, with_tags)
+{
+ {
+ // this case is vanilla:
+ csubstr yaml = R"(- a0: v0
+ !!str a1: v1
+ !!str a2: v2
+ !!str a3: v3
+- a0: w0
+ !!int a1: !!str w1
+ !!int a2: !!str w2
+ !!int a3: !!str w3
+- a0: v1
+ !foo a1: v1
+ !foo a2: v2
+ !foo a3: v3
+)";
+ Tree t = parse_in_arena(yaml);
+ EXPECT_EQ(emitrs_yaml<std::string>(t), yaml);
+ }
+ {
+ // but this case may fail because the indentation
+ // may be set from the scalar instead of the tag:
+ csubstr yaml = R"(- !!str a1: v1
+ !!str a2: v2
+ !!str a3: v3
+- !!int a1: !!str w1
+ !!int a2: !!str w2
+ !!int a3: !!str w3
+- !foo a1: v1
+ !foo a2: v2
+ !foo a3: v3
+)";
+ Tree t = parse_in_arena(yaml);
+ EXPECT_EQ(emitrs_yaml<std::string>(t), yaml);
+ }
+}
+
+TEST(seq_of_map, missing_scalars_v1)
+{
+ Tree t = parse_in_arena(R"(a:
+ - ~: ~
+ - ~: ~
+)");
+ #ifdef RYML_DBG
+ print_tree(t);
+ #endif
+ ASSERT_EQ(t["a"].num_children(), 2u);
+ ASSERT_EQ(t["a"][0].num_children(), 1u);
+ EXPECT_EQ(t["a"][0].first_child().key(), "~");
+ EXPECT_EQ(t["a"][0].first_child().val(), "~");
+ ASSERT_EQ(t["a"][1].num_children(), 1u);
+ EXPECT_EQ(t["a"][1].first_child().key(), "~");
+ EXPECT_EQ(t["a"][1].first_child().val(), "~");
+}
+
+TEST(seq_of_map, missing_scalars_v2)
+{
+ Tree t = parse_in_arena(R"(a:
+ - :
+ - :
+)");
+ #ifdef RYML_DBG
+ print_tree(t);
+ #endif
+ ASSERT_EQ(t["a"].num_children(), 2u);
+ ASSERT_EQ(t["a"][0].num_children(), 1u);
+ EXPECT_EQ(t["a"][0].first_child().key(), nullptr);
+ EXPECT_EQ(t["a"][0].first_child().val(), nullptr);
+ ASSERT_EQ(t["a"][1].num_children(), 1u);
+ EXPECT_EQ(t["a"][1].first_child().key(), nullptr);
+ EXPECT_EQ(t["a"][1].first_child().val(), nullptr);
+}
+
+TEST(seq_of_map, missing_scalars_v3)
+{
+ Tree t = parse_in_arena(R"(a:
+ - :
+ - :
+)");
+ #ifdef RYML_DBG
+ print_tree(t);
+ #endif
+ ASSERT_EQ(t["a"].num_children(), 2u);
+ ASSERT_EQ(t["a"][0].num_children(), 1u);
+ EXPECT_EQ(t["a"][0].first_child().key(), nullptr);
+ EXPECT_EQ(t["a"][0].first_child().val(), nullptr);
+ ASSERT_EQ(t["a"][1].num_children(), 1u);
+ EXPECT_EQ(t["a"][1].first_child().key(), nullptr);
+ EXPECT_EQ(t["a"][1].first_child().val(), nullptr);
+}
+
+#ifdef RYML_WITH_TAB_TOKENS
+TEST(seq_of_map, test_suite_6BCT)
+{
+ Tree t = parse_in_arena(R"(
+- foo0: bar0
+- foo1 : bar1
+- foo2 : bar2
+)");
+ #ifdef RYML_DBG
+ print_tree(t);
+ #endif
+ ASSERT_TRUE(t[0].is_map());
+ ASSERT_TRUE(t[1].is_map());
+ ASSERT_TRUE(t[2].is_map());
+ EXPECT_EQ(t[0]["foo0"].val(), csubstr("bar0"));
+ EXPECT_EQ(t[1]["foo1"].val(), csubstr("bar1"));
+ EXPECT_EQ(t[2]["foo2"].val(), csubstr("bar2"));
+}
+#endif
+
+
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+
+
+CASE_GROUP(SEQ_OF_MAP)
+{
+
+ADD_CASE_TO_GROUP("seq of empty maps, one line",
+R"([{}, {}, {}])",
+ L{MAP, MAP, MAP}
+);
+
+ADD_CASE_TO_GROUP("seq of maps, one line",
+R"([{name: John Smith, age: 33}, {name: Mary Smith, age: 27}])",
+ L{
+ N{L{N("name", "John Smith"), N("age", "33")}},
+ N{L{N("name", "Mary Smith"), N("age", "27")}}
+ }
+);
+
+ADD_CASE_TO_GROUP("seq of maps, implicit seq, explicit maps",
+R"(
+- {name: John Smith, age: 33}
+- {name: Mary Smith, age: 27}
+)",
+ L{
+ N{L{N("name", "John Smith"), N("age", "33")}},
+ N{L{N("name", "Mary Smith"), N("age", "27")}}
+ }
+);
+
+ADD_CASE_TO_GROUP("seq of maps",
+R"(
+- name: John Smith
+ age: 33
+- name: Mary Smith
+ age: 27
+)",
+ L{
+ N{L{N("name", "John Smith"), N("age", "33")}},
+ N{L{N("name", "Mary Smith"), N("age", "27")}}
+ }
+);
+
+ADD_CASE_TO_GROUP("seq of maps, next line",
+R"(
+-
+ name:
+ John Smith
+ age:
+ 33
+-
+ name:
+ Mary Smith
+ age:
+ 27
+)",
+ L{
+ N{L{N("name", "John Smith"), N("age", "33")}},
+ N{L{N("name", "Mary Smith"), N("age", "27")}}
+ }
+);
+
+ADD_CASE_TO_GROUP("seq of maps, bug #32 ex1",
+R"(
+- 'a': 1
+ b: 2
+)",
+ L{
+ N{L{N(QK, "a", "1"), N("b", "2")}}
+ }
+);
+
+ADD_CASE_TO_GROUP("seq of maps, bug #32 ex2",
+R"(
+- a: 1
+ b: 2
+- b: 2
+ 'a': 1
+- b: 2
+ 'a': 1
+ c: 3
+- {'a': 1, b: 2}
+)",
+ L{
+ N{L{N("a", "1"), N("b", "2")}},
+ N{L{N("b", "2"), N(QK, "a", "1")}},
+ N{L{N("b", "2"), N(QK, "a", "1"), N("c", "3")}},
+ N{L{N(QK, "a", "1"), N("b", "2")}},
+ }
+);
+
+ADD_CASE_TO_GROUP("seq of maps, bug #32 ex3",
+R"(
+'a': 1
+b: 2
+b: 2
+'a': 1
+)",
+L{
+ N(QK, "a", "1"), N("b", "2"), N("b", "2"), N(QK, "a", "1"),
+});
+
+
+ADD_CASE_TO_GROUP("seq of maps, implicit map in seq",
+R"('implicit block key' : [
+ 'implicit flow key 1' : value1,
+ 'implicit flow key 2' : value2,
+ 'implicit flow key 3' : value3,
+ 'implicit flow key m' : {key1: val1, key2: val2},
+ 'implicit flow key s' : [val1, val2],
+])",
+L{N(KEYSEQ|KEYQUO, "implicit block key", L{
+ N(L{N(KEYVAL|KEYQUO, "implicit flow key 1", "value1")}),
+ N(L{N(KEYVAL|KEYQUO, "implicit flow key 2", "value2")}),
+ N(L{N(KEYVAL|KEYQUO, "implicit flow key 3", "value3")}),
+ N(L{N(KEYMAP|KEYQUO, "implicit flow key m", L{N("key1", "val1"), N("key2", "val2")})}),
+ N(L{N(KEYSEQ|KEYQUO, "implicit flow key s", L{N("val1"), N("val2")})}),
+})});
+
+
+ADD_CASE_TO_GROUP("seq of maps, implicit map in seq, missing scalar",
+R"({a : [
+ : foo
+],
+b : [
+ :
+foo
+],
+c : [
+ :
+,
+ :
+]})",
+L{
+ N("a", L{N(MAP, L{N("", "foo")}),}),
+ N("b", L{N(MAP, L{N("", "foo")}),}),
+ N("c", L{N(MAP, L{N(KEYVAL, "", {})}), N(MAP, L{N(KEYVAL, "", {})}),}),
+});
+
+
+ADD_CASE_TO_GROUP("seq of maps, implicit with anchors, unresolved",
+R"(
+- &a1 a1: v1
+ &a2 a2: v2
+ &a3 a3: v3
+- *a1: w1
+ *a2: w2
+ *a3: w3
+)",
+L{
+ N(L{N( "a1", AR(KEYANCH, "a1"), "v1"), N( "a2", AR(KEYANCH, "a2"), "v2"), N( "a3", AR(KEYANCH, "a3"), "v3")}),
+ N(L{N("*a1", AR(KEYREF, "*a1"), "w1"), N("*a2", AR(KEYREF, "*a2"), "w2"), N("*a3", AR(KEYREF, "*a3"), "w3")}),
+});
+
+
+ADD_CASE_TO_GROUP("seq of maps, implicit with anchors, resolved", RESOLVE_REFS,
+R"(
+- &a1 a1: v1
+ &a2 a2: v2
+ &a3 a3: v3
+- *a1: w1
+ *a2: w2
+ *a3: w3
+)",
+L{
+ N(L{N("a1", "v1"), N("a2", "v2"), N("a3", "v3")}),
+ N(L{N("a1", "w1"), N("a2", "w2"), N("a3", "w3")}),
+});
+
+
+ADD_CASE_TO_GROUP("seq of maps, implicit with tags",
+R"(
+- !!str a1: v1
+ !!str a2: v2
+ !!str a3: v3
+- a1: !!str w1
+ a2: !!str w2
+ a3: !!str w3
+- !foo a1: v1
+ !foo a2: v2
+ !foo a3: v3
+)",
+L{
+ N(L{N(TS("!!str", "a1"), "v1"), N(TS("!!str", "a2"), "v2"), N(TS("!!str", "a3"), "v3")}),
+ N(L{N("a1", TS("!!str", "w1")), N("a2", TS("!!str", "w2")), N("a3", TS("!!str", "w3"))}),
+ N(L{N(TS("!foo", "a1"), "v1"), N(TS("!foo", "a2"), "v2"), N(TS("!foo", "a3"), "v3")}),
+});
+}
+
+} // namespace yml
+} // namespace c4
diff --git a/thirdparty/ryml/test/test_serialize.cpp b/thirdparty/ryml/test/test_serialize.cpp
new file mode 100644
index 000000000..979de7c79
--- /dev/null
+++ b/thirdparty/ryml/test/test_serialize.cpp
@@ -0,0 +1,499 @@
+#ifndef RYML_SINGLE_HEADER
+#include "c4/yml/std/std.hpp"
+#include "c4/yml/parse.hpp"
+#include "c4/yml/emit.hpp"
+#include <c4/format.hpp>
+#include <c4/yml/detail/checks.hpp>
+#include <c4/yml/detail/print.hpp>
+#endif
+
+#include "./test_case.hpp"
+
+#include <gtest/gtest.h>
+
+#if defined(_MSC_VER)
+# pragma warning(push)
+# pragma warning(disable: 4389) // signed/unsigned mismatch
+#elif defined(__clang__)
+# pragma clang diagnostic push
+# pragma clang diagnostic ignored "-Wdollar-in-identifier-extension"
+#elif defined(__GNUC__)
+# pragma GCC diagnostic push
+#endif
+
+namespace foo {
+
+template<class T>
+struct vec2
+{
+ T x, y;
+};
+template<class T>
+struct vec3
+{
+ T x, y, z;
+};
+template<class T>
+struct vec4
+{
+ T x, y, z, w;
+};
+
+template<class T> size_t to_chars(c4::substr buf, vec2<T> v) { return c4::format(buf, "({},{})", v.x, v.y); }
+template<class T> size_t to_chars(c4::substr buf, vec3<T> v) { return c4::format(buf, "({},{},{})", v.x, v.y, v.z); }
+template<class T> size_t to_chars(c4::substr buf, vec4<T> v) { return c4::format(buf, "({},{},{},{})", v.x, v.y, v.z, v.w); }
+
+template<class T> bool from_chars(c4::csubstr buf, vec2<T> *v) { size_t ret = c4::unformat(buf, "({},{})", v->x, v->y); return ret != c4::yml::npos; }
+template<class T> bool from_chars(c4::csubstr buf, vec3<T> *v) { size_t ret = c4::unformat(buf, "({},{},{})", v->x, v->y, v->z); return ret != c4::yml::npos; }
+template<class T> bool from_chars(c4::csubstr buf, vec4<T> *v) { size_t ret = c4::unformat(buf, "({},{},{},{})", v->x, v->y, v->z, v->w); return ret != c4::yml::npos; }
+
+TEST(serialize, type_as_str)
+{
+ c4::yml::Tree t;
+
+ auto r = t.rootref();
+ r |= c4::yml::MAP;
+
+ vec2<int> v2in{10, 11};
+ vec2<int> v2out{1, 2};
+ r["v2"] << v2in;
+ r["v2"] >> v2out;
+ EXPECT_EQ(v2in.x, v2out.x);
+ EXPECT_EQ(v2in.y, v2out.y);
+
+ vec3<int> v3in{100, 101, 102};
+ vec3<int> v3out{1, 2, 3};
+ r["v3"] << v3in;
+ r["v3"] >> v3out;
+ EXPECT_EQ(v3in.x, v3out.x);
+ EXPECT_EQ(v3in.y, v3out.y);
+ EXPECT_EQ(v3in.z, v3out.z);
+
+ vec4<int> v4in{1000, 1001, 1002, 1003};
+ vec4<int> v4out{1, 2, 3, 4};
+ r["v4"] << v4in;
+ r["v4"] >> v4out;
+ EXPECT_EQ(v4in.x, v4out.x);
+ EXPECT_EQ(v4in.y, v4out.y);
+ EXPECT_EQ(v4in.z, v4out.z);
+ EXPECT_EQ(v4in.w, v4out.w);
+
+ char buf[256];
+ c4::csubstr ret = c4::yml::emit_yaml(t, buf);
+ EXPECT_EQ(ret, R"(v2: '(10,11)'
+v3: '(100,101,102)'
+v4: '(1000,1001,1002,1003)'
+)");
+}
+} // namespace foo
+
+
+namespace c4 {
+namespace yml {
+
+//-------------------------------------------
+template<class Container, class... Args>
+void do_test_serialize(Args&& ...args)
+{
+ using namespace c4::yml;
+ Container s(std::forward<Args>(args)...);
+ Container out;
+
+ Tree t;
+ NodeRef n(&t);
+
+ n << s;
+ //print_tree(t);
+ emit_yaml(t);
+ c4::yml::check_invariants(t);
+ n >> out;
+ EXPECT_EQ(s, out);
+}
+
+
+TEST(serialize, std_vector_int)
+{
+ using T = int;
+ using L = std::initializer_list<T>;
+ do_test_serialize<std::vector<T>>(L{1, 2, 3, 4, 5});
+}
+TEST(serialize, std_vector_bool)
+{
+ using T = bool;
+ using L = std::initializer_list<T>;
+ do_test_serialize<std::vector<T>>(L{true, false, true, false, true, true});
+}
+TEST(serialize, std_vector_string)
+{
+ using T = std::string;
+ using L = std::initializer_list<T>;
+ do_test_serialize<std::vector<T>>(L{"0asdadk0", "1sdfkjdfgu1", "2fdfdjkhdfgkjhdfi2", "3e987dfgnfdg83", "4'd0fgºçdfg«4"});
+}
+TEST(serialize, std_vector_std_vector_int)
+{
+ using T = std::vector<int>;
+ using L = std::initializer_list<T>;
+ do_test_serialize<std::vector<T>>(L{{1, 2, 3, 4, 5}, {6, 7, 8, 9, 0}});
+}
+
+
+TEST(serialize, std_map__int_int)
+{
+ using M = std::map<int, int>;
+ using L = std::initializer_list<typename M::value_type>;
+ do_test_serialize<M>(L{{10, 0}, {11, 1}, {22, 2}, {10001, 1000}, {20002, 2000}, {30003, 3000}});
+}
+TEST(serialize, std_map__std_string_int)
+{
+ using M = std::map<std::string, int>;
+ using L = std::initializer_list<typename M::value_type>;
+ do_test_serialize<M>(L{{"asdsdf", 0}, {"dfgdfgdfg", 1}, {"dfgjdfgkjh", 2}});
+}
+TEST(serialize, std_map__string_vectori)
+{
+ using M = std::map<std::string, std::vector<int>>;
+ using L = std::initializer_list<typename M::value_type>;
+ do_test_serialize<M>(L{{"asdsdf", {0, 1, 2, 3}}, {"dfgdfgdfg", {4, 5, 6, 7}}, {"dfgjdfgkjh", {8, 9, 10, 11}}});
+}
+TEST(serialize, std_vector__map_string_int)
+{
+ using V = std::vector< std::map<std::string, int>>;
+ using M = typename V::value_type;
+ using L = std::initializer_list<M>;
+ do_test_serialize<V>(L{
+ M{{"asdasf", 0}, {"dfgkjhdfg", 1}, {"fghffg", 2}, {"r5656kjnh9b'dfgwg+*", 3}},
+ M{{"asdasf", 10}, {"dfgkjhdfg", 11}, {"fghffg", 12}, {"r5656kjnh9b'dfgwg+*", 13}},
+ M{{"asdasf", 20}, {"dfgkjhdfg", 21}, {"fghffg", 22}, {"r5656kjnh9b'dfgwg+*", 23}},
+ M{{"asdasf", 30}, {"dfgkjhdfg", 31}, {"fghffg", 32}, {"r5656kjnh9b'dfgwg+*", 33}},
+ });
+}
+
+
+TEST(serialize, bool)
+{
+ Tree t = parse_in_arena("{a: 0, b: false, c: 1, d: true}");
+ bool v, w;
+ t["a"] >> v;
+ EXPECT_EQ(v, false);
+ t["b"] >> v;
+ EXPECT_EQ(v, false);
+ t["c"] >> v;
+ EXPECT_EQ(v, true);
+ t["d"] >> v;
+ EXPECT_EQ(v, true);
+
+ t["e"] << true;
+ EXPECT_EQ(t["e"].val(), "1");
+ t["e"] >> w;
+ EXPECT_EQ(w, true);
+
+ t["e"] << false;
+ EXPECT_EQ(t["e"].val(), "0");
+ t["e"] >> w;
+ EXPECT_EQ(w, false);
+
+ t["e"] << fmt::boolalpha(true);
+ EXPECT_EQ(t["e"].val(), "true");
+ t["e"] >> w;
+ EXPECT_EQ(w, true);
+
+ t["e"] << fmt::boolalpha(false);
+ EXPECT_EQ(t["e"].val(), "false");
+ t["e"] >> w;
+ EXPECT_EQ(w, false);
+}
+
+TEST(serialize, nan)
+{
+ Tree t = parse_in_arena(R"(
+good:
+ - .nan
+ - .nan
+ - .NaN
+ - .NAN
+ - nan
+ -
+ .nan
+set:
+ - nothing
+ - nothing
+})");
+ t["set"][0] << std::numeric_limits<float>::quiet_NaN();
+ t["set"][1] << std::numeric_limits<double>::quiet_NaN();
+ EXPECT_EQ(t["set"][0].val(), ".nan");
+ EXPECT_EQ(t["set"][1].val(), ".nan");
+ EXPECT_EQ(t["good"][0].val(), ".nan");
+ EXPECT_EQ(t["good"][1].val(), ".nan");
+ EXPECT_EQ(t["good"][2].val(), ".NaN");
+ EXPECT_EQ(t["good"][3].val(), ".NAN");
+ EXPECT_EQ(t["good"][4].val(), "nan");
+ EXPECT_EQ(t["good"][5].val(), ".nan");
+ float f;
+ double d;
+ f = 0.f;
+ d = 0.;
+ t["good"][0] >> f;
+ t["good"][0] >> d;
+ EXPECT_TRUE(std::isnan(f));
+ EXPECT_TRUE(std::isnan(d));
+ f = 0.f;
+ d = 0.;
+ t["good"][1] >> f;
+ t["good"][1] >> d;
+ EXPECT_TRUE(std::isnan(f));
+ EXPECT_TRUE(std::isnan(d));
+ f = 0.f;
+ d = 0.;
+ t["good"][2] >> f;
+ t["good"][2] >> d;
+ EXPECT_TRUE(std::isnan(f));
+ EXPECT_TRUE(std::isnan(d));
+ f = 0.f;
+ d = 0.;
+ t["good"][3] >> f;
+ t["good"][3] >> d;
+ EXPECT_TRUE(std::isnan(f));
+ EXPECT_TRUE(std::isnan(d));
+ f = 0.f;
+ d = 0.;
+ t["good"][4] >> f;
+ t["good"][4] >> d;
+ EXPECT_TRUE(std::isnan(f));
+ EXPECT_TRUE(std::isnan(d));
+ f = 0.f;
+ d = 0.;
+ t["good"][5] >> f;
+ t["good"][5] >> d;
+ EXPECT_TRUE(std::isnan(f));
+ EXPECT_TRUE(std::isnan(d));
+}
+
+TEST(serialize, inf)
+{
+ C4_SUPPRESS_WARNING_GCC_CLANG_WITH_PUSH("-Wfloat-equal");
+ Tree t = parse_in_arena(R"(
+good:
+ - .inf
+ - .inf
+ - .Inf
+ - .INF
+ - inf
+ - infinity
+ -
+ .inf
+set:
+ - nothing
+ - nothing
+})");
+ float finf = std::numeric_limits<float>::infinity();
+ double dinf = std::numeric_limits<double>::infinity();
+ t["set"][0] << finf;
+ t["set"][1] << dinf;
+ EXPECT_EQ(t["set"][0].val(), ".inf");
+ EXPECT_EQ(t["set"][1].val(), ".inf");
+ EXPECT_EQ(t["good"][0].val(), ".inf");
+ EXPECT_EQ(t["good"][1].val(), ".inf");
+ EXPECT_EQ(t["good"][2].val(), ".Inf");
+ EXPECT_EQ(t["good"][3].val(), ".INF");
+ EXPECT_EQ(t["good"][4].val(), "inf");
+ EXPECT_EQ(t["good"][5].val(), "infinity");
+ EXPECT_EQ(t["good"][6].val(), ".inf");
+ float f;
+ double d;
+ f = 0.f;
+ d = 0.;
+ t["good"][0] >> f;
+ t["good"][0] >> d;
+ EXPECT_TRUE(f == finf);
+ EXPECT_TRUE(d == dinf);
+ f = 0.f;
+ d = 0.;
+ t["good"][1] >> f;
+ t["good"][1] >> d;
+ EXPECT_TRUE(f == finf);
+ EXPECT_TRUE(d == dinf);
+ f = 0.f;
+ d = 0.;
+ t["good"][2] >> f;
+ t["good"][2] >> d;
+ EXPECT_TRUE(f == finf);
+ EXPECT_TRUE(d == dinf);
+ f = 0.f;
+ d = 0.;
+ t["good"][3] >> f;
+ t["good"][3] >> d;
+ EXPECT_TRUE(f == finf);
+ EXPECT_TRUE(d == dinf);
+ f = 0.f;
+ d = 0.;
+ t["good"][4] >> f;
+ t["good"][4] >> d;
+ EXPECT_TRUE(f == finf);
+ EXPECT_TRUE(d == dinf);
+ f = 0.f;
+ d = 0.;
+ t["good"][5] >> f;
+ t["good"][5] >> d;
+ EXPECT_TRUE(f == finf);
+ EXPECT_TRUE(d == dinf);
+ f = 0.f;
+ d = 0.;
+ t["good"][6] >> f;
+ t["good"][6] >> d;
+ EXPECT_TRUE(f == finf);
+ EXPECT_TRUE(d == dinf);
+
+ t = parse_in_arena(R"(
+good:
+ - -.inf
+ - -.inf
+ - -.Inf
+ - -.INF
+ - -inf
+ - -infinity
+ -
+ -.inf
+set:
+ - nothing
+ - nothing
+})");
+ t["set"][0] << -finf;
+ t["set"][1] << -dinf;
+ EXPECT_EQ(t["set"][0].val(), "-.inf");
+ EXPECT_EQ(t["set"][1].val(), "-.inf");
+ EXPECT_EQ(t["good"][0].val(), "-.inf");
+ EXPECT_EQ(t["good"][1].val(), "-.inf");
+ EXPECT_EQ(t["good"][2].val(), "-.Inf");
+ EXPECT_EQ(t["good"][3].val(), "-.INF");
+ EXPECT_EQ(t["good"][4].val(), "-inf");
+ EXPECT_EQ(t["good"][5].val(), "-infinity");
+ EXPECT_EQ(t["good"][6].val(), "-.inf");
+ f = 0.f;
+ d = 0.;
+ t["good"][0] >> f;
+ t["good"][0] >> d;
+ EXPECT_TRUE(f == -finf);
+ EXPECT_TRUE(d == -dinf);
+ f = 0.f;
+ d = 0.;
+ t["good"][1] >> f;
+ t["good"][1] >> d;
+ EXPECT_TRUE(f == -finf);
+ EXPECT_TRUE(d == -dinf);
+ f = 0.f;
+ d = 0.;
+ t["good"][2] >> f;
+ t["good"][2] >> d;
+ EXPECT_TRUE(f == -finf);
+ EXPECT_TRUE(d == -dinf);
+ f = 0.f;
+ d = 0.;
+ t["good"][3] >> f;
+ t["good"][3] >> d;
+ EXPECT_TRUE(f == -finf);
+ EXPECT_TRUE(d == -dinf);
+ f = 0.f;
+ d = 0.;
+ t["good"][4] >> f;
+ t["good"][4] >> d;
+ EXPECT_TRUE(f == -finf);
+ EXPECT_TRUE(d == -dinf);
+ f = 0.f;
+ d = 0.;
+ t["good"][5] >> f;
+ t["good"][5] >> d;
+ EXPECT_TRUE(f == -finf);
+ EXPECT_TRUE(d == -dinf);
+ f = 0.f;
+ d = 0.;
+ t["good"][6] >> f;
+ t["good"][6] >> d;
+ EXPECT_TRUE(f == -finf);
+ EXPECT_TRUE(d == -dinf);
+ C4_SUPPRESS_WARNING_GCC_CLANG_POP
+}
+
+TEST(serialize, std_string)
+{
+ auto t = parse_in_arena("{foo: bar}");
+ std::string s;
+ EXPECT_NE(s, "bar");
+ t["foo"] >> s;
+ EXPECT_EQ(s, "bar");
+}
+
+TEST(serialize, anchor_and_ref_round_trip)
+{
+ const char yaml[] = R"(anchor_objects:
+ - &id001
+ name: id001
+ - &id002
+ name: id002
+ - name: id003
+ - &id004
+ name: id004
+references:
+ reference_key: *id001
+ reference_list:
+ - *id002
+ - *id004
+)";
+
+ Tree t = parse_in_arena(yaml);
+ std::string cmpbuf;
+ emitrs_yaml(t, &cmpbuf);
+ EXPECT_EQ(cmpbuf, yaml);
+}
+
+TEST(serialize, create_anchor_ref_trip)
+{
+ const char expected_yaml[] = R"(anchor_objects:
+ - &id001
+ name: a_name
+reference_list:
+ - *id001
+)";
+
+ Tree tree;
+ auto root_id = tree.root_id();
+ tree.to_map(root_id);
+
+ auto anchor_list_id = tree.append_child(root_id);
+ tree.to_seq(anchor_list_id, "anchor_objects");
+
+ auto anchor_map0 = tree.append_child(anchor_list_id);
+ tree.to_map(anchor_map0);
+ tree.set_val_anchor(anchor_map0, "id001");
+
+ auto anchor_elem0 = tree.append_child(anchor_map0);
+ tree.to_keyval(anchor_elem0, "name", "a_name");
+
+ auto ref_list_id = tree.append_child(root_id);
+ tree.to_seq(ref_list_id, "reference_list");
+
+ auto elem0_id = tree.append_child(ref_list_id);
+ tree.set_val_ref(elem0_id, "id001");
+
+ std::string cmpbuf;
+ emitrs_yaml(tree, &cmpbuf);
+ EXPECT_EQ(cmpbuf, expected_yaml);
+}
+
+
+//-------------------------------------------
+// this is needed to use the test case library
+Case const* get_case(csubstr /*name*/)
+{
+ return nullptr;
+}
+
+} // namespace yml
+} // namespace c4
+
+#if defined(_MSC_VER)
+# pragma warning(pop)
+#elif defined(__clang__)
+# pragma clang diagnostic pop
+#elif defined(__GNUC__)
+# pragma GCC diagnostic pop
+#endif
diff --git a/thirdparty/ryml/test/test_simple_anchor.cpp b/thirdparty/ryml/test/test_simple_anchor.cpp
new file mode 100644
index 000000000..0ee4a629f
--- /dev/null
+++ b/thirdparty/ryml/test/test_simple_anchor.cpp
@@ -0,0 +1,1405 @@
+#include "./test_group.hpp"
+
+namespace c4 {
+namespace yml {
+
+TEST(anchors, circular)
+{
+ Tree t = parse_in_arena(R"(&x
+- *x
+)");
+ ASSERT_TRUE(t.rootref().is_val_anchor());
+ ASSERT_TRUE(t[0].is_val_ref());
+ EXPECT_EQ(t.rootref().val_anchor(), "x");
+ EXPECT_EQ(t[0].val_ref(), "x");
+}
+
+TEST(anchors, node_scalar_set_ref_when_empty)
+{
+ {
+ NodeScalar ns;
+ ns.set_ref_maybe_replacing_scalar("foo", /*has_scalar*/false);
+ EXPECT_EQ(ns.scalar, "foo");
+ EXPECT_EQ(ns.anchor, "foo");
+ }
+ {
+ NodeScalar ns;
+ ns.set_ref_maybe_replacing_scalar("*foo", /*has_scalar*/false);
+ EXPECT_EQ(ns.scalar, "*foo");
+ EXPECT_EQ(ns.anchor, "foo");
+ }
+}
+
+TEST(anchors, node_scalar_set_ref_when_non_empty)
+{
+ {
+ NodeScalar ns;
+ ns.scalar = "whatever";
+ ns.set_ref_maybe_replacing_scalar("foo", /*has_scalar*/true);
+ EXPECT_EQ(ns.scalar, "foo");
+ EXPECT_EQ(ns.anchor, "foo");
+ }
+ {
+ NodeScalar ns;
+ ns.scalar = "whatever";
+ ns.set_ref_maybe_replacing_scalar("*foo", /*has_scalar*/true);
+ EXPECT_EQ(ns.scalar, "*foo");
+ EXPECT_EQ(ns.anchor, "foo");
+ ns.set_ref_maybe_replacing_scalar("foo", /*has_scalar*/true);
+ EXPECT_EQ(ns.scalar, "*foo"); // keep the previous as it is well formed
+ EXPECT_EQ(ns.anchor, "foo");
+ ns.set_ref_maybe_replacing_scalar("bar", /*has_scalar*/true);
+ EXPECT_EQ(ns.scalar, "bar"); // replace previous as it is not well formed
+ EXPECT_EQ(ns.anchor, "bar");
+ }
+}
+
+TEST(anchors, no_ambiguity_when_key_scalars_begin_with_star)
+{
+ Tree t = parse_in_arena("{foo: &foo 1, *foo: 2, '*foo': 3}");
+
+ EXPECT_TRUE(t[1].is_key_ref());
+ EXPECT_FALSE(t[2].is_key_ref());
+
+ EXPECT_FALSE(t[1].is_key_quoted());
+ EXPECT_TRUE(t[2].is_key_quoted());
+
+ EXPECT_EQ(t[1].key(), "*foo");
+ EXPECT_EQ(t[1].key_ref(), "foo");
+ EXPECT_EQ(t[2].key(), "*foo");
+
+ EXPECT_EQ(emitrs_yaml<std::string>(t), R"(foo: &foo 1
+*foo: 2
+'*foo': 3
+)");
+
+ t.resolve();
+
+ EXPECT_EQ(emitrs_yaml<std::string>(t), R"(foo: 1
+1: 2
+'*foo': 3
+)");
+}
+
+TEST(anchors, no_ambiguity_when_val_scalars_begin_with_star)
+{
+ Tree t = parse_in_arena("{foo: &foo 1, ref: *foo, quo: '*foo'}");
+
+ EXPECT_TRUE(t["ref"].is_val_ref());
+ EXPECT_FALSE(t["quo"].is_val_ref());
+
+ EXPECT_FALSE(t["ref"].is_val_quoted());
+ EXPECT_TRUE(t["quo"].is_val_quoted());
+
+ EXPECT_EQ(t["ref"].val_ref(), "foo");
+ EXPECT_EQ(t["ref"].val(), "*foo");
+ EXPECT_EQ(t["quo"].val(), "*foo");
+
+ EXPECT_EQ(emitrs_yaml<std::string>(t), R"(foo: &foo 1
+ref: *foo
+quo: '*foo'
+)");
+
+ t.resolve();
+
+ EXPECT_EQ(emitrs_yaml<std::string>(t), R"(foo: 1
+ref: 1
+quo: '*foo'
+)");
+}
+
+TEST(anchors, no_ambiguity_with_inheritance)
+{
+ Tree t = parse_in_arena("{foo: &foo {a: 1, b: 2}, bar: {<<: *foo}, sq: {'<<': haha}, dq: {\"<<\": hehe}}");
+
+ EXPECT_TRUE(t["bar"].has_child("<<"));
+ EXPECT_TRUE(t["bar"]["<<"].is_key_ref());
+ EXPECT_TRUE(t["bar"]["<<"].is_val_ref());
+ EXPECT_TRUE(t["sq"]["<<"].is_key_quoted());
+ EXPECT_TRUE(t["dq"]["<<"].is_key_quoted());
+ EXPECT_FALSE(t["sq"]["<<"].is_key_ref());
+ EXPECT_FALSE(t["dq"]["<<"].is_key_ref());
+ EXPECT_EQ(t["sq"]["<<"].key(), "<<");
+ EXPECT_EQ(t["dq"]["<<"].key(), "<<");
+ EXPECT_EQ(t["bar"]["<<"].key(), "<<");
+ EXPECT_EQ(t["bar"]["<<"].val(), "*foo");
+ EXPECT_EQ(t["bar"]["<<"].key_ref(), "<<");
+ EXPECT_EQ(t["bar"]["<<"].val_ref(), "foo");
+
+ EXPECT_EQ(emitrs_yaml<std::string>(t), R"(foo: &foo
+ a: 1
+ b: 2
+bar:
+ <<: *foo
+sq:
+ '<<': haha
+dq:
+ '<<': hehe
+)");
+ t.resolve();
+ EXPECT_EQ(emitrs_yaml<std::string>(t), R"(foo:
+ a: 1
+ b: 2
+bar:
+ a: 1
+ b: 2
+sq:
+ '<<': haha
+dq:
+ '<<': hehe
+)");
+}
+
+TEST(anchors, programatic_key_ref)
+{
+ Tree t = parse_in_arena("{}");
+ NodeRef r = t.rootref();
+ r["kanchor"] = "2";
+ r["kanchor"].set_key_anchor("kanchor");
+ r["vanchor"] = "3";
+ r["vanchor"].set_val_anchor("vanchor");
+ r["*kanchor"] = "4";
+ r["*vanchor"] = "5";
+ NodeRef ch = r.append_child();
+ ch.set_key_ref("kanchor");
+ ch.set_val("6");
+ ch = r.append_child();
+ ch.set_key_ref("vanchor");
+ ch.set_val("7");
+ EXPECT_EQ(emitrs_yaml<std::string>(t), R"(&kanchor kanchor: 2
+vanchor: &vanchor 3
+'*kanchor': 4
+'*vanchor': 5
+*kanchor: 6
+*vanchor: 7
+)");
+ t.resolve();
+ EXPECT_EQ(emitrs_yaml<std::string>(t), R"(kanchor: 2
+vanchor: 3
+'*kanchor': 4
+'*vanchor': 5
+kanchor: 6
+3: 7
+)");
+}
+
+TEST(anchors, programatic_val_ref)
+{
+ Tree t = parse_in_arena("{}");
+ t["kanchor"] = "2";
+ t["kanchor"].set_key_anchor("kanchor");
+ t["vanchor"] = "3";
+ t["vanchor"].set_val_anchor("vanchor");
+
+ t["kref"].create();
+ t["vref"].create();
+ t["kref"].set_val_ref("kanchor");
+ t["vref"].set_val_ref("vanchor");
+
+ EXPECT_EQ(emitrs_yaml<std::string>(t), R"(&kanchor kanchor: 2
+vanchor: &vanchor 3
+kref: *kanchor
+vref: *vanchor
+)");
+ t.resolve();
+ EXPECT_EQ(emitrs_yaml<std::string>(t), R"(kanchor: 2
+vanchor: 3
+kref: kanchor
+vref: 3
+)");
+}
+
+TEST(anchors, programatic_inheritance)
+{
+ Tree t = parse_in_arena("{orig: &orig {foo: bar, baz: bat}, copy: {}, notcopy: {}, notref: {}}");
+
+ t["copy"]["<<"] = "*orig";
+ t["copy"]["<<"].set_key_ref("<<");
+ t["copy"]["<<"].set_val_ref("orig");
+
+ t["notcopy"]["test"] = "*orig";
+ t["notcopy"]["test"].set_val_ref("orig");
+ t["notcopy"]["<<"] = "*orig";
+ t["notcopy"]["<<"].set_val_ref("orig");
+
+ t["notref"]["<<"] = "*orig";
+
+ EXPECT_EQ(emitrs_yaml<std::string>(t), R"(orig: &orig
+ foo: bar
+ baz: bat
+copy:
+ <<: *orig
+notcopy:
+ test: *orig
+ '<<': *orig
+notref:
+ '<<': '*orig'
+)");
+ t.resolve();
+ EXPECT_EQ(emitrs_yaml<std::string>(t), R"(orig:
+ foo: bar
+ baz: bat
+copy:
+ foo: bar
+ baz: bat
+notcopy:
+ test:
+ foo: bar
+ baz: bat
+ '<<':
+ foo: bar
+ baz: bat
+notref:
+ '<<': '*orig'
+)");
+}
+
+TEST(anchors, programatic_multiple_inheritance)
+{
+ Tree t = parse_in_arena("{orig1: &orig1 {foo: bar}, orig2: &orig2 {baz: bat}, orig3: &orig3 {and: more}, copy: {}}");
+
+ t["copy"]["<<"] |= SEQ;
+ t["copy"]["<<"].set_key_ref("<<");
+ NodeRef ref1 = t["copy"]["<<"].append_child();
+ NodeRef ref2 = t["copy"]["<<"].append_child();
+ NodeRef ref3 = t["copy"]["<<"].append_child();
+ ref1 = "*orig1";
+ ref2 = "*orig2";
+ ref3 = "*orig3";
+ ref1.set_val_ref("orig1");
+ ref2.set_val_ref("orig2");
+ ref3.set_val_ref("orig3");
+
+ EXPECT_EQ(emitrs_yaml<std::string>(t), R"(orig1: &orig1
+ foo: bar
+orig2: &orig2
+ baz: bat
+orig3: &orig3
+ and: more
+copy:
+ <<:
+ - *orig1
+ - *orig2
+ - *orig3
+)");
+ t.resolve();
+ EXPECT_EQ(emitrs_yaml<std::string>(t), R"(orig1:
+ foo: bar
+orig2:
+ baz: bat
+orig3:
+ and: more
+copy:
+ foo: bar
+ baz: bat
+ and: more
+)");
+}
+
+TEST(anchors, set_anchor_leading_ampersand_is_optional)
+{
+ Tree t = parse_in_arena("{without: 0, with: 1}");
+
+ t["without"].set_key_anchor("without");
+ t["with"].set_key_anchor("&with");
+ EXPECT_EQ(t["without"].key_anchor(), "without");
+ EXPECT_EQ(t["with"].key_anchor(), "with");
+ EXPECT_EQ(emitrs_yaml<std::string>(t), R"(&without without: 0
+&with with: 1
+)");
+
+ t["without"].set_val_anchor("without");
+ t["with"].set_val_anchor("&with");
+ EXPECT_EQ(t["without"].val_anchor(), "without");
+ EXPECT_EQ(t["with"].val_anchor(), "with");
+ EXPECT_EQ(emitrs_yaml<std::string>(t), R"(&without without: &without 0
+&with with: &with 1
+)");
+}
+
+TEST(anchors, set_ref_leading_star_is_optional)
+{
+ Tree t = parse_in_arena("{}");
+
+ t["*without"] = "0";
+ t["*with"] = "1";
+ EXPECT_EQ(emitrs_yaml<std::string>(t), R"('*without': 0
+'*with': 1
+)");
+
+ t["*without"].set_key_ref("without");
+ t["*with"].set_key_ref("*with");
+ EXPECT_EQ(t["*without"].key_ref(), "without");
+ EXPECT_EQ(t["*with"].key_ref(), "with");
+ EXPECT_EQ(emitrs_yaml<std::string>(t), R"(*without: 0
+*with: 1
+)");
+
+ t["*without"].set_val_ref("without");
+ t["*with"].set_val_ref("*with");
+ EXPECT_EQ(t["*without"].val_ref(), "without");
+ EXPECT_EQ(t["*with"].val_ref(), "with");
+ EXPECT_EQ(emitrs_yaml<std::string>(t), R"(*without: *without
+*with: *with
+)");
+}
+
+TEST(anchors, set_key_ref_also_sets_the_key_when_none_exists)
+{
+ Tree t = parse_in_arena("{}");
+ NodeRef root = t.rootref();
+ NodeRef without = root.append_child();
+ NodeRef with = root.append_child();
+ ASSERT_FALSE(without.has_key());
+ ASSERT_FALSE(with.has_key());
+ without.set_key_ref("without");
+ with.set_key_ref("*with");
+ without.set_val("0");
+ with.set_val("1");
+ ASSERT_TRUE(without.has_key());
+ ASSERT_TRUE(with.has_key());
+ EXPECT_EQ(without.key(), "without");
+ EXPECT_EQ(with.key(), "*with");
+ EXPECT_EQ(without.key_ref(), "without");
+ EXPECT_EQ(with.key_ref(), "with");
+ EXPECT_EQ(emitrs_yaml<std::string>(t), R"(*without: 0
+*with: 1
+)");
+}
+
+TEST(anchors, set_val_ref_also_sets_the_val_when_none_exists)
+{
+ Tree t = parse_in_arena("{}");
+ NodeRef root = t.rootref();
+ NodeRef without = root.append_child();
+ NodeRef with = root.append_child();
+ without.set_key("without");
+ with.set_key("with");
+ ASSERT_FALSE(without.has_val());
+ ASSERT_FALSE(with.has_val());
+ without.set_val_ref("without");
+ with.set_val_ref("*with");
+ ASSERT_TRUE(without.has_val());
+ ASSERT_TRUE(with.has_val());
+ EXPECT_EQ(without.val(), "without");
+ EXPECT_EQ(with.val(), "*with");
+ EXPECT_EQ(without.val_ref(), "without");
+ EXPECT_EQ(with.val_ref(), "with");
+ EXPECT_EQ(emitrs_yaml<std::string>(t), R"(without: *without
+with: *with
+)");
+}
+
+TEST(anchors, set_key_ref_replaces_existing_key)
+{
+ Tree t = parse_in_arena("{*foo: bar}");
+ NodeRef root = t.rootref();
+ EXPECT_TRUE(root.has_child("*foo"));
+ root["*foo"].set_key_ref("notfoo");
+ EXPECT_FALSE(root.has_child("*foo"));
+ EXPECT_FALSE(root.has_child("*notfoo"));
+ EXPECT_TRUE(root.has_child("notfoo"));
+ EXPECT_EQ(emitrs_yaml<std::string>(t), "*notfoo: bar\n");
+}
+
+TEST(anchors, set_val_ref_replaces_existing_key)
+{
+ Tree t = parse_in_arena("{foo: *bar}");
+ NodeRef root = t.rootref();
+ root["foo"].set_val_ref("notbar");
+ EXPECT_EQ(root["foo"].val(), "notbar");
+ root["foo"].set_val_ref("*notfoo");
+ EXPECT_EQ(root["foo"].val(), "*notfoo");
+ EXPECT_EQ(emitrs_yaml<std::string>(t), "foo: *notfoo\n");
+}
+
+
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+
+TEST(weird_anchor_cases_from_suite, 2SXE)
+{
+ Tree t = parse_in_arena(R"(&a: key: &a value
+foo:
+ *a:
+)");
+ t.resolve();
+ #ifdef THIS_IS_A_KNOWN_LIMITATION // since we do not allow colon in anchors, this would fail:
+ EXPECT_EQ(emitrs<std::string>(t), R"(key: value
+foo: key
+)");
+ #endif
+ // so we get this instead:
+ EXPECT_EQ(emitrs_yaml<std::string>(t), R"(key: value
+foo:
+ value:
+)");
+}
+
+
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+
+// SIMPLE_ANCHOR/YmlTestCase.parse_using_ryml/0
+
+C4_SUPPRESS_WARNING_GCC_WITH_PUSH("-Wuseless-cast")
+
+/** verify that the reference class is working correctly (yay, testing the tests) */
+TEST(CaseNode, anchors)
+{
+ const NodeType mask = KEYREF|KEYANCH|VALREF|VALANCH;
+
+ {
+ auto n = N("*vref", AR(KEYREF, "vref"), "c");
+ EXPECT_EQ(n.key, "*vref");
+ EXPECT_EQ(n.val, "c");
+ EXPECT_EQ((type_bits)(n.type & mask), (type_bits)KEYREF);
+ EXPECT_EQ((type_bits)n.key_anchor.type, (type_bits)KEYREF);
+ EXPECT_EQ((type_bits)n.val_anchor.type, (type_bits)NOTYPE);
+ EXPECT_EQ(n.key_anchor.str, "vref");
+ EXPECT_EQ(n.val_anchor.str, "");
+ }
+
+ {
+ CaseNode n("<<", "*base", AR(VALANCH, "base"));
+ EXPECT_EQ(n.key, "<<");
+ EXPECT_EQ(n.val, "*base");
+ EXPECT_EQ((type_bits)(n.type & mask), (type_bits)VALANCH);
+ EXPECT_EQ((type_bits)n.key_anchor.type, (type_bits)NOTYPE);
+ EXPECT_EQ((type_bits)n.val_anchor.type, (type_bits)VALANCH);
+ EXPECT_EQ(n.key_anchor.str, "");
+ EXPECT_EQ(n.val_anchor.str, "base");
+ }
+
+ {
+ CaseNode n("base", L{N("name", "Everyone has same name")}, AR(VALANCH, "base"));
+ EXPECT_EQ(n.key, "base");
+ EXPECT_EQ(n.val, "");
+ EXPECT_NE(n.type.is_seq(), true);
+ EXPECT_EQ((type_bits)(n.type & mask), (type_bits)VALANCH);
+ EXPECT_EQ((type_bits)n.key_anchor.type, (type_bits)NOTYPE);
+ EXPECT_EQ((type_bits)n.val_anchor.type, (type_bits)VALANCH);
+ EXPECT_EQ(n.key_anchor.str, "");
+ EXPECT_EQ(n.val_anchor.str, "base");
+ }
+
+ {
+ L l{N("<<", "*base", AR(VALREF, "base"))};
+ CaseNode const& base = *l.begin();
+ EXPECT_EQ(base.key, "<<");
+ EXPECT_EQ(base.val, "*base");
+ EXPECT_EQ(base.type.is_keyval(), true);
+ EXPECT_EQ((type_bits)(base.type & mask), (type_bits)VALREF);
+ EXPECT_EQ((type_bits)base.key_anchor.type, (type_bits)NOTYPE);
+ EXPECT_EQ((type_bits)base.val_anchor.type, (type_bits)VALREF);
+ EXPECT_EQ(base.key_anchor.str, "");
+ EXPECT_EQ(base.val_anchor.str, "base");
+ }
+
+ {
+ L l{N("<<", "*base", AR(VALREF, "base")), N("age", "10")};
+ CaseNode const& base = *l.begin();
+ CaseNode const& age = *(&base + 1);
+ EXPECT_EQ(base.key, "<<");
+ EXPECT_EQ(base.val, "*base");
+ EXPECT_EQ(base.type.is_keyval(), true);
+ EXPECT_EQ((type_bits)(base.type & mask), (type_bits)VALREF);
+ EXPECT_EQ((type_bits)base.key_anchor.type, (type_bits)NOTYPE);
+ EXPECT_EQ((type_bits)base.val_anchor.type, (type_bits)VALREF);
+ EXPECT_EQ(base.key_anchor.str, "");
+ EXPECT_EQ(base.val_anchor.str, "base");
+
+ EXPECT_EQ(age.key, "age");
+ EXPECT_EQ(age.val, "10");
+ EXPECT_EQ(age.type.is_keyval(), true);
+ EXPECT_EQ((type_bits)(age.type & mask), (type_bits)0);
+ EXPECT_EQ((type_bits)age.key_anchor.type, (type_bits)NOTYPE);
+ EXPECT_EQ((type_bits)age.val_anchor.type, (type_bits)NOTYPE);
+ EXPECT_EQ(age.key_anchor.str, "");
+ EXPECT_EQ(age.val_anchor.str, "");
+ }
+
+ {
+ CaseNode n("foo", L{N("<<", "*base", AR(VALREF, "base")), N("age", "10")}, AR(VALANCH, "foo"));
+ EXPECT_EQ(n.key, "foo");
+ EXPECT_EQ(n.val, "");
+ EXPECT_EQ(n.type.has_key(), true);
+ EXPECT_EQ(n.type.is_map(), true);
+ EXPECT_EQ((type_bits)(n.type & mask), (type_bits)VALANCH);
+ EXPECT_EQ((type_bits)n.key_anchor.type, (type_bits)NOTYPE);
+ EXPECT_EQ((type_bits)n.val_anchor.type, (type_bits)VALANCH);
+ EXPECT_EQ(n.key_anchor.str, "");
+ EXPECT_EQ(n.val_anchor.str, "foo");
+
+ CaseNode const& base = n.children[0];
+ EXPECT_EQ(base.key, "<<");
+ EXPECT_EQ(base.val, "*base");
+ EXPECT_EQ(base.type.has_key() && base.type.has_val(), true);
+ EXPECT_EQ((type_bits)(base.type & mask), (type_bits)VALREF);
+ EXPECT_EQ((type_bits)base.key_anchor.type, (type_bits)NOTYPE);
+ EXPECT_EQ((type_bits)base.val_anchor.type, (type_bits)VALREF);
+ EXPECT_EQ(base.key_anchor.str, "");
+ EXPECT_EQ(base.val_anchor.str, "base");
+ }
+
+}
+
+
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+
+TEST(simple_anchor, resolve_works_on_an_empty_tree)
+{
+ Tree t;
+ t.resolve();
+ EXPECT_TRUE(t.empty());
+}
+
+TEST(simple_anchor, resolve_works_on_a_tree_without_refs)
+{
+ Tree t = parse_in_arena("[a, b, c, d, e, f]");
+ size_t size_before = t.size();
+ t.resolve();
+ EXPECT_EQ(t.size(), size_before);
+}
+
+TEST(simple_anchor, resolve_works_on_keyrefvalref)
+{
+ Tree t = parse_in_arena("{&a a: &b b, *b: *a}");
+ EXPECT_EQ(t["a"].has_key_anchor(), true);
+ EXPECT_EQ(t["a"].has_val_anchor(), true);
+ EXPECT_EQ(t["a"].key_anchor(), "a");
+ EXPECT_EQ(t["a"].val_anchor(), "b");
+ EXPECT_EQ(t["*b"].is_key_ref(), true);
+ EXPECT_EQ(t["*b"].is_val_ref(), true);
+ EXPECT_EQ(t["*b"].key_ref(), "b");
+ EXPECT_EQ(t["*b"].val_ref(), "a");
+ EXPECT_EQ(emitrs_yaml<std::string>(t), R"(&a a: &b b
+*b: *a
+)");
+ t.resolve();
+ EXPECT_EQ(t["a"].val(), "b");
+ EXPECT_EQ(t["b"].val(), "a");
+ EXPECT_EQ(emitrs_yaml<std::string>(t), R"(a: b
+b: a
+)");
+}
+
+TEST(simple_anchor, anchors_of_first_child_key_implicit)
+{
+ csubstr yaml = R"(&anchor0
+&anchor4 top4:
+ key4: scalar4
+top5: &anchor5
+ key5: scalar5
+top6:
+ &anchor6 key6: scalar6
+top61:
+ &anchor61 key61:
+ scalar61
+top62:
+ &anchor62
+ key62:
+ scalar62
+)";
+ Tree t = parse_in_arena(yaml);
+ EXPECT_EQ(t.rootref().has_val_anchor(), true);
+ EXPECT_EQ(t.rootref().val_anchor(), "anchor0");
+ EXPECT_EQ(t["top4"].has_key_anchor(), true);
+ EXPECT_EQ(t["top4"].has_val_anchor(), false);
+ EXPECT_EQ(t["top4"].key_anchor(), "anchor4");
+ EXPECT_EQ(t["top4"]["key4"].val(), "scalar4");
+ EXPECT_EQ(t["top4"]["key4"].has_key_anchor(), false);
+ EXPECT_EQ(t["top5"].has_key_anchor(), false);
+ EXPECT_EQ(t["top5"].has_val_anchor(), true);
+ EXPECT_EQ(t["top5"].val_anchor(), "anchor5");
+ EXPECT_EQ(t["top5"]["key5"].val(), "scalar5");
+ EXPECT_EQ(t["top5"]["key5"].has_key_anchor(), false);
+ EXPECT_EQ(t["top6"].has_key_anchor(), false);
+ EXPECT_EQ(t["top6"].has_val_anchor(), false);
+ EXPECT_EQ(t["top6"]["key6"].val(), "scalar6");
+ ASSERT_EQ(t["top6"]["key6"].has_key_anchor(), true);
+ EXPECT_EQ(t["top6"]["key6"].key_anchor(), "anchor6");
+ EXPECT_EQ(t["top61"].has_key_anchor(), false);
+ EXPECT_EQ(t["top61"].has_val_anchor(), false);
+ EXPECT_EQ(t["top61"]["key61"].val(), "scalar61");
+ ASSERT_EQ(t["top61"]["key61"].has_key_anchor(), true);
+ EXPECT_EQ(t["top61"]["key61"].key_anchor(), "anchor61");
+ EXPECT_EQ(t["top62"].has_key_anchor(), false);
+ EXPECT_EQ(t["top62"].has_val_anchor(), true);
+ EXPECT_EQ(t["top62"].val_anchor(), "anchor62");
+ EXPECT_EQ(t["top62"]["key62"].val(), "scalar62");
+ ASSERT_EQ(t["top62"]["key62"].has_key_anchor(), false);
+}
+
+
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+
+
+CASE_GROUP(SIMPLE_ANCHOR)
+{
+
+ADD_CASE_TO_GROUP("merge example, unresolved",
+R"(# https://yaml.org/type/merge.html
+- &CENTER { x: 1, y: 2 }
+- &LEFT { x: 0, y: 2 }
+- &BIG { r: 10 }
+- &SMALL { r: 1 }
+
+# All the following maps are equal:
+
+- # Explicit keys
+ x: 1
+ y: 2
+ r: 10
+ label: center/big
+
+- # Merge one map
+ << : *CENTER
+ r: 10
+ label: center/big
+
+- # Merge multiple maps
+ << : [ *CENTER, *BIG ]
+ label: center/big
+
+- # Override
+ << : [ *BIG, *LEFT, *SMALL ]
+ x: 1
+ label: center/big
+)",
+L{
+ N(L{N("x", "1" ), N("y", "2")}, AR(VALANCH, "CENTER")),
+ N(L{N("x", "0" ), N("y", "2")}, AR(VALANCH, "LEFT" )),
+ N(L{N("r", "10") }, AR(VALANCH, "BIG" )),
+ N(L{N("r", "1" ) }, AR(VALANCH, "SMALL" )),
+ N(L{N("x", "1" ), N("y", "2"), N("r", "10"), N("label", "center/big")}),
+ N(L{N("<<", AR(KEYREF, "<<"), "*CENTER", AR(VALREF, "*CENTER")), N("r", "10"), N("label", "center/big")}),
+ N(L{N("<<", AR(KEYREF, "<<"), L{N("*CENTER", AR(VALREF, "*CENTER")), N("*BIG", AR(VALREF, "*BIG"))}), N("label", "center/big")}),
+ N(L{N("<<", AR(KEYREF, "<<"), L{N("*BIG", AR(VALREF, "*BIG")), N("*LEFT", AR(VALREF, "*LEFT")), N("*SMALL", AR(VALREF, "*SMALL"))}), N("x", "1"), N("label", "center/big")}),
+});
+
+ADD_CASE_TO_GROUP("merge example, resolved", RESOLVE_REFS,
+R"(# https://yaml.org/type/merge.html
+- &CENTER { x: 1, y: 2 }
+- &LEFT { x: 0, y: 2 }
+- &BIG { r: 10 }
+- &SMALL { r: 1 }
+
+# All the following maps are equal:
+
+- # Explicit keys
+ x: 1
+ y: 2
+ r: 10
+ label: center/big
+
+- # Merge one map
+ << : *CENTER
+ r: 10
+ label: center/big
+
+- # Merge multiple maps
+ << : [ *CENTER, *BIG ]
+ label: center/big
+
+- # Override
+ << : [ *SMALL, *LEFT, *BIG ]
+ x: 1
+ label: center/big
+)",
+L{
+ N(L{N("x", "1" ), N("y", "2")}),
+ N(L{N("x", "0" ), N("y", "2")}),
+ N(L{N("r", "10") }),
+ N(L{N("r", "1" ) }),
+ N(L{N("x", "1" ), N("y", "2"), N("r", "10"), N("label", "center/big")}),
+ N(L{N("x", "1" ), N("y", "2"), N("r", "10"), N("label", "center/big")}),
+ N(L{N("x", "1" ), N("y", "2"), N("r", "10"), N("label", "center/big")}),
+ N(L{N("x", "1" ), N("y", "2"), N("r", "10"), N("label", "center/big")}),
+});
+
+ADD_CASE_TO_GROUP("simple anchor 1, implicit, unresolved",
+R"(
+anchored_content: &anchor_name This string will appear as the value of two keys.
+other_anchor: *anchor_name
+anchors_in_seqs:
+ - &anchor_in_seq this value appears in both elements of the sequence
+ - *anchor_in_seq
+base: &base
+ name: Everyone has same name
+foo: &foo
+ <<: *base
+ age: 10
+bar: &bar
+ <<: *base
+ age: 20
+)",
+ L{
+ N("anchored_content", "This string will appear as the value of two keys.", AR(VALANCH, "anchor_name")),
+ N("other_anchor", "*anchor_name", AR(VALREF, "anchor_name")),
+ N("anchors_in_seqs", L{
+ N("this value appears in both elements of the sequence", AR(VALANCH, "anchor_in_seq")),
+ N("*anchor_in_seq", AR(VALREF, "anchor_in_seq")),
+ }),
+ N("base", L{N("name", "Everyone has same name")}, AR(VALANCH, "base")),
+ N("foo", L{N("<<", AR(KEYREF, "<<"), "*base", AR(VALREF, "base")), N("age", "10")}, AR(VALANCH, "foo")),
+ N("bar", L{N("<<", AR(KEYREF, "<<"), "*base", AR(VALREF, "base")), N("age", "20")}, AR(VALANCH, "bar")),
+ }
+);
+
+ADD_CASE_TO_GROUP("simple anchor 1, explicit, unresolved",
+R"({
+anchored_content: &anchor_name This string will appear as the value of two keys.,
+other_anchor: *anchor_name,
+anchors_in_seqs: [
+ &anchor_in_seq this value appears in both elements of the sequence,
+ *anchor_in_seq
+ ],
+base: &base {
+ name: Everyone has same name
+ },
+foo: &foo {
+ <<: *base,
+ age: 10
+ },
+bar: &bar {
+ <<: *base,
+ age: 20
+ }
+})",
+ L{
+ N("anchored_content", "This string will appear as the value of two keys.", AR(VALANCH, "anchor_name")),
+ N("other_anchor", "*anchor_name", AR(VALREF, "anchor_name")),
+ N("anchors_in_seqs", L{
+ N("this value appears in both elements of the sequence", AR(VALANCH, "anchor_in_seq")),
+ N("*anchor_in_seq", AR(VALREF, "anchor_in_seq")),
+ }),
+ N("base", L{N("name", "Everyone has same name")}, AR(VALANCH, "base")),
+ N("foo", L{N("<<", AR(KEYREF, "<<"), "*base", AR(VALREF, "base")), N("age", "10")}, AR(VALANCH, "foo")),
+ N("bar", L{N("<<", AR(KEYREF, "<<"), "*base", AR(VALREF, "base")), N("age", "20")}, AR(VALANCH, "bar")),
+ }
+);
+
+ADD_CASE_TO_GROUP("simple anchor 1, implicit, resolved", RESOLVE_REFS,
+R"(
+anchored_content: &anchor_name This string will appear as the value of two keys.
+other_anchor: *anchor_name
+anchors_in_seqs:
+ - &anchor_in_seq this value appears in both elements of the sequence
+ - *anchor_in_seq
+base: &base
+ name: Everyone has same name
+foo: &foo
+ <<: *base
+ age: 10
+bar: &bar
+ <<: *base
+ age: 20
+)",
+ L{
+ N("anchored_content", "This string will appear as the value of two keys."),
+ N("other_anchor", "This string will appear as the value of two keys."),
+ N("anchors_in_seqs", L{
+ N("this value appears in both elements of the sequence"),
+ N("this value appears in both elements of the sequence"),
+ }),
+ N("base", L{N("name", "Everyone has same name")}),
+ N("foo", L{N("name", "Everyone has same name"), N("age", "10")}),
+ N("bar", L{N("name", "Everyone has same name"), N("age", "20")}),
+ }
+);
+
+ADD_CASE_TO_GROUP("simple anchor 1, explicit, resolved", RESOLVE_REFS,
+R"({
+anchored_content: &anchor_name This string will appear as the value of two keys.,
+other_anchor: *anchor_name,
+anchors_in_seqs: [
+ &anchor_in_seq this value appears in both elements of the sequence,
+ *anchor_in_seq
+ ],
+base: &base {
+ name: Everyone has same name
+ },
+foo: &foo {
+ <<: *base,
+ age: 10
+ },
+bar: &bar {
+ <<: *base,
+ age: 20
+ }
+})",
+ L{
+ N("anchored_content", "This string will appear as the value of two keys."),
+ N("other_anchor", "This string will appear as the value of two keys."),
+ N("anchors_in_seqs", L{
+ N("this value appears in both elements of the sequence"),
+ N("this value appears in both elements of the sequence"),
+ }),
+ N("base", L{N("name", "Everyone has same name")}),
+ N("foo", L{N("name", "Everyone has same name"), N("age", "10")}),
+ N("bar", L{N("name", "Everyone has same name"), N("age", "20")}),
+ }
+);
+
+
+ADD_CASE_TO_GROUP("anchor example 2, unresolved",
+R"(
+receipt: Oz-Ware Purchase Invoice
+date: 2012-08-06
+customer:
+ first_name: Dorothy
+ family_name: Gale
+items:
+ - part_no: A4786
+ descrip: Water Bucket (Filled)
+ price: 1.47
+ quantity: 4
+ - part_no: E1628
+ descrip: High Heeled "Ruby" Slippers
+ size: 8
+ price: 133.7
+ quantity: 1
+bill-to: &id001
+ street: |
+ 123 Tornado Alley
+ Suite 16
+ city: East Centerville
+ state: KS
+ship-to: *id001
+specialDelivery: >
+ Follow the Yellow Brick
+ Road to the Emerald City.
+ Pay no attention to the
+ man behind the curtain.
+)",
+L{
+ N{"receipt", "Oz-Ware Purchase Invoice"},
+ N{"date", "2012-08-06"},
+ N{"customer", L{N{"first_name", "Dorothy"}, N{"family_name", "Gale"}}},
+ N{"items", L{
+ N{L{N{"part_no", "A4786"},
+ N{"descrip", "Water Bucket (Filled)"},
+ N{"price", "1.47"},
+ N{"quantity", "4"},}},
+ N{L{N{"part_no", "E1628"},
+ N{"descrip", "High Heeled \"Ruby\" Slippers"},
+ N{"size", "8"},
+ N{"price", "133.7"},
+ N{"quantity", "1"},}}}},
+ N{"bill-to", L{
+ N{QV, "street", "123 Tornado Alley\nSuite 16\n"},
+ N{"city", "East Centerville"},
+ N{"state", "KS"},}, AR(VALANCH, "id001")},
+ N{"ship-to", "*id001", AR(VALREF, "id001")},
+ N{QV, "specialDelivery", "Follow the Yellow Brick Road to the Emerald City. Pay no attention to the man behind the curtain.\n"}
+ }
+);
+
+
+ADD_CASE_TO_GROUP("anchor example 2, resolved", RESOLVE_REFS,
+R"(
+receipt: Oz-Ware Purchase Invoice
+date: 2012-08-06
+customer:
+ first_name: Dorothy
+ family_name: Gale
+items:
+ - part_no: A4786
+ descrip: Water Bucket (Filled)
+ price: 1.47
+ quantity: 4
+ - part_no: E1628
+ descrip: High Heeled "Ruby" Slippers
+ size: 8
+ price: 133.7
+ quantity: 1
+bill-to: &id001
+ street: |
+ 123 Tornado Alley
+ Suite 16
+ city: East Centerville
+ state: KS
+ship-to: *id001
+specialDelivery: >
+ Follow the Yellow Brick
+ Road to the Emerald City.
+ Pay no attention to the
+ man behind the curtain.
+)",
+L{
+ N{"receipt", "Oz-Ware Purchase Invoice"},
+ N{"date", "2012-08-06"},
+ N{"customer", L{N{"first_name", "Dorothy"}, N{"family_name", "Gale"}}},
+ N{"items", L{
+ N{L{N{"part_no", "A4786"},
+ N{"descrip", "Water Bucket (Filled)"},
+ N{"price", "1.47"},
+ N{"quantity", "4"},}},
+ N{L{N{"part_no", "E1628"},
+ N{"descrip", "High Heeled \"Ruby\" Slippers"},
+ N{"size", "8"},
+ N{"price", "133.7"},
+ N{"quantity", "1"},}}}},
+ N{"bill-to", L{
+ N{QV, "street", "123 Tornado Alley\nSuite 16\n"},
+ N{"city", "East Centerville"},
+ N{"state", "KS"},}},
+ N{"ship-to", L{
+ N{QV, "street", "123 Tornado Alley\nSuite 16\n"},
+ N{"city", "East Centerville"},
+ N{"state", "KS"},}},
+ N{QV, "specialDelivery", "Follow the Yellow Brick Road to the Emerald City. Pay no attention to the man behind the curtain.\n"}
+ }
+);
+
+ADD_CASE_TO_GROUP("anchor example 3, unresolved",
+R"(
+- step: &id001 # defines anchor label &id001
+ instrument: Lasik 2000
+ pulseEnergy: 5.4
+ pulseDuration: 12
+ repetition: 1000
+ spotSize: 1mm
+- step: &id002
+ instrument: Lasik 2000
+ pulseEnergy: 5.0
+ pulseDuration: 10
+ repetition: 500
+ spotSize: 2mm
+- step: *id001 # refers to the first step (with anchor &id001)
+- step: *id002 # refers to the second step
+- step:
+ <<: *id001
+ spotSize: 2mm # redefines just this key, refers rest from &id001
+- step: *id002
+)",
+L{N(L{
+N("step", L{
+ N{"instrument", "Lasik 2000"},
+ N{"pulseEnergy", "5.4"},
+ N{"pulseDuration", "12"},
+ N{"repetition", "1000"},
+ N{"spotSize", "1mm"},
+ }, AR(VALANCH, "id001")),
+ }), N(L{
+N("step", L{
+ N{"instrument", "Lasik 2000"},
+ N{"pulseEnergy", "5.0"},
+ N{"pulseDuration", "10"},
+ N{"repetition", "500"},
+ N{"spotSize", "2mm"},
+ }, AR(VALANCH, "id002")),
+ }), N(L{
+N{"step", "*id001", AR(VALREF, "id001")},
+ }), N(L{
+N{"step", "*id002", AR(VALREF, "id002")},
+ }), N(L{
+N{"step", L{
+ N{"<<", AR(KEYREF, "<<"), "*id001", AR(VALREF, "id002")},
+ N{"spotSize", "2mm"},
+ }},
+ }), N(L{
+N{"step", "*id002", AR(VALREF, "id002")},
+ }),
+ }
+);
+
+ADD_CASE_TO_GROUP("anchor example 3, resolved", RESOLVE_REFS,
+R"(
+- step: &id001 # defines anchor label &id001
+ instrument: Lasik 2000
+ pulseEnergy: 5.4
+ pulseDuration: 12
+ repetition: 1000
+ spotSize: 1mm
+- step: &id002
+ instrument: Lasik 2000
+ pulseEnergy: 5.0
+ pulseDuration: 10
+ repetition: 500
+ spotSize: 2mm
+- step: *id001 # refers to the first step (with anchor &id001)
+- step: *id002 # refers to the second step
+- step:
+ <<: *id001
+ spotSize: 2mm # redefines just this key, refers rest from &id001
+- step: *id002
+)",
+ L{N(L{
+N{"step", L{
+ N{"instrument", "Lasik 2000"},
+ N{"pulseEnergy", "5.4"},
+ N{"pulseDuration", "12"},
+ N{"repetition", "1000"},
+ N{"spotSize", "1mm"},
+ }},
+ }), N(L{
+N{"step", L{
+ N{"instrument", "Lasik 2000"},
+ N{"pulseEnergy", "5.0"},
+ N{"pulseDuration", "10"},
+ N{"repetition", "500"},
+ N{"spotSize", "2mm"},
+ }},
+ }), N(L{
+N{"step", L{
+ N{"instrument", "Lasik 2000"},
+ N{"pulseEnergy", "5.4"},
+ N{"pulseDuration", "12"},
+ N{"repetition", "1000"},
+ N{"spotSize", "1mm"},
+ }},
+ }), N(L{
+N{"step", L{
+ N{"instrument", "Lasik 2000"},
+ N{"pulseEnergy", "5.0"},
+ N{"pulseDuration", "10"},
+ N{"repetition", "500"},
+ N{"spotSize", "2mm"},
+ }},
+ }), N(L{
+N{"step", L{
+ N{"instrument", "Lasik 2000"},
+ N{"pulseEnergy", "5.4"},
+ N{"pulseDuration", "12"},
+ N{"repetition", "1000"},
+ N{"spotSize", "2mm"},
+ }},
+ }), N(L{
+N{"step", L{
+ N{"instrument", "Lasik 2000"},
+ N{"pulseEnergy", "5.0"},
+ N{"pulseDuration", "10"},
+ N{"repetition", "500"},
+ N{"spotSize", "2mm"},
+ }},
+ }),
+ }
+);
+
+ADD_CASE_TO_GROUP("tagged doc with anchors 9KAX",
+R"(
+---
+&a1
+!!str
+scalar1
+--- &a1 !!str scalar1
+---
+!!str
+&a1
+scalar1
+--- !!str &a1 scalar1
+---
+!!str
+&a2
+scalar2
+--- &a2 !!str scalar2
+---
+&a3
+!!str scalar3
+--- &a3 !!str scalar3
+---
+&a4 !!map
+&a5 !!str key5: value4
+--- &a4 !!map
+&a5 !!str key5: value4
+---
+a6: 1
+&anchor6 b6: 2
+---
+!!map
+&a8 !!str key8: value7
+--- !!map
+&a8 !!str key8: value7
+---
+!!map
+!!str &a10 key10: value9
+--- !!map
+&a10 !!str key10: value9
+---
+!!str &a11
+value11
+--- &a11 !!str value11
+)",
+N(STREAM, L{
+ N(DOCVAL, TS("!!str", "scalar1"), AR(VALANCH, "a1")),
+ N(DOCVAL, TS("!!str", "scalar1"), AR(VALANCH, "a1")),
+ N(DOCVAL, TS("!!str", "scalar1"), AR(VALANCH, "a1")),
+ N(DOCVAL, TS("!!str", "scalar1"), AR(VALANCH, "a1")),
+ N(DOCVAL, TS("!!str", "scalar2"), AR(VALANCH, "a2")),
+ N(DOCVAL, TS("!!str", "scalar2"), AR(VALANCH, "a2")),
+ N(DOCVAL, TS("!!str", "scalar3"), AR(VALANCH, "a3")),
+ N(DOCVAL, TS("!!str", "scalar3"), AR(VALANCH, "a3")),
+ N(DOCMAP, TL("!!map", L{N(TS("!!str", "key5"), AR(KEYANCH, "a5"), "value4")}), AR(VALANCH, "a4")),
+ N(DOCMAP, TL("!!map", L{N(TS("!!str", "key5"), AR(KEYANCH, "a5"), "value4")}), AR(VALANCH, "a4")),
+ N(DOCMAP, L{N("a6", "1"), N("b6", AR(KEYANCH, "anchor6"), "2")}),
+ N(DOCMAP, TL("!!map", L{N(TS("!!str", "key8"), AR(KEYANCH, "a8"), "value7")})),
+ N(DOCMAP, TL("!!map", L{N(TS("!!str", "key8"), AR(KEYANCH, "a8"), "value7")})),
+ N(DOCMAP, TL("!!map", L{N(TS("!!str", "key10"), AR(KEYANCH, "a10"), "value9")})),
+ N(DOCMAP, TL("!!map", L{N(TS("!!str", "key10"), AR(KEYANCH, "a10"), "value9")})),
+ N(DOCVAL, TS("!!str", "value11"), AR(VALANCH, "a11")),
+ N(DOCVAL, TS("!!str", "value11"), AR(VALANCH, "a11")),
+})
+);
+
+ADD_CASE_TO_GROUP("github131 1, unresolved",
+R"(
+a: &vref b
+*vref: c
+&kref aa: bb
+aaa: &kvref bbb
+foo:
+ *kref: cc
+ *kvref: cc
+)",
+L{
+ N("a", "b", AR(VALANCH, "vref")),
+ N("*vref", AR(KEYREF, "vref"), "c"),
+ N("aa", AR(KEYANCH, "kref"), "bb"),
+ N("aaa", "bbb", AR(VALANCH, "kvref")),
+ N("foo", L{
+ N("*kref", AR(KEYREF, "kref"), "cc"),
+ N("*kvref", AR(KEYREF, "kvref"), "cc"),
+ })
+});
+
+ADD_CASE_TO_GROUP("github131 1, resolved", RESOLVE_REFS,
+R"(
+a: &vref b
+*vref: c
+&kref aa: bb
+aaa: &kvref bbb
+foo:
+ *kref: cc
+ *kvref: cc
+)",
+L{
+ N("a", "b"),
+ N("b", "c"),
+ N("aa", "bb"),
+ N("aaa", "bbb"),
+ N("foo", L{N("aa", "cc"), N("bbb", "cc")})
+});
+
+
+ADD_CASE_TO_GROUP("anchors+refs on key+val, unresolved",
+R"({&a0 a0: &b0 b0, *b0: *a0})",
+L{
+ N("a0", AR(KEYANCH, "a0"), "b0", AR(VALANCH, "b0")),
+ N(AR(KEYREF, "*b0"), AR(VALREF, "*a0")),
+});
+
+ADD_CASE_TO_GROUP("anchors+refs on key+val, resolved", RESOLVE_REFS,
+R"({&a0 a0: &b0 b0, *b0: *a0})",
+L{
+ N("a0", "b0"),
+ N("b0", "a0"),
+});
+
+
+ADD_CASE_TO_GROUP("ambiguous anchor, unresolved",
+R"(&rootanchor
+&a0 a0: &b0 b0
+*b0: *a0
+map1:
+ &a1 a1: &b1 b1 # &a1 must be a KEY anchor on a1, not a VAL anchor on map1
+ *b1: *a1
+map2:
+ *b0: *a0 # ensure the anchor is enough to establish the indentation
+ &a2 a2: &b2 b2
+ *b2: *a2
+map3: &a3 # &a3 must be a VAL anchor on map3, not a KEY anchor on a3
+ a3: &b3 b3
+ *b3: *b0
+map4: *a0
+map5:
+ &map5
+ &a5 a5: &b5 b5
+ *b5: *a5
+map6:
+ &map6
+ a6: &b6 b6
+ *b6: *b6
+)",
+N(L{
+ N("a0", AR(KEYANCH, "a0"), "b0", AR(VALANCH, "b0")),
+ N(AR(KEYREF, "*b0"), AR(VALREF, "*a0")),
+ N("map1", L{N("a1", AR(KEYANCH, "a1"), "b1", AR(VALANCH, "b1")), N(AR(KEYREF, "*b1"), AR(VALREF, "*a1")),}),
+ N("map2", L{N(AR(KEYREF, "*b0"), AR(VALREF, "*a0")), N("a2", AR(KEYANCH, "a2"), "b2", AR(VALANCH, "b2")), N(AR(KEYREF, "*b2"), AR(VALREF, "*a2")),}),
+ N("map3", L{N("a3", "b3", AR(VALANCH, "b3")), N(AR(KEYREF, "*b3"), AR(VALREF, "*b0")),}, AR(VALANCH, "a3")),
+ N("map4", "*a0", AR(VALREF, "a0")),
+ N("map5", L{N("a5", AR(KEYANCH, "a5"), "b5", AR(VALANCH, "b5")), N(AR(KEYREF, "*b5"), AR(VALREF, "*a5")),}, AR(VALANCH, "map5")),
+ N("map6", L{N("a6", "b6", AR(VALANCH, "b6")), N(AR(KEYREF, "*b6"), AR(VALREF, "*b6")),}, AR(VALANCH, "map6")),
+}, AR(VALANCH, "rootanchor")));
+
+ADD_CASE_TO_GROUP("ambiguous anchor, resolved", RESOLVE_REFS,
+R"(
+&a0 a0: &b0 b0
+*b0: *a0
+map1:
+ &a1 a1: &b1 b1 # &a1 must be a KEY anchor on a1, not a VAL anchor on map1
+ *b1: *a1
+map2:
+ *b0: *a0 # ensure the anchor is enough to establish the indentation
+ &a2 a2: &b2 b2
+ *b2: *a2
+map3: &a3 # &a3 must be a VAL anchor on map3, not a KEY anchor on a3
+ a3: &b3 b3
+ *b3: *b0
+map4: *a0
+map5:
+ &map5
+ &a5 a5: &b5 b5
+ *b5: *a5
+map6:
+ &map6
+ a6: &b6 b6
+ *b6: *b6
+)",
+L{
+ N("a0", "b0"), N("b0", "a0"),
+ N("map1", L{N("a1", "b1"), N("b1", "a1"),}),
+ N("map2", L{N("b0", "a0"), N("a2", "b2"), N("b2", "a2"),}),
+ N("map3", L{N("a3", "b3"), N("b3", "b0"),}),
+ N("map4", "a0"),
+ N("map5", L{N("a5", "b5"), N("b5", "a5"),}),
+ N("map6", L{N("a6", "b6"), N("b6", "b6"),}),
+});
+
+
+ADD_CASE_TO_GROUP("ambiguous anchor in seq, unresolved",
+R"(
+&seq
+- &a0
+ &a1 k1: v1
+ &a2 k2: v2
+ &a3 k3: v3
+- &a4 k4: v4
+ &a5 k5: v5
+ &a6 k6: v6
+- &a7
+ &a8 k8: v8
+- &a9
+ k10: v10
+- *a1: w1
+ *a2: w2
+ *a3: w3
+ *a4: w4
+ *a5: w5
+ *a6: w6
+ *a8: w8
+- *a0
+- *a7
+- *a9
+)",
+N(L{
+ N(L{N("k1", AR(KEYANCH, "a1"), "v1"), N("k2", AR(KEYANCH, "a2"), "v2"), N("k3", AR(KEYANCH, "a3"), "v3")}, AR(VALANCH, "a0")),
+ N(L{N("k4", AR(KEYANCH, "a4"), "v4"), N("k5", AR(KEYANCH, "a5"), "v5"), N("k6", AR(KEYANCH, "a6"), "v6")}),
+ N(L{N("k8", AR(KEYANCH, "a8"), "v8")}, AR(VALANCH, "a7")),
+ N(L{N("k10", "v10")}, AR(VALANCH, "a9")),
+ N(L{
+ N("*a1", AR(KEYREF, "*a1"), "w1"),
+ N("*a2", AR(KEYREF, "*a2"), "w2"),
+ N("*a3", AR(KEYREF, "*a3"), "w3"),
+ N("*a4", AR(KEYREF, "*a4"), "w4"),
+ N("*a5", AR(KEYREF, "*a5"), "w5"),
+ N("*a6", AR(KEYREF, "*a6"), "w6"),
+ N("*a8", AR(KEYREF, "*a8"), "w8"),
+ }),
+ N("*a0", AR(VALREF, "*a0")),
+ N("*a7", AR(VALREF, "*a7")),
+ N("*a9", AR(VALREF, "*a9")),
+}, AR(VALANCH, "seq")));
+
+ADD_CASE_TO_GROUP("ambiguous anchor in seq, resolved", RESOLVE_REFS,
+R"(
+&seq
+- &a0
+ &a1 k1: v1
+ &a2 k2: v2
+ &a3 k3: v3
+- &a4 k4: v4
+ &a5 k5: v5
+ &a6 k6: v6
+- &a7
+ &a8 k8: v8
+- &a9
+ k10: v10
+- *a1: w1
+ *a2: w2
+ *a3: w3
+ *a4: w4
+ *a5: w5
+ *a6: w6
+ *a8: w8
+- *a0
+- *a7
+- *a9
+)",
+L{
+ N(L{N("k1", "v1"), N("k2", "v2"), N("k3", "v3")}),
+ N(L{N("k4", "v4"), N("k5", "v5"), N("k6", "v6")}),
+ N(L{N("k8", "v8")}),
+ N(L{N("k10", "v10")}),
+ N(L{
+ N("k1", "w1"),
+ N("k2", "w2"),
+ N("k3", "w3"),
+ N("k4", "w4"),
+ N("k5", "w5"),
+ N("k6", "w6"),
+ N("k8", "w8"),
+ }),
+ N(L{N("k1", AR(KEYANCH, "a1"), "v1"), N("k2", AR(KEYANCH, "a2"), "v2"), N("k3", AR(KEYANCH, "a3"), "v3")}),
+ N(L{N("k8", AR(KEYANCH, "a8"), "v8")}),
+ N(L{N("k10", "v10")}),
+});
+
+ADD_CASE_TO_GROUP("anchor after complex key without value ZWK4",
+R"(
+a: 1
+? b
+&anchor c: 3
+)",
+ L{
+ N("a", "1"), N(KEYVAL, "b", {}), N("c", AR(KEYANCH, "anchor"), "3")
+ }
+);
+
+ADD_CASE_TO_GROUP("anchor mixed with tag HMQ5, unresolved",
+R"(
+!!str &a1 "foo":
+ !!str bar
+&a2 baz : *a1
+)",
+ L{
+ N(KEYVAL|KEYQUO, TS("!!str", "foo"), AR(KEYANCH, "a1"), TS("!!str", "bar")),
+ N("baz", AR(KEYANCH, "a2"), "*a1", AR(VALREF, "*a1")),
+ }
+);
+
+ADD_CASE_TO_GROUP("anchor mixed with tag HMQ5, resolved", RESOLVE_REFS,
+R"(
+!!str &a1 "foo":
+ !!str bar
+&a2 baz : *a1
+)",
+ L{
+ N(KEYVAL|KEYQUO, TS("!!str", "foo"), TS("!!str", "bar")),
+ N("baz", "foo"),
+ }
+);
+}
+
+
+C4_SUPPRESS_WARNING_GCC_POP
+
+} // namespace yml
+} // namespace c4
diff --git a/thirdparty/ryml/test/test_simple_doc.cpp b/thirdparty/ryml/test/test_simple_doc.cpp
new file mode 100644
index 000000000..9e47c6b1e
--- /dev/null
+++ b/thirdparty/ryml/test/test_simple_doc.cpp
@@ -0,0 +1,526 @@
+#include "./test_group.hpp"
+
+namespace c4 {
+namespace yml {
+
+
+TEST(simple_doc, issue_251)
+{
+ {
+ csubstr yaml = R"(
+...
+)";
+ test_check_emit_check(yaml, [](Tree const &t){
+ EXPECT_EQ(t.rootref().type(), NOTYPE);
+ ASSERT_EQ(t.rootref().num_children(), 0u);
+ });
+ }
+ {
+ Tree tree;
+ NodeRef root = tree.rootref();
+ root |= MAP;
+ root["test"] = "...";
+ root["test"] |= VALQUO;
+
+ std::string s = emitrs_yaml<std::string>(tree);
+ test_check_emit_check(to_csubstr(s), [](Tree const &t){
+ EXPECT_EQ(t["test"].val(), "...");
+ });
+ }
+}
+
+
+TEST(simple_doc, test_suite_XLQ9)
+{
+ csubstr yaml = R"(
+---
+scalar
+%YAML 1.2
+)";
+ test_check_emit_check(yaml, [](Tree const &t){
+ ASSERT_TRUE(t.rootref().is_stream());
+ ASSERT_EQ(t.rootref().num_children(), 1u);
+ ASSERT_TRUE(t.rootref().first_child().is_doc());
+ ASSERT_TRUE(t.rootref().first_child().is_val());
+ EXPECT_EQ(t.rootref().first_child().val(), csubstr("scalar %YAML 1.2"));
+ });
+}
+
+
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+
+CASE_GROUP(SIMPLE_DOC)
+{
+
+ADD_CASE_TO_GROUP("one empty doc",
+R"(---
+)",
+ N(STREAM, L{DOCVAL})
+);
+
+ADD_CASE_TO_GROUP("one empty doc, explicit termination",
+R"(---
+...
+)",
+ N(STREAM, L{DOCVAL})
+);
+
+ADD_CASE_TO_GROUP("two empty docs",
+R"(---
+---
+)",
+ N(STREAM, L{DOCVAL, DOCVAL})
+);
+
+ADD_CASE_TO_GROUP("two empty docs, with termination",
+R"(---
+...
+---
+)",
+ N(STREAM, L{DOCVAL, DOCVAL})
+);
+
+ADD_CASE_TO_GROUP("doc with single scalar",
+R"(a scalar
+)",
+N(DOCVAL, "a scalar")
+);
+
+ADD_CASE_TO_GROUP("doc with single scalar, explicit",
+R"(--- a scalar
+)",
+N(STREAM, L{N(DOCVAL, "a scalar")})
+);
+
+ADD_CASE_TO_GROUP("simple doc, empty docs",
+R"(---
+---
+---
+---
+)",
+ N(STREAM, L{DOCVAL, DOCVAL, DOCVAL, DOCVAL})
+);
+
+ADD_CASE_TO_GROUP("simple doc, empty docs, indented",
+R"( ---
+ ---
+ ---
+ ---
+)",
+ N(STREAM, L{DOCVAL, DOCVAL, DOCVAL, DOCVAL})
+);
+
+ADD_CASE_TO_GROUP("simple doc, empty docs, term",
+R"(---
+...
+
+
+---
+...
+---
+...
+---
+...
+)",
+ N(STREAM, L{DOCVAL, DOCVAL, DOCVAL, DOCVAL})
+);
+
+ADD_CASE_TO_GROUP("simple doc, empty docs, term, indented",
+R"(
+ ---
+ ...
+
+
+ ---
+ ...
+ ---
+ ...
+ ---
+ ...
+)",
+ N(STREAM, L{DOCVAL, DOCVAL, DOCVAL, DOCVAL})
+);
+
+ADD_CASE_TO_GROUP("simple doc, plain scalar, multiple docs, implicit 2nd doc",
+R"(---
+- a plain scalar
+ with several lines
+...
+- a second plain scalar
+ with several lines
+)",
+N(STREAM, L{
+ N(DOCSEQ, L{N("a plain scalar with several lines")}),
+ N(DOCSEQ, L{N("a second plain scalar with several lines")}),
+}));
+
+ADD_CASE_TO_GROUP("simple doc, single scalar, implicit doc",
+R"(a scalar with some spaces inside
+)",
+ N(DOCVAL, "a scalar with some spaces inside")
+);
+
+ADD_CASE_TO_GROUP("simple doc, single scalar, implicit doc, indented",
+R"( a scalar with some spaces inside
+)",
+ N(DOCVAL,"a scalar with some spaces inside")
+);
+
+ADD_CASE_TO_GROUP("simple doc, multi scalar, implicit doc",
+R"(a scalar with some spaces inside,
+and yet another one with more spaces inside,
+and it doesn't really stop
+)",
+ N(L{
+ N("a scalar with some spaces inside"),
+ N("and yet another one with more spaces inside"),
+ N("and it doesn't really stop"),
+ })
+);
+
+ADD_CASE_TO_GROUP("simple doc, multi scalar, implicit doc, indented",
+R"(
+ a scalar with some spaces inside,
+ and yet another one with more spaces inside,
+ and it doesn't really stop
+)",
+ N(L{
+ N("a scalar with some spaces inside"),
+ N("and yet another one with more spaces inside"),
+ N("and it doesn't really stop"),
+ })
+);
+
+ADD_CASE_TO_GROUP("simple doc, single scalar, explicit doc, implicit termination",
+R"(---
+a scalar with some spaces inside
+)",
+ N(STREAM, L{N(DOCVAL, "a scalar with some spaces inside")})
+);
+
+ADD_CASE_TO_GROUP("simple doc, single scalar, explicit doc, implicit termination, indented",
+R"( ---
+ a scalar with some spaces inside
+)",
+ N(STREAM, L{N(DOCVAL, "a scalar with some spaces inside")})
+);
+
+ADD_CASE_TO_GROUP("simple doc, single scalar, explicit doc, explicit termination",
+R"(---
+a scalar with some spaces inside
+...
+)",
+ N(STREAM, L{N(DOCVAL, "a scalar with some spaces inside")})
+);
+
+ADD_CASE_TO_GROUP("simple doc, single scalar, explicit doc, explicit termination, indented",
+R"( ---
+ a scalar with some spaces inside
+ ...
+)",
+ N(STREAM, L{N(DOCVAL, "a scalar with some spaces inside")})
+);
+
+ADD_CASE_TO_GROUP("simple doc, multi doc, seq-map",
+R"(---
+- a
+- b
+- c
+...
+---
+a: 0
+b: 1
+c: 2
+)",
+ N(STREAM, L{
+ N(DOCSEQ, L{N("a"), N("b"), N("c")}),
+ N(DOCMAP, L{N("a", "0"), N("b", "1"), N("c", "2")})
+ })
+);
+
+ADD_CASE_TO_GROUP("simple doc, multi doc, seq-map, indented",
+R"( ---
+ - a
+ - b
+ - c
+ ...
+ ---
+ a: 0
+ b: 1
+ c: 2
+)",
+ N(STREAM, L{
+ N(DOCSEQ, L{N("a"), N("b"), N("c")}),
+ N(DOCMAP, L{N("a", "0"), N("b", "1"), N("c", "2")})
+ })
+);
+
+ADD_CASE_TO_GROUP("simple doc, multi doc, seq-map, no term",
+R"(---
+- a
+- b
+- c
+---
+a: 0
+b: 1
+c: 2
+)",
+ N(STREAM, L{
+ N(DOCSEQ, L{N("a"), N("b"), N("c")}),
+ N(DOCMAP, L{N("a", "0"), N("b", "1"), N("c", "2")})
+ })
+);
+
+ADD_CASE_TO_GROUP("simple doc, multi doc, seq-map, no term, indented",
+R"(
+ ---
+ - a
+ - b
+ - c
+ ---
+ a: 0
+ b: 1
+ c: 2
+)",
+ N(STREAM, L{
+ N(DOCSEQ, L{N("a"), N("b"), N("c")}),
+ N(DOCMAP, L{N("a", "0"), N("b", "1"), N("c", "2")})
+ })
+);
+
+ADD_CASE_TO_GROUP("simple doc, multi doc, map-seq",
+R"(---
+a: 0
+b: 1
+c: 2
+...
+---
+- a
+- b
+- c
+...
+)",
+ N(STREAM, L{
+ N(DOCMAP, L{N("a", "0"), N("b", "1"), N("c", "2")}),
+ N(DOCSEQ, L{N("a"), N("b"), N("c")})
+ })
+);
+
+ADD_CASE_TO_GROUP("simple doc, multi doc, map-seq, indented",
+R"(
+ ---
+ a: 0
+ b: 1
+ c: 2
+ ...
+ ---
+ - a
+ - b
+ - c
+ ...
+)",
+ N(STREAM, L{
+ N(DOCMAP, L{N("a", "0"), N("b", "1"), N("c", "2")}),
+ N(DOCSEQ, L{N("a"), N("b"), N("c")})
+ })
+);
+
+ADD_CASE_TO_GROUP("simple doc, multi doc, map-seq, no term",
+R"(---
+a: 0
+b: 1
+c: 2
+---
+- a
+- b
+- c
+)",
+ N(STREAM, L{
+ N(DOCMAP, L{N("a", "0"), N("b", "1"), N("c", "2")}),
+ N(DOCSEQ, L{N("a"), N("b"), N("c")})
+ })
+);
+
+ADD_CASE_TO_GROUP("simple doc, multi doc, map-seq, no term, indented",
+R"(
+ ---
+ a: 0
+ b: 1
+ c: 2
+ ---
+ - a
+ - b
+ - c
+)",
+ N(STREAM, L{
+ N(DOCMAP, L{N("a", "0"), N("b", "1"), N("c", "2")}),
+ N(DOCSEQ, L{N("a"), N("b"), N("c")})
+ })
+);
+
+ADD_CASE_TO_GROUP("simple doc, multi doc, impl seq-map",
+R"(---
+[a, b, c]
+...
+---
+{a: 0, b: 1, c: 2}
+...
+)",
+ N(STREAM, L{
+ N(DOCSEQ, L{N("a"), N("b"), N("c")}),
+ N(DOCMAP, L{N("a", "0"), N("b", "1"), N("c", "2")})
+ })
+);
+
+ADD_CASE_TO_GROUP("simple doc, multi doc, impl seq-map, indented",
+R"(
+ ---
+ [a, b, c]
+ ...
+ ---
+ {a: 0, b: 1, c: 2}
+ ...
+)",
+ N(STREAM, L{
+ N(DOCSEQ, L{N("a"), N("b"), N("c")}),
+ N(DOCMAP, L{N("a", "0"), N("b", "1"), N("c", "2")})
+ })
+);
+
+ADD_CASE_TO_GROUP("simple doc, multi doc, impl seq-map, no term",
+R"(---
+[a, b, c]
+---
+{a: 0, b: 1, c: 2}
+)",
+ N(STREAM, L{
+ N(DOCSEQ, L{N("a"), N("b"), N("c")}),
+ N(DOCMAP, L{N("a", "0"), N("b", "1"), N("c", "2")})
+ })
+);
+
+ADD_CASE_TO_GROUP("simple doc, multi doc, impl seq-map, no term, indented",
+R"(
+ ---
+ [a, b, c]
+ ---
+ {a: 0, b: 1, c: 2}
+)",
+ N(STREAM, L{
+ N(DOCSEQ, L{N("a"), N("b"), N("c")}),
+ N(DOCMAP, L{N("a", "0"), N("b", "1"), N("c", "2")})
+ })
+);
+
+ADD_CASE_TO_GROUP("simple doc, multi doc, impl map-seq",
+R"(---
+{a: 0, b: 1, c: 2}
+...
+---
+[a, b, c]
+...
+)",
+ N(STREAM, L{
+ N(DOCMAP, L{N("a", "0"), N("b", "1"), N("c", "2")}),
+ N(DOCSEQ, L{N("a"), N("b"), N("c")})
+ })
+);
+
+ADD_CASE_TO_GROUP("simple doc, multi doc, impl map-seq, indented",
+R"(
+ ---
+ {a: 0, b: 1, c: 2}
+ ...
+ ---
+ [a, b, c]
+ ...
+)",
+ N(STREAM, L{
+ N(DOCMAP, L{N("a", "0"), N("b", "1"), N("c", "2")}),
+ N(DOCSEQ, L{N("a"), N("b"), N("c")})
+ })
+);
+
+ADD_CASE_TO_GROUP("simple doc, multi doc, impl map-seq, no term",
+R"(---
+{a: 0, b: 1, c: 2}
+---
+[a, b, c]
+)",
+ N(STREAM, L{
+ N(DOCMAP, L{N("a", "0"), N("b", "1"), N("c", "2")}),
+ N(DOCSEQ, L{N("a"), N("b"), N("c")})
+ })
+);
+
+ADD_CASE_TO_GROUP("simple doc, multi doc, impl map-seq, no term, indented",
+R"(
+ ---
+ {a: 0, b: 1, c: 2}
+ ---
+ [a, b, c]
+)",
+ N(STREAM, L{
+ N(DOCMAP, L{N("a", "0"), N("b", "1"), N("c", "2")}),
+ N(DOCSEQ, L{N("a"), N("b"), N("c")})
+ })
+);
+
+ADD_CASE_TO_GROUP("simple doc, indented with empty lines",
+R"(
+ ---
+ {a: 0, b: 1, c: 2,
+
+
+
+
+
+
+
+ d:
+ some scalar
+ }
+ ---
+ a: 0
+ b: 1
+ c: 2
+
+
+
+
+
+
+
+ d:
+ some scalar
+)",
+ N(STREAM, L{
+ N(DOCMAP, L{N("a", "0"), N("b", "1"), N("c", "2"), N("d", "some scalar")}),
+ N(DOCMAP, L{N("a", "0"), N("b", "1"), N("c", "2"), N("d", "some scalar")}),
+ })
+);
+
+
+ADD_CASE_TO_GROUP("simple doc, tags at global scope, 9WXW",
+R"(# Private
+!foo "bar"
+...
+# Global
+%TAG ! tag:example.com,2000:app/
+---
+!foo "bar"
+)",
+N(STREAM, L{
+ N(DOCVAL|VALQUO, TS("!foo", "bar")),
+ // strict YAML should result in this for the second doc:
+ //N(DOCVAL|VALQUO, TS("<tag:example.com,2000:app/foo>", "bar")),
+ // but since we don't do lookup, it should result in:
+ N(DOCVAL|VALQUO, TS("!foo", "bar")),
+})
+);
+}
+
+} // namespace yml
+} // namespace c4
diff --git a/thirdparty/ryml/test/test_simple_map.cpp b/thirdparty/ryml/test/test_simple_map.cpp
new file mode 100644
index 000000000..3e814e279
--- /dev/null
+++ b/thirdparty/ryml/test/test_simple_map.cpp
@@ -0,0 +1,1050 @@
+#include "./test_group.hpp"
+
+namespace c4 {
+namespace yml {
+
+TEST(simple_map, issue274)
+{
+ Tree tree = parse_in_arena(R"(
+foo:
+- bar
+-
+baz: qux
+foo2:
+- bar
+-
+baz2: qux
+)");
+ std::cout << tree;
+ ASSERT_EQ(tree.rootref().num_children(), 4u);
+ ASSERT_EQ(tree["foo"].num_children(), 2u);
+ EXPECT_EQ(tree["foo"][0].val(), "bar");
+ EXPECT_EQ(tree["foo"][1].val(), "");
+ EXPECT_EQ(tree["baz"].val(), "qux");
+ ASSERT_EQ(tree["foo2"].num_children(), 2u);
+ EXPECT_EQ(tree["foo2"][0].val(), "bar");
+ EXPECT_EQ(tree["foo2"][1].val(), "");
+ EXPECT_EQ(tree["baz2"].val(), "qux");
+}
+
+TEST(simple_map, keys_with_leading_colon)
+{
+ Tree tree = parse_in_arena(R"(
+:foo:
+ :bar: a
+ :barbar: b
+ :barbarbar: c
+)");
+ EXPECT_EQ(tree[":foo"][":bar"].val(), "a");
+ EXPECT_EQ(tree[":foo"][":barbar"].val(), "b");
+ EXPECT_EQ(tree[":foo"][":barbarbar"].val(), "c");
+}
+
+TEST(simple_map, open_on_new_doc_without_space)
+{
+ Tree tree = parse_in_arena(R"(
+foo: bar
+---
+foo: bar
+---
+foo: bar
+---
+foo:
+...
+foo:
+---
+)");
+ EXPECT_EQ(tree.docref(0)["foo"].val(), "bar");
+ EXPECT_EQ(tree.docref(1)["foo"].val(), "bar");
+ EXPECT_EQ(tree.docref(2)["foo"].val(), "bar");
+ EXPECT_EQ(tree.docref(3)["foo"].val(), "");
+ EXPECT_EQ(tree.docref(4)["foo"].val(), "");
+}
+
+TEST(simple_map, open_on_new_doc_with_space_before_colon)
+{
+ Tree tree = parse_in_arena(R"(
+foo0 : bar
+---
+foo1 : bar # the " :" was causing an assert
+---
+foo2 : bar
+---
+foo3 : bar
+---
+foo4 : bar
+)");
+ EXPECT_EQ(tree.docref(0)["foo0"].val(), "bar");
+ EXPECT_EQ(tree.docref(1)["foo1"].val(), "bar");
+ EXPECT_EQ(tree.docref(2)["foo2"].val(), "bar");
+ EXPECT_EQ(tree.docref(3)["foo3"].val(), "bar");
+ EXPECT_EQ(tree.docref(4)["foo4"].val(), "bar");
+}
+
+
+TEST(simple_map, test_suite_UT92)
+{
+ csubstr yaml = R"(
+- { matches
+% : 20 }
+- { matches
+%: 20 }
+- { matches
+%:
+ 20 }
+)";
+ test_check_emit_check(yaml, [](Tree const &t){
+ ASSERT_TRUE(t[0].has_child("matches %"));
+ EXPECT_EQ(t[0]["matches %"].val(), "20");
+ ASSERT_TRUE(t[0].has_child("matches %"));
+ ASSERT_TRUE(t[1].has_child("matches %"));
+ EXPECT_EQ(t[1]["matches %"].val(), "20");
+ ASSERT_TRUE(t[1].has_child("matches %"));
+ ASSERT_TRUE(t[2].has_child("matches %"));
+ EXPECT_EQ(t[2]["matches %"].val(), "20");
+ ASSERT_TRUE(t[2].has_child("matches %"));
+ });
+}
+
+TEST(simple_map, two_nested_flow_maps_not_accepted_because_of_container_key)
+{
+ Tree tree;
+ ExpectError::do_check(&tree, [&]{
+ parse_in_arena("{{}}", &tree);
+ });
+}
+
+TEST(simple_map, many_unmatched_brackets)
+{
+ std::string src;
+ src.reserve(10000000u);
+ for(size_t num_brackets : {4u, 8u, 32u, 256u, 4096u, 1024u})
+ {
+ SCOPED_TRACE(num_brackets);
+ for(size_t i = src.size(); i < num_brackets; ++i)
+ src += '{';
+ Tree tree;
+ ExpectError::do_check(&tree, [&]{
+ parse_in_place(to_substr(src), &tree);
+ });
+ }
+}
+
+TEST(simple_map, missing_quoted_key)
+{
+ csubstr yaml = R"(
+"top1" :
+ "key1" : scalar1
+'top2' :
+ 'key2' : scalar2
+---
+"top1" :
+ "key1" : scalar1
+'top2' :
+ 'key2' : scalar2
+---
+'x2': {'y': z}
+---
+'x3':
+ 'y': z
+---
+x4:
+ 'y': z
+---
+'x5':
+'y': z
+---
+x6:
+'y': z
+---
+'x7' : [
+ 'y' : z,
+ ]
+---
+"x8" :
+ "y" : value,
+ "x" : value
+"y" :
+ "y" : value,
+ "x" : value
+)";
+ test_check_emit_check(yaml, [](Tree const &t){
+ size_t doc = 0;
+ EXPECT_TRUE(t.docref(doc)["top1"].is_key_quoted());
+ EXPECT_TRUE(t.docref(doc)["top2"].is_key_quoted());
+ EXPECT_TRUE(t.docref(doc)["top1"]["key1"].is_key_quoted());
+ EXPECT_TRUE(t.docref(doc)["top2"]["key2"].is_key_quoted());
+ ++doc;
+ EXPECT_TRUE(t.docref(doc)["top1"].is_key_quoted());
+ EXPECT_TRUE(t.docref(doc)["top2"].is_key_quoted());
+ EXPECT_TRUE(t.docref(doc)["top1"]["key1"].is_key_quoted());
+ EXPECT_TRUE(t.docref(doc)["top2"]["key2"].is_key_quoted());
+ ++doc;
+ EXPECT_TRUE(t.docref(doc)["x2"].is_key_quoted());
+ EXPECT_TRUE(t.docref(doc)["x2"]["y"].is_key_quoted());
+ ++doc;
+ EXPECT_TRUE(t.docref(doc)["x3"].is_key_quoted());
+ EXPECT_TRUE(t.docref(doc)["x3"]["y"].is_key_quoted());
+ ++doc;
+ EXPECT_FALSE(t.docref(doc)["x4"].is_key_quoted());
+ EXPECT_TRUE(t.docref(doc)["x4"]["y"].is_key_quoted());
+ ++doc;
+ EXPECT_TRUE(t.docref(doc)["x5"].is_key_quoted());
+ EXPECT_TRUE(t.docref(doc)["y"].is_key_quoted());
+ ++doc;
+ EXPECT_FALSE(t.docref(doc)["x6"].is_key_quoted());
+ EXPECT_TRUE(t.docref(doc)["y"].is_key_quoted());
+ ++doc;
+ EXPECT_TRUE(t.docref(doc)["x7"].is_key_quoted());
+ EXPECT_TRUE(t.docref(doc)["x7"][0]["y"].is_key_quoted());
+ ++doc;
+ EXPECT_TRUE(t.docref(doc)["x8"].is_key_quoted());
+ EXPECT_TRUE(t.docref(doc)["x8"]["y"].is_key_quoted());
+ EXPECT_TRUE(t.docref(doc)["x8"]["x"].is_key_quoted());
+ EXPECT_TRUE(t.docref(doc)["y"].is_key_quoted());
+ EXPECT_TRUE(t.docref(doc)["y"]["y"].is_key_quoted());
+ EXPECT_TRUE(t.docref(doc)["y"]["x"].is_key_quoted());
+ });
+}
+
+#ifdef JAVAI
+void verify_error_is_reported(csubstr case_name, csubstr yaml, size_t col={})
+{
+ SCOPED_TRACE(case_name);
+ SCOPED_TRACE(yaml);
+ Tree tree;
+ Location loc = {};
+ loc.col = col;
+ ExpectError::do_check(&tree, [&](){
+ parse_in_arena(yaml, &tree);
+ }, loc);
+}
+
+TEST(simple_map, no_map_key_flow)
+{
+ verify_error_is_reported("map key", R"({ first: Sammy, last: Sosa }: foo)", 28u);
+}
+
+TEST(simple_map, no_map_key_block)
+{
+ verify_error_is_reported("map key", R"(?
+ first: Sammy
+ last: Sosa
+:
+ foo
+)");
+}
+
+TEST(simple_map, no_seq_key_flow)
+{
+ verify_error_is_reported("seq key", R"([Sammy, Sosa]: foo)", 28u);
+}
+
+TEST(simple_map, no_seq_key_block)
+{
+ verify_error_is_reported("map key", R"(?
+ - Sammy
+ - Sosa
+:
+ foo
+)");
+}
+#endif
+
+#ifdef RYML_WITH_TAB_TOKENS
+TEST(simple_map, block_tab_tokens)
+{
+ Tree tree = parse_in_arena(R"(
+--- # block, spaces only
+a: 0
+b: 1
+c: 2
+--- # block, tabs after token
+a: 0
+b: 1
+c: 2
+--- # block, tabs before and after token
+a : 0
+b : 1
+c : 2
+--- # block, tabs before token
+a : 0
+b : 1
+c : 2
+--- # block, tabs before newline
+a : 0
+b : 1
+c : 2
+)");
+ EXPECT_EQ(tree.docref(0)["a"].val(), csubstr("0"));
+ EXPECT_EQ(tree.docref(0)["b"].val(), csubstr("1"));
+ EXPECT_EQ(tree.docref(0)["c"].val(), csubstr("2"));
+ EXPECT_EQ(tree.docref(1)["a"].val(), csubstr("0"));
+ EXPECT_EQ(tree.docref(1)["b"].val(), csubstr("1"));
+ EXPECT_EQ(tree.docref(1)["c"].val(), csubstr("2"));
+ EXPECT_EQ(tree.docref(2)["a"].val(), csubstr("0"));
+ EXPECT_EQ(tree.docref(2)["b"].val(), csubstr("1"));
+ EXPECT_EQ(tree.docref(2)["c"].val(), csubstr("2"));
+ EXPECT_EQ(tree.docref(3)["a"].val(), csubstr("0"));
+ EXPECT_EQ(tree.docref(3)["b"].val(), csubstr("1"));
+ EXPECT_EQ(tree.docref(3)["c"].val(), csubstr("2"));
+ EXPECT_EQ(tree.docref(4)["a"].val(), csubstr("0"));
+ EXPECT_EQ(tree.docref(4)["b"].val(), csubstr("1"));
+ EXPECT_EQ(tree.docref(4)["c"].val(), csubstr("2"));
+}
+
+TEST(simple_map, flow_tab_tokens)
+{
+ Tree tree = parse_in_arena(R"(
+--- # flow, no tabs
+{a: 0, b: 1, c: 2}
+--- # flow, tabs after token
+{a: 0, b: 1, c: 2}
+--- # flow, tabs before and after token
+{a : 0, b : 1, c : 2}
+--- # flow, tabs before token
+{a : 0, b : 1, c : 2}
+--- # flow, tabs after val
+{a : 0 , b : 1 , c : 2 }
+--- # flow, tabs after val and comma
+{a : 0 , b : 1 , c : 2 }
+--- # flow, tabs everywhere
+ {
+ a : 0 ,
+ b : 1 ,
+ c : 2
+ }
+ )");
+ EXPECT_EQ(tree.docref(0)["a"].val(), csubstr("0"));
+ EXPECT_EQ(tree.docref(0)["b"].val(), csubstr("1"));
+ EXPECT_EQ(tree.docref(0)["c"].val(), csubstr("2"));
+ EXPECT_EQ(tree.docref(1)["a"].val(), csubstr("0"));
+ EXPECT_EQ(tree.docref(1)["b"].val(), csubstr("1"));
+ EXPECT_EQ(tree.docref(1)["c"].val(), csubstr("2"));
+ EXPECT_EQ(tree.docref(2)["a"].val(), csubstr("0"));
+ EXPECT_EQ(tree.docref(2)["b"].val(), csubstr("1"));
+ EXPECT_EQ(tree.docref(2)["c"].val(), csubstr("2"));
+ EXPECT_EQ(tree.docref(3)["a"].val(), csubstr("0"));
+ EXPECT_EQ(tree.docref(3)["b"].val(), csubstr("1"));
+ EXPECT_EQ(tree.docref(3)["c"].val(), csubstr("2"));
+ EXPECT_EQ(tree.docref(4)["a"].val(), csubstr("0"));
+ EXPECT_EQ(tree.docref(4)["b"].val(), csubstr("1"));
+ EXPECT_EQ(tree.docref(4)["c"].val(), csubstr("2"));
+ EXPECT_EQ(tree.docref(5)["a"].val(), csubstr("0"));
+ EXPECT_EQ(tree.docref(5)["b"].val(), csubstr("1"));
+ EXPECT_EQ(tree.docref(5)["c"].val(), csubstr("2"));
+ EXPECT_EQ(tree.docref(6)["a"].val(), csubstr("0"));
+ EXPECT_EQ(tree.docref(6)["b"].val(), csubstr("1"));
+ EXPECT_EQ(tree.docref(6)["c"].val(), csubstr("2"));
+}
+#endif // RYML_WITH_TAB_TOKENS
+
+
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+
+
+CASE_GROUP(SIMPLE_MAP)
+{
+//
+ADD_CASE_TO_GROUP("empty map",
+"{}",
+ MAP
+);
+
+ADD_CASE_TO_GROUP("empty map, multiline",
+R"({
+}
+)",
+ MAP
+);
+
+ADD_CASE_TO_GROUP("empty map, multilines",
+R"({
+# foo bar baz akjasdkj
+}
+)",
+ MAP
+);
+
+ADD_CASE_TO_GROUP("simple map, explicit, single line",
+"{foo: 0, bar: 1, baz: 2, bat: 3}",
+ L{N{"foo", "0"}, N{"bar", "1"}, N{"baz", "2"}, N{"bat", "3"}}
+);
+
+ADD_CASE_TO_GROUP("simple map, explicit, multiline, unindented",
+R"({
+foo: 0,
+bar: 1,
+baz: 2,
+bat: 3
+})",
+ L{N{"foo", "0"}, N{"bar", "1"}, N{"baz", "2"}, N{"bat", "3"}}
+);
+
+ADD_CASE_TO_GROUP("simple map, explicit, multiline, indented",
+R"({
+ foo: 0,
+ bar: 1,
+ baz: 2,
+ bat: 3
+})",
+ L{N{"foo", "0"}, N{"bar", "1"}, N{"baz", "2"}, N{"bat", "3"}}
+);
+
+ADD_CASE_TO_GROUP("simple map",
+R"(
+foo: 0
+bar: 1
+baz: 2
+bat: 3
+)",
+ L{N{"foo", "0"}, N{"bar", "1"}, N{"baz", "2"}, N{"bat", "3"}}
+);
+
+ADD_CASE_TO_GROUP("simple map, values on next line",
+R"(
+foo:
+ 0
+bar:
+ 1
+baz:
+ 2
+bat:
+ 3
+)",
+ L{N{"foo", "0"}, N{"bar", "1"}, N{"baz", "2"}, N{"bat", "3"}}
+);
+
+ADD_CASE_TO_GROUP("simple map, with comments",
+R"(
+foo: 0 # this is a foo
+bar: 1 # this is a bar
+baz: 2 # this is a baz
+bat: 3 # this is a bat
+)",
+ L{N{"foo", "0"}, N{"bar", "1"}, N{"baz", "2"}, N{"bat", "3"}}
+);
+
+ADD_CASE_TO_GROUP("simple map, with comments interspersed",
+R"(
+# this is a foo
+foo: 0
+# this is a bar
+bar: 1
+# this is a baz
+baz: 2
+# this is a bat
+bat: 3
+)",
+ L{N{"foo", "0"}, N{"bar", "1"}, N{"baz", "2"}, N{"bat", "3"}}
+);
+
+ADD_CASE_TO_GROUP("simple map, with comments interspersed implicit key X8DW",
+R"(
+? key
+# comment
+: value
+)",
+ L{N("key", "value")}
+);
+
+ADD_CASE_TO_GROUP("simple map, with indented comments interspersed, before",
+R"(
+ # this is a foo
+foo: 0
+ # this is a bar
+bar: 1
+ # this is a baz
+baz: 2
+ # this is a bat
+bat: 3
+)",
+ L{N{"foo", "0"}, N{"bar", "1"}, N{"baz", "2"}, N{"bat", "3"}}
+);
+
+ADD_CASE_TO_GROUP("simple map, with indented comments interspersed, after",
+R"(
+foo: 0
+ # this is a foo
+bar: 1
+ # this is a bar
+baz: 2
+ # this is a baz
+bat: 3
+ # this is a bat
+)",
+ L{N{"foo", "0"}, N{"bar", "1"}, N{"baz", "2"}, N{"bat", "3"}}
+);
+
+ADD_CASE_TO_GROUP("simple map, null values",
+R"(
+key: val
+a:
+b:
+c:
+d:
+e:
+f:
+g:
+foo: bar
+)",
+L{N("key", "val"), N(KEYVAL, "a", /*"~"*/{}), N(KEYVAL, "b", /*"~"*/{}), N(KEYVAL, "c", /*"~"*/{}), N(KEYVAL, "d", /*"~"*/{}), N(KEYVAL, "e", /*"~"*/{}), N(KEYVAL, "f", /*"~"*/{}), N(KEYVAL, "g", /*"~"*/{}), N("foo", "bar"),}
+);
+
+ADD_CASE_TO_GROUP("simple map expl, null values 1",
+R"({key: val, a, b, c, d, e: , f: , g: , foo: bar})",
+L{N("key", "val"), N(KEYVAL, "a", /*"~"*/{}), N(KEYVAL, "b", /*"~"*/{}), N(KEYVAL, "c", /*"~"*/{}), N(KEYVAL, "d", /*"~"*/{}), N(KEYVAL, "e", /*"~"*/{}), N(KEYVAL, "f", /*"~"*/{}), N(KEYVAL, "g", /*"~"*/{}), N("foo", "bar"),}
+);
+
+ADD_CASE_TO_GROUP("simple map expl, null values 2",
+R"(
+- {a}
+- {a, b, c}
+- {a: 1, b: 2, c}
+- {a: 1, b, c: 2}
+- {a, b: 1, c: 2}
+)",
+L{
+ N(L{N(KEYVAL, "a", /*"~"*/{})}),
+ N(L{N(KEYVAL, "a", /*"~"*/{}), N(KEYVAL, "b", /*"~"*/{}), N(KEYVAL, "c", /*"~"*/{})}),
+ N(L{N("a", "1"), N("b", "2"), N(KEYVAL, "c", /*"~"*/{})}),
+ N(L{N("a", "1"), N(KEYVAL, "b", /*"~"*/{}), N("c", "2")}),
+ N(L{N(KEYVAL, "a", /*"~"*/{}), N("b", "1"), N("c", "2")}),
+ }
+);
+
+ADD_CASE_TO_GROUP("simple map expl, null values 3, 4ABK",
+R"(
+- {foo: , bar: , baz: }
+- {foo:, bar:, baz:}
+- {foo:foo: , bar:bar: , baz:baz: }
+- {foo:foo:, bar:bar:, baz:baz:}
+)",
+L{
+ N(L{N(KEYVAL, "foo", {}), N(KEYVAL, "bar", {}), N(KEYVAL, "baz", {})}),
+ N(L{N(KEYVAL, "foo", {}), N(KEYVAL, "bar", {}), N(KEYVAL, "baz", {})}),
+ N(L{N(KEYVAL, "foo:foo", {}), N(KEYVAL, "bar:bar", {}), N(KEYVAL, "baz:baz", {})}),
+ N(L{N(KEYVAL, "foo:foo", {}), N(KEYVAL, "bar:bar", {}), N(KEYVAL, "baz:baz", {})}),
+});
+
+ADD_CASE_TO_GROUP("simple map, scalars with special chars, comma",
+R"(
+a,b: val,000
+c,d: val, 000
+e,f: val , 000
+h,i: val ,000
+a, b: val,000
+c, d: val, 000
+e, f: val , 000
+h, i: val ,000
+a , b: val,000
+c , d: val, 000
+e , f: val , 000
+h , i: val ,000
+a ,b: val,000
+c ,d: val, 000
+e ,f: val , 000
+h ,i: val ,000
+)",
+ L{
+ N{"a,b", "val,000"}, N{"c,d", "val, 000"}, N{"e,f", "val , 000"}, N{"h,i", "val ,000"},
+ N{"a, b", "val,000"}, N{"c, d", "val, 000"}, N{"e, f", "val , 000"}, N{"h, i", "val ,000"},
+ N{"a , b", "val,000"}, N{"c , d", "val, 000"}, N{"e , f", "val , 000"}, N{"h , i", "val ,000"},
+ N{"a ,b", "val,000"}, N{"c ,d", "val, 000"}, N{"e ,f", "val , 000"}, N{"h ,i", "val ,000"},
+}
+);
+
+ADD_CASE_TO_GROUP("simple map, scalars with special chars, semicolon",
+R"(
+a:b: val:000
+c:d: "val: 000"
+e:f: "val : 000"
+h:i: val :000
+"a: b": val:000
+"c: d": "val: 000"
+"e: f": "val : 000"
+"h: i": val :000
+"a : b": val:000
+"c : d": "val: 000"
+"e : f": "val : 000"
+"h : i": val :000
+a :b: val:000
+c :d: "val: 000"
+e :f: "val : 000"
+h :i: val :000
+)",
+ L{
+ N{ "a:b", "val:000"}, N{QV, "c:d", "val: 000"}, N{QV, "e:f", "val : 000"}, N{ "h:i", "val :000"},
+ N{QK, "a: b", "val:000"}, N{QKV, "c: d", "val: 000"}, N{QKV, "e: f", "val : 000"},N{QK, "h: i", "val :000"},
+ N{QK, "a : b", "val:000"},N{QKV, "c : d", "val: 000"},N{QKV, "e : f", "val : 000"},N{QK, "h : i", "val :000"},
+ N{ "a :b", "val:000"}, N{QV, "c :d", "val: 000"}, N{QV, "e :f", "val : 000"}, N{ "h :i", "val :000"},
+}
+);
+
+ADD_CASE_TO_GROUP("simple map, scalars with special chars, cardinal",
+R"(
+a#b: val#000
+c#d: val# 000
+e#f: "val # 000"
+h#i: "val #000"
+a# b: val#000
+c# d: val# 000
+e# f: "val # 000"
+h# i: "val #000"
+"a # b": val#000
+"c # d": val# 000
+"e # f": "val # 000"
+"h # i": "val #000"
+"a #b": val#000
+"c #d": val# 000
+"e #f": "val # 000"
+"h #i": "val #000"
+)",
+ L{
+ N{ "a#b", "val#000"}, N{ "c#d", "val# 000"}, N{QV, "e#f", "val # 000"}, N{QV, "h#i", "val #000"},
+ N{ "a# b", "val#000"}, N{ "c# d", "val# 000"}, N{QV, "e# f", "val # 000"}, N{QV, "h# i", "val #000"},
+ N{QK, "a # b", "val#000"}, N{QK, "c # d", "val# 000"}, N{QKV, "e # f", "val # 000"}, N{QKV, "h # i", "val #000"},
+ N{QK, "a #b", "val#000"}, N{QK, "c #d", "val# 000"}, N{QKV, "e #f", "val # 000"}, N{QKV, "h #i", "val #000"},
+}
+);
+
+ADD_CASE_TO_GROUP("simple map, scalars with special chars, dash",
+R"(
+a-b: val-000
+c-d: val- 000
+e-f: val - 000
+h-i: val -000
+a- b: val-000
+c- d: val- 000
+e- f: val - 000
+h- i: val -000
+a - b: val-000
+c - d: val- 000
+e - f: val - 000
+h - i: val -000
+a -b: val-000
+c -d: val- 000
+e -f: val - 000
+h -i: val -000
+)",
+ L{
+ N{"a-b", "val-000"}, N{"c-d", "val- 000"}, N{"e-f", "val - 000"}, N{"h-i", "val -000"},
+ N{"a- b", "val-000"}, N{"c- d", "val- 000"}, N{"e- f", "val - 000"}, N{"h- i", "val -000"},
+ N{"a - b", "val-000"}, N{"c - d", "val- 000"}, N{"e - f", "val - 000"}, N{"h - i", "val -000"},
+ N{"a -b", "val-000"}, N{"c -d", "val- 000"}, N{"e -f", "val - 000"}, N{"h -i", "val -000"},
+}
+);
+
+ADD_CASE_TO_GROUP("simple map, scalars with special chars, left-bracket",
+R"(
+a[b: val[000
+c[d: val[ 000
+e[f: val [ 000
+h[i: val [000
+a[ b: val[000
+c[ d: val[ 000
+e[ f: val [ 000
+h[ i: val [000
+a [ b: val[000
+c [ d: val[ 000
+e [ f: val [ 000
+h [ i: val [000
+a [b: val[000
+c [d: val[ 000
+e [f: val [ 000
+h [i: val [000
+)",
+ L{
+ N{"a[b", "val[000"}, N{"c[d", "val[ 000"}, N{"e[f", "val [ 000"}, N{"h[i", "val [000"},
+ N{"a[ b", "val[000"}, N{"c[ d", "val[ 000"}, N{"e[ f", "val [ 000"}, N{"h[ i", "val [000"},
+ N{"a [ b", "val[000"}, N{"c [ d", "val[ 000"}, N{"e [ f", "val [ 000"}, N{"h [ i", "val [000"},
+ N{"a [b", "val[000"}, N{"c [d", "val[ 000"}, N{"e [f", "val [ 000"}, N{"h [i", "val [000"},
+}
+);
+
+ADD_CASE_TO_GROUP("simple map, scalars with special chars, right-bracket",
+R"(
+a]b: val]000
+c]d: val] 000
+e]f: val ] 000
+h]i: val ]000
+a] b: val]000
+c] d: val] 000
+e] f: val ] 000
+h] i: val ]000
+a ] b: val]000
+c ] d: val] 000
+e ] f: val ] 000
+h ] i: val ]000
+a ]b: val]000
+c ]d: val] 000
+e ]f: val ] 000
+h ]i: val ]000
+)",
+ L{
+ N{"a]b", "val]000"}, N{"c]d", "val] 000"}, N{"e]f", "val ] 000"}, N{"h]i", "val ]000"},
+ N{"a] b", "val]000"}, N{"c] d", "val] 000"}, N{"e] f", "val ] 000"}, N{"h] i", "val ]000"},
+ N{"a ] b", "val]000"}, N{"c ] d", "val] 000"}, N{"e ] f", "val ] 000"}, N{"h ] i", "val ]000"},
+ N{"a ]b", "val]000"}, N{"c ]d", "val] 000"}, N{"e ]f", "val ] 000"}, N{"h ]i", "val ]000"},
+}
+);
+
+ADD_CASE_TO_GROUP("simple map, scalars with special chars, left-curly",
+R"(
+a{b: val{000
+c{d: val{ 000
+e{f: val { 000
+h{i: val {000
+a{ b: val{000
+c{ d: val{ 000
+e{ f: val { 000
+h{ i: val {000
+a { b: val{000
+c { d: val{ 000
+e { f: val { 000
+h { i: val {000
+a {b: val{000
+c {d: val{ 000
+e {f: val { 000
+h {i: val {000
+)",
+ L{
+ N{"a{b", "val{000"}, N{"c{d", "val{ 000"}, N{"e{f", "val { 000"}, N{"h{i", "val {000"},
+ N{"a{ b", "val{000"}, N{"c{ d", "val{ 000"}, N{"e{ f", "val { 000"}, N{"h{ i", "val {000"},
+ N{"a { b", "val{000"}, N{"c { d", "val{ 000"}, N{"e { f", "val { 000"}, N{"h { i", "val {000"},
+ N{"a {b", "val{000"}, N{"c {d", "val{ 000"}, N{"e {f", "val { 000"}, N{"h {i", "val {000"},
+}
+);
+
+ADD_CASE_TO_GROUP("simple map, scalars with special chars, right-curly",
+R"(
+a}b: val}000
+c}d: val} 000
+e}f: val } 000
+h}i: val }000
+a} b: val}000
+c} d: val} 000
+e} f: val } 000
+h} i: val }000
+a } b: val}000
+c } d: val} 000
+e } f: val } 000
+h } i: val }000
+a }b: val}000
+c }d: val} 000
+e }f: val } 000
+h }i: val }000
+)",
+ L{
+ N{"a}b", "val}000"}, N{"c}d", "val} 000"}, N{"e}f", "val } 000"}, N{"h}i", "val }000"},
+ N{"a} b", "val}000"}, N{"c} d", "val} 000"}, N{"e} f", "val } 000"}, N{"h} i", "val }000"},
+ N{"a } b", "val}000"}, N{"c } d", "val} 000"}, N{"e } f", "val } 000"}, N{"h } i", "val }000"},
+ N{"a }b", "val}000"}, N{"c }d", "val} 000"}, N{"e }f", "val } 000"}, N{"h }i", "val }000"},
+}
+);
+
+ADD_CASE_TO_GROUP("simple map expl, scalars with special chars, comma",
+R"({
+a0,b0: val0,0000
+c0,d0: val0, 0000
+e0,f0: val0 , 0000
+h0,i0: val0 ,0000
+a1, b1: val1,0001
+c1, d1: val1, 0001
+e1, f1: val1 , 0001
+h1, i1: val1 ,0001
+a2 , b2: val2,0002
+c2 , d2: val2, 0002
+e2 , f2: val2 , 0002
+h2 , i2: val2 ,0002
+a3 ,b3: val3,0003
+c3 ,d3: val3, 0003
+e3 ,f3: val3 , 0003
+h3 ,i3: val3 ,0003
+})",
+ L{ // this is crazy...
+ N(KEYVAL, "a0", /*"~"*/{}),
+ N("b0", "val0"),
+ N(KEYVAL, "0000 c0", /*"~"*/{}),
+ N("d0", "val0"), N(KEYVAL, "0000 e0", /*"~"*/{}),
+ N("f0", "val0"), N(KEYVAL, "0000 h0", /*"~"*/{}),
+ N("i0", "val0"), N(KEYVAL, "0000 a1", /*"~"*/{}),
+ N("b1", "val1"), N(KEYVAL, "0001 c1", /*"~"*/{}),
+ N("d1", "val1"), N(KEYVAL, "0001 e1", /*"~"*/{}),
+ N("f1", "val1"), N(KEYVAL, "0001 h1", /*"~"*/{}),
+ N("i1", "val1"), N(KEYVAL, "0001 a2", /*"~"*/{}),
+ N("b2", "val2"), N(KEYVAL, "0002 c2", /*"~"*/{}),
+ N("d2", "val2"), N(KEYVAL, "0002 e2", /*"~"*/{}),
+ N("f2", "val2"), N(KEYVAL, "0002 h2", /*"~"*/{}),
+ N("i2", "val2"), N(KEYVAL, "0002 a3", /*"~"*/{}),
+ N("b3", "val3"), N(KEYVAL, "0003 c3", /*"~"*/{}),
+ N("d3", "val3"), N(KEYVAL, "0003 e3", /*"~"*/{}),
+ N("f3", "val3"), N(KEYVAL, "0003 h3", /*"~"*/{}),
+ N("i3", "val3"), N(KEYVAL, "0003", /*"~"*/{}),
+}
+);
+
+
+ADD_CASE_TO_GROUP("simple map, spaces before semicolon, issue54",
+R"(
+foo : crl
+keyA :
+ keyA.B : test value
+"key C" : val C
+'key D' : val D
+elm2 :
+ "key C" : val C
+ 'key D' : val D
+ key E : val E
+elm3 :
+ 'key D' : val D
+ "key C" : val C
+ key E : val E
+elm4 :
+ key E : val E
+ 'key D' : val D
+ "key C" : val C
+)",
+L{
+ N("foo", "crl"),
+ N("keyA", L{N("keyA.B", "test value")}),
+ N(QK, "key C", "val C"),
+ N(QK, "key D", "val D"),
+ N("elm2", L{N(QK, "key C", "val C"), N(QK, "key D", "val D"), N("key E", "val E"),}),
+ N("elm3", L{N(QK, "key D", "val D"), N(QK, "key C", "val C"), N("key E", "val E"),}),
+ N("elm4", L{N("key E", "val E"), N(QK, "key D", "val D"), N(QK, "key C", "val C"),}),
+}
+);
+
+ADD_CASE_TO_GROUP("simple map, spaces before semicolon, issue65, v0",
+R"({a : b})",
+L{
+ N("a", "b"),
+}
+);
+
+ADD_CASE_TO_GROUP("simple map, spaces before semicolon, issue65, v1",
+R"(a : b)",
+L{
+ N("a", "b"),
+}
+);
+
+ADD_CASE_TO_GROUP("simple map, spaces before semicolon, issue65, v2",
+R"(
+is it ok : let's see
+ok : {a : b, c : d, e : f,}
+must be ok :
+ c0 : d
+ c1 : d
+ c2 : d
+)",
+L{
+ N("is it ok", "let's see"),
+ N("ok", L{N("a", "b"), N("c", "d"), N("e", "f")}),
+ N("must be ok", L{N("c0", "d"), N("c1", "d"), N("c2", "d")}),
+}
+);
+
+ADD_CASE_TO_GROUP("simple map, spaces before semicolon, issue65, v3",
+R"({
+oka: {a : b},
+is it ok : let's see,
+okb: {a : b},
+ok : {a : b},
+must be ok : {
+ c0 : d,
+ c1 : d,
+ c2 : d,
+}
+})",
+L{
+ N("oka", L{N("a", "b")}),
+ N("is it ok", "let's see"),
+ N("okb", L{N("a", "b")}),
+ N("ok", L{N("a", "b")}),
+ N("must be ok", L{N("c0", "d"), N("c1", "d"), N("c2", "d")}),
+});
+
+ADD_CASE_TO_GROUP("simple map, empty keys 2JQS, v1",
+R"(
+: a
+: b
+)",
+N(MAP, L{
+ N("", "a"),
+ N("", "b"),
+}));
+
+ADD_CASE_TO_GROUP("simple map, empty keys 2JQS, v2",
+R"(
+:
+ a
+:
+ b
+)",
+N(MAP, L{
+ N("", "a"),
+ N("", "b"),
+}));
+
+ADD_CASE_TO_GROUP("simple map, empty keys 4ABK, v1",
+R"({
+: a,
+: b,
+})",
+N(MAP, L{
+ N("", "a"),
+ N("", "b"),
+}));
+
+ADD_CASE_TO_GROUP("simple map, empty keys 4ABK, v2",
+R"({
+:
+ a,
+:
+ b,
+})",
+N(MAP, L{
+ N("", "a"),
+ N("", "b"),
+}));
+
+ADD_CASE_TO_GROUP("simple map, values on next line 4MUZ, v1",
+R"({foo
+: bar,
+baz
+: bat
+})",
+N(MAP, L{
+ N("foo", "bar"),
+ N("baz", "bat"),
+}));
+
+ADD_CASE_TO_GROUP("simple map, values on next line 4MUZ, v2",
+R"({foo
+:
+ bar,
+baz
+:
+ bat
+})",
+N(MAP, L{
+ N("foo", "bar"),
+ N("baz", "bat"),
+}));
+
+/* this is not valid YAML: plain scalars can't have ':' as a token
+ADD_CASE_TO_GROUP("simple map, values on next line 4MUZ, v3",
+R"(foo
+: bar
+baz
+: bat
+)",
+N(MAP, L{
+ N("foo", "bar"),
+ N("baz", "bat"),
+}));
+
+ADD_CASE_TO_GROUP("simple map, values on next line 4MUZ, v4",
+R"(foo
+:
+ bar
+baz
+:
+ bat
+)",
+N(MAP, L{
+ N("foo", "bar"),
+ N("baz", "bat"),
+}));
+*/
+
+ADD_CASE_TO_GROUP("json compact",
+R"(---
+{
+"foo0":"bar",
+"foo1":bar,
+"foo3":{"a":map},
+"foo5":[a,seq],
+}
+--- {"foo0":"bar","foo1":bar,"foo3":{"a":map},"foo5":[a,seq],}
+)",
+N(STREAM,
+ L{
+ N(DOCMAP, L{
+ N(KEYVAL|KEYQUO|VALQUO,"foo0","bar"),
+ N(KEYVAL|KEYQUO,"foo1","bar"),
+ N(KEYMAP|KEYQUO,"foo3", L{N(KEYVAL|KEYQUO,"a","map")}),
+ N(KEYSEQ|KEYQUO,"foo5", L{N("a"),N("seq")}),
+ }),
+ N(DOCMAP, L{
+ N(KEYVAL|KEYQUO|VALQUO,"foo0","bar"),
+ N(KEYVAL|KEYQUO,"foo1","bar"),
+ N(KEYMAP|KEYQUO,"foo3", L{N(KEYVAL|KEYQUO,"a","map")}),
+ N(KEYSEQ|KEYQUO,"foo5", L{N("a"),N("seq")}),
+ }),
+ })
+);
+
+
+ADD_CASE_TO_GROUP("issue223 0 fails",
+R"(
+ A:
+ - 1
+ - 4
+ B:
+ - 2
+ - 3
+ )",
+N(L{
+ N("A", L{N("1"), N("4")}),
+ N("B", L{N("2"), N("3")}),
+ })
+);
+
+ADD_CASE_TO_GROUP("issue223 1 passes",
+R"(A:
+ - 1
+ - 4
+B:
+ - 2
+ - 3
+)",
+N(L{
+ N("A", L{N("1"), N("4")}),
+ N("B", L{N("2"), N("3")}),
+ })
+);
+
+ADD_CASE_TO_GROUP("issue223 2 passes",
+R"(A:
+ - 1
+ - 4
+B:
+ - 2
+ - 3)",
+N(L{
+ N("A", L{N("1"), N("4")}),
+ N("B", L{N("2"), N("3")}),
+ })
+);
+ADD_CASE_TO_GROUP("issue223 3 fails",
+R"(A:
+ - 1
+ - 4
+B:
+ - 2
+ - 3
+ )",
+N(L{
+ N("A", L{N("1"), N("4")}),
+ N("B", L{N("2"), N("3")}),
+ })
+);
+}
+
+} // namespace yml
+} // namespace c4
diff --git a/thirdparty/ryml/test/test_simple_seq.cpp b/thirdparty/ryml/test/test_simple_seq.cpp
new file mode 100644
index 000000000..a24f5d4f7
--- /dev/null
+++ b/thirdparty/ryml/test/test_simple_seq.cpp
@@ -0,0 +1,695 @@
+#include "./test_group.hpp"
+
+namespace c4 {
+namespace yml {
+
+TEST(simple_seq, bad_seq1)
+{
+ Tree tree;
+ ExpectError::do_check(&tree, [&]{
+ parse_in_arena(R"(
+---
+[ a, b, c ] ]
+)", &tree);
+ });
+}
+
+TEST(simple_seq, bad_seq2)
+{
+ Tree tree;
+ ExpectError::do_check(&tree, [&]{
+ parse_in_arena(R"(
+---
+[ [ a, b, c ]
+)", &tree);
+ });
+}
+
+TEST(simple_seq, two_nested_flow_seqs)
+{
+ Tree tree = parse_in_arena("[[]]");
+ EXPECT_TRUE(tree.rootref().is_seq());
+ ASSERT_TRUE(tree.rootref().has_children());
+ EXPECT_TRUE(tree.rootref().first_child().is_seq());
+ ASSERT_FALSE(tree.rootref().first_child().has_children());
+}
+
+TEST(simple_seq, many_unmatched_brackets)
+{
+ std::string src;
+ src.reserve(10000000u);
+ for(size_t num_brackets : {4u, 8u, 32u})
+ {
+ SCOPED_TRACE(num_brackets);
+ for(size_t i = src.size(); i < num_brackets; ++i)
+ src += '[';
+ Tree tree;
+ Location loc = {};
+ loc.line = 1;
+ loc.col = num_brackets + 1u;
+ ExpectError::do_check(&tree, [&]{
+ parse_in_place(to_substr(src), &tree);
+ }, loc);
+ }
+}
+
+TEST(simple_seq, missing_quoted_key)
+{
+ csubstr yaml = R"(
+"top1" :
+ ["0", "1", ]
+'top2' :
+ ["0", "1", ]
+---
+"top1" :
+ - "0"
+ - "1"
+'top2' :
+ - "0"
+ - "1"
+)";
+ test_check_emit_check(yaml, [](Tree const &t){
+ size_t doc = 0;
+ EXPECT_TRUE(t.docref(doc)["top1"].is_key_quoted());
+ EXPECT_TRUE(t.docref(doc)["top2"].is_key_quoted());
+ EXPECT_TRUE(t.docref(doc)["top1"][0].is_val_quoted());
+ EXPECT_TRUE(t.docref(doc)["top1"][1].is_val_quoted());
+ EXPECT_TRUE(t.docref(doc)["top2"][0].is_val_quoted());
+ EXPECT_TRUE(t.docref(doc)["top2"][1].is_val_quoted());
+ ++doc;
+ EXPECT_TRUE(t.docref(doc)["top1"].is_key_quoted());
+ EXPECT_TRUE(t.docref(doc)["top2"].is_key_quoted());
+ EXPECT_TRUE(t.docref(doc)["top1"][0].is_val_quoted());
+ EXPECT_TRUE(t.docref(doc)["top1"][1].is_val_quoted());
+ EXPECT_TRUE(t.docref(doc)["top2"][0].is_val_quoted());
+ EXPECT_TRUE(t.docref(doc)["top2"][1].is_val_quoted());
+ });
+}
+
+TEST(simple_seq, deeply_nested_to_cover_parse_stack_resizes)
+{
+ csubstr yaml = R"(
+[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[0, 1, 2, 3, 4, 5, 6, 7]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]
+)";
+ Tree t = parse_in_arena(yaml);
+ size_t id = t.root_id();
+ while(t.has_children(id))
+ id = t.first_child(id);
+ ASSERT_TRUE(t.ref(id).has_parent());
+ NodeRef seq = t.ref(id).parent();
+ ASSERT_TRUE(seq.is_seq());
+ EXPECT_EQ(seq[0].val(), csubstr("0"));
+ EXPECT_EQ(seq[1].val(), csubstr("1"));
+ EXPECT_EQ(seq[2].val(), csubstr("2"));
+ EXPECT_EQ(seq[3].val(), csubstr("3"));
+ EXPECT_EQ(seq[4].val(), csubstr("4"));
+ EXPECT_EQ(seq[5].val(), csubstr("5"));
+ EXPECT_EQ(seq[6].val(), csubstr("6"));
+ EXPECT_EQ(seq[7].val(), csubstr("7"));
+}
+
+
+#ifdef RYML_WITH_TAB_TOKENS
+TEST(simple_seq, block_tab_tokens)
+{
+ Tree tree = parse_in_arena(R"(
+--- # block, spaces only
+- 0
+- 1
+- 2
+--- # block, tabs after
+- 0
+- 1
+- 2
+--- # block, tabs after token, and after val
+- 0
+- 1
+- 2
+)");
+ EXPECT_EQ(tree.docref(0)[0].val(), csubstr("0"));
+ EXPECT_EQ(tree.docref(0)[1].val(), csubstr("1"));
+ EXPECT_EQ(tree.docref(0)[2].val(), csubstr("2"));
+ EXPECT_EQ(tree.docref(1)[0].val(), csubstr("0"));
+ EXPECT_EQ(tree.docref(1)[1].val(), csubstr("1"));
+ EXPECT_EQ(tree.docref(1)[2].val(), csubstr("2"));
+}
+
+TEST(simple_seq, flow_tab_tokens)
+{
+ Tree tree = parse_in_arena(R"(
+--- # flow, no tabs
+[0, 1, 2]
+--- # flow, tabs after
+[0, 1, 2]
+--- # flow, tabs before and after
+[0 , 1 , 2]
+--- # flow, tabs everywhere
+ [
+ 0 ,
+ 1 ,
+ 2 ,
+ ]
+)");
+ EXPECT_EQ(tree.docref(0)[0].val(), csubstr("0"));
+ EXPECT_EQ(tree.docref(0)[1].val(), csubstr("1"));
+ EXPECT_EQ(tree.docref(0)[2].val(), csubstr("2"));
+ EXPECT_EQ(tree.docref(1)[0].val(), csubstr("0"));
+ EXPECT_EQ(tree.docref(1)[1].val(), csubstr("1"));
+ EXPECT_EQ(tree.docref(1)[2].val(), csubstr("2"));
+ EXPECT_EQ(tree.docref(2)[0].val(), csubstr("0"));
+ EXPECT_EQ(tree.docref(2)[1].val(), csubstr("1"));
+ EXPECT_EQ(tree.docref(2)[2].val(), csubstr("2"));
+ EXPECT_EQ(tree.docref(3)[0].val(), csubstr("0"));
+ EXPECT_EQ(tree.docref(3)[1].val(), csubstr("1"));
+ EXPECT_EQ(tree.docref(3)[2].val(), csubstr("2"));
+}
+#endif // RYML_WITH_TAB_TOKENS
+
+
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+
+
+CASE_GROUP(SIMPLE_SEQ)
+{
+
+ADD_CASE_TO_GROUP("simple seq",
+R"(- 0
+- 1
+- 2
+- 3
+)",
+L{N{"0"}, N{"1"}, N{"2"}, N{"3"}}
+);
+
+
+ADD_CASE_TO_GROUP("simple seq, explicit, single line",
+"[0, 1, 2, 3]",
+L{N{"0"}, N{"1"}, N{"2"}, N{"3"}}
+);
+
+ADD_CASE_TO_GROUP("simple seq, explicit, single line, trailcomma",
+"[0, 1, 2, 3,]",
+L{N{"0"}, N{"1"}, N{"2"}, N{"3"},}
+);
+
+ADD_CASE_TO_GROUP("simple seq, explicit, multiline, unindented",
+R"([
+0,
+1,
+2,
+3
+])",
+ L{N{"0"}, N{"1"}, N{"2"}, N{"3"}}
+);
+
+ADD_CASE_TO_GROUP("simple seq, explicit, multiline, unindented, trailcomma",
+R"([
+0,
+1,
+2,
+3,
+])",
+ L{N{"0"}, N{"1"}, N{"2"}, N{"3"}}
+);
+
+ADD_CASE_TO_GROUP("simple seq, explicit, multiline, comments inline",
+R"([
+0, # bla0
+1, # bla1
+2, # bla2
+3 # bla3
+])",
+ L{N{"0"}, N{"1"}, N{"2"}, N{"3"}}
+);
+
+ADD_CASE_TO_GROUP("simple seq, explicit, multiline, comments prev line",
+R"([
+# bla0
+0,
+# bla1
+1,
+# bla2
+2,
+# bla3
+3
+])",
+ L{N{"0"}, N{"1"}, N{"2"}, N{"3"}}
+);
+
+ADD_CASE_TO_GROUP("simple seq, explicit, multiline, indented",
+R"([
+ 0,
+ 1,
+ 2,
+ 3
+])",
+ L{N{"0"}, N{"1"}, N{"2"}, N{"3"}}
+);
+
+ADD_CASE_TO_GROUP("simple seq, comments inline",
+R"(
+- 0 # this is a foo
+- 1 # this is a bar
+- 2 # this is a bar
+- 3 # this is a bar
+)",
+ L{N{"0"}, N{"1"}, N{"2"}, N{"3"}}
+);
+
+ADD_CASE_TO_GROUP("simple seq, comments prev line",
+R"(
+# this is a foo
+- 0
+# this is a bar
+- 1
+# this is a baz
+- 2
+# this is a bat
+- 3
+)",
+ L{N{"0"}, N{"1"}, N{"2"}, N{"3"}}
+);
+
+ADD_CASE_TO_GROUP("simple seq, scalars with special chars, comma",
+R"(
+- a,b
+- c,d
+- e,f
+- a, b
+- c, d
+- e, f
+- a , b
+- c , d
+- e , f
+- a ,b
+- c ,d
+- e ,f
+)",
+L{N{"a,b"}, N{"c,d"}, N{"e,f"},
+ N{"a, b"}, N{"c, d"}, N{"e, f"},
+ N{"a , b"}, N{"c , d"}, N{"e , f"},
+ N{"a ,b"}, N{"c ,d"}, N{"e ,f"},
+ }
+);
+
+ADD_CASE_TO_GROUP("simple seq, scalars with special chars, colon",
+R"(
+- a:b
+- "c:d"
+- 'e:f'
+- a :b
+- "c :d"
+- 'e :f'
+- a : b # THIS IS A KEY-VAL!!!
+- "c : d"
+- 'e : f'
+- a: b # THIS IS A KEY-VAL!!!
+- "c: d"
+- 'e: f'
+)",
+L{
+ N("a:b"), N(QV, "c:d"), N(QV, "e:f"),
+ N("a :b"), N(QV, "c :d"), N(QV, "e :f"),
+ N(L{N("a", "b")}), N(QV, "c : d"), N(QV, "e : f"),
+ N(L{N("a", "b")}), N(QV, "c: d"), N(QV, "e: f"),
+ }
+);
+
+ADD_CASE_TO_GROUP("simple seq, scalars with special chars, cardinal",
+R"(
+- a#b
+- "a#b"
+- 'a#b'
+- a# b
+- "a# b"
+- 'a# b'
+- a # b
+- "a # b"
+- 'a # b'
+- a #b
+- "a #b"
+- 'a #b'
+)",
+L{
+ N{"a#b"}, N{QV, "a#b"}, N{QV, "a#b"},
+ N{"a# b"}, N{QV, "a# b"}, N{QV, "a# b"},
+ N{"a"}, N{QV, "a # b"}, N{QV, "a # b"},
+ N{"a"}, N{QV, "a #b"}, N{QV, "a #b"},
+ }
+);
+
+ADD_CASE_TO_GROUP("simple seq, scalars with special chars, dash",
+R"(
+- a-b
+- "a-b"
+- 'a-b'
+- a- b
+- "a- b"
+- 'a- b'
+- a - b
+- "a - b"
+- 'a - b'
+- a -b
+- "a -b"
+- 'a -b'
+)",
+L{
+ N{"a-b"}, N{QV, "a-b"}, N{QV, "a-b"},
+ N{"a- b"}, N{QV, "a- b"}, N{QV, "a- b"},
+ N{"a - b"}, N{QV, "a - b"}, N{QV, "a - b"},
+ N{"a -b"}, N{QV, "a -b"}, N{QV, "a -b"},
+ }
+);
+
+ADD_CASE_TO_GROUP("simple seq, scalars with special chars, left-curly",
+R"(
+- a{b
+- "a{b"
+- 'a{b'
+- a{ b
+- "a{ b"
+- 'a{ b'
+- a { b
+- "a { b"
+- 'a { b'
+- a {b
+- "a {b"
+- 'a {b'
+)",
+L{
+ N{"a{b"}, N{QV, "a{b"}, N{QV, "a{b"},
+ N{"a{ b"}, N{QV, "a{ b"}, N{QV, "a{ b"},
+ N{"a { b"}, N{QV, "a { b"}, N{QV, "a { b"},
+ N{"a {b"}, N{QV, "a {b"}, N{QV, "a {b"},
+ }
+);
+
+ADD_CASE_TO_GROUP("simple seq, scalars with special chars, right-curly",
+R"(
+- a}b
+- "a}b"
+- 'a}b'
+- a} b
+- "a} b"
+- 'a} b'
+- a } b
+- "a } b"
+- 'a } b'
+- a }b
+- "a }b"
+- 'a }b'
+)",
+L{
+ N{"a}b"}, N{QV, "a}b"}, N{QV, "a}b"},
+ N{"a} b"}, N{QV, "a} b"}, N{QV, "a} b"},
+ N{"a } b"}, N{QV, "a } b"}, N{QV, "a } b"},
+ N{"a }b"}, N{QV, "a }b"}, N{QV, "a }b"},
+ }
+);
+
+ADD_CASE_TO_GROUP("simple seq, scalars with special chars, left-bracket",
+R"(
+- a[b
+- "a[b"
+- 'a[b'
+- a[ b
+- "a[ b"
+- 'a[ b'
+- a [ b
+- "a [ b"
+- 'a [ b'
+- a [b
+- "a [b"
+- 'a [b'
+)",
+L{
+ N{"a[b"}, N{QV, "a[b"}, N{QV, "a[b"},
+ N{"a[ b"}, N{QV, "a[ b"}, N{QV, "a[ b"},
+ N{"a [ b"}, N{QV, "a [ b"}, N{QV, "a [ b"},
+ N{"a [b"}, N{QV, "a [b"}, N{QV, "a [b"},
+ }
+);
+
+ADD_CASE_TO_GROUP("simple seq, scalars with special chars, right-bracket",
+R"(
+- a]b
+- "a]b"
+- 'a]b'
+- a] b
+- "a] b"
+- 'a] b'
+- a ] b
+- "a ] b"
+- 'a ] b'
+- a ]b
+- "a ]b"
+- 'a ]b'
+)",
+L{
+ N{"a]b"}, N{QV, "a]b"}, N{QV, "a]b"},
+ N{"a] b"}, N{QV, "a] b"}, N{QV, "a] b"},
+ N{"a ] b"}, N{QV, "a ] b"}, N{QV, "a ] b"},
+ N{"a ]b"}, N{QV, "a ]b"}, N{QV, "a ]b"},
+ }
+);
+
+ADD_CASE_TO_GROUP("simple seq expl, scalars with special chars, comma",
+R"([
+ a,b, "c,d", 'e,f',
+ a, b, "c, d", 'e, f',
+ a , b, "c , d", 'e , f',
+ a ,b, "c ,d", 'e ,f',
+])",
+L{
+ N{"a"}, N("b"), N(QV, "c,d"), N(QV, "e,f"),
+ N{"a"}, N("b"), N(QV, "c, d"), N(QV, "e, f"),
+ N{"a"}, N("b"), N(QV, "c , d"), N(QV, "e , f"),
+ N{"a"}, N("b"), N(QV, "c ,d"), N(QV, "e ,f"),
+ }
+);
+
+#ifdef RYML_WITH_TAB_TOKENS
+#define _ryml_with_or_without_tabs(with, without) with
+#else
+#define _ryml_with_or_without_tabs(with, without) without
+#endif
+ADD_CASE_TO_GROUP("simple seq expl, scalars with special chars, colon",
+R"(
+- [[], :@]
+- [[], :%]
+- [[], :^]
+- [[], :$]
+#- [[], ::]
+- [[], : ]
+- [[], :`]
+)",
+L{
+ N(L{N(SEQ), N(":@")}),
+ N(L{N(SEQ), N(":%")}),
+ N(L{N(SEQ), N(":^")}),
+ N(L{N(SEQ), N(":$")}),
+ //N(L{N(SEQ), N("::")}), TODO: yaml playground
+ N(L{N(SEQ), _ryml_with_or_without_tabs(N(MAP, L{N("", "")}), N(": "))}),
+ N(L{N(SEQ), N(":`")}),
+}
+);
+
+ADD_CASE_TO_GROUP("simple seq expl, scalars with special chars, colon 2",
+R"([
+# a:b, # not legal
+ "c:d", 'e:f',
+# a: b, # not legal
+ "c: d", 'e: f',
+# a : b, # not legal
+ "c : d", 'e : f',
+# a :b, # not legal
+ "c :d", 'e :f',
+])",
+L{/*...not legal...*/
+ /*N{"a"}, N("b"),*/ N(QV, "c:d"), N(QV, "e:f"),
+ /*N{"a"}, N("b"),*/ N(QV, "c: d"), N(QV, "e: f"),
+ /*N{"a"}, N("b"),*/ N(QV, "c : d"), N(QV, "e : f"),
+ /*N{"a"}, N("b"),*/ N(QV, "c :d"), N(QV, "e :f"),
+ }
+);
+
+ADD_CASE_TO_GROUP("simple seq expl, scalars with special chars, cardinal",
+R"([
+ a#b, "c#d", 'e#f',
+ a# b, "c# d", 'e# f',
+ a # b, "c # d", 'e # f',
+, # this is needed because of the comment above
+ a #b, "c #d", 'e #f',
+])",
+L{
+ N{"a#b"}, N(QV, "c#d"), N(QV, "e#f"),
+ N{"a# b"}, N(QV, "c# d"), N(QV, "e# f"),
+ N{"a"},
+ N{"a"},
+ }
+);
+
+ADD_CASE_TO_GROUP("simple seq expl, scalars with special chars, dash",
+R"([
+ a-b, "c-d", 'e-f',
+ a- b, "c- d", 'e- f',
+ a - b, "c - d", 'e - f',
+ a -b, "c -d", 'e -f',
+])",
+L{
+ N{"a-b"}, N(QV, "c-d"), N(QV, "e-f"),
+ N{"a- b"}, N(QV, "c- d"), N(QV, "e- f"),
+ N{"a - b"}, N(QV, "c - d"), N(QV, "e - f"),
+ N{"a -b"}, N(QV, "c -d"), N(QV, "e -f"),
+ }
+);
+
+ADD_CASE_TO_GROUP("simple seq expl, scalars with special chars, left-bracket",
+R"([
+# a[b,
+ "c[d", 'e[f',
+# a[ b,
+ "c[ d", 'e[ f',
+# a [ b,
+ "c [ d", 'e [ f',
+# a [b,
+ "c [d", 'e [f',
+])",
+L{
+ /*N{"a[b"}, */ N(QV, "c[d"), N(QV, "e[f"),
+ /*N{"a[ b"}, */ N(QV, "c[ d"), N(QV, "e[ f"),
+ /*N{"a [ b"},*/ N(QV, "c [ d"), N(QV, "e [ f"),
+ /*N{"a [b"}, */ N(QV, "c [d"), N(QV, "e [f"),
+ }
+);
+
+ADD_CASE_TO_GROUP("simple seq expl, scalars with special chars, right-bracket",
+R"([
+# a]b,
+ "c]d", 'e]f',
+# a] b,
+ "c] d", 'e] f',
+# a ] b,
+ "c ] d", 'e ] f',
+# a ]b,
+ "c ]d", 'e ]f',
+])",
+L{
+ /*N{"a]b"}, */ N(QV, "c]d"), N(QV, "e]f"),
+ /*N{"a] b"}, */ N(QV, "c] d"), N(QV, "e] f"),
+ /*N{"a ] b"},*/ N(QV, "c ] d"), N(QV, "e ] f"),
+ /*N{"a ]b"}, */ N(QV, "c ]d"), N(QV, "e ]f"),
+ }
+);
+
+ADD_CASE_TO_GROUP("simple seq expl, scalars with special chars, left-curly",
+R"([
+# a{b,
+ "c{d", 'e{f',
+# a{ b,
+ "c{ d", 'e{ f',
+# a { b,
+ "c { d", 'e { f',
+# a {b,
+ "c {d", 'e {f',
+])",
+L{
+ /*N{"a{b"}, */ N(QV, "c{d"), N(QV, "e{f"),
+ /*N{"a{ b"}, */ N(QV, "c{ d"), N(QV, "e{ f"),
+ /*N{"a { b"},*/ N(QV, "c { d"), N(QV, "e { f"),
+ /*N{"a {b"}, */ N(QV, "c {d"), N(QV, "e {f"),
+ }
+);
+
+ADD_CASE_TO_GROUP("simple seq expl, scalars with special chars, right-curly",
+R"([
+# a}b,
+ "c}d", 'e}f',
+# a} b,
+ "c} d", 'e} f',
+# a } b,
+ "c } d", 'e } f',
+# a }b,
+ "c }d", 'e }f',
+])",
+L{
+ /*N{"a}b"}, */ N(QV, "c}d"), N(QV, "e}f"),
+ /*N{"a} b"}, */ N(QV, "c} d"), N(QV, "e} f"),
+ /*N{"a } b"},*/ N(QV, "c } d"), N(QV, "e } f"),
+ /*N{"a }b"}, */ N(QV, "c }d"), N(QV, "e }f"),
+ }
+);
+
+ADD_CASE_TO_GROUP("simple seq, issue 28",
+R"(# was failing on https://github.com/biojppm/rapidyaml/issues/28
+enemy:
+- actors:
+ - {name: Enemy_Bokoblin_Junior, value: 4.0}
+ - {name: Enemy_Bokoblin_Middle, value: 16.0}
+ - {name: Enemy_Bokoblin_Senior, value: 32.0}
+ - {name: Enemy_Bokoblin_Dark, value: 48.0}
+ species: BokoblinSeries
+enemy2:
+- actors:
+ - {name: Enemy_Bokoblin_Junior, value: 4.0}
+ - {name: Enemy_Bokoblin_Middle, value: 16.0}
+ - {name: Enemy_Bokoblin_Senior, value: 32.0}
+ - {name: Enemy_Bokoblin_Dark, value: 48.0}
+ species: BokoblinSeries
+)",
+L{
+ N("enemy", L{N(L{
+ N("actors", L{
+ N(L{N("name", "Enemy_Bokoblin_Junior"), N("value", "4.0"),}),
+ N(L{N("name", "Enemy_Bokoblin_Middle"), N("value", "16.0"),}),
+ N(L{N("name", "Enemy_Bokoblin_Senior"), N("value", "32.0"),}),
+ N(L{N("name", "Enemy_Bokoblin_Dark"), N("value", "48.0"),}),
+ }),
+ N("species", "BokoblinSeries"),
+ })
+ }),
+ N("enemy2", L{N(L{
+ N("actors", L{
+ N(L{N("name", "Enemy_Bokoblin_Junior"), N("value", "4.0"),}),
+ N(L{N("name", "Enemy_Bokoblin_Middle"), N("value", "16.0"),}),
+ N(L{N("name", "Enemy_Bokoblin_Senior"), N("value", "32.0"),}),
+ N(L{N("name", "Enemy_Bokoblin_Dark"), N("value", "48.0"),}),
+ }),
+ N("species", "BokoblinSeries"),
+ })
+ }),
+});
+
+ADD_CASE_TO_GROUP("simple seq, invalid character 1", EXPECT_PARSE_ERROR,
+R"(- 0 # this is a foo
+}
+)",
+ LineCol(2, 1)
+);
+
+ADD_CASE_TO_GROUP("simple seq, invalid character 2", EXPECT_PARSE_ERROR,
+R"(- 0 # this is a foo
+]
+)",
+ LineCol(2, 1)
+);
+
+ADD_CASE_TO_GROUP("simple seq, invalid character 3", EXPECT_PARSE_ERROR,
+R"(- 0 # this is a foo
+:
+)",
+ LineCol(2, 1)
+);
+
+ADD_CASE_TO_GROUP("simple seq, invalid character 4", EXPECT_PARSE_ERROR,
+R"(- 0 # this is a foo
+abcdef!
+)",
+ LineCol(2, 1)
+);
+}
+
+} // namespace yml
+} // namespace c4
diff --git a/thirdparty/ryml/test/test_simple_set.cpp b/thirdparty/ryml/test/test_simple_set.cpp
new file mode 100644
index 000000000..f209c79c7
--- /dev/null
+++ b/thirdparty/ryml/test/test_simple_set.cpp
@@ -0,0 +1,144 @@
+#include "./test_group.hpp"
+
+namespace c4 {
+namespace yml {
+
+
+TEST(simple_set, emit)
+{
+ const char yaml[] = R"(!!set
+? aa
+? bb
+? cc
+)";
+ std::string expected = R"(!!set
+aa:
+bb:
+cc:
+)";
+ Tree t = parse_in_arena(yaml);
+ auto s = emitrs_yaml<std::string>(t);
+ EXPECT_EQ(expected, s);
+}
+
+
+TEST(simple_set, emit_doc)
+{
+ const char yaml[] = R"(--- !!set
+? aa
+? bb
+? cc
+)";
+ std::string expected = R"(--- !!set
+aa:
+bb:
+cc:
+)";
+ Tree t = parse_in_arena(yaml);
+ auto s = emitrs_yaml<std::string>(t);
+ EXPECT_EQ(expected, s);
+}
+
+
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+
+CASE_GROUP(SIMPLE_SET)
+{
+
+ADD_CASE_TO_GROUP("doc as set, missing value",
+R"(!!set
+? a
+? b
+?
+)",
+N(TL("!!set", L{N(KEYVAL, "a", {}), N(KEYVAL, "b", {}), N(KEYVAL, {}, "")}))
+);
+
+ADD_CASE_TO_GROUP("doc as set, implicit",
+R"(!!set
+? a
+? b
+)",
+N(TL("!!set", L{N(KEYVAL, "a", {}), N(KEYVAL, "b", {})}))
+);
+
+ADD_CASE_TO_GROUP("doc as set",
+R"(--- !!set
+? aa
+? bb
+? cc
+)",
+N(STREAM, L{N(DOCMAP, TL("!!set", L{
+ N(KEYVAL, "aa", /*"~"*/{}),
+ N(KEYVAL, "bb", /*"~"*/{}),
+ N(KEYVAL, "cc", /*"~"*/{})})
+)}));
+
+ADD_CASE_TO_GROUP("sets 2XXW",
+R"(
+--- !!set
+? Mark McGwire
+? Sammy Sosa
+? Ken Griff
+)",
+N(STREAM, L{N(DOCMAP, TL("!!set", L{
+ N(KEYVAL, "Mark McGwire", /*"~"*/{}),
+ N(KEYVAL, "Sammy Sosa", /*"~"*/{}),
+ N(KEYVAL, "Ken Griff", /*"~"*/{}),})
+)}));
+
+ADD_CASE_TO_GROUP("sets 2XXW, indented",
+R"(
+ --- !!set
+ ? Mark McGwire
+ ? Sammy Sosa
+ ? Ken Griff
+)",
+N(STREAM, L{N(DOCMAP, TL("!!set", L{
+ N(KEYVAL, "Mark McGwire", /*"~"*/{}),
+ N(KEYVAL, "Sammy Sosa", /*"~"*/{}),
+ N(KEYVAL, "Ken Griff", /*"~"*/{}),})
+)}));
+
+ADD_CASE_TO_GROUP("sets 2XXW, no set",
+R"(
+---
+? Mark McGwire
+? Sammy Sosa
+? Ken Griff
+)",
+N(STREAM, L{N(DOCMAP, L{
+ N(KEYVAL, "Mark McGwire", /*"~"*/{}),
+ N(KEYVAL, "Sammy Sosa", /*"~"*/{}),
+ N(KEYVAL, "Ken Griff", /*"~"*/{}),}
+)}));
+
+ADD_CASE_TO_GROUP("sets 2XXW, no doc",
+R"(!!set
+? Mark McGwire
+? Sammy Sosa
+? Ken Griff
+)",
+TL("!!set", L{
+ N(KEYVAL, "Mark McGwire", /*"~"*/{}),
+ N(KEYVAL, "Sammy Sosa", /*"~"*/{}),
+ N(KEYVAL, "Ken Griff", /*"~"*/{}),
+}));
+
+ADD_CASE_TO_GROUP("sets 2XXW, no doc, no set",
+R"(
+? Mark McGwire
+? Sammy Sosa
+? Ken Griff
+)",
+L{
+ N(KEYVAL, "Mark McGwire", /*"~"*/{}),
+ N(KEYVAL, "Sammy Sosa", /*"~"*/{}),
+ N(KEYVAL, "Ken Griff", /*"~"*/{}),
+});
+}
+
+} // namespace yml
+} // namespace c4
diff --git a/thirdparty/ryml/test/test_single_quoted.cpp b/thirdparty/ryml/test/test_single_quoted.cpp
new file mode 100644
index 000000000..d27fdb6e0
--- /dev/null
+++ b/thirdparty/ryml/test/test_single_quoted.cpp
@@ -0,0 +1,356 @@
+#include "./test_group.hpp"
+
+namespace c4 {
+namespace yml {
+
+TEST(single_quoted, test_suite_KSS4)
+{
+ csubstr yaml = R"(
+---
+'quoted
+string'
+--- 'quoted
+string'
+---
+- 'quoted
+ string'
+---
+- 'quoted
+string'
+---
+'quoted
+ string': 'quoted
+ string'
+---
+'quoted
+string': 'quoted
+string'
+)";
+ test_check_emit_check(yaml, [](Tree const &t){
+ EXPECT_EQ(t.docref(0).val(), "quoted string");
+ EXPECT_EQ(t.docref(1).val(), "quoted string");
+ EXPECT_EQ(t.docref(2)[0].val(), "quoted string");
+ EXPECT_EQ(t.docref(3)[0].val(), "quoted string");
+ EXPECT_EQ(t.docref(4)["quoted string"].val(), "quoted string");
+ EXPECT_EQ(t.docref(5)["quoted string"].val(), "quoted string");
+ });
+}
+
+
+TEST(single_quoted, test_suite_R4YG)
+{
+ csubstr yaml = R"(
+- '
+
+detected
+
+'
+
+)";
+ test_check_emit_check(yaml, [](Tree const &t){
+ EXPECT_EQ(t[0].val(), csubstr("\t\ndetected\n"));
+ });
+}
+
+
+TEST(single_quoted, test_suite_PRH3)
+{
+ csubstr yaml = R"(
+- ' 1st non-empty
+
+ 2nd non-empty
+ 3rd non-empty '
+- ' 1st non-empty
+
+ 2nd non-empty
+ 3rd non-empty '
+- ' 1st non-empty
+
+ 2nd non-empty
+ 3rd non-empty '
+)";
+ test_check_emit_check(yaml, [](Tree const &t){
+ EXPECT_EQ(t[0].val(), csubstr(" 1st non-empty\n2nd non-empty 3rd non-empty "));
+ EXPECT_EQ(t[1].val(), csubstr(" 1st non-empty\n2nd non-empty 3rd non-empty "));
+ EXPECT_EQ(t[2].val(), csubstr(" 1st non-empty\n2nd non-empty 3rd non-empty "));
+ });
+}
+
+
+TEST(single_quoted, test_suite_T4YY)
+{
+ csubstr yaml = R"(
+---
+' 1st non-empty
+
+ 2nd non-empty
+ 3rd non-empty '
+---
+'
+
+detected
+
+'
+
+)";
+ test_check_emit_check(yaml, [](Tree const &t){
+ ASSERT_TRUE(t.rootref().is_stream());
+ ASSERT_TRUE(t.rootref().first_child().is_doc());
+ EXPECT_EQ(t.rootref().first_child().val(), csubstr(" 1st non-empty\n2nd non-empty 3rd non-empty "));
+ });
+}
+
+TEST(single_quoted, test_suite_G4RS)
+{
+ csubstr yaml = R"(
+single: '"Howdy!" he cried.'
+quoted: ' # Not a ''comment''.'
+tie-fighter: '|\-*-/|'
+)";
+ test_check_emit_check(yaml, [](Tree const &t){
+ EXPECT_EQ(t["single"].val() , csubstr(R"("Howdy!" he cried.)"));
+ EXPECT_EQ(t["quoted"].val() , csubstr(R"( # Not a 'comment'.)"));
+ EXPECT_EQ(t["tie-fighter"].val(), csubstr(R"(|\-*-/|)"));
+ });
+}
+
+TEST(single_quoted, quotes_are_preserved)
+{
+ csubstr yaml = R"(
+'%ROOT': '%VAL'
+'%ROOT2':
+ - '%VAL'
+ - '%VAL'
+)";
+ test_check_emit_check(yaml, [](Tree const &t){
+ ASSERT_TRUE(t.rootref().is_map());
+ ASSERT_TRUE(t.rootref().has_child("%ROOT"));
+ ASSERT_TRUE(t.rootref().has_child("%ROOT2"));
+ ASSERT_EQ(t["%ROOT2"].num_children(), 2u);
+ EXPECT_TRUE(t["%ROOT"].is_key_quoted());
+ EXPECT_TRUE(t["%ROOT"].is_val_quoted());
+ EXPECT_TRUE(t["%ROOT2"].is_key_quoted());
+ EXPECT_TRUE(t["%ROOT2"][0].is_val_quoted());
+ EXPECT_TRUE(t["%ROOT2"][1].is_val_quoted());
+ });
+}
+
+
+//-----------------------------------------------------------------------------
+
+void verify_error_is_reported(csubstr case_name, csubstr yaml, Location loc={})
+{
+ SCOPED_TRACE(case_name);
+ SCOPED_TRACE(yaml);
+ Tree tree;
+ ExpectError::do_check(&tree, [&](){
+ parse_in_arena(yaml, &tree);
+ }, loc);
+}
+
+TEST(single_quoted, error_on_unmatched_quotes)
+{
+ verify_error_is_reported("map block", R"(foo: '"
+bar: '')");
+ verify_error_is_reported("seq block", R"(- '"
+- '')");
+ verify_error_is_reported("map flow", R"({foo: '", bar: ''})");
+ verify_error_is_reported("seq flow", R"(['", ''])");
+}
+
+TEST(single_quoted, error_on_unmatched_quotes_with_escapes)
+{
+ verify_error_is_reported("map block", R"(foo: '''"
+bar: '')");
+ verify_error_is_reported("seq block", R"(- '''"
+- '')");
+ verify_error_is_reported("map flow", R"({foo: '''", bar: ''})");
+ verify_error_is_reported("seq flow", R"(['''", ''])");
+}
+
+TEST(single_quoted, error_on_unmatched_quotes_at_end)
+{
+ verify_error_is_reported("map block", R"(foo: ''
+bar: '")");
+ verify_error_is_reported("seq block", R"(- ''
+- '")");
+ verify_error_is_reported("map flow", R"({foo: '', bar: '"})");
+ verify_error_is_reported("seq flow", R"(['', '"])");
+}
+
+TEST(single_quoted, error_on_unmatched_quotes_at_end_with_escapes)
+{
+ verify_error_is_reported("map block", R"(foo: ''
+bar: '''")");
+ verify_error_is_reported("seq block", R"(- ''
+- '''")");
+ verify_error_is_reported("map flow", R"({foo: '', bar: '''"})");
+ verify_error_is_reported("seq flow", R"(['', '''"])");
+}
+
+TEST(single_quoted, error_on_unclosed_quotes)
+{
+ verify_error_is_reported("map block", R"(foo: ',
+bar: what)");
+ verify_error_is_reported("seq block", R"(- '
+- what)");
+ verify_error_is_reported("map flow", R"({foo: ', bar: what})");
+ verify_error_is_reported("seq flow", R"([', what])");
+}
+
+TEST(single_quoted, error_on_unclosed_quotes_with_escapes)
+{
+ verify_error_is_reported("map block", R"(foo: ''',
+bar: what)");
+ verify_error_is_reported("seq block", R"(- '''
+- what)");
+ verify_error_is_reported("map flow", R"({foo: ''', bar: what})");
+ verify_error_is_reported("seq flow", R"([''', what])");
+}
+
+TEST(single_quoted, error_on_unclosed_quotes_at_end)
+{
+ verify_error_is_reported("map block", R"(foo: what
+bar: ')");
+ verify_error_is_reported("seq block", R"(- what
+- ')");
+ verify_error_is_reported("map flow", R"({foo: what, bar: '})");
+ verify_error_is_reported("seq flow", R"([what, '])");
+}
+
+TEST(single_quoted, error_on_unclosed_quotes_at_end_with_escapes)
+{
+ verify_error_is_reported("map block", R"(foo: what
+bar: ''')");
+ verify_error_is_reported("seq block", R"(- what
+- ''')");
+ verify_error_is_reported("map flow", R"({foo: what, bar: '''})");
+ verify_error_is_reported("seq flow", R"([what, '''])");
+}
+
+
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+
+CASE_GROUP(SINGLE_QUOTED)
+{
+
+ADD_CASE_TO_GROUP("squoted, only text",
+R"('Some text without any quotes.'
+)",
+ N(DOCVAL | VALQUO, "Some text without any quotes.")
+);
+
+ADD_CASE_TO_GROUP("squoted, with double quotes",
+R"('Some "text" "with double quotes"')",
+ N(DOCVAL | VALQUO, "Some \"text\" \"with double quotes\"")
+);
+
+ADD_CASE_TO_GROUP("squoted, with single quotes",
+R"('Some text ''with single quotes''')",
+ N(DOCVAL | VALQUO, "Some text 'with single quotes'")
+);
+
+ADD_CASE_TO_GROUP("squoted, with single and double quotes",
+R"('Some text ''with single quotes'' "and double quotes".')",
+ N(DOCVAL | VALQUO, "Some text 'with single quotes' \"and double quotes\".")
+);
+
+ADD_CASE_TO_GROUP("squoted, with escapes",
+R"('Some text with escapes \n \r \t')",
+ N(DOCVAL | VALQUO, "Some text with escapes \\n \\r \\t")
+);
+
+ADD_CASE_TO_GROUP("squoted, all",
+R"('Several lines of text,
+containing ''single quotes'' and "double quotes". Escapes (like \n) don''t do anything.
+
+Newlines can be added by leaving a blank line.
+ Leading whitespace on lines is ignored.'
+)",
+ N(DOCVAL | VALQUO, "Several lines of text, containing 'single quotes' and \"double quotes\". Escapes (like \\n) don't do anything.\nNewlines can be added by leaving a blank line. Leading whitespace on lines is ignored.")
+);
+
+ADD_CASE_TO_GROUP("squoted, empty",
+R"('')",
+ N(DOCVAL | VALQUO, "")
+);
+
+ADD_CASE_TO_GROUP("squoted, blank",
+R"(
+- ''
+- ' '
+- ' '
+- ' '
+- ' '
+)",
+ L{N(QV, ""), N(QV, " "), N(QV, " "), N(QV, " "), N(QV, " ")}
+);
+
+ADD_CASE_TO_GROUP("squoted, numbers", // these should not be quoted when emitting
+R"(
+- -1
+- -1.0
+- +1.0
+- 1e-2
+- 1e+2
+)",
+ L{N("-1"), N("-1.0"), N("+1.0"), N("1e-2"), N("1e+2")}
+);
+
+ADD_CASE_TO_GROUP("squoted, trailing space",
+R"('a aaaa ')",
+ N(DOCVAL | VALQUO, "a aaaa ")
+);
+
+ADD_CASE_TO_GROUP("squoted, leading space",
+R"(' a aaaa')",
+ N(DOCVAL | VALQUO, " a aaaa")
+);
+
+ADD_CASE_TO_GROUP("squoted, trailing and leading space",
+R"(' 012345 ')",
+ N(DOCVAL | VALQUO, " 012345 ")
+);
+
+ADD_CASE_TO_GROUP("squoted, 1 squote",
+R"('''')",
+ N(DOCVAL | VALQUO, "'")
+);
+
+ADD_CASE_TO_GROUP("squoted, 2 squotes",
+R"('''''')",
+ N(DOCVAL | VALQUO, "''")
+);
+
+ADD_CASE_TO_GROUP("squoted, 3 squotes",
+R"('''''''')",
+ N(DOCVAL | VALQUO, "'''")
+);
+
+ADD_CASE_TO_GROUP("squoted, 4 squotes",
+R"('''''''''')",
+ N(DOCVAL | VALQUO, "''''")
+);
+
+ADD_CASE_TO_GROUP("squoted, 5 squotes",
+R"('''''''''''')",
+ N(DOCVAL | VALQUO, "'''''")
+);
+
+/*
+ADD_CASE_TO_GROUP("squoted, example 2",
+R"('This is a key
+
+that has multiple lines
+
+': and this is its value
+)",
+ L{N("This is a key\nthat has multiple lines\n", "and this is its value")}
+);
+*/
+}
+
+} // namespace yml
+} // namespace c4
diff --git a/thirdparty/ryml/test/test_singleheader/libryml_singleheader.cpp b/thirdparty/ryml/test/test_singleheader/libryml_singleheader.cpp
new file mode 100644
index 000000000..b5f27d1ed
--- /dev/null
+++ b/thirdparty/ryml/test/test_singleheader/libryml_singleheader.cpp
@@ -0,0 +1,3 @@
+#define RYML_SINGLE_HDR_DEFINE_NOW
+#define C4CORE_SINGLE_HDR_DEFINE_NOW
+#include <ryml_all.hpp>
diff --git a/thirdparty/ryml/test/test_stack.cpp b/thirdparty/ryml/test/test_stack.cpp
new file mode 100644
index 000000000..73d7b2cea
--- /dev/null
+++ b/thirdparty/ryml/test/test_stack.cpp
@@ -0,0 +1,857 @@
+#ifdef RYML_SINGLE_HEADER
+#include "ryml_all.hpp"
+#else
+#include "c4/yml/detail/stack.hpp"
+#endif
+#include <gtest/gtest.h>
+#include "./callbacks_tester.hpp"
+
+
+//-------------------------------------------
+
+namespace c4 {
+namespace yml {
+
+namespace detail {
+
+template<size_t N>
+using istack = stack<int, N>;
+using ip = int const*;
+
+template<size_t N>
+void to_large(istack<N> *s)
+{
+ size_t sz = 3u * N;
+ s->reserve(sz);
+ EXPECT_NE(s->m_stack, s->m_buf);
+}
+
+template<size_t N>
+void fill_to_large(istack<N> *s)
+{
+ size_t sz = 3u * N;
+ s->reserve(sz);
+ for(int i = 0, e = (int)sz; i < e; ++i)
+ s->push(i);
+ EXPECT_NE(s->m_stack, s->m_buf);
+}
+
+
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+
+template<size_t N>
+void test_stack_small_vs_large()
+{
+ istack<N> s;
+ for(size_t i = 0; i < N; ++i)
+ {
+ s.push(static_cast<int>(i));
+ EXPECT_EQ(s.size(), i+1);
+ }
+ EXPECT_EQ(s.size(), N);
+ EXPECT_EQ(s.m_stack, s.m_buf);
+ for(size_t i = 0; i < N; ++i)
+ {
+ EXPECT_EQ(s.top(N-1-i), static_cast<int>(i));
+ }
+ s.push(N);
+ EXPECT_NE(s.m_stack, s.m_buf);
+ EXPECT_EQ(s.top(), static_cast<int>(N));
+ EXPECT_EQ(s.pop(), static_cast<int>(N));
+ EXPECT_NE(s.m_stack, s.m_buf);
+ for(size_t i = 0; i < N; ++i)
+ {
+ EXPECT_EQ(s.top(N-1-i), static_cast<int>(i));
+ }
+}
+
+TEST(stack, small_vs_large)
+{
+ test_stack_small_vs_large<8>();
+ test_stack_small_vs_large<16>();
+ test_stack_small_vs_large<32>();
+ test_stack_small_vs_large<128>();
+}
+
+
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+
+template<size_t N>
+void test_copy_ctor()
+{
+ istack<N> src;
+
+ // small
+ for(size_t i = 0; i < N; ++i)
+ {
+ src.push((int)i);
+ }
+ EXPECT_EQ(src.m_stack, src.m_buf);
+ ip b = src.begin();
+ {
+ istack<N> dst(src);
+ EXPECT_EQ(dst.size(), src.size());
+ EXPECT_EQ(dst.m_stack, dst.m_buf);
+ EXPECT_EQ((ip)src.begin(), b);
+ EXPECT_NE((ip)dst.begin(), (ip)src.begin());
+ }
+
+ // large
+ for(size_t i = 0; i < 2*N; ++i)
+ {
+ src.push((int)i); // large
+ }
+ EXPECT_NE(src.m_stack, src.m_buf);
+ b = src.begin();
+ {
+ istack<N> dst(src);
+ EXPECT_EQ(dst.size(), src.size());
+ EXPECT_NE(dst.m_stack, dst.m_buf);
+ EXPECT_EQ((ip)src.begin(), b);
+ EXPECT_NE((ip)dst.begin(), (ip)src.begin());
+ }
+}
+
+TEST(stack, copy_ctor)
+{
+ test_copy_ctor<4>();
+ test_copy_ctor<8>();
+ test_copy_ctor<64>();
+ test_copy_ctor<128>();
+}
+
+
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+
+template<size_t N>
+void test_move_ctor()
+{
+ istack<N> src;
+
+ // small
+ for(size_t i = 0; i < N; ++i)
+ {
+ src.push((int)i);
+ }
+ EXPECT_EQ(src.m_stack, src.m_buf);
+ ip b = src.begin();
+ size_t sz = src.size();
+ {
+ istack<N> dst(std::move(src));
+ EXPECT_EQ(dst.size(), sz);
+ EXPECT_EQ(dst.m_stack, dst.m_buf);
+ EXPECT_NE(dst.m_stack, b);
+ EXPECT_EQ(src.size(), size_t(0));
+ EXPECT_EQ((ip)src.begin(), src.m_buf);
+ EXPECT_NE((ip)dst.begin(), b);
+ }
+ EXPECT_EQ(src.size(), size_t(0));
+ EXPECT_EQ(src.capacity(), N);
+ EXPECT_EQ(src.m_stack, src.m_buf);
+
+ // redo
+ for(size_t i = 0; i < N; ++i)
+ {
+ src.push((int)i);
+ }
+ EXPECT_EQ(src.size(), N);
+ EXPECT_EQ(src.capacity(), N);
+ EXPECT_EQ(src.m_stack, src.m_buf);
+ // large
+ for(size_t i = 0; i < 2*N; ++i)
+ {
+ src.push((int)i); // large
+ }
+ EXPECT_EQ(src.size(), 3*N);
+ EXPECT_NE(src.m_stack, src.m_buf);
+ b = src.begin();
+ sz = src.size();
+ {
+ istack<N> dst(std::move(src));
+ EXPECT_EQ(dst.size(), sz);
+ EXPECT_NE(dst.m_stack, dst.m_buf);
+ EXPECT_EQ(dst.m_stack, b);
+ EXPECT_EQ(src.capacity(), N);
+ EXPECT_EQ(src.size(), size_t(0));
+ EXPECT_EQ((ip)src.begin(), src.m_buf);
+ EXPECT_EQ((ip)dst.begin(), b);
+ }
+}
+
+TEST(stack, move_ctor)
+{
+ test_move_ctor<4>();
+ test_move_ctor<8>();
+ test_move_ctor<64>();
+ test_move_ctor<128>();
+}
+
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+
+template<size_t N>
+void test_copy_assign()
+{
+ istack<N> dst;
+ istack<N> srcs; // small
+ istack<N> srcl; // large
+
+ for(size_t i = 0; i < N; ++i)
+ {
+ srcs.push((int)i); // small
+ srcl.push((int)i); // large
+ }
+ for(size_t i = 0; i < 2*N; ++i)
+ {
+ srcl.push((int)i); // large
+ }
+ EXPECT_EQ(srcs.m_stack, srcs.m_buf);
+ EXPECT_NE(srcl.m_stack, srcl.m_buf);
+
+ ip bs = srcs.begin(), bl = srcl.begin();
+
+ {
+ dst = srcs;
+ EXPECT_EQ(dst.size(), srcs.size());
+ EXPECT_EQ(dst.m_stack, dst.m_buf);
+ EXPECT_EQ((ip)srcs.begin(), bs);
+ EXPECT_NE((ip)dst.begin(), (ip)srcs.begin());
+ }
+
+ {
+ dst = srcl;
+ EXPECT_EQ(dst.size(), srcl.size());
+ EXPECT_NE(dst.m_stack, dst.m_buf);
+ EXPECT_EQ((ip)srcl.begin(), bl);
+ EXPECT_NE((ip)dst.begin(), (ip)srcl.begin());
+ }
+
+ {
+ dst = srcs;
+ EXPECT_EQ(dst.size(), srcs.size());
+ EXPECT_NE(dst.m_stack, dst.m_buf); // it stays in long mode (it's not trimmed when assigned from a short-mode stack)
+ EXPECT_EQ((ip)srcs.begin(), bs);
+ EXPECT_NE((ip)dst.begin(), (ip)srcs.begin());
+ }
+}
+
+TEST(stack, copy_assign)
+{
+ test_copy_assign<4>();
+ test_copy_assign<8>();
+ test_copy_assign<64>();
+ test_copy_assign<128>();
+}
+
+
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+
+template<size_t N>
+void test_move_assign()
+{
+ istack<N> srcs, srcl, dst;
+
+ for(size_t i = 0; i < N; ++i)
+ {
+ srcs.push((int)i); // small
+ srcl.push((int)i); // large
+ }
+ for(size_t i = 0; i < 2*N; ++i)
+ {
+ srcl.push((int)i); // large
+ }
+ EXPECT_EQ(srcs.m_stack, srcs.m_buf);
+ EXPECT_NE(srcl.m_stack, srcl.m_buf);
+
+ ip bs = srcs.begin()/*, bl = srcl.begin()*/;
+ size_t szs = srcs.size(), szl = srcl.size();
+
+ for(int i = 0; i < 10; ++i)
+ {
+ EXPECT_FALSE(srcs.empty());
+ EXPECT_TRUE(dst.empty());
+ EXPECT_EQ(dst.m_stack, dst.m_buf);
+ EXPECT_EQ(srcs.m_stack, srcs.m_buf);
+
+ dst = std::move(srcs);
+ EXPECT_TRUE(srcs.empty());
+ EXPECT_FALSE(dst.empty());
+ EXPECT_EQ(srcs.size(), size_t(0));
+ EXPECT_EQ(srcs.capacity(), N);
+ EXPECT_EQ(dst.size(), szs);
+ EXPECT_EQ(dst.m_stack, dst.m_buf);
+ EXPECT_EQ(srcs.m_stack, srcs.m_buf);
+ EXPECT_EQ((ip)srcs.begin(), bs);
+ EXPECT_NE((ip)dst.begin(), (ip)srcs.begin());
+
+ srcs = std::move(dst);
+ }
+
+ for(int i = 0; i < 10; ++i)
+ {
+ EXPECT_EQ(srcl.size(), 3*N);
+ EXPECT_FALSE(srcl.empty());
+ EXPECT_TRUE(dst.empty());
+ EXPECT_EQ(dst.m_stack, dst.m_buf);
+ EXPECT_NE(srcl.m_stack, srcl.m_buf);
+
+ dst = std::move(srcl);
+ EXPECT_TRUE(srcl.empty());
+ EXPECT_FALSE(dst.empty());
+ EXPECT_EQ(srcl.size(), size_t(0));
+ EXPECT_EQ(srcl.capacity(), N);
+ EXPECT_EQ(dst.size(), szl);
+ EXPECT_NE(dst.m_stack, dst.m_buf);
+ EXPECT_EQ(srcl.m_stack, srcl.m_buf);
+ EXPECT_EQ((ip)srcl.begin(), srcl.m_buf);
+ EXPECT_NE((ip)dst.begin(), (ip)srcl.begin());
+
+ srcl = std::move(dst);
+ }
+}
+
+TEST(stack, move_assign)
+{
+ test_move_assign<4>();
+ test_move_assign<8>();
+ test_move_assign<64>();
+ test_move_assign<128>();
+}
+
+
+//-----------------------------------------------------------------------------
+
+template<size_t N>
+void test_callbacks_default_ctor()
+{
+ CallbacksTester td;
+ CallbacksTester ts;
+ istack<N> dst;
+ EXPECT_EQ(dst.m_callbacks, get_callbacks());
+}
+
+TEST(stack, callbacks_default_ctor)
+{
+ test_callbacks_default_ctor<4>();
+ test_callbacks_default_ctor<8>();
+ test_callbacks_default_ctor<64>();
+ test_callbacks_default_ctor<128>();
+}
+
+template<size_t N>
+void test_callbacks_ctor()
+{
+ CallbacksTester td;
+ CallbacksTester ts;
+ istack<N> dst(td.callbacks());
+ ASSERT_EQ(dst.m_callbacks, td.callbacks());
+}
+
+TEST(stack, callbacks_ctor)
+{
+ test_callbacks_ctor<4>();
+ test_callbacks_ctor<8>();
+ test_callbacks_ctor<64>();
+ test_callbacks_ctor<128>();
+}
+
+
+//-----------------------------------------------------------------------------
+// copy ctor
+
+template<size_t N>
+void test_callbacks_copy_ctor_small()
+{
+ CallbacksTester ts("src");
+ CallbacksTester td("dst");
+ {
+ istack<N> src(ts.callbacks());
+ EXPECT_EQ(src.size(), 0u);
+ EXPECT_EQ(src.capacity(), N);
+ ASSERT_EQ(src.m_callbacks, ts.callbacks());
+ EXPECT_EQ(src.m_stack, src.m_buf);
+ EXPECT_EQ(ts.num_allocs, 0u);
+ size_t nbefore = ts.num_allocs;
+ EXPECT_EQ(src.m_stack, src.m_buf);
+ EXPECT_EQ(ts.num_allocs, 0u);
+ istack<N> dst(src);
+ EXPECT_EQ(dst.size(), 0u);
+ EXPECT_EQ(dst.capacity(), N);
+ ASSERT_EQ(src.m_callbacks, ts.callbacks());
+ ASSERT_EQ(dst.m_callbacks, ts.callbacks());
+ EXPECT_EQ(dst.m_stack, dst.m_buf);
+ EXPECT_EQ(src.m_stack, src.m_buf);
+ EXPECT_EQ(ts.num_allocs, nbefore);
+ EXPECT_EQ(td.num_allocs, 0u);
+ }
+}
+
+template<size_t N>
+void test_callbacks_copy_ctor_large_unfilled()
+{
+ CallbacksTester ts("src");
+ CallbacksTester td("dst");
+ {
+ istack<N> src(ts.callbacks());
+ ASSERT_EQ(src.m_callbacks, ts.callbacks());
+ EXPECT_EQ(src.m_stack, src.m_buf);
+ EXPECT_EQ(ts.num_allocs, 0u);
+ to_large(&src);
+ ASSERT_GT(src.capacity(), N);
+ size_t nbefore = ts.num_allocs;
+ EXPECT_NE(src.m_stack, src.m_buf);
+ EXPECT_NE(ts.num_allocs, 0u);
+ istack<N> dst(src);
+ ASSERT_EQ(dst.m_callbacks, ts.callbacks());
+ ASSERT_NE(dst.m_callbacks, td.callbacks());
+ EXPECT_EQ(dst.m_stack, dst.m_buf);
+ EXPECT_NE(src.m_stack, src.m_buf);
+ EXPECT_EQ(ts.num_allocs, nbefore);
+ EXPECT_EQ(td.num_allocs, 0u);
+ }
+}
+
+template<size_t N>
+void test_callbacks_copy_ctor_large_filled()
+{
+ CallbacksTester ts("src");
+ CallbacksTester td("dst");
+ {
+ istack<N> src(ts.callbacks());
+ ASSERT_EQ(src.m_callbacks, ts.callbacks());
+ EXPECT_EQ(src.m_stack, src.m_buf);
+ EXPECT_EQ(ts.num_allocs, 0u);
+ fill_to_large(&src);
+ ASSERT_GT(src.capacity(), N);
+ size_t nbefore = ts.num_allocs;
+ EXPECT_NE(src.m_stack, src.m_buf);
+ EXPECT_NE(ts.num_allocs, 0u);
+ istack<N> dst(src);
+ ASSERT_EQ(dst.m_callbacks, ts.callbacks());
+ ASSERT_NE(dst.m_callbacks, td.callbacks());
+ EXPECT_NE(dst.m_stack, dst.m_buf);
+ EXPECT_NE(src.m_stack, src.m_buf);
+ EXPECT_GT(ts.num_allocs, nbefore);
+ EXPECT_EQ(td.num_allocs, 0u);
+ }
+}
+
+TEST(stack, callbacks_copy_ctor_small)
+{
+ test_callbacks_copy_ctor_small<4>();
+ test_callbacks_copy_ctor_small<8>();
+ test_callbacks_copy_ctor_small<64>();
+ test_callbacks_copy_ctor_small<128>();
+}
+
+TEST(stack, callbacks_copy_ctor_large_unfilled)
+{
+ test_callbacks_copy_ctor_large_unfilled<4>();
+ test_callbacks_copy_ctor_large_unfilled<8>();
+ test_callbacks_copy_ctor_large_unfilled<64>();
+ test_callbacks_copy_ctor_large_unfilled<128>();
+}
+
+TEST(stack, callbacks_copy_ctor_large_filled)
+{
+ test_callbacks_copy_ctor_large_filled<4>();
+ test_callbacks_copy_ctor_large_filled<8>();
+ test_callbacks_copy_ctor_large_filled<64>();
+ test_callbacks_copy_ctor_large_filled<128>();
+}
+
+
+//-----------------------------------------------------------------------------
+// copy ctor
+
+template<size_t N>
+void test_callbacks_move_ctor_small()
+{
+ CallbacksTester ts;
+ istack<N> src(ts.callbacks());
+ ASSERT_EQ(src.m_callbacks, ts.callbacks());
+ EXPECT_EQ(src.m_stack, src.m_buf);
+ EXPECT_EQ(ts.num_allocs, 0u);
+ size_t nbefore = ts.num_allocs;
+ EXPECT_EQ(src.m_stack, src.m_buf);
+ EXPECT_EQ(ts.num_allocs, 0u);
+ istack<N> dst(std::move(src));
+ ASSERT_EQ(dst.m_callbacks, ts.callbacks());
+ EXPECT_EQ(dst.m_stack, dst.m_buf);
+ EXPECT_EQ(src.m_stack, src.m_buf);
+ EXPECT_EQ(ts.num_allocs, nbefore);
+}
+
+template<size_t N>
+void test_callbacks_move_ctor_large_unfilled()
+{
+ CallbacksTester ts;
+ istack<N> src(ts.callbacks());
+ ASSERT_EQ(src.m_callbacks, ts.callbacks());
+ EXPECT_EQ(src.m_stack, src.m_buf);
+ EXPECT_EQ(ts.num_allocs, 0u);
+ to_large(&src);
+ size_t nbefore = ts.num_allocs;
+ EXPECT_NE(src.m_stack, src.m_buf);
+ EXPECT_NE(ts.num_allocs, 0u);
+ istack<N> dst(std::move(src));
+ ASSERT_EQ(dst.m_callbacks, ts.callbacks());
+ EXPECT_NE(dst.m_stack, dst.m_buf);
+ EXPECT_EQ(src.m_stack, src.m_buf);
+ EXPECT_EQ(ts.num_allocs, nbefore);
+}
+
+template<size_t N>
+void test_callbacks_move_ctor_large_filled()
+{
+ CallbacksTester ts;
+ istack<N> src(ts.callbacks());
+ ASSERT_EQ(src.m_callbacks, ts.callbacks());
+ EXPECT_EQ(src.m_stack, src.m_buf);
+ EXPECT_EQ(ts.num_allocs, 0u);
+ fill_to_large(&src);
+ size_t nbefore = ts.num_allocs;
+ EXPECT_NE(src.m_stack, src.m_buf);
+ EXPECT_NE(ts.num_allocs, 0u);
+ istack<N> dst(std::move(src));
+ ASSERT_EQ(dst.m_callbacks, ts.callbacks());
+ EXPECT_NE(dst.m_stack, dst.m_buf);
+ EXPECT_EQ(src.m_stack, src.m_buf);
+ EXPECT_EQ(ts.num_allocs, nbefore);
+}
+
+TEST(stack, callbacks_move_ctor_small)
+{
+ test_callbacks_move_ctor_small<4>();
+ test_callbacks_move_ctor_small<8>();
+ test_callbacks_move_ctor_small<64>();
+ test_callbacks_move_ctor_small<128>();
+}
+
+TEST(stack, callbacks_move_ctor_large_unfilled)
+{
+ test_callbacks_move_ctor_large_unfilled<4>();
+ test_callbacks_move_ctor_large_unfilled<8>();
+ test_callbacks_move_ctor_large_unfilled<64>();
+ test_callbacks_move_ctor_large_unfilled<128>();
+}
+
+TEST(stack, callbacks_move_ctor_large_filled)
+{
+ test_callbacks_move_ctor_large_filled<4>();
+ test_callbacks_move_ctor_large_filled<8>();
+ test_callbacks_move_ctor_large_filled<64>();
+ test_callbacks_move_ctor_large_filled<128>();
+}
+
+
+//-----------------------------------------------------------------------------
+// copy assign
+
+template<size_t N>
+void test_callbacks_copy_assign_to_empty()
+{
+ CallbacksTester ts("src");
+ CallbacksTester td("dst");
+ istack<N> src(ts.callbacks());
+ istack<N> dst(td.callbacks());
+ ASSERT_EQ(src.m_callbacks, ts.callbacks());
+ EXPECT_EQ(src.m_stack, src.m_buf);
+ EXPECT_EQ(ts.num_allocs, 0u);
+ fill_to_large(&src);
+ size_t nbefore = ts.num_allocs;
+ EXPECT_NE(src.m_stack, src.m_buf);
+ EXPECT_NE(ts.num_allocs, 0u);
+ dst = src;
+ ASSERT_EQ(dst.m_callbacks, ts.callbacks());
+ ASSERT_NE(dst.m_callbacks, td.callbacks());
+ EXPECT_NE(dst.m_stack, dst.m_buf);
+ EXPECT_NE(src.m_stack, src.m_buf);
+ EXPECT_GT(ts.num_allocs, nbefore);
+ EXPECT_EQ(td.num_allocs, 0u);
+}
+
+TEST(stack, callbacks_copy_assign_to_empty)
+{
+ test_callbacks_copy_assign_to_empty<4>();
+ test_callbacks_copy_assign_to_empty<8>();
+ test_callbacks_copy_assign_to_empty<64>();
+ test_callbacks_copy_assign_to_empty<128>();
+}
+
+template<size_t N>
+void test_callbacks_copy_assign_to_nonempty()
+{
+ CallbacksTester ts("src");
+ {
+ CallbacksTester td("dst");
+ istack<N> src(ts.callbacks());
+ istack<N> dst(td.callbacks());
+ ASSERT_EQ(src.m_callbacks, ts.callbacks());
+ ASSERT_EQ(dst.m_callbacks, td.callbacks());
+ EXPECT_EQ(src.m_stack, src.m_buf);
+ EXPECT_EQ(src.m_stack, src.m_buf);
+ EXPECT_EQ(ts.num_allocs, 0u);
+ EXPECT_EQ(td.num_allocs, 0u);
+ EXPECT_EQ(ts.num_deallocs, 0u);
+ EXPECT_EQ(td.num_deallocs, 0u);
+ fill_to_large(&src);
+ fill_to_large(&dst);
+ EXPECT_NE(src.m_stack, src.m_buf);
+ EXPECT_NE(dst.m_stack, dst.m_buf);
+ EXPECT_EQ(ts.num_allocs, 1u);
+ EXPECT_EQ(td.num_allocs, 1u);
+ EXPECT_EQ(ts.num_deallocs, 0u);
+ EXPECT_EQ(td.num_deallocs, 0u);
+ dst = src;
+ ASSERT_EQ(src.m_callbacks, ts.callbacks());
+ ASSERT_EQ(dst.m_callbacks, ts.callbacks()); // changed to ts
+ EXPECT_NE(dst.m_stack, dst.m_buf);
+ EXPECT_NE(src.m_stack, src.m_buf);
+ EXPECT_EQ(ts.num_allocs, 2u);
+ EXPECT_EQ(td.num_allocs, 1u);
+ EXPECT_EQ(ts.num_deallocs, 0u);
+ EXPECT_EQ(td.num_deallocs, 1u);
+ td.check();
+ }
+ ts.check();
+}
+
+TEST(stack, callbacks_copy_assign_to_nonempty)
+{
+ test_callbacks_copy_assign_to_nonempty<4>();
+ test_callbacks_copy_assign_to_nonempty<8>();
+ test_callbacks_copy_assign_to_nonempty<64>();
+ test_callbacks_copy_assign_to_nonempty<128>();
+}
+
+template<size_t N>
+void test_callbacks_move_assign_to_empty()
+{
+ CallbacksTester ts("src");
+ {
+ CallbacksTester td("dst");
+ istack<N> src(ts.callbacks());
+ istack<N> dst(td.callbacks());
+ ASSERT_EQ(src.m_callbacks, ts.callbacks());
+ ASSERT_EQ(dst.m_callbacks, td.callbacks());
+ EXPECT_EQ(src.m_stack, src.m_buf);
+ EXPECT_EQ(dst.m_stack, dst.m_buf);
+ EXPECT_EQ(ts.num_allocs, 0u);
+ EXPECT_EQ(td.num_allocs, 0u);
+ fill_to_large(&src);
+ EXPECT_NE(src.m_stack, src.m_buf);
+ EXPECT_EQ(dst.m_stack, dst.m_buf);
+ EXPECT_EQ(ts.num_allocs, 1u);
+ EXPECT_EQ(td.num_allocs, 0u);
+ EXPECT_EQ(ts.num_deallocs, 0u);
+ EXPECT_EQ(td.num_deallocs, 0u);
+ dst = std::move(src);
+ ASSERT_EQ(src.m_callbacks, ts.callbacks());
+ ASSERT_EQ(dst.m_callbacks, ts.callbacks()); // changed to ts
+ EXPECT_EQ(src.m_stack, src.m_buf);
+ EXPECT_NE(dst.m_stack, dst.m_buf);
+ EXPECT_EQ(ts.num_allocs, 1u);
+ EXPECT_EQ(td.num_allocs, 0u);
+ EXPECT_EQ(ts.num_deallocs, 0u);
+ EXPECT_EQ(td.num_deallocs, 0u);
+ td.check();
+ }
+ EXPECT_EQ(ts.num_allocs, 1u);
+ EXPECT_EQ(ts.num_deallocs, 1u);
+ ts.check();
+}
+
+TEST(stack, callbacks_move_assign_to_empty)
+{
+ test_callbacks_move_assign_to_empty<4>();
+ test_callbacks_move_assign_to_empty<8>();
+ test_callbacks_move_assign_to_empty<64>();
+ test_callbacks_move_assign_to_empty<128>();
+}
+
+template<size_t N>
+void test_callbacks_move_assign_to_nonempty()
+{
+ CallbacksTester ts("src");
+ {
+ CallbacksTester td("dst");
+ istack<N> src(ts.callbacks());
+ istack<N> dst(td.callbacks());
+ ASSERT_EQ(src.m_callbacks, ts.callbacks());
+ ASSERT_EQ(dst.m_callbacks, td.callbacks());
+ EXPECT_EQ(src.m_stack, src.m_buf);
+ EXPECT_EQ(src.m_stack, src.m_buf);
+ EXPECT_EQ(ts.num_allocs, 0u);
+ EXPECT_EQ(td.num_allocs, 0u);
+ EXPECT_EQ(ts.num_deallocs, 0u);
+ EXPECT_EQ(td.num_deallocs, 0u);
+ fill_to_large(&src);
+ fill_to_large(&dst);
+ EXPECT_NE(src.m_stack, src.m_buf);
+ EXPECT_NE(dst.m_stack, dst.m_buf);
+ EXPECT_EQ(ts.num_allocs, 1u);
+ EXPECT_EQ(td.num_allocs, 1u);
+ EXPECT_EQ(ts.num_deallocs, 0u);
+ EXPECT_EQ(td.num_deallocs, 0u);
+ dst = std::move(src);
+ ASSERT_EQ(src.m_callbacks, ts.callbacks());
+ ASSERT_EQ(dst.m_callbacks, ts.callbacks()); // changed to ts
+ EXPECT_NE(dst.m_stack, dst.m_buf);
+ EXPECT_EQ(src.m_stack, src.m_buf);
+ EXPECT_EQ(ts.num_allocs, 1u);
+ EXPECT_EQ(td.num_allocs, 1u);
+ EXPECT_EQ(ts.num_deallocs, 0u);
+ EXPECT_EQ(td.num_deallocs, 1u);
+ td.check();
+ }
+ EXPECT_EQ(ts.num_allocs, 1u);
+ EXPECT_EQ(ts.num_deallocs, 1u);
+ ts.check();
+}
+
+TEST(stack, callbacks_move_assign_to_nonempty)
+{
+ test_callbacks_move_assign_to_nonempty<4>();
+ test_callbacks_move_assign_to_nonempty<8>();
+ test_callbacks_move_assign_to_nonempty<64>();
+ test_callbacks_move_assign_to_nonempty<128>();
+}
+
+
+//-----------------------------------------------------------------------------
+
+template<size_t N>
+void test_reserve()
+{
+ {
+ CallbacksTester ts;
+ {
+ istack<N> s(ts.callbacks());
+ EXPECT_EQ(ts.num_allocs, 0u);
+ EXPECT_EQ(ts.num_deallocs, 0u);
+ EXPECT_EQ(s.capacity(), N);
+ s.reserve(4*N);
+ EXPECT_EQ(ts.num_allocs, 1u);
+ EXPECT_EQ(ts.num_deallocs, 0u);
+ EXPECT_EQ(s.capacity(), 4*N);
+ }
+ EXPECT_EQ(ts.num_allocs, 1u);
+ EXPECT_EQ(ts.num_deallocs, 1u);
+ ts.check();
+ }
+ {
+ CallbacksTester ts;
+ {
+ istack<N> s(ts.callbacks());
+ EXPECT_EQ(ts.num_allocs, 0u);
+ EXPECT_EQ(ts.num_deallocs, 0u);
+ EXPECT_EQ(s.capacity(), N);
+ s.reserve(4*N);
+ EXPECT_EQ(ts.num_allocs, 1u);
+ EXPECT_EQ(ts.num_deallocs, 0u);
+ EXPECT_EQ(s.capacity(), 4*N);
+ s._free();
+ }
+ EXPECT_EQ(ts.num_allocs, 1u);
+ EXPECT_EQ(ts.num_deallocs, 1u);
+ ts.check();
+ }
+}
+
+TEST(stack, reserve_capacity)
+{
+ test_reserve<10>();
+ test_reserve<20>();
+}
+
+
+template<size_t N, int NumTimes>
+void grow_to_large__push()
+{
+ istack<N> s;
+ int ni = (int)N;
+ for(int i = 0; i < NumTimes * ni; ++i)
+ {
+ s.push(i);
+ if(i < ni)
+ EXPECT_EQ(s.m_stack, s.m_buf) << i;
+ else
+ EXPECT_NE(s.m_stack, s.m_buf) << i;
+ }
+ for(int i = 0; i < NumTimes * ni; ++i)
+ {
+ EXPECT_EQ(s.bottom((size_t)i), i);
+ }
+}
+
+TEST(stack, push_to_large_twice)
+{
+ grow_to_large__push<10, 8>();
+ grow_to_large__push<20, 8>();
+ grow_to_large__push<32, 8>();
+}
+
+template<size_t N, int NumTimes>
+void grow_to_large__push_top()
+{
+ istack<N> s;
+ int ni = (int)N;
+ s.push(0);
+ for(int i = 1; i < NumTimes * ni; ++i)
+ {
+ s.push_top();
+ EXPECT_EQ(s.top(), i-1) << i;
+ s.top() = i;
+ if(i < ni)
+ EXPECT_EQ(s.m_stack, s.m_buf) << i;
+ else
+ EXPECT_NE(s.m_stack, s.m_buf) << i;
+ }
+ for(int i = 0; i < NumTimes * ni; ++i)
+ {
+ EXPECT_EQ(s.bottom((size_t)i), i);
+ }
+}
+
+TEST(stack, push_top_to_large_twice)
+{
+ grow_to_large__push_top<10, 8>();
+ grow_to_large__push_top<20, 8>();
+ grow_to_large__push_top<32, 8>();
+}
+
+} // namespace detail
+} // namespace yml
+} // namespace c4
+
+
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+
+// this is needed to use the test case library
+
+#ifndef RYML_SINGLE_HEADER
+#include "c4/substr.hpp"
+#endif
+
+namespace c4 {
+namespace yml {
+struct Case;
+Case const* get_case(csubstr /*name*/)
+{
+ return nullptr;
+}
+
+} // namespace yml
+} // namespace c4
diff --git a/thirdparty/ryml/test/test_style.cpp b/thirdparty/ryml/test/test_style.cpp
new file mode 100644
index 000000000..5b46d075b
--- /dev/null
+++ b/thirdparty/ryml/test/test_style.cpp
@@ -0,0 +1,616 @@
+#ifndef RYML_SINGLE_HEADER
+#include "c4/yml/std/std.hpp"
+#include "c4/yml/parse.hpp"
+#include "c4/yml/emit.hpp"
+#include <c4/format.hpp>
+#include <c4/yml/detail/checks.hpp>
+#include <c4/yml/detail/print.hpp>
+#endif
+
+#include "./test_case.hpp"
+
+#include <gtest/gtest.h>
+
+namespace c4 {
+namespace yml {
+
+std::string emit2str(Tree const& t)
+{
+ return emitrs_yaml<std::string>(t);
+}
+
+
+TEST(style, flags)
+{
+ Tree tree = parse_in_arena("foo: bar");
+ EXPECT_TRUE(tree.rootref().type().default_block());
+ EXPECT_FALSE(tree.rootref().type().marked_flow());
+ EXPECT_FALSE(tree.rootref().type().marked_flow_sl());
+ EXPECT_FALSE(tree.rootref().type().marked_flow_ml());
+ tree._add_flags(tree.root_id(), _WIP_STYLE_FLOW_SL);
+ EXPECT_FALSE(tree.rootref().type().default_block());
+ EXPECT_TRUE(tree.rootref().type().marked_flow());
+ EXPECT_TRUE(tree.rootref().type().marked_flow_sl());
+ EXPECT_FALSE(tree.rootref().type().marked_flow_ml());
+ tree._rem_flags(tree.root_id(), _WIP_STYLE_FLOW_SL);
+ tree._add_flags(tree.root_id(), _WIP_STYLE_FLOW_ML);
+ EXPECT_FALSE(tree.rootref().type().default_block());
+ EXPECT_TRUE(tree.rootref().type().marked_flow());
+ EXPECT_FALSE(tree.rootref().type().marked_flow_sl());
+ EXPECT_TRUE(tree.rootref().type().marked_flow_ml());
+}
+
+
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+csubstr scalar_yaml = R"(
+this is the key: >-
+ this is the multiline
+ "val" with
+
+ 'empty' lines
+)";
+
+void check_same_emit(Tree const& expected)
+{
+ #if 0
+ #define _showtrees(num) \
+ std::cout << "--------\nEMITTED" #num "\n--------\n"; \
+ std::cout << ws ## num; \
+ std::cout << "--------\nACTUAL" #num "\n--------\n"; \
+ print_tree(actual ## num); \
+ std::cout << "--------\nEXPECTED" #num "\n--------\n"; \
+ print_tree(expected)
+ #else
+ #define _showtrees(num)
+ #endif
+
+ std::string ws1, ws2, ws3, ws4;
+ emitrs_yaml(expected, &ws1);
+ {
+ SCOPED_TRACE("actual1");
+ Tree actual1 = parse_in_arena(to_csubstr(ws1));
+ _showtrees(1);
+ test_compare(actual1, expected);
+ emitrs_yaml(actual1, &ws2);
+ }
+ {
+ SCOPED_TRACE("actual2");
+ Tree actual2 = parse_in_arena(to_csubstr(ws2));
+ _showtrees(2);
+ test_compare(actual2, expected);
+ emitrs_yaml(actual2, &ws3);
+ }
+ {
+ SCOPED_TRACE("actual3");
+ Tree actual3 = parse_in_arena(to_csubstr(ws3));
+ _showtrees(3);
+ test_compare(actual3, expected);
+ emitrs_yaml(actual3, &ws4);
+ }
+ {
+ SCOPED_TRACE("actual4");
+ Tree actual4 = parse_in_arena(to_csubstr(ws4));
+ _showtrees(4);
+ test_compare(actual4, expected);
+ }
+}
+
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+
+
+TEST(style, noflags)
+{
+ Tree expected = parse_in_arena("{}");
+ NodeRef r = expected.rootref();
+ r["normal"] |= MAP;
+ r["normal"]["singleline"] = "foo";
+ r["normal"]["multiline"] |= MAP;
+ r["normal"]["multiline"]["____________"] = "foo";
+ r["normal"]["multiline"]["____mid_____"] = "foo\nbar";
+ r["normal"]["multiline"]["____mid_end1"] = "foo\nbar\n";
+ r["normal"]["multiline"]["____mid_end2"] = "foo\nbar\n\n";
+ r["normal"]["multiline"]["____mid_end3"] = "foo\nbar\n\n\n";
+ r["normal"]["multiline"]["____________"] = "foo";
+ r["normal"]["multiline"]["____________"] = "foo bar";
+ r["normal"]["multiline"]["________end1"] = "foo bar\n";
+ r["normal"]["multiline"]["________end2"] = "foo bar\n\n";
+ r["normal"]["multiline"]["________end3"] = "foo bar\n\n\n";
+ r["normal"]["multiline"]["beg_________"] = "\nfoo";
+ r["normal"]["multiline"]["beg_mid_____"] = "\nfoo\nbar";
+ r["normal"]["multiline"]["beg_mid_end1"] = "\nfoo\nbar\n";
+ r["normal"]["multiline"]["beg_mid_end2"] = "\nfoo\nbar\n\n";
+ r["normal"]["multiline"]["beg_mid_end3"] = "\nfoo\nbar\n\n\n";
+ r["leading_ws"] |= MAP;
+ r["leading_ws"]["singleline"] |= MAP;
+ r["leading_ws"]["singleline"]["space"] = " foo";
+ r["leading_ws"]["singleline"]["tab"] = "\tfoo";
+ r["leading_ws"]["singleline"]["space_and_tab0"] = " \tfoo";
+ r["leading_ws"]["singleline"]["space_and_tab1"] = "\t foo";
+ r["leading_ws"]["multiline"] |= MAP;
+ r["leading_ws"]["multiline"]["beg_________"] = "\n \tfoo";
+ r["leading_ws"]["multiline"]["beg_mid_____"] = "\n \tfoo\nbar";
+ r["leading_ws"]["multiline"]["beg_mid_end1"] = "\n \tfoo\nbar\n";
+ r["leading_ws"]["multiline"]["beg_mid_end2"] = "\n \tfoo\nbar\n\n";
+ r["leading_ws"]["multiline"]["beg_mid_end3"] = "\n \tfoo\nbar\n\n\n";
+ check_same_emit(expected);
+}
+
+
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+
+#ifdef WIP
+TEST(style, scalar_retains_style_after_parse)
+{
+ {
+ Tree t = parse_in_arena("foo");
+ EXPECT_TRUE(t.rootref().type().val_marked_plain());
+ EXPECT_FALSE(t.rootref().type().val_marked_squo());
+ EXPECT_FALSE(t.rootref().type().val_marked_dquo());
+ EXPECT_FALSE(t.rootref().type().val_marked_literal());
+ EXPECT_FALSE(t.rootref().type().val_marked_folded());
+ EXPECT_EQ(emitrs<std::string>(t), std::string("foo\n"));
+ }
+ {
+ Tree t = parse_in_arena("'foo'");
+ EXPECT_FALSE(t.rootref().type().val_marked_plain());
+ EXPECT_TRUE(t.rootref().type().val_marked_squo());
+ EXPECT_FALSE(t.rootref().type().val_marked_dquo());
+ EXPECT_FALSE(t.rootref().type().val_marked_literal());
+ EXPECT_FALSE(t.rootref().type().val_marked_folded());
+ EXPECT_EQ(emitrs<std::string>(t), std::string("'foo'\n"));
+ }
+ {
+ Tree t = parse_in_arena("'foo'");
+ EXPECT_FALSE(t.rootref().type().val_marked_plain());
+ EXPECT_FALSE(t.rootref().type().val_marked_squo());
+ EXPECT_TRUE(t.rootref().type().val_marked_dquo());
+ EXPECT_FALSE(t.rootref().type().val_marked_literal());
+ EXPECT_FALSE(t.rootref().type().val_marked_folded());
+ EXPECT_EQ(emitrs<std::string>(t), std::string("'foo'\n"));
+ }
+ {
+ Tree t = parse_in_arena("[foo, 'baz', \"bat\"]");
+ EXPECT_TRUE(t.rootref().type().marked_flow());
+ EXPECT_TRUE(t[0].type().val_marked_plain());
+ EXPECT_FALSE(t[0].type().val_marked_squo());
+ EXPECT_FALSE(t[0].type().val_marked_dquo());
+ EXPECT_FALSE(t[0].type().val_marked_literal());
+ EXPECT_FALSE(t[0].type().val_marked_folded());
+ EXPECT_FALSE(t[1].type().val_marked_plain());
+ EXPECT_TRUE(t[1].type().val_marked_squo());
+ EXPECT_FALSE(t[1].type().val_marked_dquo());
+ EXPECT_FALSE(t[1].type().val_marked_literal());
+ EXPECT_FALSE(t[1].type().val_marked_folded());
+ EXPECT_FALSE(t[2].type().val_marked_plain());
+ EXPECT_FALSE(t[2].type().val_marked_squo());
+ EXPECT_TRUE(t[2].type().val_marked_dquo());
+ EXPECT_FALSE(t[2].type().val_marked_literal());
+ EXPECT_FALSE(t[2].type().val_marked_folded());
+ EXPECT_EQ(emitrs<std::string>(t), std::string("foo"));
+ }
+}
+#endif
+
+
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+
+TEST(scalar, base)
+{
+ Tree tree = parse_in_arena(scalar_yaml);
+ EXPECT_EQ(tree[0].key(), csubstr("this is the key"));
+ EXPECT_EQ(tree[0].val(), csubstr("this is the multiline \"val\" with\n'empty' lines"));
+ EXPECT_EQ(emit2str(tree), R"(this is the key: |-
+ this is the multiline "val" with
+ 'empty' lines
+)");
+ check_same_emit(tree);
+}
+
+TEST(scalar, block_literal)
+{
+ Tree tree = parse_in_arena(scalar_yaml);
+ {
+ SCOPED_TRACE("val only");
+ EXPECT_FALSE(tree[0].type().key_marked_literal());
+ EXPECT_FALSE(tree[0].type().val_marked_literal());
+ tree._add_flags(tree[0].id(), _WIP_VAL_LITERAL);
+ EXPECT_FALSE(tree[0].type().key_marked_literal());
+ EXPECT_TRUE(tree[0].type().val_marked_literal());
+ EXPECT_EQ(emit2str(tree), R"(this is the key: |-
+ this is the multiline "val" with
+ 'empty' lines
+)");
+ check_same_emit(tree);
+ }
+ {
+ SCOPED_TRACE("key+val");
+ tree._add_flags(tree[0].id(), _WIP_KEY_LITERAL);
+ EXPECT_TRUE(tree[0].type().key_marked_literal());
+ EXPECT_TRUE(tree[0].type().val_marked_literal());
+ EXPECT_EQ(emit2str(tree), R"(? |-
+ this is the key
+: |-
+ this is the multiline "val" with
+ 'empty' lines
+)");
+ check_same_emit(tree);
+ }
+ {
+ SCOPED_TRACE("key only");
+ tree._rem_flags(tree[0].id(), _WIP_VAL_LITERAL);
+ EXPECT_TRUE(tree[0].type().key_marked_literal());
+ EXPECT_FALSE(tree[0].type().val_marked_literal());
+ EXPECT_EQ(emit2str(tree), R"(? |-
+ this is the key
+: |-
+ this is the multiline "val" with
+ 'empty' lines
+)");
+ check_same_emit(tree);
+ }
+}
+
+TEST(scalar, block_folded)
+{
+ Tree tree = parse_in_arena(scalar_yaml);
+ {
+ SCOPED_TRACE("val only");
+ EXPECT_FALSE(tree[0].type().key_marked_folded());
+ EXPECT_FALSE(tree[0].type().val_marked_folded());
+ tree._add_flags(tree[0].id(), _WIP_VAL_FOLDED);
+ EXPECT_FALSE(tree[0].type().key_marked_folded());
+ EXPECT_TRUE(tree[0].type().val_marked_folded());
+ EXPECT_EQ(emit2str(tree), R"(this is the key: >-
+ this is the multiline "val" with
+
+ 'empty' lines
+)");
+ check_same_emit(tree);
+ }
+ {
+ SCOPED_TRACE("key+val");
+ tree._add_flags(tree[0].id(), _WIP_KEY_FOLDED);
+ EXPECT_TRUE(tree[0].type().key_marked_folded());
+ EXPECT_TRUE(tree[0].type().val_marked_folded());
+ EXPECT_EQ(emit2str(tree), R"(? >-
+ this is the key
+: >-
+ this is the multiline "val" with
+
+ 'empty' lines
+)");
+ check_same_emit(tree);
+ }
+ {
+ SCOPED_TRACE("val only");
+ tree._rem_flags(tree[0].id(), _WIP_VAL_FOLDED);
+ EXPECT_TRUE(tree[0].type().key_marked_folded());
+ EXPECT_FALSE(tree[0].type().val_marked_folded());
+ EXPECT_EQ(emit2str(tree), R"(? >-
+ this is the key
+: |-
+ this is the multiline "val" with
+ 'empty' lines
+)");
+ check_same_emit(tree);
+ }
+}
+
+TEST(scalar, squot)
+{
+ Tree tree = parse_in_arena(scalar_yaml);
+ EXPECT_FALSE(tree[0].type().key_marked_squo());
+ EXPECT_FALSE(tree[0].type().val_marked_squo());
+ {
+ SCOPED_TRACE("val only");
+ tree._add_flags(tree[0].id(), _WIP_VAL_SQUO);
+ EXPECT_FALSE(tree[0].type().key_marked_squo());
+ EXPECT_TRUE(tree[0].type().val_marked_squo());
+ EXPECT_EQ(emit2str(tree), R"(this is the key: 'this is the multiline "val" with
+
+ ''empty'' lines'
+)");
+ check_same_emit(tree);
+ }
+ {
+ SCOPED_TRACE("key+val");
+ tree._add_flags(tree[0].id(), _WIP_KEY_SQUO);
+ EXPECT_TRUE(tree[0].type().key_marked_squo());
+ EXPECT_TRUE(tree[0].type().val_marked_squo());
+ EXPECT_EQ(emit2str(tree), R"('this is the key': 'this is the multiline "val" with
+
+ ''empty'' lines'
+)");
+ check_same_emit(tree);
+ }
+ {
+ SCOPED_TRACE("key only");
+ tree._rem_flags(tree[0].id(), _WIP_VAL_SQUO);
+ EXPECT_TRUE(tree[0].type().key_marked_squo());
+ EXPECT_FALSE(tree[0].type().val_marked_squo());
+ EXPECT_EQ(emit2str(tree), R"('this is the key': |-
+ this is the multiline "val" with
+ 'empty' lines
+)");
+ check_same_emit(tree);
+ }
+}
+
+TEST(scalar, dquot)
+{
+ Tree tree = parse_in_arena(scalar_yaml);
+ EXPECT_FALSE(tree[0].type().key_marked_dquo());
+ EXPECT_FALSE(tree[0].type().val_marked_dquo());
+ {
+ SCOPED_TRACE("val only");
+ tree._add_flags(tree[0].id(), _WIP_VAL_DQUO);
+ EXPECT_FALSE(tree[0].type().key_marked_dquo());
+ EXPECT_TRUE(tree[0].type().val_marked_dquo());
+ // visual studio fails to compile this string when used inside
+ // the EXPECT_EQ() macro below. So we declare it separately
+ // instead:
+ csubstr yaml = R"(this is the key: "this is the multiline \"val\" with
+
+ 'empty' lines"
+)";
+ EXPECT_EQ(emit2str(tree), yaml);
+ check_same_emit(tree);
+ }
+ {
+ SCOPED_TRACE("key+val");
+ tree._add_flags(tree[0].id(), _WIP_KEY_DQUO);
+ EXPECT_TRUE(tree[0].type().key_marked_dquo());
+ EXPECT_TRUE(tree[0].type().val_marked_dquo());
+ // visual studio fails to compile this string when used inside
+ // the EXPECT_EQ() macro below. So we declare it separately
+ // instead:
+ csubstr yaml = R"("this is the key": "this is the multiline \"val\" with
+
+ 'empty' lines"
+)";
+ EXPECT_EQ(emit2str(tree), yaml);
+ check_same_emit(tree);
+ }
+ {
+ SCOPED_TRACE("key only");
+ tree._rem_flags(tree[0].id(), _WIP_VAL_DQUO);
+ EXPECT_TRUE(tree[0].type().key_marked_dquo());
+ EXPECT_FALSE(tree[0].type().val_marked_dquo());
+ EXPECT_EQ(emit2str(tree), R"("this is the key": |-
+ this is the multiline "val" with
+ 'empty' lines
+)");
+ check_same_emit(tree);
+ }
+}
+
+TEST(scalar, plain)
+{
+ Tree tree = parse_in_arena(scalar_yaml);
+ EXPECT_FALSE(tree[0].type().key_marked_plain());
+ EXPECT_FALSE(tree[0].type().val_marked_plain());
+ {
+ SCOPED_TRACE("val only");
+ tree._add_flags(tree[0].id(), _WIP_VAL_PLAIN);
+ EXPECT_FALSE(tree[0].type().key_marked_plain());
+ EXPECT_TRUE(tree[0].type().val_marked_plain());
+ EXPECT_EQ(emit2str(tree), R"(this is the key: this is the multiline "val" with
+
+ 'empty' lines
+)");
+ check_same_emit(tree);
+ }
+ {
+ SCOPED_TRACE("key+val");
+ tree._add_flags(tree[0].id(), _WIP_KEY_PLAIN);
+ EXPECT_TRUE(tree[0].type().key_marked_plain());
+ EXPECT_TRUE(tree[0].type().val_marked_plain());
+ EXPECT_EQ(emit2str(tree), R"(this is the key: this is the multiline "val" with
+
+ 'empty' lines
+)");
+ check_same_emit(tree);
+ }
+ {
+ SCOPED_TRACE("key only");
+ tree._rem_flags(tree[0].id(), _WIP_VAL_PLAIN);
+ EXPECT_TRUE(tree[0].type().key_marked_plain());
+ EXPECT_FALSE(tree[0].type().val_marked_plain());
+ EXPECT_EQ(emit2str(tree), R"(this is the key: |-
+ this is the multiline "val" with
+ 'empty' lines
+)");
+ check_same_emit(tree);
+ }
+}
+
+
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+
+TEST(stream, block)
+{
+ Tree tree = parse_in_arena(R"(
+---
+scalar
+%YAML 1.2
+---
+foo
+---
+bar
+)");
+ EXPECT_TRUE(tree.rootref().is_stream());
+ EXPECT_TRUE(tree.docref(0).is_doc());
+ EXPECT_TRUE(tree.docref(0).is_val());
+ EXPECT_EQ(emit2str(tree), "--- scalar %YAML 1.2\n--- foo\n--- bar\n");
+ tree._add_flags(tree.root_id(), _WIP_STYLE_FLOW_SL);
+ EXPECT_EQ(emit2str(tree), "--- scalar %YAML 1.2\n--- foo\n--- bar\n");
+}
+
+
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+
+TEST(seq, block)
+{
+ Tree tree = parse_in_arena("[1, 2, 3, 4, 5, 6]");
+ EXPECT_EQ(emit2str(tree), R"(- 1
+- 2
+- 3
+- 4
+- 5
+- 6
+)");
+}
+
+TEST(seq, flow_sl)
+{
+ Tree tree = parse_in_arena("[1, 2, 3, 4, 5, 6]");
+ tree._add_flags(tree.root_id(), _WIP_STYLE_FLOW_SL);
+ EXPECT_EQ(emit2str(tree), R"([1,2,3,4,5,6])");
+}
+
+
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+
+TEST(keyseq, block)
+{
+ Tree tree = parse_in_arena("{foo: [1, 2, 3, 4, 5, 6]}");
+ EXPECT_TRUE(tree.rootref().type().default_block());
+ EXPECT_EQ(emit2str(tree), R"(foo:
+ - 1
+ - 2
+ - 3
+ - 4
+ - 5
+ - 6
+)");
+ tree = parse_in_arena("{foo: [1, [2, 3], 4, [5, 6]]}");
+ EXPECT_EQ(emit2str(tree), R"(foo:
+ - 1
+ - - 2
+ - 3
+ - 4
+ - - 5
+ - 6
+)");
+}
+
+TEST(keyseq, flow_sl)
+{
+ Tree tree = parse_in_arena("{foo: [1, 2, 3, 4, 5, 6]}");
+ EXPECT_TRUE(tree.rootref().type().default_block());
+ tree._add_flags(tree.root_id(), _WIP_STYLE_FLOW_SL);
+ EXPECT_FALSE(tree.rootref().type().default_block());
+ EXPECT_EQ(emit2str(tree), R"({foo: [1,2,3,4,5,6]})");
+ //
+ tree = parse_in_arena("{foo: [1, [2, 3], 4, [5, 6]]}");
+ tree._add_flags(tree.root_id(), _WIP_STYLE_FLOW_SL);
+ EXPECT_EQ(emit2str(tree), R"({foo: [1,[2,3],4,[5,6]]})");
+ //
+ tree._rem_flags(tree.root_id(), _WIP_STYLE_FLOW_SL);
+ tree._add_flags(tree["foo"][1].id(), _WIP_STYLE_FLOW_SL);
+ tree._add_flags(tree["foo"][3].id(), _WIP_STYLE_FLOW_SL);
+ EXPECT_EQ(emit2str(tree), R"(foo:
+ - 1
+ - [2,3]
+ - 4
+ - [5,6]
+)");
+}
+
+
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+
+TEST(map, block)
+{
+ Tree tree = parse_in_arena("{1: 10, 2: 10, 3: 10, 4: 10, 5: 10, 6: 10}");
+ EXPECT_EQ(emit2str(tree), R"(1: 10
+2: 10
+3: 10
+4: 10
+5: 10
+6: 10
+)");
+}
+
+TEST(map, flow_sl)
+{
+ Tree tree = parse_in_arena("{1: 10, 2: 10, 3: 10, 4: 10, 5: 10, 6: 10}");
+ tree._add_flags(tree.root_id(), _WIP_STYLE_FLOW_SL);
+ EXPECT_EQ(emit2str(tree), R"({1: 10,2: 10,3: 10,4: 10,5: 10,6: 10})");
+}
+
+
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+
+TEST(keymap, block)
+{
+ Tree tree = parse_in_arena("{foo: {1: 10, 2: 10, 3: 10, 4: 10, 5: 10, 6: 10}}");
+ EXPECT_EQ(emit2str(tree), R"(foo:
+ 1: 10
+ 2: 10
+ 3: 10
+ 4: 10
+ 5: 10
+ 6: 10
+)");
+}
+
+
+TEST(keymap, flow_sl)
+{
+ Tree tree = parse_in_arena("{foo: {1: 10, 2: 10, 3: 10, 4: 10, 5: 10, 6: 10}}");
+ tree._add_flags(tree.root_id(), _WIP_STYLE_FLOW_SL);
+ EXPECT_EQ(emit2str(tree), R"({foo: {1: 10,2: 10,3: 10,4: 10,5: 10,6: 10}})");
+ //
+ tree = parse_in_arena("{foo: {1: 10, 2: {2: 10, 3: 10}, 4: 10, 5: {5: 10, 6: 10}}}");
+ EXPECT_EQ(emit2str(tree), R"(foo:
+ 1: 10
+ 2:
+ 2: 10
+ 3: 10
+ 4: 10
+ 5:
+ 5: 10
+ 6: 10
+)");
+ tree._add_flags(tree.root_id(), _WIP_STYLE_FLOW_SL);
+ EXPECT_EQ(emit2str(tree), R"({foo: {1: 10,2: {2: 10,3: 10},4: 10,5: {5: 10,6: 10}}})");
+ tree._rem_flags(tree.root_id(), _WIP_STYLE_FLOW_SL);
+ tree._add_flags(tree["foo"][1].id(), _WIP_STYLE_FLOW_SL);
+ tree._add_flags(tree["foo"][3].id(), _WIP_STYLE_FLOW_SL);
+ EXPECT_EQ(emit2str(tree), R"(foo:
+ 1: 10
+ 2: {2: 10,3: 10}
+ 4: 10
+ 5: {5: 10,6: 10}
+)");
+}
+
+
+//-------------------------------------------
+// this is needed to use the test case library
+Case const* get_case(csubstr /*name*/)
+{
+ return nullptr;
+}
+
+} // namespace yml
+} // namespace c4
diff --git a/thirdparty/ryml/test/test_suite.cpp b/thirdparty/ryml/test/test_suite.cpp
new file mode 100644
index 000000000..22dabf8c4
--- /dev/null
+++ b/thirdparty/ryml/test/test_suite.cpp
@@ -0,0 +1,612 @@
+#ifndef RYML_SINGLE_HEADER
+#include <c4/yml/std/std.hpp>
+#include <c4/yml/tree.hpp>
+#include <c4/yml/parse.hpp>
+#include <c4/yml/emit.hpp>
+#include <c4/yml/detail/print.hpp>
+#include <c4/yml/detail/checks.hpp>
+#endif
+#include "test_case.hpp"
+#include "test_suite/test_suite_common.hpp"
+#include "test_suite/test_suite_parts.hpp"
+#include "test_suite/test_suite_events.hpp"
+#include <c4/fs/fs.hpp>
+#include <c4/log/log.hpp>
+#include <gtest/gtest.h>
+
+
+/* Each case from the test suite contains:
+ *
+ * - (awkward) input yaml (in_yaml)
+ * - (somewhat standard) output equivalent (out_yaml)
+ * - (when meaningful/possible) json equivalent (in_json)
+ * - yaml parsing events (events)
+ *
+ * Running a test consists of parsing the contents above into a data
+ * structure, and then repeatedly parsing and emitting yaml in a sort
+ * of pipe. Ie, (eg for in_yaml) parse in_yaml, emit corresponding
+ * yaml, then parse this emitted yaml, and so on. Each parse/emit pair
+ * is named a processing level in this test. */
+
+
+C4_SUPPRESS_WARNING_MSVC_PUSH
+C4_SUPPRESS_WARNING_MSVC(4702) // unreachable code
+
+#define NLEVELS 4
+
+namespace c4 {
+namespace yml {
+
+
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+
+struct Events
+{
+ csubstr filename = {};
+ std::string src = {};
+ std::string emitted_events = {};
+ Tree tree = {};
+ mutable Tree adjusted_tree = {};
+ mutable Tree tree_from_emitted_events = {};
+ bool was_parsed = false;
+ bool enabled = false;
+
+ void init(csubstr filename_, csubstr src_)
+ {
+ filename = filename_;
+ src.assign(src_.begin(), src_.end());
+ tree.clear();
+ tree.clear_arena();
+ tree.reserve(10);
+ was_parsed = false;
+ enabled = true;
+ }
+
+ void compare_trees(csubstr actual_src, Tree const& actual_tree) const
+ {
+ if(actual_src.empty())
+ GTEST_SKIP();
+ _nfo_logf("SRC:\n{}", actual_src);
+ _nfo_print_tree("EXPECTED", tree);
+ _nfo_print_tree("ACTUAL", actual_tree);
+ test_compare(actual_tree, tree);
+ }
+
+ void compare_emitted_events(csubstr actual_src, Tree const& tree_from_actual_src)
+ {
+ C4_UNUSED(actual_src);
+ emit_events(&emitted_events, tree_from_actual_src);
+ _nfo_logf("EXPECTED_EVENTS:\n{}", src);
+ _nfo_logf("ACTUAL_EVENTS:\n{}", emitted_events);
+ // we cannot directly compare the event strings,
+ // so we create a tree from the emitted events,
+ // and then compare the trees:
+ tree_from_emitted_events.clear();
+ tree_from_emitted_events.reserve(16);
+ parser.parse(c4::to_csubstr(emitted_events), &tree_from_emitted_events);
+ _nfo_logf("SRC:\n{}", actual_src);
+ _nfo_print_tree("ACTUAL_FROM_SOURCE", tree_from_actual_src);
+ _nfo_print_tree("ACTUAL_FROM_EMITTED_EVENTS", tree_from_emitted_events);
+ _nfo_print_tree("EXPECTED_FROM_EVENTS", tree);
+ test_compare(tree_from_emitted_events, tree);
+ }
+
+ EventsParser parser;
+ void parse_events(csubstr actual_src)
+ {
+ if(was_parsed)
+ return;
+ if(actual_src.empty())
+ GTEST_SKIP();
+ parser.parse(c4::to_csubstr(src), &tree);
+ if(tree.empty())
+ tree.reserve(10);
+ _nfo_print_tree("EXPECTED", tree);
+ was_parsed = true;
+ }
+};
+
+
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+/** a processing level */
+struct ProcLevel
+{
+ size_t level;
+ ProcLevel *prev;
+ csubstr filename;
+ std::string src;
+ c4::yml::Parser parser;
+ c4::yml::Tree tree;
+ std::string emitted;
+
+ bool immutable = false;
+ bool reuse = false;
+ bool was_parsed = false;
+ bool was_emitted = false;
+
+ void init(size_t level_, ProcLevel *prev_, csubstr filename_, csubstr src_, bool immutable_, bool reuse_)
+ {
+ level = level_;
+ prev = prev_;
+ filename = filename_;
+ src.assign(src_.begin(), src_.end());
+ immutable = immutable_;
+ reuse = reuse_;
+ was_parsed = false;
+ was_emitted = false;
+ }
+
+ void receive_src(ProcLevel & prev_)
+ {
+ RYML_ASSERT(&prev_ == prev);
+ if(!prev_.was_emitted)
+ {
+ _nfo_logf("level[{}] not emitted. emit!", prev_.level);
+ prev_.emit();
+ }
+ if(src != prev_.emitted)
+ {
+ was_parsed = false;
+ was_emitted = false;
+ src = prev_.emitted;
+ }
+ }
+
+ template<class T>
+ void log(const char* context, T const& v)
+ {
+ C4_UNUSED(context);
+ C4_UNUSED(v);
+ #if RYML_NFO
+ constexpr const char sep[] = "+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n";
+ c4::log("{}:\n{}{}{}", context, sep, v, sep);
+ #endif
+ }
+
+ void parse()
+ {
+ if(was_parsed)
+ return;
+ if(prev)
+ {
+ receive_src(*prev);
+ }
+ _nfo_logf("level[{}]: parsing source:\n{}", level, src);
+ if(reuse)
+ {
+ tree.clear();
+ if(immutable)
+ parser.parse_in_arena(filename, c4::to_csubstr(src), &tree);
+ else
+ parser.parse_in_place(filename, c4::to_substr(src), &tree);
+ }
+ else
+ {
+ if(immutable)
+ tree = parse_in_arena(filename, c4::to_csubstr(src));
+ else
+ tree = parse_in_place(filename, c4::to_substr(src));
+ }
+ _nfo_print_tree("PARSED", tree);
+ tree.resolve_tags();
+ _nfo_print_tree("RESOLVED TAGS", tree);
+ was_parsed = true;
+ //_resolve_if_needed();
+ }
+
+ void _resolve_if_needed()
+ {
+ ConstNodeRef root = tree.rootref();
+ bool has_anchors_or_refs = root.visit([](ConstNodeRef const& node, size_t /*level*/){
+ return (node.is_anchor() || node.is_ref());
+ });
+ if(has_anchors_or_refs)
+ {
+ tree.resolve();
+ _nfo_print_tree("RESOLVED", tree);
+ }
+ }
+
+ void emit()
+ {
+ if(was_emitted)
+ return;
+ if(!was_parsed)
+ {
+ _nfo_logf("level[{}] not parsed. parse!", level);
+ parse();
+ }
+ emitrs_yaml(tree, &emitted);
+ csubstr ss = to_csubstr(emitted);
+ if(ss.ends_with("\n...\n"))
+ emitted.resize(emitted.size() - 4);
+ was_emitted = true;
+ _nfo_logf("EMITTED:\n{}", emitted);
+ }
+
+ void compare_trees(ProcLevel & prev_)
+ {
+ RYML_ASSERT(&prev_ == prev);
+ if(!prev_.was_parsed)
+ {
+ _nfo_logf("level[{}] not parsed. parse!", prev_.level);
+ prev_.parse();
+ }
+ if(!was_parsed)
+ {
+ _nfo_logf("level[{}] not parsed. parse!", level);
+ parse();
+ }
+ _nfo_print_tree("PREV_", prev_.tree);
+ _nfo_print_tree("CURR", tree);
+ test_compare(prev_.tree, tree);
+ }
+
+ void compare_emitted(ProcLevel & prev_)
+ {
+ RYML_ASSERT(&prev_ == prev);
+ if(!prev_.was_emitted)
+ {
+ _nfo_logf("level[{}] not emitted. emit!", prev_.level);
+ prev_.emit();
+ }
+ if(!was_emitted)
+ {
+ _nfo_logf("level[{}] not emitted. emit!", level);
+ emit();
+ }
+ _nfo_logf("level[{}]: EMITTED:\n{}", prev_.level, prev_.emitted);
+ _nfo_logf("level[{}]: EMITTED:\n{}", level, emitted);
+ if(emitted != prev_.emitted)
+ {
+ // workaround for lack of idempotency in tag normalization.
+ Tree from_prev = parse_in_arena(to_csubstr(prev_.emitted));
+ Tree from_this = parse_in_arena(to_csubstr(emitted));
+ from_prev.resolve_tags();
+ from_this.resolve_tags();
+ test_compare(from_prev, from_this);
+ }
+ }
+};
+
+
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+/** holds data for one particular test suite approach. */
+struct Approach
+{
+ csubstr casename;
+ csubstr filename;
+ ProcLevel levels[NLEVELS] = {};
+ AllowedFailure allowed_failure = {};
+ bool enabled = false;
+ bool expect_error = false;
+
+ void init(csubstr casename_, csubstr filename_, csubstr src_, bool immutable_, bool reuse_, bool expect_error_)
+ {
+ casename = casename_;
+ filename = filename_;
+ allowed_failure = is_failure_expected(casename);
+ size_t level_index = 0;
+ ProcLevel *prev = nullptr;
+ for(ProcLevel &l : levels)
+ {
+ l.init(level_index++, prev, filename, src_, immutable_, reuse_);
+ prev = &l;
+ }
+ expect_error = expect_error_;
+ }
+
+ csubstr src() const { return c4::to_csubstr(levels[0].src); }
+ bool skip() const { return allowed_failure; }
+
+ void parse(size_t num, bool emit)
+ {
+ if(allowed_failure)
+ GTEST_SKIP();
+ for(size_t i = 0; i < num; ++i)
+ {
+ levels[i].parse();
+ if(emit)
+ levels[i].emit();
+ if(i + 1 < num)
+ levels[i+1].receive_src(levels[i]);
+ }
+ }
+
+ void compare_trees(size_t num)
+ {
+ if(allowed_failure)
+ GTEST_SKIP();
+ for(size_t i = 1; i < num; ++i)
+ levels[i].compare_trees(levels[i-1]);
+ }
+ void compare_trees(size_t num, Approach & other)
+ {
+ if(allowed_failure)
+ GTEST_SKIP();
+ for(size_t i = 0; i < num; ++i)
+ levels[i].compare_trees(other.levels[i]);
+ }
+
+ void compare_emitted(size_t num)
+ {
+ if(allowed_failure)
+ GTEST_SKIP();
+ for(size_t i = 1; i < num; ++i)
+ levels[i].compare_emitted(levels[i-1]);
+ }
+ void compare_emitted(size_t num, Approach & other)
+ {
+ if(allowed_failure)
+ GTEST_SKIP();
+ for(size_t i = 0; i < num; ++i)
+ levels[i].compare_emitted(other.levels[i]);
+ }
+
+ void compare_events(Events *events)
+ {
+ if(allowed_failure || filename.ends_with(".json"))
+ GTEST_SKIP();
+ events->parse_events(src());
+ parse(1, /*emit*/false);
+ events->compare_trees(src(), levels[0].tree);
+ }
+
+ void compare_emitted_events(Events *events)
+ {
+ if(allowed_failure || filename.ends_with(".json"))
+ GTEST_SKIP();
+ events->parse_events(src());
+ parse(1, /*emit*/false);
+ events->compare_emitted_events(src(), levels[0].tree);
+ }
+
+ void check_expected_error()
+ {
+ if(allowed_failure)
+ GTEST_SKIP();
+ ExpectError::do_check(&levels[0].tree, [this]{
+ levels[0].parse();
+ });
+ }
+
+};
+
+
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+/** Each approach can be read from mutable/immutable yaml source and
+ * with/without reuse. */
+struct Subject
+{
+ Approach unix_arena;
+ Approach unix_arena_reuse;
+ Approach unix_inplace;
+ Approach unix_inplace_reuse;
+
+ Approach windows_arena;
+ Approach windows_arena_reuse;
+ Approach windows_inplace;
+ Approach windows_inplace_reuse;
+
+ std::string unix_src;
+ std::string windows_src;
+
+ void init(csubstr casename, csubstr filename, csubstr src, bool expect_error)
+ {
+ src = replace_all("\r", "", src, &unix_src);
+
+ unix_arena .init(casename, filename, src, /*immutable*/true , /*reuse*/false, expect_error);
+ unix_arena_reuse.init(casename, filename, src, /*immutable*/true , /*reuse*/true , expect_error);
+ unix_inplace .init(casename, filename, src, /*immutable*/false, /*reuse*/false, expect_error);
+ unix_inplace_reuse.init(casename, filename, src, /*immutable*/false, /*reuse*/true , expect_error);
+
+ src = replace_all("\n", "\r\n", src, &windows_src);
+
+ windows_arena .init(casename, filename, src, /*immutable*/true , /*reuse*/false, expect_error);
+ windows_arena_reuse.init(casename, filename, src, /*immutable*/true , /*reuse*/true , expect_error);
+ windows_inplace .init(casename, filename, src, /*immutable*/false, /*reuse*/false, expect_error);
+ windows_inplace_reuse.init(casename, filename, src, /*immutable*/false, /*reuse*/true , expect_error);
+ }
+};
+
+
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+
+// some utility functions, used below
+
+/** all the ways to process a test case are available through this
+ * class. Tests are defined below and use only one of these. */
+struct SuiteCase
+{
+ csubstr case_title;
+ csubstr case_dir;
+ std::string filename;
+ std::string file_contents;
+ std::string events_filename;
+ std::string events_file_contents;
+
+ Subject input;
+ Events events;
+ bool expect_error;
+
+ /** loads the several types of tests from an input test suite file */
+ SuiteCase(const char *case_title_, const char* case_dir_, const char *input_file)
+ {
+ using namespace c4;
+ using c4::to_csubstr;
+
+ if(to_csubstr(input_file) == "error")
+ input_file = "in.yaml";
+
+ case_title = to_csubstr(case_title_);
+
+ case_dir = to_csubstr(case_dir_);
+ RYML_CHECK(case_dir.find('\\') == yml::npos);
+ C4_CHECK_MSG(fs::dir_exists(case_dir.str), "dir not found: '%s'", case_dir);
+
+ filename = catrs<std::string>(case_dir, '/', to_csubstr(input_file));
+ C4_CHECK_MSG(fs::file_exists(filename.c_str()), "file not found: '%s'", filename.c_str());
+ log("testing suite case: {} {} ({})", case_title, filename, case_dir);
+
+ std::string errfile = catrs<std::string>(to_csubstr(case_dir_), "/error");
+ expect_error = fs::file_exists(errfile.c_str());
+
+ fs::file_get_contents(filename.c_str(), &file_contents);
+ input.init(case_title, to_csubstr(filename), to_csubstr(file_contents), expect_error);
+
+ events_filename = catrs<std::string>(case_dir, "/test.event");
+ C4_CHECK(fs::file_exists(events_filename.c_str()));
+ fs::file_get_contents(events_filename.c_str(), &events_file_contents);
+ events.init(to_csubstr(events_filename), to_csubstr(events_file_contents));
+
+ dump("~~~ case: " , case_title , "~~~\n",
+ "~~~ file: " , filename , "~~~\n",
+ "~~~ input:\n" , to_csubstr(input.unix_arena.levels[0].src), "~~~\n",
+ "~~~ events:\n" , events.src , "~~~\n");
+ }
+
+};
+
+
+
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+
+
+// a global holding the test case data
+SuiteCase* g_suite_case = nullptr;
+bool g_do_subcases = true;
+
+
+
+#define DEFINE_TESTS(which) \
+ \
+ \
+struct which : public ::testing::TestWithParam<size_t> \
+{ \
+}; \
+ \
+ \
+TEST_P(which, parse) \
+{ \
+ RYML_CHECK(GetParam() < NLEVELS); \
+ if(g_suite_case->expect_error) \
+ GTEST_SKIP(); \
+ g_suite_case->input.which.parse(1 + GetParam(), false); \
+} \
+ \
+ \
+TEST_P(which, compare_trees) \
+{ \
+ RYML_CHECK(GetParam() < NLEVELS); \
+ if(g_suite_case->expect_error) \
+ GTEST_SKIP(); \
+ g_suite_case->input.which.compare_trees(1 + GetParam()); \
+} \
+ \
+ \
+TEST_P(which, emit) \
+{ \
+ RYML_CHECK(GetParam() < NLEVELS); \
+ if(g_suite_case->expect_error) \
+ GTEST_SKIP(); \
+ g_suite_case->input.which.parse(1 + GetParam(), true); \
+} \
+ \
+ \
+TEST_P(which, compare_emitted) \
+{ \
+ RYML_CHECK(GetParam() < NLEVELS); \
+ if(g_suite_case->expect_error) \
+ GTEST_SKIP(); \
+ g_suite_case->input.which.compare_emitted(1 + GetParam()); \
+} \
+ \
+/*-----------------------------------------------*/ \
+ \
+TEST(which##_events, compare) \
+{ \
+ if(g_suite_case->expect_error) \
+ GTEST_SKIP(); \
+ g_suite_case->input.which.compare_events(&g_suite_case->events); \
+} \
+ \
+TEST(which##_events, emit_events) \
+{ \
+ if(g_suite_case->expect_error) \
+ GTEST_SKIP(); \
+ g_suite_case->input.which.compare_emitted_events(&g_suite_case->events); \
+} \
+ \
+/*-----------------------------------------------*/ \
+ \
+TEST(which##_errors, check_expected_error) \
+{ \
+ if(!g_suite_case->expect_error) \
+ GTEST_SKIP(); \
+ g_suite_case->input.which.check_expected_error(); \
+} \
+ \
+ \
+INSTANTIATE_TEST_SUITE_P(_, which, testing::Range<size_t>(0, NLEVELS))
+
+
+DEFINE_TESTS(unix_arena);
+DEFINE_TESTS(unix_inplace);
+DEFINE_TESTS(unix_arena_reuse);
+DEFINE_TESTS(unix_inplace_reuse);
+DEFINE_TESTS(windows_arena);
+DEFINE_TESTS(windows_inplace);
+DEFINE_TESTS(windows_arena_reuse);
+DEFINE_TESTS(windows_inplace_reuse);
+
+
+//-------------------------------------------
+// this is needed to use the test case library
+Case const* get_case(csubstr /*name*/) { return nullptr; }
+
+} // namespace yml
+} // namespace c4
+
+
+
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+
+int main(int argc, char* argv[])
+{
+ c4::dump("$");
+ for(int i = 0; i < argc; ++i)
+ c4::dump(' ', c4::to_csubstr(argv[i]));
+ c4::dump("\n");
+
+ // make gtest parse its args
+ testing::InitGoogleTest(&argc, argv);
+
+ // now we have only our args to consider
+ if(argc != 4)
+ {
+ log("usage:\n{} <test_name> <test-dir> <input-file>", c4::to_csubstr(argv[0]));
+ return 1;
+ }
+
+ // load the test case from the suite file
+ c4::yml::SuiteCase suite_case(argv[1], argv[2], argv[3]);
+ c4::yml::g_suite_case = &suite_case;
+
+ return RUN_ALL_TESTS();
+}
+
+C4_SUPPRESS_WARNING_MSVC_PUSH
diff --git a/thirdparty/ryml/test/test_suite/test_suite_common.hpp b/thirdparty/ryml/test/test_suite/test_suite_common.hpp
new file mode 100644
index 000000000..9cfd89f1f
--- /dev/null
+++ b/thirdparty/ryml/test/test_suite/test_suite_common.hpp
@@ -0,0 +1,44 @@
+#ifndef C4_YML_TEST_SUITE_COMMON_HPP_
+#define C4_YML_TEST_SUITE_COMMON_HPP_
+
+#ifndef RYML_SINGLE_HEADER
+#include <c4/yml/std/std.hpp>
+#include <c4/yml/tree.hpp>
+#include <c4/yml/parse.hpp>
+#include <c4/yml/emit.hpp>
+#include <c4/yml/detail/print.hpp>
+#include <c4/yml/detail/checks.hpp>
+#endif
+
+#include <c4/fs/fs.hpp>
+#include <c4/log/log.hpp>
+
+#include "test_case.hpp"
+#include <gtest/gtest.h>
+
+#define RYML_NFO (0 || RYML_DBG)
+
+#if RYML_NFO
+#define _nfo_print_tree(title, tree) do { c4::log("{}:{}: " title ":", __FILE__, __LINE__); print_tree(tree); c4::yml::emit(tree, stdout); fflush(stdout); } while(0)
+#define _nfo_logf(fmt, ...) do { c4::log("{}:{}: " fmt , __FILE__, __LINE__, __VA_ARGS__); fflush(stdout); } while(0)
+#define _nfo_log(fmt) do { c4::log("{}:{}: " fmt , __FILE__, __LINE__); fflush(stdout); } while(0)
+#define _nfo_printf(...) printf(__VA_ARGS__)
+#else
+#define _nfo_print_tree(title, tree)
+#define _nfo_logf(fmt, ...)
+#define _nfo_log(fmt)
+#define _nfo_printf(...)
+#endif
+#define _nfo_llogf(fmt, ...) _nfo_logf("line[{}]: '{}': " fmt, linenum, line, __VA_ARGS__)
+#define _nfo_llog(fmt) _nfo_logf("line[{}]: '{}': " fmt, linenum, line)
+
+
+namespace c4 {
+namespace yml {
+
+
+} // namespace yml
+} // namespace c4
+
+
+#endif /* C4_YML_TEST_SUITE_COMMON_HPP_ */
diff --git a/thirdparty/ryml/test/test_suite/test_suite_events.cpp b/thirdparty/ryml/test/test_suite/test_suite_events.cpp
new file mode 100644
index 000000000..b359421b5
--- /dev/null
+++ b/thirdparty/ryml/test/test_suite/test_suite_events.cpp
@@ -0,0 +1,607 @@
+#include "test_suite_events.hpp"
+#include "test_suite_common.hpp"
+#ifndef RYML_SINGLE_HEADER
+#include <c4/yml/detail/stack.hpp>
+#endif
+
+namespace c4 {
+namespace yml {
+
+namespace /*anon*/ {
+
+struct ScalarType
+{
+ typedef enum {
+ PLAIN = 0,
+ SQUOTED,
+ DQUOTED,
+ LITERAL,
+ FOLDED
+ } ScalarType_e;
+
+ ScalarType_e val = PLAIN;
+ bool operator== (ScalarType_e v) const { return val == v; }
+ bool operator!= (ScalarType_e v) const { return val != v; }
+ ScalarType& operator= (ScalarType_e v) { val = v; return *this; }
+
+ csubstr to_str() const
+ {
+ switch(val)
+ {
+ case ScalarType::PLAIN: return csubstr("PLAIN");
+ case ScalarType::SQUOTED: return csubstr("SQUOTED");
+ case ScalarType::DQUOTED: return csubstr("DQUOTED");
+ case ScalarType::LITERAL: return csubstr("LITERAL");
+ case ScalarType::FOLDED: return csubstr("FOLDED");
+ }
+ C4_ERROR("");
+ return csubstr("");
+ }
+
+ bool is_quoted() const { return val == ScalarType::SQUOTED || val == ScalarType::DQUOTED; }
+};
+
+
+struct OptionalScalar
+{
+ csubstr val = {};
+ bool was_set = false;
+ inline operator csubstr() const { return get(); }
+ inline operator bool() const { return was_set; }
+ void operator= (csubstr v) { val = v; was_set = true; }
+ csubstr get() const { RYML_ASSERT(was_set); return val; }
+};
+
+#if RYML_NFO
+size_t to_chars(c4::substr buf, OptionalScalar const& s)
+{
+ if(!s)
+ return 0u;
+ if(s.val.len <= buf.len)
+ memcpy(buf.str, s.val.str, s.val.len);
+ return s.val.len;
+}
+#endif
+
+csubstr filtered_scalar(csubstr str, ScalarType scalar_type, Tree *tree)
+{
+ (void)scalar_type;
+ csubstr tokens[] = {R"(\n)", R"(\t)", R"(\\)"};
+ if(!str.first_of_any_iter(std::begin(tokens), std::end(tokens)))
+ return str;
+ substr buf = tree->alloc_arena(str.len); // we are going to always replace with less characters
+ size_t strpos = 0;
+ size_t bufpos = 0;
+ auto append_str = [&](size_t pos){
+ csubstr rng = str.range(strpos, pos);
+ memcpy(buf.str + bufpos, rng.str, rng.len);
+ bufpos += rng.len;
+ strpos = pos;
+ };
+ size_t i;
+ auto append_chars = [&](csubstr s, size_t skipstr){
+ memcpy(buf.str + bufpos, s.str, s.len);
+ bufpos += s.len;
+ i += skipstr - 1; // incremented at the loop
+ strpos += skipstr;
+ };
+ for(i = 0; i < str.len; ++i)
+ {
+ char curr = str[i];
+ char next1 = i+1 < str.len ? str[i+1] : '\0';
+ if(curr == '\\')
+ {
+ if(next1 == '\\')
+ {
+ char next2 = i+2 < str.len ? str[i+2] : '\0';
+ if(next2 == 'n')
+ {
+ append_str(i);
+ append_chars(R"(\n)", 3u); // '\\n' -> '\n'
+ }
+ else if(next2 == 't')
+ {
+ append_str(i);
+ append_chars(R"(\t)", 3u); // '\\t' -> '\t'
+ }
+ else
+ {
+ append_str(i);
+ append_chars(R"(\)", 2u); // '\\' -> '\'
+ }
+ }
+ else if(next1 == 'n')
+ {
+ append_str(i);
+ append_chars("\n", 2u);
+ }
+ else if(next1 == 't')
+ {
+ append_str(i);
+ append_chars("\t", 2u);
+ }
+ }
+ }
+ append_str(str.len);
+ buf = buf.first(bufpos);
+ _nfo_logf("filtering: result=~~~{}~~~", buf);
+ return buf;
+}
+
+struct Scalar
+{
+ OptionalScalar scalar = {};
+ OptionalScalar anchor = {};
+ OptionalScalar ref = {};
+ OptionalScalar tag = {};
+ ScalarType type = {};
+ inline operator bool() const { if(anchor || tag) { RYML_ASSERT(scalar); } return scalar.was_set; }
+ void add_key_props(Tree *tree, size_t node) const
+ {
+ if(ref)
+ {
+ _nfo_logf("node[{}]: set key ref: '{}'", node, ref);
+ tree->set_key_ref(node, ref);
+ }
+ if(anchor)
+ {
+ _nfo_logf("node[{}]: set key anchor: '{}'", node, anchor);
+ tree->set_key_anchor(node, anchor);
+ }
+ if(tag)
+ {
+ csubstr ntag = normalize_tag(tag);
+ _nfo_logf("node[{}]: set key tag: '{}' -> '{}'", node, tag, ntag);
+ tree->set_key_tag(node, ntag);
+ }
+ if(type.is_quoted())
+ {
+ _nfo_logf("node[{}]: set key as quoted", node);
+ tree->_add_flags(node, KEYQUO);
+ }
+ }
+ void add_val_props(Tree *tree, size_t node) const
+ {
+ if(ref)
+ {
+ _nfo_logf("node[{}]: set val ref: '{}'", node, ref);
+ tree->set_val_ref(node, ref);
+ }
+ if(anchor)
+ {
+ _nfo_logf("node[{}]: set val anchor: '{}'", node, anchor);
+ tree->set_val_anchor(node, anchor);
+ }
+ if(tag)
+ {
+ csubstr ntag = normalize_tag(tag);
+ _nfo_logf("node[{}]: set val tag: '{}' -> '{}'", node, tag, ntag);
+ tree->set_val_tag(node, ntag);
+ }
+ if(type.is_quoted())
+ {
+ _nfo_logf("node[{}]: set val as quoted", node);
+ tree->_add_flags(node, VALQUO);
+ }
+ }
+ csubstr filtered_scalar(Tree *tree) const
+ {
+ return ::c4::yml::filtered_scalar(scalar, type, tree);
+ }
+};
+
+csubstr parse_anchor_and_tag(csubstr tokens, OptionalScalar *anchor, OptionalScalar *tag)
+{
+ *anchor = OptionalScalar{};
+ *tag = OptionalScalar{};
+ if(tokens.begins_with('&'))
+ {
+ size_t pos = tokens.first_of(' ');
+ if(pos == (size_t)csubstr::npos)
+ {
+ *anchor = tokens.sub(1);
+ tokens = {};
+ }
+ else
+ {
+ *anchor = tokens.first(pos).sub(1);
+ tokens = tokens.right_of(pos);
+ }
+ _nfo_logf("anchor: {}", *anchor);
+ }
+ if(tokens.begins_with('<'))
+ {
+ size_t pos = tokens.find('>');
+ RYML_ASSERT(pos != (size_t)csubstr::npos);
+ *tag = tokens.first(pos + 1);
+ tokens = tokens.right_of(pos).triml(' ');
+ _nfo_logf("tag: {}", *tag);
+ }
+ return tokens;
+}
+
+} // namespace /*anon*/
+
+void EventsParser::parse(csubstr src, Tree *C4_RESTRICT tree_)
+{
+ struct ParseLevel { size_t tree_node; };
+ detail::stack<ParseLevel> m_stack = {};
+ Tree &C4_RESTRICT tree = *tree_;
+ size_t linenum = 0;
+ Scalar key = {};
+ _nfo_logf("parsing events! src:\n{}", src);
+ for(csubstr line : src.split('\n'))
+ {
+ line = line.trimr('\r');
+ line = line.triml(' ');
+ _nfo_printf("\n\n-----------------------\n");
+ _nfo_llog("");
+ _nfo_logf("line[{}]: top={} type={}", linenum, m_stack.empty() ? tree.root_id() : m_stack.top().tree_node, NodeType::type_str(tree.type(m_stack.empty() ? tree.root_id() : m_stack.top().tree_node)));
+ if(line.begins_with("=VAL "))
+ {
+ line = line.stripl("=VAL ");
+ ASSERT_GE(m_stack.size(), 0u);
+ Scalar curr = {};
+ line = parse_anchor_and_tag(line, &curr.anchor, &curr.tag);
+ if(line.begins_with('"'))
+ {
+ _nfo_llog("double-quoted scalar!");
+ curr.scalar = line.sub(1);
+ curr.type = ScalarType::DQUOTED;
+ }
+ else if(line.begins_with('\''))
+ {
+ _nfo_llog("single-quoted scalar!");
+ curr.scalar = line.sub(1);
+ curr.type = ScalarType::SQUOTED;
+ }
+ else if(line.begins_with('|'))
+ {
+ _nfo_llog("block literal scalar!");
+ curr.scalar = line.sub(1);
+ curr.type = ScalarType::LITERAL;
+ }
+ else if(line.begins_with('>'))
+ {
+ _nfo_llog("block folded scalar!");
+ curr.scalar = line.sub(1);
+ curr.type = ScalarType::FOLDED;
+ }
+ else
+ {
+ _nfo_llog("plain scalar");
+ ASSERT_TRUE(line.begins_with(':'));
+ curr.scalar = line.sub(1);
+ }
+ _nfo_logf("parsed scalar: '{}'", curr.scalar);
+ if(m_stack.empty())
+ {
+ _nfo_log("stack was empty, pushing root as DOC...");
+ //tree._p(tree.root_id())->m_type.add(DOC);
+ m_stack.push({tree.root_id()});
+ }
+ ParseLevel &top = m_stack.top();
+ if(tree.is_seq(top.tree_node))
+ {
+ _nfo_logf("is seq! seq_id={}", top.tree_node);
+ ASSERT_FALSE(key);
+ ASSERT_TRUE(curr);
+ _nfo_logf("seq[{}]: adding child", top.tree_node);
+ size_t node = tree.append_child(top.tree_node);
+ NodeType_e as_doc = tree.is_stream(top.tree_node) ? DOC : NOTYPE;
+ _nfo_logf("seq[{}]: child={} val='{}' as_doc=", top.tree_node, node, curr.scalar, NodeType::type_str(as_doc));
+ tree.to_val(node, curr.filtered_scalar(&tree), as_doc);
+ curr.add_val_props(&tree, node);
+ }
+ else if(tree.is_map(top.tree_node))
+ {
+ _nfo_logf("is map! map_id={}", top.tree_node);
+ if(!key)
+ {
+ _nfo_logf("store key='{}' anchor='{}' tag='{}' type={}", curr.scalar, curr.anchor, curr.tag, curr.type.to_str());
+ key = curr;
+ }
+ else
+ {
+ _nfo_logf("map[{}]: adding child", top.tree_node);
+ size_t node = tree.append_child(top.tree_node);
+ NodeType_e as_doc = tree.is_stream(top.tree_node) ? DOC : NOTYPE;
+ _nfo_logf("map[{}]: child={} key='{}' val='{}' as_doc={}", top.tree_node, node, key.scalar, curr.scalar, NodeType::type_str(as_doc));
+ tree.to_keyval(node, key.filtered_scalar(&tree), curr.filtered_scalar(&tree), as_doc);
+ key.add_key_props(&tree, node);
+ curr.add_val_props(&tree, node);
+ _nfo_logf("clear key='{}'", key.scalar);
+ key = {};
+ }
+ }
+ else
+ {
+ _nfo_logf("setting tree_node={} to DOCVAL...", top.tree_node);
+ tree.to_val(top.tree_node, curr.filtered_scalar(&tree), DOC);
+ curr.add_val_props(&tree, top.tree_node);
+ }
+ }
+ else if(line.begins_with("=ALI "))
+ {
+ csubstr alias = line.stripl("=ALI ");
+ _nfo_logf("REF token: {}", alias);
+ ParseLevel top = m_stack.top();
+ if(tree.is_seq(top.tree_node))
+ {
+ _nfo_logf("node[{}] is seq: set {} as val ref", top.tree_node, alias);
+ ASSERT_FALSE(key);
+ size_t node = tree.append_child(top.tree_node);
+ tree.to_val(node, alias);
+ tree.set_val_ref(node, alias);
+ }
+ else if(tree.is_map(top.tree_node))
+ {
+ if(key)
+ {
+ _nfo_logf("node[{}] is map and key '{}' is pending: set {} as val ref", top.tree_node, key.scalar, alias);
+ size_t node = tree.append_child(top.tree_node);
+ tree.to_keyval(node, key.filtered_scalar(&tree), alias);
+ key.add_key_props(&tree, node);
+ tree.set_val_ref(node, alias);
+ _nfo_logf("clear key='{}'", key);
+ key = {};
+ }
+ else
+ {
+ _nfo_logf("node[{}] is map and no key is pending: save {} as key ref", top.tree_node, alias);
+ key.scalar = alias;
+ key.ref = alias;
+ }
+ }
+ else
+ {
+ C4_ERROR("ALI event requires map or seq");
+ }
+ }
+ else if(line.begins_with("+SEQ"))
+ {
+ _nfo_log("pushing SEQ");
+ OptionalScalar anchor = {};
+ OptionalScalar tag = {};
+ csubstr more_tokens = line.stripl("+SEQ").triml(' ');
+ if(more_tokens.begins_with('['))
+ {
+ ASSERT_TRUE(more_tokens.begins_with("[]"));
+ more_tokens = more_tokens.offs(2, 0).triml(' ');
+ }
+ parse_anchor_and_tag(more_tokens, &anchor, &tag);
+ size_t node = tree.root_id();
+ if(m_stack.empty())
+ {
+ _nfo_log("stack was empty, set root to SEQ");
+ tree._add_flags(node, SEQ);
+ m_stack.push({node});
+ ASSERT_FALSE(key); // for the key to exist, the parent must exist and be a map
+ }
+ else
+ {
+ size_t parent = m_stack.top().tree_node;
+ _nfo_logf("stack was not empty. parent={}", parent);
+ ASSERT_NE(parent, (size_t)NONE);
+ NodeType more_flags = NOTYPE;
+ if(tree.is_doc(parent) && !(tree.is_seq(parent) || tree.is_map(parent)))
+ {
+ _nfo_logf("set node to parent={}, add DOC", parent);
+ node = parent;
+ more_flags.add(DOC);
+ }
+ else
+ {
+ _nfo_logf("add child to parent={}", parent);
+ node = tree.append_child(parent);
+ m_stack.push({node});
+ _nfo_logf("add child to parent={}: child={}", parent, node);
+ }
+ if(key)
+ {
+ _nfo_logf("has key, set to keyseq: parent={} child={} key='{}'", parent, node, key);
+ ASSERT_EQ(tree.is_map(parent) || node == parent, true);
+ tree.to_seq(node, key.filtered_scalar(&tree), more_flags);
+ key.add_key_props(&tree, node);
+ _nfo_logf("clear key='{}'", key.scalar);
+ key = {};
+ }
+ else
+ {
+ if(tree.is_map(parent))
+ {
+ _nfo_logf("has null key, set to keyseq: parent={} child={}", parent, node);
+ ASSERT_EQ(tree.is_map(parent) || node == parent, true);
+ tree.to_seq(node, csubstr{}, more_flags);
+ }
+ else
+ {
+ _nfo_logf("no key, set to seq: parent={} child={}", parent, node);
+ tree.to_seq(node, more_flags);
+ }
+ }
+ }
+ if(tag)
+ tree.set_val_tag(node, normalize_tag(tag));
+ if(anchor)
+ tree.set_val_anchor(node, anchor);
+ }
+ else if(line.begins_with("+MAP"))
+ {
+ _nfo_log("pushing MAP");
+ OptionalScalar anchor = {};
+ OptionalScalar tag = {};
+ csubstr more_tokens = line.stripl("+MAP").triml(' ');
+ if(more_tokens.begins_with('{'))
+ {
+ ASSERT_TRUE(more_tokens.begins_with("{}"));
+ more_tokens = more_tokens.offs(2, 0).triml(' ');
+ }
+ parse_anchor_and_tag(more_tokens, &anchor, &tag);
+ size_t node = tree.root_id();
+ if(m_stack.empty())
+ {
+ _nfo_log("stack was empty, set root to MAP");
+ tree._add_flags(node, MAP);
+ m_stack.push({node});
+ ASSERT_FALSE(key); // for the key to exist, the parent must exist and be a map
+ }
+ else
+ {
+ size_t parent = m_stack.top().tree_node;
+ _nfo_logf("stack was not empty. parent={}", parent);
+ ASSERT_NE(parent, (size_t)NONE);
+ NodeType more_flags = NOTYPE;
+ if(tree.is_doc(parent) && !(tree.is_seq(parent) || tree.is_map(parent)))
+ {
+ _nfo_logf("set node to parent={}, add DOC", parent);
+ node = parent;
+ more_flags.add(DOC);
+ }
+ else
+ {
+ _nfo_logf("add child to parent={}", parent);
+ node = tree.append_child(parent);
+ m_stack.push({node});
+ _nfo_logf("add child to parent={}: child={}", parent, node);
+ }
+ if(key)
+ {
+ _nfo_logf("has key, set to keymap: parent={} child={} key='{}'", parent, node, key);
+ ASSERT_EQ(tree.is_map(parent) || node == parent, true);
+ tree.to_map(node, key.filtered_scalar(&tree), more_flags);
+ key.add_key_props(&tree, node);
+ _nfo_logf("clear key='{}'", key.scalar);
+ key = {};
+ }
+ else
+ {
+ if(tree.is_map(parent))
+ {
+ _nfo_logf("has null key, set to keymap: parent={} child={}", parent, node);
+ ASSERT_EQ(tree.is_map(parent) || node == parent, true);
+ tree.to_map(node, csubstr{}, more_flags);
+ }
+ else
+ {
+ _nfo_logf("no key, set to map: parent={} child={}", parent, node);
+ tree.to_map(node, more_flags);
+ }
+ }
+ }
+ if(tag)
+ tree.set_val_tag(node, normalize_tag(tag));
+ if(anchor)
+ tree.set_val_anchor(node, anchor);
+ }
+ else if(line.begins_with("-SEQ"))
+ {
+ _nfo_logf("popping SEQ, empty={}", m_stack.empty());
+ size_t node;
+ if(m_stack.empty())
+ node = tree.root_id();
+ else
+ node = m_stack.pop().tree_node;
+ ASSERT_TRUE(tree.is_seq(node)) << "node=" << node;
+ }
+ else if(line.begins_with("-MAP"))
+ {
+ _nfo_logf("popping MAP, empty={}", m_stack.empty());
+ size_t node;
+ if(m_stack.empty())
+ node = tree.root_id();
+ else
+ node = m_stack.pop().tree_node;
+ ASSERT_TRUE(tree.is_map(node)) << "node=" << node;
+ }
+ else if(line.begins_with("+DOC"))
+ {
+ csubstr rem = line.stripl("+DOC").triml(' ');
+ _nfo_logf("pushing DOC: {}", rem);
+ size_t node = tree.root_id();
+ auto is_sep = rem.first_of_any("---\n", "--- ", "---\r") || rem.ends_with("---");
+ ASSERT_EQ(key, false); // for the key to exist, the parent must exist and be a map
+ if(m_stack.empty())
+ {
+ _nfo_log("stack was empty");
+ ASSERT_EQ(node, tree.root_id());
+ if(tree.is_stream(node))
+ {
+ _nfo_log("there is already a stream, append a DOC");
+ node = tree.append_child(node);
+ tree.to_doc(node);
+ m_stack.push({node});
+ }
+ else if(is_sep)
+ {
+ _nfo_logf("separator was specified: {}", rem);
+ if((!tree.is_container(node)) && (!tree.is_doc(node)))
+ {
+ tree._add_flags(node, STREAM);
+ node = tree.append_child(node);
+ _nfo_logf("create STREAM at {} and add DOC child={}", tree.root_id(), node);
+ tree.to_doc(node);
+ m_stack.push({node});
+ }
+ else
+ {
+ _nfo_log("rearrange root as STREAM");
+ tree.set_root_as_stream();
+ node = tree.append_child(tree.root_id());
+ _nfo_logf("added doc as STREAM child: {}", node);
+ tree.to_doc(node);
+ m_stack.push({node});
+ }
+ }
+ else
+ {
+ if(tree.is_doc(node))
+ {
+ _nfo_log("rearrange root as STREAM");
+ tree.set_root_as_stream();
+ m_stack.push({node});
+ }
+ }
+ }
+ else
+ {
+ size_t parent = m_stack.top().tree_node;
+ _nfo_logf("add DOC to parent={}", parent);
+ ASSERT_NE(parent, (size_t)NONE);
+ node = tree.append_child(parent);
+ _nfo_logf("child DOC={}", node);
+ tree.to_doc(node);
+ m_stack.push({node});
+ }
+ }
+ else if(line.begins_with("-DOC"))
+ {
+ _nfo_log("popping DOC");
+ if(!m_stack.empty())
+ m_stack.pop();
+ }
+ else if(line.begins_with("+STR"))
+ {
+ ASSERT_EQ(m_stack.size(), 0u);
+ }
+ else if(line.begins_with("-STR"))
+ {
+ ASSERT_LE(m_stack.size(), 1u);
+ if(!m_stack.empty())
+ m_stack.pop();
+ }
+ else if(line.empty())
+ {
+ // nothing to do
+ }
+ else
+ {
+ C4_ERROR("unknown event: '%.*s'", (int)line.len, line.str);
+ }
+ linenum++;
+ }
+}
+
+
+} // namespace yml
+} // namespace c4
diff --git a/thirdparty/ryml/test/test_suite/test_suite_events.hpp b/thirdparty/ryml/test/test_suite/test_suite_events.hpp
new file mode 100644
index 000000000..3b3cdbffb
--- /dev/null
+++ b/thirdparty/ryml/test/test_suite/test_suite_events.hpp
@@ -0,0 +1,45 @@
+#ifndef C4_YML_TEST_SUITE_EVENTS_HPP_
+#define C4_YML_TEST_SUITE_EVENTS_HPP_
+
+#ifdef RYML_SINGLE_HEADER
+#include <ryml_all.hpp>
+#else
+#include <c4/yml/tree.hpp>
+#endif
+
+namespace c4 {
+namespace yml {
+
+struct EventsParser
+{
+ void parse(csubstr src, Tree *C4_RESTRICT tree);
+};
+
+size_t emit_events(substr buf, Tree const& C4_RESTRICT tree);
+
+template<class CharContainer>
+void emit_events(CharContainer *container, Tree const& C4_RESTRICT tree)
+{
+ size_t ret = emit_events(to_substr(*container), tree);
+ if(ret > container->size())
+ {
+ container->resize(ret);
+ ret = emit_events(to_substr(*container), tree);
+ C4_CHECK(ret == container->size());
+ }
+ container->resize(ret);
+}
+
+template<class CharContainer>
+CharContainer emit_events(Tree const& C4_RESTRICT tree)
+{
+ CharContainer result;
+ emit_events(&result, tree);
+ return result;
+}
+
+} // namespace yml
+} // namespace c4
+
+
+#endif /* C4_YML_TEST_SUITE_EVENTS_HPP_ */
diff --git a/thirdparty/ryml/test/test_suite/test_suite_events_emitter.cpp b/thirdparty/ryml/test/test_suite/test_suite_events_emitter.cpp
new file mode 100644
index 000000000..6c22b1e6e
--- /dev/null
+++ b/thirdparty/ryml/test/test_suite/test_suite_events_emitter.cpp
@@ -0,0 +1,289 @@
+#ifndef RYML_SINGLE_HEADER
+#include <c4/yml/std/string.hpp>
+#endif
+#include "test_suite_events.hpp"
+
+namespace c4 {
+namespace yml {
+
+struct EventsEmitter
+{
+ substr buf;
+ size_t pos;
+ std::string tagbuf;
+ Tree const* C4_RESTRICT m_tree;
+ EventsEmitter(Tree const& tree, substr buf_) : buf(buf_), pos(), m_tree(&tree) {}
+ void emit_tag(csubstr tag, size_t node);
+ void emit_scalar(csubstr val, bool quoted);
+ void emit_key_anchor_tag(size_t node);
+ void emit_val_anchor_tag(size_t node);
+ void emit_events(size_t node);
+ void emit_doc(size_t node);
+ void emit_events();
+ template<size_t N>
+ C4_ALWAYS_INLINE void pr(const char (&s)[N])
+ {
+ if(pos + N-1 <= buf.len)
+ memcpy(buf.str + pos, s, N-1);
+ pos += N-1;
+ }
+ C4_ALWAYS_INLINE void pr(csubstr s)
+ {
+ if(pos + s.len <= buf.len)
+ memcpy(buf.str + pos, s.str, s.len);
+ pos += s.len;
+ }
+ C4_ALWAYS_INLINE void pr(char c)
+ {
+ if(pos + 1 <= buf.len)
+ buf[pos] = c;
+ ++pos;
+ }
+ C4_ALWAYS_INLINE size_t emit_to_esc(csubstr val, size_t prev, size_t i, char c)
+ {
+ pr(val.range(prev, i));
+ pr('\\');
+ pr(c);
+ return i+1;
+ }
+ C4_ALWAYS_INLINE size_t emit_to_esc(csubstr val, size_t prev, size_t i, csubstr repl)
+ {
+ pr(val.range(prev, i));
+ pr(repl);
+ return i+1;
+ }
+};
+
+void EventsEmitter::emit_scalar(csubstr val, bool quoted)
+{
+ constexpr const char openchar[] = {':', '\''};
+ pr(openchar[quoted]);
+ size_t prev = 0;
+ uint8_t const* C4_RESTRICT s = (uint8_t const* C4_RESTRICT) val.str;
+ for(size_t i = 0; i < val.len; ++i)
+ {
+ switch(s[i])
+ {
+ case UINT8_C(0x0a): // \n
+ prev = emit_to_esc(val, prev, i, 'n'); break;
+ case UINT8_C(0x5c): // '\\'
+ prev = emit_to_esc(val, prev, i, '\\'); break;
+ case UINT8_C(0x09): // \t
+ prev = emit_to_esc(val, prev, i, 't'); break;
+ case UINT8_C(0x0d): // \r
+ prev = emit_to_esc(val, prev, i, 'r'); break;
+ case UINT8_C(0x00): // \0
+ prev = emit_to_esc(val, prev, i, '0'); break;
+ case UINT8_C(0x0c): // \f (form feed)
+ prev = emit_to_esc(val, prev, i, 'f'); break;
+ case UINT8_C(0x08): // \b (backspace)
+ prev = emit_to_esc(val, prev, i, 'b'); break;
+ case UINT8_C(0x07): // \a (bell)
+ prev = emit_to_esc(val, prev, i, 'a'); break;
+ case UINT8_C(0x0b): // \v (vertical tab)
+ prev = emit_to_esc(val, prev, i, 'v'); break;
+ case UINT8_C(0x1b): // \e (escape)
+ prev = emit_to_esc(val, prev, i, "\\e"); break;
+ case UINT8_C(0xc2):
+ if(i+1 < val.len)
+ {
+ uint8_t np1 = s[i+1];
+ if(np1 == UINT8_C(0xa0))
+ prev = 1u + emit_to_esc(val, prev, i++, "\\_");
+ else if(np1 == UINT8_C(0x85))
+ prev = 1u + emit_to_esc(val, prev, i++, "\\N");
+ }
+ break;
+ case UINT8_C(0xe2):
+ if(i+2 < val.len)
+ {
+ if(s[i+1] == UINT8_C(0x80))
+ {
+ if(s[i+2] == UINT8_C(0xa8))
+ {
+ prev = 2u + emit_to_esc(val, prev, i, "\\L");
+ i += 2u;
+ }
+ else if(s[i+2] == UINT8_C(0xa9))
+ {
+ prev = 2u + emit_to_esc(val, prev, i, "\\P");
+ i += 2u;
+ }
+ }
+ }
+ break;
+ }
+ }
+ pr(val.sub(prev)); // print remaining portion
+}
+
+void EventsEmitter::emit_tag(csubstr tag, size_t node)
+{
+ size_t tagsize = m_tree->resolve_tag(to_substr(tagbuf), tag, node);
+ if(tagsize)
+ {
+ if(tagsize > tagbuf.size())
+ {
+ tagbuf.resize(tagsize);
+ tagsize = m_tree->resolve_tag(to_substr(tagbuf), tag, node);
+ }
+ pr(to_substr(tagbuf).first(tagsize));
+ }
+ else
+ {
+ csubstr ntag = normalize_tag_long(tag);
+ if(ntag.begins_with('<'))
+ {
+ pr(ntag);
+ }
+ else
+ {
+ pr('<');
+ pr(ntag);
+ pr('>');
+ }
+ }
+}
+
+void EventsEmitter::emit_key_anchor_tag(size_t node)
+{
+ if(m_tree->has_key_anchor(node))
+ {
+ pr(" &");
+ pr(m_tree->key_anchor(node));
+ }
+ if(m_tree->has_key_tag(node))
+ {
+ pr(' ');
+ emit_tag(m_tree->key_tag(node), node);
+ }
+}
+
+void EventsEmitter::emit_val_anchor_tag(size_t node)
+{
+ if(m_tree->has_val_anchor(node))
+ {
+ pr(" &");
+ pr(m_tree->val_anchor(node));
+ }
+ if(m_tree->has_val_tag(node))
+ {
+ pr(' ');
+ emit_tag(m_tree->val_tag(node), node);
+ }
+}
+
+void EventsEmitter::emit_events(size_t node)
+{
+ if(m_tree->has_key(node))
+ {
+ if(m_tree->is_key_ref(node))
+ {
+ csubstr k = m_tree->key(node);
+ if(k != "<<")
+ {
+ pr("=ALI ");
+ pr(k);
+ pr('\n');
+ }
+ else
+ {
+ pr("=VAL :");
+ pr(k);
+ pr('\n');
+ }
+ }
+ else
+ {
+ pr("=VAL");
+ emit_key_anchor_tag(node);
+ pr(' ');
+ emit_scalar(m_tree->key(node), m_tree->is_key_quoted(node));
+ pr('\n');
+ }
+ }
+ if(m_tree->has_val(node))
+ {
+ if(m_tree->is_val_ref(node))
+ {
+ pr("=ALI ");
+ pr(m_tree->val(node));
+ pr('\n');
+ }
+ else
+ {
+ pr("=VAL");
+ emit_val_anchor_tag(node);
+ pr(' ');
+ emit_scalar(m_tree->val(node), m_tree->is_val_quoted(node));
+ pr('\n');
+ }
+ }
+ else if(m_tree->is_map(node))
+ {
+ pr("+MAP");
+ emit_val_anchor_tag(node);
+ pr('\n');
+ for(size_t child = m_tree->first_child(node); child != NONE; child = m_tree->next_sibling(child))
+ emit_events(child);
+ pr("-MAP\n");
+ }
+ else if(m_tree->is_seq(node))
+ {
+ pr("+SEQ");
+ emit_val_anchor_tag(node);
+ pr('\n');
+ for(size_t child = m_tree->first_child(node); child != NONE; child = m_tree->next_sibling(child))
+ emit_events(child);
+ pr("-SEQ\n");
+ }
+}
+
+void EventsEmitter::emit_doc(size_t node)
+{
+ if(m_tree->type(node) == NOTYPE)
+ return;
+ if(m_tree->has_parent(node))
+ pr("+DOC ---"); // parent must be a stream
+ else
+ pr("+DOC");
+ if(m_tree->is_val(node))
+ {
+ pr("\n=VAL");
+ emit_val_anchor_tag(node);
+ pr(' ');
+ emit_scalar(m_tree->val(node), m_tree->is_val_quoted(node));
+ pr('\n');
+ }
+ else
+ {
+ pr('\n');
+ emit_events(node);
+ }
+ pr("-DOC\n");
+}
+
+void EventsEmitter::emit_events()
+{
+ pr("+STR\n");
+ if(!m_tree->empty())
+ {
+ size_t root = m_tree->root_id();
+ if(m_tree->is_stream(root))
+ for(size_t node = m_tree->first_child(root); node != NONE; node = m_tree->next_sibling(node))
+ emit_doc(node);
+ else
+ emit_doc(root);
+ }
+ pr("-STR\n");
+}
+
+size_t emit_events(substr buf, Tree const& C4_RESTRICT tree)
+{
+ EventsEmitter e(tree, buf);
+ e.emit_events();
+ return e.pos;
+}
+
+} // namespace yml
+} // namespace c4
diff --git a/thirdparty/ryml/test/test_suite/test_suite_parts.cpp b/thirdparty/ryml/test/test_suite/test_suite_parts.cpp
new file mode 100644
index 000000000..5caaafeab
--- /dev/null
+++ b/thirdparty/ryml/test/test_suite/test_suite_parts.cpp
@@ -0,0 +1,220 @@
+#include "./test_suite_parts.hpp"
+
+namespace c4 {
+namespace yml {
+
+
+// To see the test case contents, refer to this URL:
+// https://github.com/yaml/yaml-test-suite/tree/master/src
+constexpr const AllowedFailure allowed_failures[] = {
+
+ // g++-5 does not like creating a csubstr directly from the literal.
+ // so we use this macro to remove cruft from the code:
+ #define _(testcase, reason) AllowedFailure{csubstr(testcase), csubstr(reason)}
+
+ //-------------------------------------------------------------------------
+ // SECTION 1. Known issues, TODO
+ //
+ // These tests are temporarily skipped, and cover issues that must be fixed.
+
+ // double quoted scalars
+ _("G4RS-in_json" , "special characters must be emitted in double quoted style"),
+ _("G4RS-in_yaml" , "special characters must be emitted in double quoted style"),
+ _("G4RS-out_yaml" , "special characters must be emitted in double quoted style"),
+ // other
+ _("UKK6_01-in_yaml" , "fails to parse double :: in UNK state"),
+
+
+ //-------------------------------------------------------------------------
+ // SECTION 2. Expected errors that fail to materialize.
+
+ // maps
+ _("236B-error" , "should not accept final scalar in a map"),
+ _("7MNF-error" , "should not accept final scalar in a map"),
+ _("62EZ-error" , "should not accept invalid block mapping key on same line as previous key"),
+ _("9CWY-error" , "should not accept final scalar in a map"),
+ _("CXX2-error" , "should not accept mapping with anchor on document start line"),
+ _("DK95_06-error" , "should not accept tab indentation"),
+ _("GDY7-error" , "should not accept comment that looks like a mapping key"),
+ _("D49Q-error" , "should not accept multiline single quoted implicit keys"),
+ _("DK4H-error" , "should not accept implicit key followed by newline"),
+ _("JY7Z-error" , "should not accept trailing content that looks like a mapping"),
+ _("SU74-error" , "should not accept anchor and alias as mapping key"),
+ _("T833-error" , "should not accept flow mapping missing a separating comma"),
+ _("VJP3_00-error" , "should not accept flow collections over many lines"),
+ _("Y79Y_006-error", "should not accept tab after ?"),
+ _("Y79Y_007-error", "should not accept tab after :"),
+ _("Y79Y_008-error", "should not accept tab after ?"),
+ _("Y79Y_009-error", "should not accept tab after ?"),
+ _("ZCZ6-error" , "should not accept invalid mapping in plain single line value"),
+ // seqs
+ _("5U3A-error" , "should not accept opening a sequence on same line as map key"),
+ _("6JTT-error" , "should not accept flow sequence without terminating ]"),
+ _("9C9N-error" , "should not accept non-indented flow sequence"),
+ _("9JBA-error" , "should not accept comment after flow seq terminating ]"),
+ _("9MAG-error" , "should not accept flow sequence with invalid comma at the beginning"),
+ _("CTN5-error" , "should not accept flow sequence with missing elements"),
+ _("CVW2-error" , "should not accept flow sequence with comment after ,"),
+ _("G5U8-error" , "should not accept [-, -]"),
+ _("KS4U-error" , "should not accept item after end of flow sequence"),
+ _("P2EQ-error" , "should not accept sequence item on same line as previous item"),
+ _("YJV2-error" , "should not accept [-]"),
+ _("Y79Y_003-error", "should not accept leading tabs in seq elmt"),
+ _("Y79Y_004-error", "should not accept tab after -"),
+ _("Y79Y_005-error", "should not accept tab after -"),
+ // block scalars
+ _("2G84_00-error" , "should not accept the block literal spec"),
+ _("2G84_01-error" , "should not accept the block literal spec"),
+ _("5LLU-error" , "should not accept folded scalar with wrong indented line after spaces only"),
+ _("S4GJ-error" , "should not accept text after block scalar indicator"),
+ _("S98Z-error" , "should not accept block scalar with more spaces than first content line"),
+ _("X4QW-error" , "should not accept comment without whitespace after block scalar indicator"),
+ _("Y79Y_000-error", "should not accept leading tabs in the block scalar"),
+ // quoted scalars
+ _("55WF-error" , "should not accept invalid escape in double quoted scalar"),
+ _("7LBH-error" , "should not accept multiline double quoted implicit keys"),
+ _("DK95_01-error", "should not accept leading tabs in double quoted multiline scalar"),
+ _("HRE5-error" , "should not accept double quoted scalar with escaped single quote"),
+ _("JKF3-error" , "should not accept multiline unindented double quoted scalar"),
+ _("QB6E-error" , "should not accept indented multiline quoted scalar"),
+ _("RXY3-error" , "should not accept document-end marker in single quoted string"),
+ _("SU5Z-error" , "should not accept comment without whitespace after double quoted scalar"),
+ // plain scalars
+ _("8XDJ-error" , "should not accept comment in multiline scalar"),
+ _("CML9-error" , "should not accept comment inside flow scalar"),
+ // documents/streams
+ _("3HFZ-error" , "should not accept scalar after ..."),
+ _("5TRB-error" , "should not accept document-end marker in double quoted string"),
+ _("9MMA-error" , "should not accept empty doc after %YAML directive"),
+ _("9MQT_01-error", "should not accept scalars after ..."),
+ _("B63P-error" , "should not accept directive without doc"),
+ _("EB22-error" , "should not accept missing document-end marker before directive"),
+ _("H7TQ-error" , "should not accept extra words after directive"),
+ _("MUS6_00-error", "should not accept #... at the end of %YAML directive"),
+ _("MUS6_01-error", "should not accept #... at the end of %YAML directive"),
+ _("N782-error" , "should not accept document markers in flow style"),
+ _("RHX7-error" , "should not accept directive without document end marker"),
+ _("SF5V-error" , "should not accept duplicate YAML directive"),
+ // anchors
+ _("4EJS-error" , "should not accept double anchor for scalar"),
+ _("4JVG-error" , "should not accept double anchor for scalar"),
+ _("SY6V-error" , "should not accept anchor before sequence entry on same line"),
+ // tags
+ _("9HCY-error" , "should not accept tag directive in non-doc scope"),
+ _("BU8L-error" , "should not accept node properties spread over multiple lines"),
+ _("LHL4-error" , "should not accept tag"),
+ _("U99R-error" , "should not accept comma in a tag"),
+ _("QLJ7-error" , "tag directives should apply only to the next doc (?)"),
+
+
+ //-------------------------------------------------------------------------
+ // SECTION 3. Deliberate ryml limitations.
+ //
+ // These tests are skipped because they cover parts of YAML that
+ // are deliberately not implemented by ryml.
+
+ #ifndef RYML_WITH_TAB_TOKENS // -<tab> or :<tab> are supported only when the above macro is defined
+ _("A2M4-in_yaml-events" , "tabs tokens"),
+ _("6BCT-in_yaml" , "tabs tokens"),
+ _("J3BT-in_yaml-events" , "tabs tokens"),
+ _("Y79Y_010-in_yaml-events", "tabs tokens"),
+ #endif
+ // container keys are not supported
+ _("4FJ6-in_yaml" , "only scalar keys allowed (keys cannot be maps or seqs)"),
+ _("4FJ6-out_yaml" , "only scalar keys allowed (keys cannot be maps or seqs)"),
+ _("6BFJ-in_yaml" , "only scalar keys allowed (keys cannot be maps or seqs)"),
+ _("6BFJ-out_yaml" , "only scalar keys allowed (keys cannot be maps or seqs)"),
+ _("6PBE-in_yaml" , "only scalar keys allowed (keys cannot be maps or seqs)"),
+ _("6PBE-out_yaml" , "only scalar keys allowed (keys cannot be maps or seqs)"),
+ _("6PBE-emit_yaml" , "only scalar keys allowed (keys cannot be maps or seqs)"),
+ _("9MMW-in_yaml" , "only scalar keys allowed (keys cannot be maps or seqs)"),
+ _("9MMW-out_yaml" , "only scalar keys allowed (keys cannot be maps or seqs)"),
+ _("KK5P-in_yaml" , "only scalar keys allowed (keys cannot be maps or seqs)"),
+ _("KK5P-out_yaml" , "only scalar keys allowed (keys cannot be maps or seqs)"),
+ _("KZN9-in_yaml" , "only scalar keys allowed (keys cannot be maps or seqs)"),
+ _("KZN9-out_yaml" , "only scalar keys allowed (keys cannot be maps or seqs)"),
+ _("LX3P-in_yaml" , "only scalar keys allowed (keys cannot be maps or seqs)"),
+ _("LX3P-out_yaml" , "only scalar keys allowed (keys cannot be maps or seqs)"),
+ _("M2N8_00-in_yaml" , "only scalar keys allowed (keys cannot be maps or seqs)"),
+ _("M2N8_00-out_yaml" , "only scalar keys allowed (keys cannot be maps or seqs)"),
+ _("M2N8_01-in_yaml-events" , "only scalar keys allowed (keys cannot be maps or seqs)"),
+ _("M2N8_01-out_yaml-events", "only scalar keys allowed (keys cannot be maps or seqs)"),
+ _("M5DY-in_yaml" , "only scalar keys allowed (keys cannot be maps or seqs)"),
+ _("M5DY-out_yaml" , "only scalar keys allowed (keys cannot be maps or seqs)"),
+ _("Q9WF-in_yaml" , "only scalar keys allowed (keys cannot be maps or seqs)"),
+ _("Q9WF-out_yaml" , "only scalar keys allowed (keys cannot be maps or seqs)"),
+ _("RZP5-in_yaml" , "only scalar keys allowed (keys cannot be maps or seqs)"),
+ _("RZP5-out_yaml" , "only scalar keys allowed (keys cannot be maps or seqs)"),
+ _("SBG9-in_yaml" , "only scalar keys allowed (keys cannot be maps or seqs)"),
+ _("SBG9-out_yaml" , "only scalar keys allowed (keys cannot be maps or seqs)"),
+ _("V9D5-in_yaml" , "only scalar keys allowed (keys cannot be maps or seqs)"),
+ _("V9D5-out_yaml" , "only scalar keys allowed (keys cannot be maps or seqs)"),
+ _("X38W-in_yaml" , "only scalar keys allowed (keys cannot be maps or seqs)"),
+ _("X38W-out_yaml" , "only scalar keys allowed (keys cannot be maps or seqs)"),
+ _("XW4D-in_yaml" , "only scalar keys allowed (keys cannot be maps or seqs)"),
+ _("XW4D-out_yaml" , "only scalar keys allowed (keys cannot be maps or seqs)"),
+ // anchors with : are not supported
+ _("2SXE-in_yaml-events" , "weird characters in anchors, anchors must not end with :"),
+ // malformed json in the test spec
+ _("35KP-in_json" , "malformed JSON from multiple documents"),
+ _("5TYM-in_json" , "malformed JSON from multiple documents"),
+ _("6XDY-in_json" , "malformed JSON from multiple documents"),
+ _("6WLZ-in_json" , "malformed JSON from multiple documents"),
+ _("6ZKB-in_json" , "malformed JSON from multiple documents"),
+ _("7Z25-in_json" , "malformed JSON from multiple documents"),
+ _("9DXL-in_json" , "malformed JSON from multiple documents"),
+ _("9KAX-in_json" , "malformed JSON from multiple documents"),
+ _("9WXW-in_json" , "malformed JSON from multiple documents"),
+ _("JHB9-in_json" , "malformed JSON from multiple documents"),
+ _("KSS4-in_json" , "malformed JSON from multiple documents"),
+ _("L383-in_json" , "malformed JSON from multiple documents"),
+ _("M7A3-in_json" , "malformed JSON from multiple documents"),
+ _("RZT7-in_json" , "malformed JSON from multiple documents"),
+ _("U9NS-in_json" , "malformed JSON from multiple documents"),
+ _("W4TN-in_json" , "malformed JSON from multiple documents"),
+ // malformed test spec?
+ _("4ABK-out_yaml-events" , "out-yaml contains null, while in-yaml and events contain empty scalars"),
+ _("4WA9-out_yaml-events" , "out-yaml test spec is missing a --- document token, which is required in the events"),
+ _("652Z-out_yaml-events" , "out-yaml test spec is missing a --- document token, which is required in the events"),
+ _("6CA3-emit_yaml" , "out-yaml test spec is missing a --- document token, which is required in the events"),
+ _("6FWR-out_yaml-events" , "out-yaml test spec is missing a --- document token, which is required in the events"),
+ _("6WPF-out_yaml-events" , "out-yaml test spec is missing a --- document token, which is required in the events"),
+ _("9TFX-out_yaml-events" , "out-yaml test spec is missing a --- document token, which is required in the events"),
+ _("B3HG-out_yaml-events" , "out-yaml test spec is missing a --- document token, which is required in the events"),
+ _("DK95_00-emit_yaml-events", "out-yaml test spec is missing a --- document token, which is required in the events"),
+ _("DK95_02-emit_yaml-events", "out-yaml test spec is missing a --- document token, which is required in the events"),
+ _("DK95_03-emit_yaml-events", "out-yaml test spec is missing a --- document token, which is required in the events"),
+ _("DK95_04-emit_yaml-events", "out-yaml test spec is missing a --- document token, which is required in the events"),
+ _("DK95_05-emit_yaml-events", "out-yaml test spec is missing a --- document token, which is required in the events"),
+ _("DK95_06-emit_yaml-events", "out-yaml test spec is missing a --- document token, which is required in the events"),
+ _("DK95_07-emit_yaml-events", "out-yaml test spec is missing a --- document token, which is required in the events"),
+ _("DK95_08-emit_yaml-events", "out-yaml test spec is missing a --- document token, which is required in the events"),
+ _("EX5H-out_yaml-events" , "out-yaml test spec is missing a --- document token, which is required in the events"),
+ _("EXG3-out_yaml-events" , "out-yaml test spec is missing a --- document token, which is required in the events"),
+ _("L24T_00-emit_yaml-events", "out-yaml test spec is missing a --- document token, which is required in the events"),
+ _("L24T_01-emit_yaml-events", "out-yaml test spec is missing a --- document token, which is required in the events"),
+ _("M6YH-out_yaml-events" , "out-yaml test spec is missing a --- document token, which is required in the events"),
+ _("Q8AD-out_yaml-events" , "out-yaml test spec is missing a --- document token, which is required in the events"),
+ _("T26H-out_yaml-events" , "out-yaml test spec is missing a --- document token, which is required in the events"),
+ _("T4YY-out_yaml-events" , "out-yaml test spec is missing a --- document token, which is required in the events"),
+ _("T5N4-out_yaml-events" , "out-yaml test spec is missing a --- document token, which is required in the events"),
+ _("VJP3_01-out_yaml-events" , "out-yaml test spec is missing a --- document token, which is required in the events"),
+
+ #undef _
+};
+
+
+cspan<AllowedFailure> g_allowed_failures = allowed_failures;
+
+AllowedFailure is_failure_expected(csubstr casename)
+{
+ RYML_CHECK(casename.not_empty());
+ for(AllowedFailure const& af : g_allowed_failures)
+ if(af.test_name == casename || casename.begins_with(af.test_name))
+ return af;
+ return {};
+}
+
+
+} // namespace c4
+} // namespace yml
diff --git a/thirdparty/ryml/test/test_suite/test_suite_parts.hpp b/thirdparty/ryml/test/test_suite/test_suite_parts.hpp
new file mode 100644
index 000000000..9092313ba
--- /dev/null
+++ b/thirdparty/ryml/test/test_suite/test_suite_parts.hpp
@@ -0,0 +1,28 @@
+#ifndef C4_YML_TEST_SUITE_PARTS_HPP_
+#define C4_YML_TEST_SUITE_PARTS_HPP_
+
+#ifdef RYML_SINGLE_HEADER
+#include <ryml_all.hpp>
+#else
+#include <c4/yml/common.hpp>
+#include <c4/span.hpp>
+#endif
+#include <c4/log/log.hpp>
+
+namespace c4 {
+namespace yml {
+
+struct AllowedFailure
+{
+ csubstr test_name;
+ csubstr reason;
+ operator bool() const { return !test_name.empty(); }
+};
+
+AllowedFailure is_failure_expected(csubstr casename);
+
+} // namespace c4
+} // namespace yml
+
+
+#endif /* C4_YML_TEST_SUITE_PARTS_HPP_ */
diff --git a/thirdparty/ryml/test/test_tag_property.cpp b/thirdparty/ryml/test/test_tag_property.cpp
new file mode 100644
index 000000000..e56fef994
--- /dev/null
+++ b/thirdparty/ryml/test/test_tag_property.cpp
@@ -0,0 +1,1149 @@
+#include "./test_group.hpp"
+#include "test_case.hpp"
+
+namespace c4 {
+namespace yml {
+
+TEST(tag_directives, basic)
+{
+ Tree t = parse_in_arena(R"(
+%TAG !m! !my-
+--- # Bulb here
+!m!light fluorescent
+...
+%TAG !m! !meta-
+--- # Color here
+!m!light green
+)");
+ EXPECT_EQ(t[0].val_tag(), "!m!light");
+ EXPECT_EQ(t[1].val_tag(), "!m!light");
+ EXPECT_EQ(t.num_tag_directives(), 2u);
+ char buf_[100];
+ EXPECT_EQ(t.resolve_tag_sub(buf_, "!m!light", 1u), csubstr("<!my-light>"));
+ EXPECT_EQ(t.resolve_tag_sub(buf_, "!m!light", 2u), csubstr("<!meta-light>"));
+ EXPECT_EQ(emitrs_yaml<std::string>(t), std::string(R"(%TAG !m! !my-
+--- !m!light fluorescent
+...
+%TAG !m! !meta-
+--- !m!light green
+)"));
+}
+
+TEST(tag_directives, accepts_comment)
+{
+ Tree t = parse_in_arena(R"(
+%TAG !m! !my- # comment
+--- # Bulb here
+!m!light fluorescent
+...
+%TAG !m! !meta- # comment
+--- # Color here
+!m!light green
+)");
+ EXPECT_EQ(t[0].val_tag(), "!m!light");
+ EXPECT_EQ(t[1].val_tag(), "!m!light");
+ EXPECT_EQ(t.num_tag_directives(), 2u);
+ char buf_[100];
+ EXPECT_EQ(t.resolve_tag_sub(buf_, "!m!light", 1u), csubstr("<!my-light>"));
+ EXPECT_EQ(t.resolve_tag_sub(buf_, "!m!light", 2u), csubstr("<!meta-light>"));
+ EXPECT_EQ(emitrs_yaml<std::string>(t), std::string(R"(%TAG !m! !my-
+--- !m!light fluorescent
+...
+%TAG !m! !meta-
+--- !m!light green
+)"));
+}
+
+TEST(tag_directives, accepts_multiple_spaces)
+{
+ Tree t = parse_in_arena(R"(
+%TAG !m! !my- # comment
+--- # Bulb here
+!m!light fluorescent
+...
+%TAG !m! !meta- # comment
+--- # Color here
+!m!light green
+)");
+ EXPECT_EQ(t[0].val_tag(), "!m!light");
+ EXPECT_EQ(t[1].val_tag(), "!m!light");
+ EXPECT_EQ(t.num_tag_directives(), 2u);
+ char buf_[100];
+ EXPECT_EQ(t.resolve_tag_sub(buf_, "!m!light", 1u), csubstr("<!my-light>"));
+ EXPECT_EQ(t.resolve_tag_sub(buf_, "!m!light", 2u), csubstr("<!meta-light>"));
+ EXPECT_EQ(emitrs_yaml<std::string>(t), std::string(R"(%TAG !m! !my-
+--- !m!light fluorescent
+...
+%TAG !m! !meta-
+--- !m!light green
+)"));
+}
+
+TEST(tag_directives, errors)
+{
+ {
+ Tree t;
+ ExpectError::do_check(&t, [&]{
+ t = parse_in_arena(R"(
+%TAG
+--- # Bulb here
+!m!light fluorescent)");
+ });
+ }
+ {
+ Tree t;
+ ExpectError::do_check(&t, [&]{
+ t = parse_in_arena(R"(
+%TAG !m!
+--- # Bulb here
+!m!light fluorescent)");
+ });
+ }
+}
+
+TEST(tag_directives, resolve_tags)
+{
+ Tree t = parse_in_arena(R"(
+%TAG !m! !my- # comment
+--- # Bulb here
+!m!light fluorescent: !m!light bulb
+...
+%TAG !m! !meta- # comment
+--- # Color here
+!m!light green: !m!light color
+)");
+ EXPECT_EQ(t.docref(0)[0].key_tag(), "!m!light");
+ EXPECT_EQ(t.docref(0)[0].val_tag(), "!m!light");
+ EXPECT_EQ(t.num_tag_directives(), 2u);
+ t.resolve_tags();
+ EXPECT_EQ(t.docref(0)[0].key_tag(), "<!my-light>");
+ EXPECT_EQ(t.docref(0)[0].val_tag(), "<!my-light>");
+ EXPECT_EQ(emitrs_yaml<std::string>(t), std::string(R"(%TAG !m! !my-
+---
+!<!my-light> fluorescent: !<!my-light> bulb
+...
+%TAG !m! !meta-
+---
+!<!meta-light> green: !<!meta-light> color
+)"));
+}
+
+TEST(tag_directives, safe_with_empty_tree)
+{
+ Tree t;
+ t.resolve_tags();
+ EXPECT_TRUE(t.empty());
+}
+
+TEST(tag_directives, decode_uri_chars)
+{
+ {
+ Tree t = parse_in_arena(R"(
+%TAG !e! tag:example.com,2000:app/
+---
+- !e!%61%62%63%21 baz
+)");
+ t.resolve_tags();
+ EXPECT_EQ(t.docref(0)[0].val_tag(), csubstr("<tag:example.com,2000:app/abc!>"));
+ }
+ {
+ Tree t;
+ auto checkerr = [&t](csubstr yaml){
+ ExpectError::do_check(&t, [&]{
+ t.clear();
+ t = parse_in_arena(yaml);
+ t.resolve_tags();
+ });
+ };
+ {
+ SCOPED_TRACE("without numbers at begin");
+ checkerr(R"(
+%TAG !e! tag:example.com,2000:app/
+---
+- !e!%%62%63 baz
+)");
+ }
+ {
+ SCOPED_TRACE("without numbers in the middle");
+ checkerr(R"(
+%TAG !e! tag:example.com,2000:app/
+---
+- !e!%61%%63 baz
+)");
+ }
+ {
+ SCOPED_TRACE("without numbers in the end");
+ checkerr(R"(
+%TAG !e! tag:example.com,2000:app/
+---
+- !e!%61%62% baz
+)");
+ }
+ {
+ SCOPED_TRACE("with wrong characters numbers at begin");
+ checkerr(R"(
+%TAG !e! tag:example.com,2000:app/
+---
+- !e!%h%62%63 baz
+)");
+ }
+ {
+ SCOPED_TRACE("with wrong characters in the middle");
+ checkerr(R"(
+%TAG !e! tag:example.com,2000:app/
+---
+- !e!%61%hh%63 baz
+)");
+ }
+ {
+ SCOPED_TRACE("with wrong characters in the end");
+ checkerr(R"(
+%TAG !e! tag:example.com,2000:app/
+---
+- !e!%61%62%hh baz
+)");
+ }
+ }
+}
+
+
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+
+TEST(tags, test_suite_735Y)
+{
+ csubstr yaml_without_seq = R"(
+!!map # Block collection
+foo : bar
+)";
+ test_check_emit_check(yaml_without_seq, [](Tree const &t){
+ EXPECT_TRUE(t.rootref().is_map());
+ EXPECT_TRUE(t.rootref().has_val_tag());
+ EXPECT_EQ(t.rootref()["foo"].val(), csubstr("bar"));
+ });
+
+ csubstr yaml = R"(
+-
+ foo : bar
+- #!!map
+ foo : bar
+- #!!map # Block collection
+ foo : bar
+- !!map
+ foo : bar
+- !!map # Block collection
+ foo : bar
+)";
+ test_check_emit_check(yaml, [](Tree const &t){
+ ASSERT_TRUE(t.rootref().is_seq());
+ ASSERT_EQ(t.rootref().num_children(), 5u);
+ //
+ EXPECT_TRUE(t[0].is_map());
+ EXPECT_TRUE(!t[0].has_val_tag());
+ EXPECT_EQ(t[0]["foo"].val(), csubstr("bar"));
+ //
+ EXPECT_TRUE(t[1].is_map());
+ EXPECT_TRUE(!t[1].has_val_tag());
+ EXPECT_EQ(t[1]["foo"].val(), csubstr("bar"));
+ //
+ EXPECT_TRUE(t[2].is_map());
+ EXPECT_TRUE(!t[2].has_val_tag());
+ EXPECT_EQ(t[2]["foo"].val(), csubstr("bar"));
+ //
+ EXPECT_TRUE(t[3].is_map());
+ ASSERT_TRUE(t[3].has_val_tag());
+ EXPECT_EQ(t[3].val_tag(), csubstr("!!map"));
+ EXPECT_EQ(t[3]["foo"].val(), csubstr("bar"));
+ //
+ EXPECT_TRUE(t[4].is_map());
+ ASSERT_TRUE(t[4].has_val_tag());
+ EXPECT_EQ(t[4].val_tag(), csubstr("!!map"));
+ EXPECT_EQ(t[4]["foo"].val(), csubstr("bar"));
+ });
+}
+
+
+TEST(tags, parsing)
+{
+ Tree t = parse_in_arena(R"(
+!!seq
+- !!map
+ !key key1: !val val1
+ !<!key> key2: !<!val> val2
+ !<key> key3: !<val> val3
+ <!key> key4: <!val> val4 # there are NOT parsed as tags
+- !<tag:yaml.org,2002:map>
+ !key key1: !val val1
+- !<tag:yaml.org,2002:seq>
+ - !val val
+ - !str val
+ - <!str> val
+ - !<!str> val
+ - !<!!str> val
+ - !<tag:yaml.org,2002:str> val
+)");
+ EXPECT_EQ(t.rootref().val_tag(), csubstr("!!seq"));
+ EXPECT_EQ(t[0].val_tag(), csubstr("!!map"));
+ EXPECT_EQ(t[1].val_tag(), csubstr("!!map"));
+ EXPECT_EQ(t[2].val_tag(), csubstr("!!seq"));
+ EXPECT_EQ(t[0]["key1"].key_tag(), csubstr("!key"));
+ EXPECT_EQ(t[0]["key1"].val_tag(), csubstr("!val"));
+ EXPECT_EQ(t[0]["key2"].key_tag(), csubstr("<!key>"));
+ EXPECT_EQ(t[0]["key2"].val_tag(), csubstr("<!val>"));
+ EXPECT_EQ(t[0]["key3"].key_tag(), csubstr("<key>"));
+ EXPECT_EQ(t[0]["key3"].val_tag(), csubstr("<val>"));
+ EXPECT_EQ(t[0]["<!key> key4"].has_key_tag(), false);
+ EXPECT_EQ(t[0]["<!key> key4"].has_val_tag(), false);
+ EXPECT_EQ(t[0]["<!key> key4"].key(), csubstr("<!key> key4"));
+ EXPECT_EQ(t[0]["<!key> key4"].val(), csubstr("<!val> val4"));
+ EXPECT_EQ(t[2][5].val_tag(), csubstr("!!str"));
+
+ EXPECT_EQ(emitrs_yaml<std::string>(t), R"(!!seq
+- !!map
+ !key key1: !val val1
+ !<!key> key2: !<!val> val2
+ !<key> key3: !<val> val3
+ <!key> key4: <!val> val4
+- !!map
+ !key key1: !val val1
+- !!seq
+ - !val val
+ - !str val
+ - <!str> val
+ - !<!str> val
+ - !<!!str> val
+ - !!str val
+)");
+}
+
+
+TEST(tags, setting)
+{
+ Tree t;
+ size_t rid = t.root_id();
+ t.to_map(rid);
+ t.set_val_tag(rid, "!valtag");
+ EXPECT_EQ(t.val_tag(rid), "!valtag");
+
+ // a keymap
+ {
+ size_t child = t.append_child(rid);
+ t.to_seq(child, "key2");
+ t.set_key_tag(child, "!keytag");
+ t.set_val_tag(child, "!valtag2");
+ EXPECT_TRUE(t.has_key(child));
+ EXPECT_FALSE(t.has_val(child));
+ EXPECT_EQ(t.key(child), "key2");
+ EXPECT_EQ(t.key_tag(child), "!keytag");
+ EXPECT_EQ(t.val_tag(child), "!valtag2");
+ }
+
+ // a keyseq
+ {
+ size_t child = t.append_child(rid);
+ t.to_seq(child, "key2");
+ t.set_key_tag(child, "!keytag");
+ t.set_val_tag(child, "!valtag2");
+ EXPECT_TRUE(t.has_key(child));
+ EXPECT_FALSE(t.has_val(child));
+ EXPECT_EQ(t.key(child), "key2");
+ EXPECT_EQ(t.key_tag(child), "!keytag");
+ EXPECT_EQ(t.val_tag(child), "!valtag2");
+ }
+
+ // a keyval
+ {
+ size_t child = t.append_child(rid);
+ t.to_keyval(child, "key", "val");
+ t.set_key_tag(child, "!keytag");
+ t.set_val_tag(child, "!valtag");
+ EXPECT_TRUE(t.has_key(child));
+ EXPECT_TRUE(t.has_val(child));
+ EXPECT_EQ(t.key(child), "key");
+ EXPECT_EQ(t.val(child), "val");
+ EXPECT_EQ(t.key_tag(child), "!keytag");
+ EXPECT_EQ(t.val_tag(child), "!valtag");
+ }
+
+ // a val
+ {
+ size_t seqid = t[1].id();
+ ASSERT_TRUE(t.is_seq(seqid));
+ size_t child = t.append_child(seqid);
+ t.to_val(child, "val");
+ t.set_val_tag(child, "!valtag");
+ EXPECT_FALSE(t.has_key(child));
+ EXPECT_TRUE(t.has_val(child));
+ EXPECT_EQ(t.val(child), "val");
+ EXPECT_EQ(t.val_tag(child), "!valtag");
+ }
+}
+
+TEST(tags, errors)
+{
+ Tree t = parse_in_arena("{key: val, keymap: {}, keyseq: [val]}");
+ size_t keyval = t["keyval"].id();
+ size_t keymap = t["keymap"].id();
+ size_t keyseq = t["keyseq"].id();
+ size_t val = t["keyseq"][0].id();
+ size_t empty_keyval = t.append_child(keymap);
+ size_t empty_val = t.append_child(keyseq);
+
+ ASSERT_NE(keyval, (size_t)npos);
+ ASSERT_NE(keymap, (size_t)npos);
+ ASSERT_NE(keyseq, (size_t)npos);
+ ASSERT_NE(val, (size_t)npos);
+
+ // cannot get key tag in a node that does not have a key tag
+ EXPECT_FALSE(t.has_key_tag(empty_keyval));
+ ExpectError::check_assertion(&t, [&](){
+ EXPECT_EQ(t.key_tag(empty_keyval), "");
+ });
+ ExpectError::check_assertion(&t, [&](){
+ EXPECT_EQ(t.key_tag(keyval), "");
+ });
+ ExpectError::check_assertion(&t, [&](){
+ EXPECT_EQ(t.key_tag(keymap), "");
+ });
+ ExpectError::check_assertion(&t, [&](){
+ EXPECT_EQ(t.key_tag(keyseq), "");
+ });
+ ExpectError::check_assertion(&t, [&](){
+ EXPECT_EQ(t.key_tag(val), "");
+ });
+ // cannot get val tag in a node that does not have a val tag
+ EXPECT_FALSE(t.has_val_tag(empty_val));
+ ExpectError::check_assertion(&t, [&](){
+ EXPECT_EQ(t.val_tag(empty_val), "");
+ });
+ EXPECT_FALSE(t.has_val_tag(empty_keyval));
+ ExpectError::check_assertion(&t, [&](){
+ EXPECT_EQ(t.val_tag(empty_keyval), "");
+ });
+ ExpectError::check_assertion(&t, [&](){
+ EXPECT_EQ(t.val_tag(keyval), "");
+ });
+ ExpectError::check_assertion(&t, [&](){
+ EXPECT_EQ(t.val_tag(keymap), "");
+ });
+ ExpectError::check_assertion(&t, [&](){
+ EXPECT_EQ(t.val_tag(keyseq), "");
+ });
+ ExpectError::check_assertion(&t, [&](){
+ EXPECT_EQ(t.val_tag(val), "");
+ });
+ // cannot set key tag in a node that does not have a key
+ EXPECT_FALSE(t.has_key(empty_keyval));
+ ExpectError::check_assertion(&t, [&](){
+ t.set_key_tag(empty_keyval, "!keytag");
+ });
+ EXPECT_FALSE(t.has_key_tag(val)); // must stay the same
+ ExpectError::check_assertion(&t, [&](){
+ t.set_key_tag(val, "!valtag");
+ });
+ EXPECT_FALSE(t.has_key_tag(val)); // must stay the same
+ // cannot set val tag in a node that does not have a val
+ EXPECT_FALSE(t.has_val(empty_val));
+ ExpectError::check_assertion(&t, [&](){
+ t.set_val_tag(empty_val, "!valtag");
+ });
+ EXPECT_FALSE(t.has_val_tag(empty_val)); // must stay the same
+ EXPECT_FALSE(t.has_val(empty_keyval));
+ ExpectError::check_assertion(&t, [&](){
+ t.set_val_tag(empty_keyval, "!valtag");
+ });
+ EXPECT_FALSE(t.has_val_tag(empty_keyval)); // must stay the same
+}
+
+
+TEST(tags, setting_user_tags_do_not_require_leading_mark)
+{
+ Tree t = parse_in_arena("{key: val, keymap: {}, keyseq: [val]}");
+ size_t keyval = t["keyval"].id();
+ size_t keymap = t["keymap"].id();
+ size_t keyseq = t["keyseq"].id();
+ size_t val = t["keyseq"][0].id();
+ ASSERT_NE(keyval, (size_t)npos);
+ ASSERT_NE(keymap, (size_t)npos);
+ ASSERT_NE(keyseq, (size_t)npos);
+ ASSERT_NE(val, (size_t)npos);
+
+ // without leading mark
+ t.set_key_tag(keyseq, "keytag");
+ t.set_val_tag(keyseq, "valtag");
+ t.set_val_tag(val, "valtag2");
+ EXPECT_EQ(t.key_tag(keyseq), "keytag");
+ EXPECT_EQ(t.val_tag(keyseq), "valtag");
+ EXPECT_EQ(t.val_tag(val), "valtag2");
+
+ EXPECT_EQ(emitrs_yaml<std::string>(t), R"(key: val
+keymap: {}
+!keytag keyseq: !valtag
+ - !valtag2 val
+)");
+
+ // with leading mark
+ t.set_key_tag(keyseq, "!keytag");
+ t.set_val_tag(keyseq, "!valtag");
+ t.set_val_tag(val, "!valtag2");
+ EXPECT_EQ(t.key_tag(keyseq), "!keytag");
+ EXPECT_EQ(t.val_tag(keyseq), "!valtag");
+ EXPECT_EQ(t.val_tag(val), "!valtag2");
+
+ EXPECT_EQ(emitrs_yaml<std::string>(t), R"(key: val
+keymap: {}
+!keytag keyseq: !valtag
+ - !valtag2 val
+)");
+}
+
+
+TEST(tags, valid_chars)
+{
+ Tree t = parse_in_arena(R"(
+- !<foo bar> val
+- !<foo> bar> val
+- !<foo> <bar> val
+)");
+ EXPECT_EQ(t[0].val_tag(), "<foo bar>");
+ EXPECT_EQ(t[0].val(), "val");
+ EXPECT_EQ(t[1].val_tag(), "<foo>");
+ EXPECT_EQ(t[1].val(), "bar> val");
+ EXPECT_EQ(t[2].val_tag(), "<foo>");
+ EXPECT_EQ(t[2].val(), "<bar> val");
+}
+
+
+TEST(tags, EHF6)
+{
+ {
+ Tree t = parse_in_arena(R"(!!map {
+ k: !!seq [ a, !!str b],
+ j: !!seq
+ [ a, !!str b]
+})");
+ ASSERT_TRUE(t.rootref().has_val_tag());
+ EXPECT_EQ(t.rootref().val_tag(), "!!map");
+ ASSERT_TRUE(t["k"].has_val_tag());
+ ASSERT_TRUE(t["j"].has_val_tag());
+ EXPECT_EQ(t["k"].val_tag(), "!!seq");
+ EXPECT_EQ(t["j"].val_tag(), "!!seq");
+ }
+ {
+ Tree t = parse_in_arena(R"(!!seq [
+ !!map { !!str k: v},
+ !!map { !!str ? k: v}
+])");
+ ASSERT_TRUE(t.rootref().has_val_tag());
+ EXPECT_EQ(t.rootref().val_tag(), "!!seq");
+ ASSERT_TRUE(t[0].has_val_tag());
+ ASSERT_TRUE(t[1].has_val_tag());
+ EXPECT_EQ(t[0].val_tag(), "!!map");
+ EXPECT_EQ(t[1].val_tag(), "!!map");
+ ASSERT_TRUE(t[0]["k"].has_key_tag());
+ ASSERT_TRUE(t[1]["k"].has_key_tag());
+ EXPECT_EQ(t[0]["k"].key_tag(), "!!str");
+ EXPECT_EQ(t[1]["k"].key_tag(), "!!str");
+ }
+}
+
+
+//-----------------------------------------------------------------------------
+
+TEST(to_tag, user)
+{
+ EXPECT_EQ(to_tag("!"), TAG_NONE);
+ EXPECT_EQ(to_tag("!."), TAG_NONE);
+ EXPECT_EQ(to_tag("!good_type"), TAG_NONE);
+}
+
+TEST(to_tag, double_exc_mark)
+{
+ EXPECT_EQ(to_tag("!!" ), TAG_NONE);
+ EXPECT_EQ(to_tag("!!." ), TAG_NONE);
+
+ EXPECT_EQ(to_tag("!!map" ), TAG_MAP);
+ EXPECT_EQ(to_tag("!!omap" ), TAG_OMAP);
+ EXPECT_EQ(to_tag("!!pairs" ), TAG_PAIRS);
+ EXPECT_EQ(to_tag("!!set" ), TAG_SET);
+ EXPECT_EQ(to_tag("!!seq" ), TAG_SEQ);
+ EXPECT_EQ(to_tag("!!binary" ), TAG_BINARY);
+ EXPECT_EQ(to_tag("!!bool" ), TAG_BOOL);
+ EXPECT_EQ(to_tag("!!float" ), TAG_FLOAT);
+ EXPECT_EQ(to_tag("!!int" ), TAG_INT);
+ EXPECT_EQ(to_tag("!!merge" ), TAG_MERGE);
+ EXPECT_EQ(to_tag("!!null" ), TAG_NULL);
+ EXPECT_EQ(to_tag("!!str" ), TAG_STR);
+ EXPECT_EQ(to_tag("!!timestamp" ), TAG_TIMESTAMP);
+ EXPECT_EQ(to_tag("!!value" ), TAG_VALUE);
+
+ EXPECT_EQ(to_tag("!!map." ), TAG_NONE);
+ EXPECT_EQ(to_tag("!!omap." ), TAG_NONE);
+ EXPECT_EQ(to_tag("!!pairs." ), TAG_NONE);
+ EXPECT_EQ(to_tag("!!set." ), TAG_NONE);
+ EXPECT_EQ(to_tag("!!seq." ), TAG_NONE);
+ EXPECT_EQ(to_tag("!!binary." ), TAG_NONE);
+ EXPECT_EQ(to_tag("!!bool." ), TAG_NONE);
+ EXPECT_EQ(to_tag("!!float." ), TAG_NONE);
+ EXPECT_EQ(to_tag("!!int." ), TAG_NONE);
+ EXPECT_EQ(to_tag("!!merge." ), TAG_NONE);
+ EXPECT_EQ(to_tag("!!null." ), TAG_NONE);
+ EXPECT_EQ(to_tag("!!str." ), TAG_NONE);
+ EXPECT_EQ(to_tag("!!timestamp."), TAG_NONE);
+ EXPECT_EQ(to_tag("!!value." ), TAG_NONE);
+}
+
+TEST(to_tag, with_namespace)
+{
+ EXPECT_EQ(to_tag("tag:yaml.org,2002:" ), TAG_NONE);
+ EXPECT_EQ(to_tag("tag:yaml.org,2002:." ), TAG_NONE);
+
+ EXPECT_EQ(to_tag("tag:yaml.org,2002:map" ), TAG_MAP);
+ EXPECT_EQ(to_tag("tag:yaml.org,2002:omap" ), TAG_OMAP);
+ EXPECT_EQ(to_tag("tag:yaml.org,2002:pairs" ), TAG_PAIRS);
+ EXPECT_EQ(to_tag("tag:yaml.org,2002:set" ), TAG_SET);
+ EXPECT_EQ(to_tag("tag:yaml.org,2002:seq" ), TAG_SEQ);
+ EXPECT_EQ(to_tag("tag:yaml.org,2002:binary" ), TAG_BINARY);
+ EXPECT_EQ(to_tag("tag:yaml.org,2002:bool" ), TAG_BOOL);
+ EXPECT_EQ(to_tag("tag:yaml.org,2002:float" ), TAG_FLOAT);
+ EXPECT_EQ(to_tag("tag:yaml.org,2002:int" ), TAG_INT);
+ EXPECT_EQ(to_tag("tag:yaml.org,2002:merge" ), TAG_MERGE);
+ EXPECT_EQ(to_tag("tag:yaml.org,2002:null" ), TAG_NULL);
+ EXPECT_EQ(to_tag("tag:yaml.org,2002:str" ), TAG_STR);
+ EXPECT_EQ(to_tag("tag:yaml.org,2002:timestamp" ), TAG_TIMESTAMP);
+ EXPECT_EQ(to_tag("tag:yaml.org,2002:value" ), TAG_VALUE);
+
+ EXPECT_EQ(to_tag("tag:yaml.org,2002:map." ), TAG_NONE);
+ EXPECT_EQ(to_tag("tag:yaml.org,2002:omap." ), TAG_NONE);
+ EXPECT_EQ(to_tag("tag:yaml.org,2002:pairs." ), TAG_NONE);
+ EXPECT_EQ(to_tag("tag:yaml.org,2002:set." ), TAG_NONE);
+ EXPECT_EQ(to_tag("tag:yaml.org,2002:seq." ), TAG_NONE);
+ EXPECT_EQ(to_tag("tag:yaml.org,2002:binary." ), TAG_NONE);
+ EXPECT_EQ(to_tag("tag:yaml.org,2002:bool." ), TAG_NONE);
+ EXPECT_EQ(to_tag("tag:yaml.org,2002:float." ), TAG_NONE);
+ EXPECT_EQ(to_tag("tag:yaml.org,2002:int." ), TAG_NONE);
+ EXPECT_EQ(to_tag("tag:yaml.org,2002:merge." ), TAG_NONE);
+ EXPECT_EQ(to_tag("tag:yaml.org,2002:null." ), TAG_NONE);
+ EXPECT_EQ(to_tag("tag:yaml.org,2002:str." ), TAG_NONE);
+ EXPECT_EQ(to_tag("tag:yaml.org,2002:timestamp."), TAG_NONE);
+ EXPECT_EQ(to_tag("tag:yaml.org,2002:value." ), TAG_NONE);
+}
+
+TEST(to_tag, with_namespace_bracket)
+{
+ EXPECT_EQ(to_tag("<tag:yaml.org,2002:" ), TAG_NONE);
+ EXPECT_EQ(to_tag("<tag:yaml.org,2002:." ), TAG_NONE);
+ EXPECT_EQ(to_tag("<tag:yaml.org,2002:foo" ), TAG_NONE);
+
+ EXPECT_EQ(to_tag("<tag:yaml.org,2002:map>" ), TAG_MAP);
+ EXPECT_EQ(to_tag("<tag:yaml.org,2002:omap>" ), TAG_OMAP);
+ EXPECT_EQ(to_tag("<tag:yaml.org,2002:pairs>" ), TAG_PAIRS);
+ EXPECT_EQ(to_tag("<tag:yaml.org,2002:set>" ), TAG_SET);
+ EXPECT_EQ(to_tag("<tag:yaml.org,2002:seq>" ), TAG_SEQ);
+ EXPECT_EQ(to_tag("<tag:yaml.org,2002:binary>" ), TAG_BINARY);
+ EXPECT_EQ(to_tag("<tag:yaml.org,2002:bool>" ), TAG_BOOL);
+ EXPECT_EQ(to_tag("<tag:yaml.org,2002:float>" ), TAG_FLOAT);
+ EXPECT_EQ(to_tag("<tag:yaml.org,2002:int>" ), TAG_INT);
+ EXPECT_EQ(to_tag("<tag:yaml.org,2002:merge>" ), TAG_MERGE);
+ EXPECT_EQ(to_tag("<tag:yaml.org,2002:null>" ), TAG_NULL);
+ EXPECT_EQ(to_tag("<tag:yaml.org,2002:str>" ), TAG_STR);
+ EXPECT_EQ(to_tag("<tag:yaml.org,2002:timestamp>" ), TAG_TIMESTAMP);
+ EXPECT_EQ(to_tag("<tag:yaml.org,2002:value>" ), TAG_VALUE);
+
+ EXPECT_EQ(to_tag("<tag:yaml.org,2002:map.>" ), TAG_NONE);
+ EXPECT_EQ(to_tag("<tag:yaml.org,2002:omap.>" ), TAG_NONE);
+ EXPECT_EQ(to_tag("<tag:yaml.org,2002:pairs.>" ), TAG_NONE);
+ EXPECT_EQ(to_tag("<tag:yaml.org,2002:set.>" ), TAG_NONE);
+ EXPECT_EQ(to_tag("<tag:yaml.org,2002:seq.>" ), TAG_NONE);
+ EXPECT_EQ(to_tag("<tag:yaml.org,2002:binary.>" ), TAG_NONE);
+ EXPECT_EQ(to_tag("<tag:yaml.org,2002:bool.>" ), TAG_NONE);
+ EXPECT_EQ(to_tag("<tag:yaml.org,2002:float.>" ), TAG_NONE);
+ EXPECT_EQ(to_tag("<tag:yaml.org,2002:int.>" ), TAG_NONE);
+ EXPECT_EQ(to_tag("<tag:yaml.org,2002:merge.>" ), TAG_NONE);
+ EXPECT_EQ(to_tag("<tag:yaml.org,2002:null.>" ), TAG_NONE);
+ EXPECT_EQ(to_tag("<tag:yaml.org,2002:str.>" ), TAG_NONE);
+ EXPECT_EQ(to_tag("<tag:yaml.org,2002:timestamp.>"), TAG_NONE);
+ EXPECT_EQ(to_tag("<tag:yaml.org,2002:value.>" ), TAG_NONE);
+}
+
+TEST(from_tag, basic)
+{
+ EXPECT_EQ("", from_tag(TAG_NONE));
+
+ EXPECT_EQ("!!map" , from_tag(TAG_MAP));
+ EXPECT_EQ("!!omap" , from_tag(TAG_OMAP));
+ EXPECT_EQ("!!pairs" , from_tag(TAG_PAIRS));
+ EXPECT_EQ("!!set" , from_tag(TAG_SET));
+ EXPECT_EQ("!!seq" , from_tag(TAG_SEQ));
+ EXPECT_EQ("!!binary" , from_tag(TAG_BINARY));
+ EXPECT_EQ("!!bool" , from_tag(TAG_BOOL));
+ EXPECT_EQ("!!float" , from_tag(TAG_FLOAT));
+ EXPECT_EQ("!!int" , from_tag(TAG_INT));
+ EXPECT_EQ("!!merge" , from_tag(TAG_MERGE));
+ EXPECT_EQ("!!null" , from_tag(TAG_NULL));
+ EXPECT_EQ("!!str" , from_tag(TAG_STR));
+ EXPECT_EQ("!!timestamp" , from_tag(TAG_TIMESTAMP));
+ EXPECT_EQ("!!value" , from_tag(TAG_VALUE));
+}
+
+TEST(normalize_tag, basic)
+{
+ EXPECT_EQ(normalize_tag("<tag:yaml.org,2002:" ), "<tag:yaml.org,2002:");
+ EXPECT_EQ(normalize_tag("<tag:yaml.org,2002:." ), "<tag:yaml.org,2002:.");
+
+ EXPECT_EQ(normalize_tag("<tag:yaml.org,2002:map>" ), "!!map");
+ EXPECT_EQ(normalize_tag("<tag:yaml.org,2002:omap>" ), "!!omap");
+ EXPECT_EQ(normalize_tag("<tag:yaml.org,2002:pairs>" ), "!!pairs");
+ EXPECT_EQ(normalize_tag("<tag:yaml.org,2002:set>" ), "!!set");
+ EXPECT_EQ(normalize_tag("<tag:yaml.org,2002:seq>" ), "!!seq");
+ EXPECT_EQ(normalize_tag("<tag:yaml.org,2002:binary>" ), "!!binary");
+ EXPECT_EQ(normalize_tag("<tag:yaml.org,2002:bool>" ), "!!bool");
+ EXPECT_EQ(normalize_tag("<tag:yaml.org,2002:float>" ), "!!float");
+ EXPECT_EQ(normalize_tag("<tag:yaml.org,2002:int>" ), "!!int");
+ EXPECT_EQ(normalize_tag("<tag:yaml.org,2002:merge>" ), "!!merge");
+ EXPECT_EQ(normalize_tag("<tag:yaml.org,2002:null>" ), "!!null");
+ EXPECT_EQ(normalize_tag("<tag:yaml.org,2002:str>" ), "!!str");
+ EXPECT_EQ(normalize_tag("<tag:yaml.org,2002:timestamp>" ), "!!timestamp");
+ EXPECT_EQ(normalize_tag("<tag:yaml.org,2002:value>" ), "!!value");
+
+ EXPECT_EQ(normalize_tag("!<tag:yaml.org,2002:map>" ), "!!map");
+ EXPECT_EQ(normalize_tag("!<tag:yaml.org,2002:omap>" ), "!!omap");
+ EXPECT_EQ(normalize_tag("!<tag:yaml.org,2002:pairs>" ), "!!pairs");
+ EXPECT_EQ(normalize_tag("!<tag:yaml.org,2002:set>" ), "!!set");
+ EXPECT_EQ(normalize_tag("!<tag:yaml.org,2002:seq>" ), "!!seq");
+ EXPECT_EQ(normalize_tag("!<tag:yaml.org,2002:binary>" ), "!!binary");
+ EXPECT_EQ(normalize_tag("!<tag:yaml.org,2002:bool>" ), "!!bool");
+ EXPECT_EQ(normalize_tag("!<tag:yaml.org,2002:float>" ), "!!float");
+ EXPECT_EQ(normalize_tag("!<tag:yaml.org,2002:int>" ), "!!int");
+ EXPECT_EQ(normalize_tag("!<tag:yaml.org,2002:merge>" ), "!!merge");
+ EXPECT_EQ(normalize_tag("!<tag:yaml.org,2002:null>" ), "!!null");
+ EXPECT_EQ(normalize_tag("!<tag:yaml.org,2002:str>" ), "!!str");
+ EXPECT_EQ(normalize_tag("!<tag:yaml.org,2002:timestamp>"), "!!timestamp");
+ EXPECT_EQ(normalize_tag("!<tag:yaml.org,2002:value>" ), "!!value");
+
+ EXPECT_EQ(normalize_tag("!!map" ), "!!map");
+ EXPECT_EQ(normalize_tag("!!omap" ), "!!omap");
+ EXPECT_EQ(normalize_tag("!!pairs" ), "!!pairs");
+ EXPECT_EQ(normalize_tag("!!set" ), "!!set");
+ EXPECT_EQ(normalize_tag("!!seq" ), "!!seq");
+ EXPECT_EQ(normalize_tag("!!binary" ), "!!binary");
+ EXPECT_EQ(normalize_tag("!!bool" ), "!!bool");
+ EXPECT_EQ(normalize_tag("!!float" ), "!!float");
+ EXPECT_EQ(normalize_tag("!!int" ), "!!int");
+ EXPECT_EQ(normalize_tag("!!merge" ), "!!merge");
+ EXPECT_EQ(normalize_tag("!!null" ), "!!null");
+ EXPECT_EQ(normalize_tag("!!str" ), "!!str");
+ EXPECT_EQ(normalize_tag("!!timestamp"), "!!timestamp");
+ EXPECT_EQ(normalize_tag("!!value" ), "!!value");
+
+ EXPECT_EQ(normalize_tag("!!foo" ), "!!foo");
+
+ EXPECT_EQ(normalize_tag("!my-light"), "!my-light");
+ EXPECT_EQ(normalize_tag("!foo"), "!foo");
+ EXPECT_EQ(normalize_tag("<!foo>"), "<!foo>");
+ EXPECT_EQ(normalize_tag("<foo>"), "<foo>");
+ EXPECT_EQ(normalize_tag("<!>"), "<!>");
+
+ EXPECT_EQ(normalize_tag("!<!foo>"), "<!foo>");
+ EXPECT_EQ(normalize_tag("!<foo>"), "<foo>");
+ EXPECT_EQ(normalize_tag("!<!>"), "<!>");
+}
+
+TEST(normalize_tag_long, basic)
+{
+ EXPECT_EQ(normalize_tag_long("<tag:yaml.org,2002:" ), "<tag:yaml.org,2002:");
+ EXPECT_EQ(normalize_tag_long("<tag:yaml.org,2002:." ), "<tag:yaml.org,2002:.");
+
+ EXPECT_EQ(normalize_tag_long("<tag:yaml.org,2002:map>" ), "<tag:yaml.org,2002:map>");
+ EXPECT_EQ(normalize_tag_long("<tag:yaml.org,2002:omap>" ), "<tag:yaml.org,2002:omap>");
+ EXPECT_EQ(normalize_tag_long("<tag:yaml.org,2002:pairs>" ), "<tag:yaml.org,2002:pairs>");
+ EXPECT_EQ(normalize_tag_long("<tag:yaml.org,2002:set>" ), "<tag:yaml.org,2002:set>");
+ EXPECT_EQ(normalize_tag_long("<tag:yaml.org,2002:seq>" ), "<tag:yaml.org,2002:seq>");
+ EXPECT_EQ(normalize_tag_long("<tag:yaml.org,2002:binary>" ), "<tag:yaml.org,2002:binary>");
+ EXPECT_EQ(normalize_tag_long("<tag:yaml.org,2002:bool>" ), "<tag:yaml.org,2002:bool>");
+ EXPECT_EQ(normalize_tag_long("<tag:yaml.org,2002:float>" ), "<tag:yaml.org,2002:float>");
+ EXPECT_EQ(normalize_tag_long("<tag:yaml.org,2002:int>" ), "<tag:yaml.org,2002:int>");
+ EXPECT_EQ(normalize_tag_long("<tag:yaml.org,2002:merge>" ), "<tag:yaml.org,2002:merge>");
+ EXPECT_EQ(normalize_tag_long("<tag:yaml.org,2002:null>" ), "<tag:yaml.org,2002:null>");
+ EXPECT_EQ(normalize_tag_long("<tag:yaml.org,2002:str>" ), "<tag:yaml.org,2002:str>");
+ EXPECT_EQ(normalize_tag_long("<tag:yaml.org,2002:timestamp>" ), "<tag:yaml.org,2002:timestamp>");
+ EXPECT_EQ(normalize_tag_long("<tag:yaml.org,2002:value>" ), "<tag:yaml.org,2002:value>");
+
+ EXPECT_EQ(normalize_tag_long("!<tag:yaml.org,2002:map>" ), "<tag:yaml.org,2002:map>");
+ EXPECT_EQ(normalize_tag_long("!<tag:yaml.org,2002:omap>" ), "<tag:yaml.org,2002:omap>");
+ EXPECT_EQ(normalize_tag_long("!<tag:yaml.org,2002:pairs>" ), "<tag:yaml.org,2002:pairs>");
+ EXPECT_EQ(normalize_tag_long("!<tag:yaml.org,2002:set>" ), "<tag:yaml.org,2002:set>");
+ EXPECT_EQ(normalize_tag_long("!<tag:yaml.org,2002:seq>" ), "<tag:yaml.org,2002:seq>");
+ EXPECT_EQ(normalize_tag_long("!<tag:yaml.org,2002:binary>" ), "<tag:yaml.org,2002:binary>");
+ EXPECT_EQ(normalize_tag_long("!<tag:yaml.org,2002:bool>" ), "<tag:yaml.org,2002:bool>");
+ EXPECT_EQ(normalize_tag_long("!<tag:yaml.org,2002:float>" ), "<tag:yaml.org,2002:float>");
+ EXPECT_EQ(normalize_tag_long("!<tag:yaml.org,2002:int>" ), "<tag:yaml.org,2002:int>");
+ EXPECT_EQ(normalize_tag_long("!<tag:yaml.org,2002:merge>" ), "<tag:yaml.org,2002:merge>");
+ EXPECT_EQ(normalize_tag_long("!<tag:yaml.org,2002:null>" ), "<tag:yaml.org,2002:null>");
+ EXPECT_EQ(normalize_tag_long("!<tag:yaml.org,2002:str>" ), "<tag:yaml.org,2002:str>");
+ EXPECT_EQ(normalize_tag_long("!<tag:yaml.org,2002:timestamp>"), "<tag:yaml.org,2002:timestamp>");
+ EXPECT_EQ(normalize_tag_long("!<tag:yaml.org,2002:value>" ), "<tag:yaml.org,2002:value>");
+
+ EXPECT_EQ(normalize_tag_long("!!map" ), "<tag:yaml.org,2002:map>");
+ EXPECT_EQ(normalize_tag_long("!!omap" ), "<tag:yaml.org,2002:omap>");
+ EXPECT_EQ(normalize_tag_long("!!pairs" ), "<tag:yaml.org,2002:pairs>");
+ EXPECT_EQ(normalize_tag_long("!!set" ), "<tag:yaml.org,2002:set>");
+ EXPECT_EQ(normalize_tag_long("!!seq" ), "<tag:yaml.org,2002:seq>");
+ EXPECT_EQ(normalize_tag_long("!!binary" ), "<tag:yaml.org,2002:binary>");
+ EXPECT_EQ(normalize_tag_long("!!bool" ), "<tag:yaml.org,2002:bool>");
+ EXPECT_EQ(normalize_tag_long("!!float" ), "<tag:yaml.org,2002:float>");
+ EXPECT_EQ(normalize_tag_long("!!int" ), "<tag:yaml.org,2002:int>");
+ EXPECT_EQ(normalize_tag_long("!!merge" ), "<tag:yaml.org,2002:merge>");
+ EXPECT_EQ(normalize_tag_long("!!null" ), "<tag:yaml.org,2002:null>");
+ EXPECT_EQ(normalize_tag_long("!!str" ), "<tag:yaml.org,2002:str>");
+ EXPECT_EQ(normalize_tag_long("!!timestamp"), "<tag:yaml.org,2002:timestamp>");
+ EXPECT_EQ(normalize_tag_long("!!value" ), "<tag:yaml.org,2002:value>");
+
+ EXPECT_EQ(normalize_tag_long("!!foo" ), "!!foo");
+
+ EXPECT_EQ(normalize_tag_long("!my-light"), "!my-light");
+ EXPECT_EQ(normalize_tag_long("!foo"), "!foo");
+ EXPECT_EQ(normalize_tag_long("<!foo>"), "<!foo>");
+ EXPECT_EQ(normalize_tag_long("<foo>"), "<foo>");
+ EXPECT_EQ(normalize_tag_long("<!>"), "<!>");
+
+ EXPECT_EQ(normalize_tag_long("!<!foo>"), "<!foo>");
+ EXPECT_EQ(normalize_tag_long("!<foo>"), "<foo>");
+ EXPECT_EQ(normalize_tag_long("!<!foo>"), "<!foo>");
+ EXPECT_EQ(normalize_tag_long("!<foo>"), "<foo>");
+ EXPECT_EQ(normalize_tag_long("!<!>"), "<!>");
+}
+
+
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+
+CASE_GROUP(TAG_PROPERTY)
+{
+
+ADD_CASE_TO_GROUP("user tag, empty, test suite 52DL",
+R"(! a)",
+N(DOCVAL, TS("!", "a"))
+);
+
+ADD_CASE_TO_GROUP("tag property in implicit map, std tags",
+R"(ivar: !!int 0
+svar: !!str 0
+fvar: !!float 0.1
+!!int 2: !!float 3
+!!float 3: !!int 3.4
+!!str key: !!int val
+myObject: !myClass { name: Joe, age: 15 }
+picture: !!binary >-
+ R0lGODdhDQAIAIAAAAAAANn
+ Z2SwAAAAADQAIAAACF4SDGQ
+ ar3xxbJ9p0qa7R0YxwzaFME
+ 1IAADs=
+)",
+ L{
+ N("ivar", TS("!!int", "0")),
+ N("svar", TS("!!str", "0")),
+ N("fvar", TS("!!float", "0.1")),
+ N(TS("!!int", "2"), TS("!!float", "3")),
+ N(TS("!!float", "3"), TS("!!int", "3.4")),
+ N(TS("!!str", "key"), TS("!!int", "val")),
+ N("myObject", TL("!myClass", L{N("name", "Joe"), N("age", "15")})),
+ N(QV, "picture", TS("!!binary", R"(R0lGODdhDQAIAIAAAAAAANn Z2SwAAAAADQAIAAACF4SDGQ ar3xxbJ9p0qa7R0YxwzaFME 1IAADs=)")),
+ }
+);
+
+ADD_CASE_TO_GROUP("tag property in implicit map, usr tags",
+R"(ivar: !int 0
+svar: !str 0
+fvar: !float 0.1
+!int 2: !float 3
+!float 3: !int 3.4
+!str key: !int val
+myObject: !myClass { name: Joe, age: 15 }
+picture: !binary >-
+ R0lGODdhDQAIAIAAAAAAANn
+ Z2SwAAAAADQAIAAACF4SDGQ
+ ar3xxbJ9p0qa7R0YxwzaFME
+ 1IAADs=
+)",
+ L{
+ N("ivar", TS("!int", "0")),
+ N("svar", TS("!str", "0")),
+ N("fvar", TS("!float", "0.1")),
+ N(TS("!int", "2"), TS("!float", "3")),
+ N(TS("!float", "3"), TS("!int", "3.4")),
+ N(TS("!str", "key"), TS("!int", "val")),
+ N("myObject", TL("!myClass", L{N("name", "Joe"), N("age", "15")})),
+ N(QV, "picture", TS("!binary", R"(R0lGODdhDQAIAIAAAAAAANn Z2SwAAAAADQAIAAACF4SDGQ ar3xxbJ9p0qa7R0YxwzaFME 1IAADs=)")),
+ }
+);
+
+ADD_CASE_TO_GROUP("tag property in explicit map, std tags",
+R"({
+ivar: !!int 0,
+svar: !!str 0,
+!!str key: !!int val
+})",
+ L{
+ N("ivar", TS("!!int", "0")),
+ N("svar", TS("!!str", "0")),
+ N(TS("!!str", "key"), TS("!!int", "val"))
+ }
+);
+
+ADD_CASE_TO_GROUP("tag property in explicit map, usr tags",
+R"({
+ivar: !int 0,
+svar: !str 0,
+!str key: !int val
+}
+)",
+ L{
+ N("ivar", TS("!int", "0")),
+ N("svar", TS("!str", "0")),
+ N(TS("!str", "key"), TS("!int", "val"))
+ }
+);
+
+ADD_CASE_TO_GROUP("tag property in implicit seq, std tags",
+R"(- !!int 0
+- !!str 0
+)",
+ L{
+ N(TS("!!int", "0")),
+ N(TS("!!str", "0")),
+ }
+);
+
+ADD_CASE_TO_GROUP("tag property in implicit seq, usr tags",
+R"(- !int 0
+- !str 0
+)",
+ L{
+ N(TS("!int", "0")),
+ N(TS("!str", "0")),
+ }
+);
+
+ADD_CASE_TO_GROUP("tag property in explicit seq, std tags",
+R"([
+!!int 0,
+!!str 0
+]
+)",
+ L{
+ N(TS("!!int", "0")),
+ N(TS("!!str", "0")),
+ }
+);
+
+ADD_CASE_TO_GROUP("tag property in explicit seq, usr tags",
+R"([
+!int 0,
+!str 0
+]
+)",
+ L{
+ N(TS("!int", "0")),
+ N(TS("!str", "0")),
+ }
+);
+
+ADD_CASE_TO_GROUP("tagged explicit sequence in map, std tags",
+R"(some_seq: !!its_type [
+!!int 0,
+!!str 0
+]
+)",
+ L{N("some_seq", TL("!!its_type", L{
+ N(TS("!!int", "0")),
+ N(TS("!!str", "0")),
+ }))
+ }
+);
+
+ADD_CASE_TO_GROUP("tagged explicit sequence in map, usr tags",
+R"(some_seq: !its_type [
+!int 0,
+!str 0
+]
+)",
+ L{N("some_seq", TL("!its_type", L{
+ N(TS("!int", "0")),
+ N(TS("!str", "0")),
+ }))
+ }
+);
+
+ADD_CASE_TO_GROUP("tagged doc",
+R"(
+--- !!map
+a: 0
+b: 1
+--- !map
+? a
+: b
+--- !!seq
+- a
+- b
+--- !!str
+a
+ b
+...
+--- !!str a b
+...
+--- !!str a b
+--- !!str
+a: b
+--- !!str a: b
+---
+!!str a: b
+---
+!!str a
+ b
+---
+!!set
+? a
+? b
+--- !!set
+? a
+? b
+)",
+N(STREAM, L{
+ N(DOCMAP, TL("!!map", L{N("a", "0"), N("b", "1")})),
+ N(DOCMAP, TL("!map", L{N("a", "b")})),
+ N(DOCSEQ, TL("!!seq", L{N("a"), N("b")})),
+ N(DOCVAL, TS("!!str", "a b")),
+ N(DOCVAL, TS("!!str", "a b")),
+ N(DOCVAL, TS("!!str", "a b")),
+ N(DOCVAL, TS("!!str", "a: b")),
+ N(DOCVAL, TS("!!str", "a: b")),
+ N(DOCMAP, L{N(TS("!!str", "a"), "b")}),
+ N(DOCVAL, TS("!!str", "a b")),
+ N(DOCMAP, TL("!!set", L{N(KEYVAL, "a", /*"~"*/{}), N(KEYVAL, "b", /*"~"*/{})})),
+ N(DOCMAP, TL("!!set", L{N(KEYVAL, "a", /*"~"*/{}), N(KEYVAL, "b", /*"~"*/{})})),
+}));
+
+
+ADD_CASE_TO_GROUP("ambiguous tag in map, std tag",
+R"(!!map
+!!str a0: !!xxx b0
+!!str fooz: !!map
+ k1: !!float 1.0
+ k3: !!float 2.0
+!!str foo: !!map
+ !!int 1: !!float 20.0
+ !!int 3: !!float 40.0
+bar: !!map
+ 10: !!str 2
+ 30: !!str 4
+!!str baz:
+ !!int 10: !!float 20
+ !!int 30: !!float 40
+)",
+TL("!!map", L{
+ N(TS("!!str", "a0"), TS("!!xxx", "b0")),
+ N(TS("!!str", "fooz"), TL("!!map", L{N("k1", TS("!!float", "1.0")), N("k3", TS("!!float", "2.0"))})),
+ N(TS("!!str", "foo"), TL("!!map", L{N(TS("!!int", "1"), TS("!!float", "20.0")), N(TS("!!int", "3"), TS("!!float", "40.0"))})),
+ N("bar", TL("!!map", L{N("10", TS("!!str", "2")), N("30", TS("!!str", "4"))})),
+ N(TS("!!str", "baz"), L{N(TS("!!int", "10"), TS("!!float", "20")), N(TS("!!int", "30"), TS("!!float", "40"))}),
+}));
+
+ADD_CASE_TO_GROUP("ambiguous tag in map, usr tag",
+R"(!map
+!str a0: !xxx b0
+!str fooz: !map
+ k1: !float 1.0
+ k3: !float 2.0
+!str foo: !map
+ !int 1: !float 20.0
+ !int 3: !float 40.0
+bar: !map
+ 10: !str 2
+ 30: !str 4
+!str baz:
+ !int 10: !float 20
+ !int 30: !float 40
+)",
+TL("!map", L{
+ N(TS("!str", "a0"), TS("!xxx", "b0")),
+ N(TS("!str", "fooz"), TL("!map", L{N("k1", TS("!float", "1.0")), N("k3", TS("!float", "2.0"))})),
+ N(TS("!str", "foo"), TL("!map", L{N(TS("!int", "1"), TS("!float", "20.0")), N(TS("!int", "3"), TS("!float", "40.0"))})),
+ N("bar", TL("!map", L{N("10", TS("!str", "2")), N("30", TS("!str", "4"))})),
+ N(TS("!str", "baz"), L{N(TS("!int", "10"), TS("!float", "20")), N(TS("!int", "30"), TS("!float", "40"))}),
+}));
+
+
+ADD_CASE_TO_GROUP("ambiguous tag in seq, std tag",
+R"(!!seq
+- !!str k1: v1
+ !!str k2: v2
+ !!str k3: v3
+- !!map
+ !!str k4: v4
+ !!str k5: v5
+ !!str k6: v6
+- !!map
+ k7: v7
+ k8: v8
+ k9: v9
+- - !!str v10
+ - !!str v20
+ - !!str v30
+- !!seq
+ - !!str v40
+ - !!str v50
+ - !!str v60
+- !!seq
+ - v70
+ - v80
+ - v90
+)",
+TL("!!seq", L{
+ N(L{N(TS("!!str", "k1"), "v1"), N(TS("!!str", "k2"), "v2"), N(TS("!!str", "k3"), "v3"), }),
+ N(TL("!!map", L{N(TS("!!str", "k4"), "v4"), N(TS("!!str", "k5"), "v5"), N(TS("!!str", "k6"), "v6"), })),
+ N(TL("!!map", L{N("k7", "v7"), N("k8", "v8"), N("k9", "v9"), })),
+ N(L{N(TS("!!str", "v10")), N(TS("!!str", "v20")), N(TS("!!str", "v30"))}),
+ N(TL("!!seq", L{N(TS("!!str", "v40")), N(TS("!!str", "v50")), N(TS("!!str", "v60"))})),
+ N(TL("!!seq", L{N("v70"), N("v80"), N("v90")})),
+}));
+
+ADD_CASE_TO_GROUP("ambiguous tag in seq, usr tag",
+R"(!seq
+- !str k1: v1
+ !str k2: v2
+ !str k3: v3
+- !map
+ !str k4: v4
+ !str k5: v5
+ !str k6: v6
+- !map
+ k7: v7
+ k8: v8
+ k9: v9
+- - !str v10
+ - !str v20
+ - !str v30
+- !seq
+ - !str v40
+ - !str v50
+ - !str v60
+- !seq
+ - v70
+ - v80
+ - v90
+)",
+TL("!seq", L{
+ N(L{N(TS("!str", "k1"), "v1"), N(TS("!str", "k2"), "v2"), N(TS("!str", "k3"), "v3"), }),
+ N(TL("!map", L{N(TS("!str", "k4"), "v4"), N(TS("!str", "k5"), "v5"), N(TS("!str", "k6"), "v6"), })),
+ N(TL("!map", L{N("k7", "v7"), N("k8", "v8"), N("k9", "v9"), })),
+ N(L{N(TS("!str", "v10")), N(TS("!str", "v20")), N(TS("!str", "v30"))}),
+ N(TL("!seq", L{N(TS("!str", "v40")), N(TS("!str", "v50")), N(TS("!str", "v60"))})),
+ N(TL("!seq", L{N("v70"), N("v80"), N("v90")})),
+}));
+}
+
+} // namespace yml
+} // namespace c4
diff --git a/thirdparty/ryml/test/test_tree.cpp b/thirdparty/ryml/test/test_tree.cpp
new file mode 100644
index 000000000..b6aad0435
--- /dev/null
+++ b/thirdparty/ryml/test/test_tree.cpp
@@ -0,0 +1,3924 @@
+#ifndef RYML_SINGLE_HEADER
+#include "c4/yml/std/std.hpp"
+#include "c4/yml/parse.hpp"
+#include "c4/yml/emit.hpp"
+#include <c4/format.hpp>
+#include <c4/yml/detail/checks.hpp>
+#include <c4/yml/detail/print.hpp>
+#endif
+#include "./test_case.hpp"
+#include "./callbacks_tester.hpp"
+
+#include <gtest/gtest.h>
+
+#if defined(_MSC_VER)
+# pragma warning(push)
+# pragma warning(disable: 4389) // signed/unsigned mismatch
+#elif defined(__clang__)
+# pragma clang diagnostic push
+# pragma clang diagnostic ignored "-Wdollar-in-identifier-extension"
+#elif defined(__GNUC__)
+# pragma GCC diagnostic push
+#endif
+
+namespace c4 {
+namespace yml {
+
+
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+
+void node_scalar_test_empty(NodeScalar const& s)
+{
+ EXPECT_TRUE(s.empty());
+ EXPECT_EQ(s.tag, "");
+ EXPECT_EQ(s.tag.len, 0u);
+ EXPECT_TRUE(s.tag.empty());
+ EXPECT_EQ(s.scalar, "");
+ EXPECT_EQ(s.scalar.len, 0u);
+ EXPECT_TRUE(s.scalar.empty());
+}
+
+void node_scalar_test_foo(NodeScalar const& s, bool with_tag=false)
+{
+ EXPECT_FALSE(s.empty());
+ if(with_tag)
+ {
+ EXPECT_EQ(s.tag, "!!str");
+ EXPECT_EQ(s.tag.len, 5u);
+ EXPECT_FALSE(s.tag.empty());
+ }
+ else
+ {
+ EXPECT_EQ(s.tag, "");
+ EXPECT_EQ(s.tag.len, 0u);
+ EXPECT_TRUE(s.tag.empty());
+ }
+ EXPECT_EQ(s.scalar, "foo");
+ EXPECT_EQ(s.scalar.len, 3u);
+ EXPECT_FALSE(s.scalar.empty());
+}
+
+void node_scalar_test_foo3(NodeScalar const& s, bool with_tag=false)
+{
+ EXPECT_FALSE(s.empty());
+ if(with_tag)
+ {
+ EXPECT_EQ(s.tag, "!!str+++");
+ EXPECT_EQ(s.tag.len, 8u);
+ EXPECT_FALSE(s.tag.empty());
+ }
+ else
+ {
+ EXPECT_EQ(s.tag, "");
+ EXPECT_EQ(s.tag.len, 0u);
+ EXPECT_TRUE(s.tag.empty());
+ }
+ EXPECT_EQ(s.scalar, "foo3");
+ EXPECT_EQ(s.scalar.len, 4u);
+ EXPECT_FALSE(s.scalar.empty());
+}
+
+TEST(NodeScalar, ctor_empty)
+{
+ NodeScalar s;
+ node_scalar_test_empty(s);
+}
+
+TEST(NodeScalar, ctor__untagged)
+{
+ {
+ const char sarr[] = "foo";
+ const char *sptr = "foo";
+ csubstr ssp = "foo";
+
+ for(auto s : {NodeScalar(sarr), NodeScalar(to_csubstr(sptr)), NodeScalar(ssp)})
+ {
+ node_scalar_test_foo(s);
+ }
+
+ NodeScalar s;
+ s = {sarr};
+ node_scalar_test_foo(s);
+ s = to_csubstr(sptr);
+ node_scalar_test_foo(s);
+ s = {ssp};
+ node_scalar_test_foo(s);
+ }
+
+ {
+ const char sarr[] = "foo3";
+ const char *sptr = "foo3";
+ csubstr ssp = "foo3";
+
+ for(auto s : {NodeScalar(sarr), NodeScalar(to_csubstr(sptr)), NodeScalar(ssp)})
+ {
+ node_scalar_test_foo3(s);
+ }
+
+ NodeScalar s;
+ {
+ SCOPED_TRACE("here 1");
+ s = {sarr};
+ node_scalar_test_foo3(s);
+ }
+ {
+ SCOPED_TRACE("here 2");
+ s = to_csubstr(sptr);
+ node_scalar_test_foo3(s);
+ }
+ {
+ SCOPED_TRACE("here 3");
+ s = ssp;
+ node_scalar_test_foo3(s);
+ }
+ }
+}
+
+TEST(NodeScalar, ctor__tagged)
+{
+ {
+ const char sarr[] = "foo", tarr[] = "!!str";
+ const char *sptr = "foo";
+ const char *tptr = "!!str";
+ csubstr ssp = "foo", tsp = "!!str";
+
+ for(NodeScalar s : {
+ NodeScalar(tsp, ssp),
+ NodeScalar(tsp, to_csubstr(sptr)),
+ NodeScalar(tsp, sarr),
+ NodeScalar(to_csubstr(tptr), ssp),
+ NodeScalar(to_csubstr(tptr), to_csubstr(sptr)),
+ NodeScalar(to_csubstr(tptr), sarr),
+ NodeScalar(tarr, ssp),
+ NodeScalar(tarr, to_csubstr(sptr)),
+ NodeScalar(tarr, sarr),
+ })
+ {
+ node_scalar_test_foo(s, true);
+ }
+
+ NodeScalar s;
+
+ {
+ SCOPED_TRACE("here 0.0");
+ s = {tsp, ssp};
+ node_scalar_test_foo(s, true);
+ }
+ {
+ SCOPED_TRACE("here 0.1");
+ s = {tsp, to_csubstr(sptr)};
+ node_scalar_test_foo(s, true);
+ }
+ {
+ SCOPED_TRACE("here 0.2");
+ s = {tsp, sarr};
+ node_scalar_test_foo(s, true);
+ }
+
+ {
+ SCOPED_TRACE("here 1.0");
+ s = {to_csubstr(tptr), ssp};
+ node_scalar_test_foo(s, true);
+ }
+ {
+ SCOPED_TRACE("here 1.1");
+ s = {to_csubstr(tptr), to_csubstr(sptr)};
+ node_scalar_test_foo(s, true);
+ }
+ {
+ SCOPED_TRACE("here 1.3");
+ s = {to_csubstr(tptr), sarr};
+ node_scalar_test_foo(s, true);
+ }
+
+ {
+ SCOPED_TRACE("here 3.0");
+ s = {tarr, ssp};
+ node_scalar_test_foo(s, true);
+ }
+ {
+ SCOPED_TRACE("here 3.1");
+ s = {tarr, to_csubstr(sptr)};
+ node_scalar_test_foo(s, true);
+ }
+ {
+ SCOPED_TRACE("here 3.3");
+ s = {tarr, sarr};
+ node_scalar_test_foo(s, true);
+ }
+
+ }
+
+ {
+ const char sarr[] = "foo3", tarr[] = "!!str+++";
+ const char *sptr = "foo3";
+ const char *tptr = "!!str+++";
+ csubstr ssp = "foo3", tsp = "!!str+++";
+
+ NodeScalar wtf = {tsp, ssp};
+ EXPECT_EQ(wtf.tag, tsp);
+ EXPECT_EQ(wtf.scalar, ssp);
+ for(auto s : {
+ NodeScalar(tsp, ssp),
+ NodeScalar(tsp, to_csubstr(sptr)),
+ NodeScalar(tsp, sarr),
+ NodeScalar(to_csubstr(tptr), ssp),
+ NodeScalar(to_csubstr(tptr), to_csubstr(sptr)),
+ NodeScalar(to_csubstr(tptr), sarr),
+ NodeScalar(tarr, ssp),
+ NodeScalar(tarr, to_csubstr(sptr)),
+ NodeScalar(tarr, sarr),
+ })
+ {
+ node_scalar_test_foo3(s, true);
+ }
+
+ NodeScalar s;
+
+ {
+ SCOPED_TRACE("here 0.0");
+ s = {tsp, ssp};
+ node_scalar_test_foo3(s, true);
+ }
+ {
+ SCOPED_TRACE("here 0.1");
+ s = {tsp, to_csubstr(sptr)};
+ node_scalar_test_foo3(s, true);
+ }
+ {
+ SCOPED_TRACE("here 0.3");
+ s = {tsp, sarr};
+ node_scalar_test_foo3(s, true);
+ }
+
+ {
+ SCOPED_TRACE("here 1.0");
+ s = {to_csubstr(tptr), ssp};
+ node_scalar_test_foo3(s, true);
+ }
+ {
+ SCOPED_TRACE("here 1.1");
+ s = {to_csubstr(tptr), to_csubstr(sptr)};
+ node_scalar_test_foo3(s, true);
+ }
+ {
+ SCOPED_TRACE("here 1.3");
+ s = {to_csubstr(tptr), sarr};
+ node_scalar_test_foo3(s, true);
+ }
+
+ {
+ SCOPED_TRACE("here 3.0");
+ s = {tarr, ssp};
+ node_scalar_test_foo3(s, true);
+ }
+ {
+ SCOPED_TRACE("here 3.1");
+ s = {tarr, to_csubstr(sptr)};
+ node_scalar_test_foo3(s, true);
+ }
+ {
+ SCOPED_TRACE("here 3.3");
+ s = {tarr, sarr};
+ node_scalar_test_foo3(s, true);
+ }
+
+ }
+
+}
+
+
+
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+
+TEST(NodeInit, ctor__empty)
+{
+ NodeInit n;
+ EXPECT_EQ((type_bits)n.type, (type_bits)NOTYPE);
+ EXPECT_EQ(n.key.scalar, "");
+ EXPECT_EQ(n.key.tag, "");
+ EXPECT_EQ(n.val.scalar, "");
+ EXPECT_EQ(n.val.tag, "");
+}
+
+TEST(NodeInit, ctor__type_only)
+{
+ for(auto k : {KEY, KEYVAL, MAP, KEYMAP, SEQ, KEYSEQ})
+ {
+ SCOPED_TRACE(NodeType::type_str(k));
+ NodeInit n(k);
+ EXPECT_EQ((type_bits)n.type, (type_bits)k);
+ EXPECT_EQ(n.key.scalar, "");
+ EXPECT_EQ(n.key.tag, "");
+ EXPECT_EQ(n.val.scalar, "");
+ EXPECT_EQ(n.val.tag, "");
+ }
+}
+
+TEST(NodeInit, ctor__val_only)
+{
+ {
+ const char sarr[] = "foo";
+ const char *sptr = "foo"; size_t sptrlen = 3;
+ csubstr ssp = "foo";
+
+ {
+ SCOPED_TRACE("here 0");
+ {
+ NodeInit s(sarr);
+ node_scalar_test_foo(s.val);
+ node_scalar_test_empty(s.key);
+ s.clear();
+ }
+ {
+ NodeInit s{to_csubstr(sptr)};
+ node_scalar_test_foo(s.val);
+ node_scalar_test_empty(s.key);
+ s.clear();
+ }
+ {
+ NodeInit s{sarr};
+ node_scalar_test_foo(s.val);
+ node_scalar_test_empty(s.key);
+ s.clear();
+ }
+ }
+
+ {
+ SCOPED_TRACE("here 1");
+ {
+ NodeInit s(sarr);
+ node_scalar_test_foo(s.val);
+ node_scalar_test_empty(s.key);
+ s.clear();
+ }
+ {
+ NodeInit s(to_csubstr(sptr));
+ node_scalar_test_foo(s.val);
+ node_scalar_test_empty(s.key);
+ s.clear();
+ }
+ {
+ NodeInit s(sarr);
+ node_scalar_test_foo(s.val);
+ node_scalar_test_empty(s.key);
+ s.clear();
+ }
+ }
+
+ {
+ SCOPED_TRACE("here 2");
+ NodeInit s;
+ s = {sarr};
+ node_scalar_test_foo(s.val);
+ node_scalar_test_empty(s.key);
+ s.clear();
+ s = {to_csubstr(sptr)};
+ node_scalar_test_foo(s.val);
+ node_scalar_test_empty(s.key);
+ s.clear();
+ //s = {sptr, sptrlen}; // fails to compile
+ //node_scalar_test_foo(s.val);
+ //node_scalar_test_empty(s.key);
+ //s.clear();
+ s = {ssp};
+ node_scalar_test_foo(s.val);
+ node_scalar_test_empty(s.key);
+ s.clear();
+ }
+
+ for(auto s : {
+ NodeInit(sarr),
+ NodeInit(to_csubstr(sptr)),
+ NodeInit(csubstr{sptr, sptrlen}),
+ NodeInit(ssp)})
+ {
+ SCOPED_TRACE("here LOOP");
+ node_scalar_test_foo(s.val);
+ node_scalar_test_empty(s.key);
+ }
+ }
+
+ {
+ const char sarr[] = "foo3";
+ const char *sptr = "foo3"; size_t sptrlen = 4;
+ csubstr ssp = "foo3";
+
+ {
+ SCOPED_TRACE("here 0");
+ NodeInit s = {sarr};
+ node_scalar_test_foo3(s.val);
+ node_scalar_test_empty(s.key);
+ }
+ { // FAILS
+ SCOPED_TRACE("here 1");
+ //NodeInit s = sarr;
+ //node_scalar_test_foo3(s.val);
+ //node_scalar_test_empty(s.key);
+ }
+ {
+ SCOPED_TRACE("here 2");
+ NodeInit s{sarr};
+ node_scalar_test_foo3(s.val);
+ node_scalar_test_empty(s.key);
+ }
+ {
+ SCOPED_TRACE("here 3");
+ NodeInit s(sarr);
+ node_scalar_test_foo3(s.val);
+ node_scalar_test_empty(s.key);
+ }
+
+ for(auto s : {
+ NodeInit(sarr),
+ NodeInit(to_csubstr(sptr)),
+ NodeInit(csubstr{sptr, sptrlen}),
+ NodeInit(ssp)})
+ {
+ SCOPED_TRACE("here LOOP");
+ node_scalar_test_foo3(s.val);
+ node_scalar_test_empty(s.key);
+ }
+ }
+}
+
+
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+
+TEST(Tree, empty_ctor)
+{
+ Tree tree;
+ EXPECT_EQ(tree.callbacks(), get_callbacks());
+ EXPECT_EQ(tree.empty(), true);
+ EXPECT_EQ(tree.capacity(), 0u);
+ EXPECT_EQ(tree.arena_capacity(), 0u);
+ EXPECT_EQ(tree.arena_slack(), 0u);
+ EXPECT_EQ(tree.size(), 0u);
+ EXPECT_EQ(tree.slack(), 0u);
+ EXPECT_EQ(tree.arena().empty(), true);
+}
+
+TEST(Tree, node_cap_ctor)
+{
+ {
+ Tree tree(10u);
+ EXPECT_EQ(tree.callbacks(), get_callbacks());
+ EXPECT_EQ(tree.empty(), false); // we have the root
+ EXPECT_EQ(tree.capacity(), 10u);
+ EXPECT_EQ(tree.arena_capacity(), 0u);
+ EXPECT_EQ(tree.arena_slack(), 0u);
+ EXPECT_EQ(tree.arena().empty(), true);
+ EXPECT_EQ(tree.size(), 1u); // we have the root
+ EXPECT_EQ(tree.slack(), 9u);
+ }
+ {
+ Tree tree(10u, 20u);
+ EXPECT_EQ(tree.callbacks(), get_callbacks());
+ EXPECT_EQ(tree.empty(), false); // we have the root
+ EXPECT_EQ(tree.capacity(), 10u);
+ EXPECT_EQ(tree.arena_capacity(), 20u);
+ EXPECT_EQ(tree.arena().empty(), true);
+ EXPECT_EQ(tree.size(), 1u); // we have the root
+ EXPECT_EQ(tree.slack(), 9u);
+ }
+ {
+ Tree tree(0u, 20u);
+ EXPECT_EQ(tree.callbacks(), get_callbacks());
+ EXPECT_EQ(tree.empty(), true);
+ EXPECT_EQ(tree.capacity(), 0u);
+ EXPECT_EQ(tree.arena_capacity(), 20u);
+ EXPECT_EQ(tree.arena_slack(), 20u);
+ EXPECT_EQ(tree.arena().empty(), true);
+ EXPECT_EQ(tree.size(), 0u);
+ EXPECT_EQ(tree.slack(), 0u);
+ }
+}
+
+Tree get_test_tree(CallbacksTester *cbt=nullptr)
+{
+ Parser parser(cbt ? cbt->callbacks() : get_callbacks());
+ Tree t = parser.parse_in_arena("", "{a: b, c: d, e: [0, 1, 2, 3]}");
+ // make sure the tree has strings in its arena
+ NodeRef n = t.rootref();
+ NodeRef ch = n.append_child();
+ ch << key("serialized_key");
+ ch << 89;
+ return t;
+}
+
+TEST(Tree, test_tree_has_arena)
+{
+ {
+ Tree t = get_test_tree();
+ ASSERT_GT(t.arena().size(), 0u);
+ }
+ {
+ CallbacksTester cbt;
+ Tree t = get_test_tree(&cbt);
+ ASSERT_GT(t.arena().size(), 0u);
+ }
+}
+
+//-------------------------------------------
+TEST(Tree, copy_ctor)
+{
+ CallbacksTester cbt;
+ {
+ Tree src = get_test_tree(&cbt);
+ test_invariants(src);
+ {
+ Tree dst(src);
+ test_invariants(dst);
+ test_compare(dst, src);
+ test_arena_not_shared(dst, src);
+ EXPECT_EQ(dst.callbacks(), src.callbacks());
+ }
+ }
+}
+
+//-------------------------------------------
+TEST(Tree, move_ctor)
+{
+ CallbacksTester cbt;
+ Tree src = get_test_tree(&cbt);
+ test_invariants(src);
+ Tree save(src);
+ test_invariants(save);
+ test_compare(save, src);
+ {
+ Tree dst(std::move(src));
+ EXPECT_EQ(src.empty(), true);
+ EXPECT_EQ(src.size(), 0u);
+ EXPECT_EQ(src.arena().empty(), true);
+ EXPECT_EQ(dst.size(), save.size());
+ EXPECT_EQ(dst.arena(), save.arena());
+ test_invariants(src);
+ test_invariants(dst);
+ test_compare(dst, save);
+ test_arena_not_shared(src, dst);
+ test_arena_not_shared(save, dst);
+ }
+}
+
+//-------------------------------------------
+TEST(Tree, copy_assign_same_callbacks)
+{
+ CallbacksTester cbt;
+ {
+ Tree src = get_test_tree(&cbt);
+ test_invariants(src);
+ {
+ Tree dst(cbt.callbacks());
+ EXPECT_EQ(dst.callbacks(), src.callbacks());
+ test_invariants(dst);
+ dst = src;
+ test_invariants(dst);
+ test_compare(dst, src);
+ test_arena_not_shared(dst, src);
+ EXPECT_EQ(dst.callbacks(), src.callbacks());
+ }
+ }
+}
+
+TEST(Tree, copy_assign_diff_callbacks)
+{
+ CallbacksTester cbsrc("src");
+ CallbacksTester cbdst("dst");
+ {
+ Tree src = get_test_tree(&cbsrc);
+ EXPECT_EQ(src.callbacks(), cbsrc.callbacks());
+ test_invariants(src);
+ {
+ Tree dst = get_test_tree(&cbdst);
+ EXPECT_EQ(dst.callbacks(), cbdst.callbacks());
+ test_invariants(dst);
+ dst = src;
+ test_invariants(dst);
+ test_compare(dst, src);
+ test_arena_not_shared(dst, src);
+ EXPECT_EQ(dst.callbacks(), src.callbacks());
+ }
+ }
+}
+
+//-------------------------------------------
+TEST(Tree, move_assign_same_callbacks)
+{
+ CallbacksTester cbt;
+ Tree src = get_test_tree(&cbt);
+ test_invariants(src);
+ Tree save(src);
+ EXPECT_EQ(save.callbacks(), src.callbacks());
+ test_invariants(save);
+ test_compare(save, src);
+ {
+ Tree dst = get_test_tree(&cbt);
+ EXPECT_NE(dst.empty(), true);
+ EXPECT_NE(dst.size(), 0u);
+ EXPECT_NE(dst.arena().empty(), true);
+ dst = std::move(src);
+ EXPECT_EQ(src.empty(), true);
+ EXPECT_EQ(src.size(), 0u);
+ EXPECT_EQ(src.arena().empty(), true);
+ EXPECT_EQ(src.callbacks(), cbt.callbacks());
+ EXPECT_EQ(dst.size(), save.size());
+ EXPECT_EQ(dst.arena(), save.arena());
+ EXPECT_EQ(dst.callbacks(), save.callbacks());
+ test_invariants(src);
+ test_invariants(dst);
+ test_compare(dst, save);
+ test_arena_not_shared(src, dst);
+ test_arena_not_shared(save, dst);
+ }
+}
+
+TEST(Tree, move_assign_diff_callbacks)
+{
+ CallbacksTester cbsrc("src");
+ CallbacksTester cbdst("dst");
+ Tree src = get_test_tree(&cbsrc);
+ test_invariants(src);
+ Tree save(src);
+ test_invariants(save);
+ test_compare(save, src);
+ {
+ Tree dst = get_test_tree(&cbdst);
+ EXPECT_NE(dst.empty(), true);
+ EXPECT_NE(dst.size(), 0u);
+ EXPECT_NE(dst.arena().empty(), true);
+ EXPECT_EQ(dst.callbacks(), cbdst.callbacks());
+ dst = std::move(src);
+ EXPECT_EQ(src.empty(), true);
+ EXPECT_EQ(src.size(), 0u);
+ EXPECT_EQ(src.arena().empty(), true);
+ EXPECT_EQ(src.callbacks(), cbsrc.callbacks());
+ EXPECT_EQ(dst.size(), save.size());
+ EXPECT_EQ(dst.arena(), save.arena());
+ EXPECT_NE(dst.callbacks(), cbdst.callbacks());
+ EXPECT_EQ(dst.callbacks(), save.callbacks());
+ test_invariants(src);
+ test_invariants(dst);
+ test_compare(dst, save);
+ test_arena_not_shared(src, dst);
+ test_arena_not_shared(save, dst);
+ }
+}
+
+TEST(Tree, std_interop)
+{
+ CallbacksTester cbt;
+ std::vector<Tree> forest;
+ for(size_t i = 0; i < 3; ++i)
+ {
+ forest.emplace_back(cbt.callbacks());
+ parse_in_arena("{foo: bar}", &forest.back());
+ }
+}
+
+
+//-------------------------------------------
+TEST(Tree, reserve)
+{
+ Tree t(16, 64);
+ EXPECT_EQ(t.capacity(), 16);
+ EXPECT_EQ(t.slack(), 15);
+ EXPECT_EQ(t.size(), 1);
+ EXPECT_EQ(t.arena_capacity(), 64);
+ EXPECT_EQ(t.arena_slack(), 64);
+ EXPECT_EQ(t.arena_size(), 0);
+ test_invariants(t);
+
+ auto buf = t.m_buf;
+ t.reserve(16);
+ t.reserve_arena(64);
+ EXPECT_EQ(t.m_buf, buf);
+ EXPECT_EQ(t.capacity(), 16);
+ EXPECT_EQ(t.slack(), 15);
+ EXPECT_EQ(t.size(), 1);
+ EXPECT_EQ(t.arena_capacity(), 64);
+ EXPECT_EQ(t.arena_slack(), 64);
+ EXPECT_EQ(t.arena_size(), 0);
+ test_invariants(t);
+
+ t.reserve(32);
+ t.reserve_arena(128);
+ EXPECT_EQ(t.capacity(), 32);
+ EXPECT_EQ(t.slack(), 31);
+ EXPECT_EQ(t.size(), 1);
+ EXPECT_EQ(t.arena_capacity(), 128);
+ EXPECT_EQ(t.arena_slack(), 128);
+ EXPECT_EQ(t.arena_size(), 0);
+ test_invariants(t);
+
+ buf = t.m_buf;
+ parse_in_arena("[a, b, c, d, e, f]", &t);
+ EXPECT_EQ(t.m_buf, buf);
+ EXPECT_EQ(t.capacity(), 32);
+ EXPECT_EQ(t.slack(), 25);
+ EXPECT_EQ(t.size(), 7);
+ EXPECT_EQ(t.arena_capacity(), 128);
+ EXPECT_EQ(t.arena_slack(), 110);
+ EXPECT_EQ(t.arena_size(), 18);
+ test_invariants(t);
+
+ t.reserve(64);
+ t.reserve_arena(256);
+ EXPECT_EQ(t.capacity(), 64);
+ EXPECT_EQ(t.slack(), 57);
+ EXPECT_EQ(t.size(), 7);
+ EXPECT_EQ(t.arena_capacity(), 256);
+ EXPECT_EQ(t.arena_slack(), 238);
+ EXPECT_EQ(t.arena_size(), 18);
+ test_invariants(t);
+}
+
+// https://github.com/biojppm/rapidyaml/issues/288
+TEST(Tree, reserve_arena_issue288)
+{
+ Tree t;
+ EXPECT_EQ(t.arena_slack(), 0u);
+ EXPECT_EQ(t.arena_capacity(), 0u);
+ EXPECT_EQ(t.arena_size(), 0u);
+ t.reserve_arena(3u);
+ EXPECT_EQ(t.arena_slack(), 3u);
+ EXPECT_GE(t.arena_capacity(), 3u);
+ EXPECT_EQ(t.arena_size(), 0u);
+ // longer than the slack to cause another call to _grow_arena()
+ std::string stars(2 * t.arena_slack(), '*');
+ t.copy_to_arena(to_csubstr(stars));
+ EXPECT_GE(t.arena_capacity(), stars.size());
+ EXPECT_EQ(t.arena_size(), stars.size());
+ EXPECT_EQ(t.arena(), to_csubstr(stars));
+ // again
+ std::string pluses(2 * t.arena_slack(), '+');
+ t.copy_to_arena(to_csubstr(pluses));
+ EXPECT_GE(t.arena_capacity(), stars.size() + pluses.size());
+ EXPECT_EQ(t.arena_size(), stars.size() + pluses.size());
+ EXPECT_EQ(t.arena().first(stars.size()), to_csubstr(stars));
+ EXPECT_EQ(t.arena().last(pluses.size()), to_csubstr(pluses));
+}
+
+TEST(Tree, clear)
+{
+ Tree t(16, 64);
+ EXPECT_EQ(t.capacity(), 16);
+ EXPECT_EQ(t.slack(), 15);
+ EXPECT_EQ(t.size(), 1);
+ EXPECT_EQ(t.arena_capacity(), 64);
+ EXPECT_EQ(t.arena_slack(), 64);
+ EXPECT_EQ(t.arena_size(), 0);
+ test_invariants(t);
+
+ t.clear();
+ t.clear_arena();
+ EXPECT_EQ(t.capacity(), 16);
+ EXPECT_EQ(t.slack(), 15);
+ EXPECT_EQ(t.size(), 1);
+ EXPECT_EQ(t.arena_capacity(), 64);
+ EXPECT_EQ(t.arena_slack(), 64);
+ EXPECT_EQ(t.arena_size(), 0);
+ test_invariants(t);
+
+ auto buf = t.m_buf;
+ t.reserve(16);
+ t.reserve_arena(64);
+ EXPECT_EQ(t.m_buf, buf);
+ EXPECT_EQ(t.capacity(), 16);
+ EXPECT_EQ(t.slack(), 15);
+ EXPECT_EQ(t.size(), 1);
+ EXPECT_EQ(t.arena_capacity(), 64);
+ EXPECT_EQ(t.arena_slack(), 64);
+ EXPECT_EQ(t.arena_size(), 0);
+ test_invariants(t);
+
+ t.reserve(32);
+ t.reserve_arena(128);
+ EXPECT_EQ(t.capacity(), 32);
+ EXPECT_EQ(t.slack(), 31);
+ EXPECT_EQ(t.size(), 1);
+ EXPECT_EQ(t.arena_capacity(), 128);
+ EXPECT_EQ(t.arena_slack(), 128);
+ EXPECT_EQ(t.arena_size(), 0);
+ test_invariants(t);
+
+ buf = t.m_buf;
+ parse_in_arena("[a, b, c, d, e, f]", &t);
+ EXPECT_EQ(t.m_buf, buf);
+ EXPECT_EQ(t.capacity(), 32);
+ EXPECT_EQ(t.slack(), 25);
+ EXPECT_EQ(t.size(), 7);
+ EXPECT_EQ(t.arena_capacity(), 128);
+ EXPECT_EQ(t.arena_slack(), 110);
+ EXPECT_EQ(t.arena_size(), 18);
+ test_invariants(t);
+
+ t.clear();
+ t.clear_arena();
+ EXPECT_EQ(t.capacity(), 32);
+ EXPECT_EQ(t.slack(), 31);
+ EXPECT_EQ(t.size(), 1);
+ EXPECT_EQ(t.arena_capacity(), 128);
+ EXPECT_EQ(t.arena_slack(), 128);
+ EXPECT_EQ(t.arena_size(), 0);
+ test_invariants(t);
+}
+
+
+//-------------------------------------------
+
+TEST(Tree, ref)
+{
+ Tree t = parse_in_arena("[0, 1, 2, 3]");
+ EXPECT_EQ(t.ref(0).id(), 0);
+ EXPECT_EQ(t.ref(1).id(), 1);
+ EXPECT_EQ(t.ref(2).id(), 2);
+ EXPECT_EQ(t.ref(3).id(), 3);
+ EXPECT_EQ(t.ref(4).id(), 4);
+ EXPECT_TRUE(t.ref(0).is_seq());
+ EXPECT_TRUE(t.ref(1).is_val());
+ EXPECT_TRUE(t.ref(2).is_val());
+ EXPECT_TRUE(t.ref(3).is_val());
+ EXPECT_TRUE(t.ref(4).is_val());
+}
+
+TEST(Tree, ref_const)
+{
+ const Tree t = parse_in_arena("[0, 1, 2, 3]");
+ EXPECT_EQ(t.ref(0).id(), 0);
+ EXPECT_EQ(t.ref(1).id(), 1);
+ EXPECT_EQ(t.ref(2).id(), 2);
+ EXPECT_EQ(t.ref(3).id(), 3);
+ EXPECT_EQ(t.ref(4).id(), 4);
+ EXPECT_TRUE(t.ref(0).is_seq());
+ EXPECT_TRUE(t.ref(1).is_val());
+ EXPECT_TRUE(t.ref(2).is_val());
+ EXPECT_TRUE(t.ref(3).is_val());
+ EXPECT_TRUE(t.ref(4).is_val());
+}
+
+
+TEST(Tree, operator_square_brackets)
+{
+ {
+ Tree t = parse_in_arena("[0, 1, 2, 3, 4]");
+ Tree &m = t;
+ Tree const& cm = t;
+ EXPECT_EQ(m[0].val(), "0");
+ EXPECT_EQ(m[1].val(), "1");
+ EXPECT_EQ(m[2].val(), "2");
+ EXPECT_EQ(m[3].val(), "3");
+ EXPECT_EQ(m[4].val(), "4");
+ EXPECT_EQ(cm[0].val(), "0");
+ EXPECT_EQ(cm[1].val(), "1");
+ EXPECT_EQ(cm[2].val(), "2");
+ EXPECT_EQ(cm[3].val(), "3");
+ EXPECT_EQ(cm[4].val(), "4");
+ //
+ EXPECT_TRUE(m[0] == "0");
+ EXPECT_TRUE(m[1] == "1");
+ EXPECT_TRUE(m[2] == "2");
+ EXPECT_TRUE(m[3] == "3");
+ EXPECT_TRUE(m[4] == "4");
+ EXPECT_TRUE(cm[0] == "0");
+ EXPECT_TRUE(cm[1] == "1");
+ EXPECT_TRUE(cm[2] == "2");
+ EXPECT_TRUE(cm[3] == "3");
+ EXPECT_TRUE(cm[4] == "4");
+ //
+ EXPECT_FALSE(m[0] != "0");
+ EXPECT_FALSE(m[1] != "1");
+ EXPECT_FALSE(m[2] != "2");
+ EXPECT_FALSE(m[3] != "3");
+ EXPECT_FALSE(m[4] != "4");
+ EXPECT_FALSE(cm[0] != "0");
+ EXPECT_FALSE(cm[1] != "1");
+ EXPECT_FALSE(cm[2] != "2");
+ EXPECT_FALSE(cm[3] != "3");
+ EXPECT_FALSE(cm[4] != "4");
+ }
+ {
+ Tree t = parse_in_arena("{a: 0, b: 1, c: 2, d: 3, e: 4}");
+ Tree &m = t;
+ Tree const& cm = t;
+ EXPECT_EQ(m["a"].val(), "0");
+ EXPECT_EQ(m["b"].val(), "1");
+ EXPECT_EQ(m["c"].val(), "2");
+ EXPECT_EQ(m["d"].val(), "3");
+ EXPECT_EQ(m["e"].val(), "4");
+ EXPECT_EQ(cm["a"].val(), "0");
+ EXPECT_EQ(cm["b"].val(), "1");
+ EXPECT_EQ(cm["c"].val(), "2");
+ EXPECT_EQ(cm["d"].val(), "3");
+ EXPECT_EQ(cm["e"].val(), "4");
+ //
+ EXPECT_TRUE(m["a"] == "0");
+ EXPECT_TRUE(m["b"] == "1");
+ EXPECT_TRUE(m["c"] == "2");
+ EXPECT_TRUE(m["d"] == "3");
+ EXPECT_TRUE(m["e"] == "4");
+ EXPECT_TRUE(cm["a"] == "0");
+ EXPECT_TRUE(cm["b"] == "1");
+ EXPECT_TRUE(cm["c"] == "2");
+ EXPECT_TRUE(cm["d"] == "3");
+ EXPECT_TRUE(cm["e"] == "4");
+ //
+ EXPECT_FALSE(m["a"] != "0");
+ EXPECT_FALSE(m["b"] != "1");
+ EXPECT_FALSE(m["c"] != "2");
+ EXPECT_FALSE(m["d"] != "3");
+ EXPECT_FALSE(m["e"] != "4");
+ EXPECT_FALSE(cm["a"] != "0");
+ EXPECT_FALSE(cm["b"] != "1");
+ EXPECT_FALSE(cm["c"] != "2");
+ EXPECT_FALSE(cm["d"] != "3");
+ EXPECT_FALSE(cm["e"] != "4");
+ }
+}
+
+TEST(Tree, relocate)
+{
+ // create a tree with anchors and refs, and copy it to ensure the
+ // relocation also applies to the anchors and refs. Ensure to put
+ // the source in the arena so that it gets relocated.
+ Tree tree = parse_in_arena(R"(&keyanchor key: val
+key2: &valanchor val2
+keyref: *keyanchor
+*valanchor: was val anchor
+!!int 0: !!str foo
+!!str doe: !!str a deer a female deer
+ray: a drop of golden sun
+me: a name I call myself
+far: a long long way to run
+)");
+ Tree copy = tree;
+ EXPECT_EQ(copy.size(), tree.size());
+ EXPECT_EQ(emitrs_yaml<std::string>(copy), R"(&keyanchor key: val
+key2: &valanchor val2
+keyref: *keyanchor
+*valanchor: was val anchor
+!!int 0: !!str foo
+!!str doe: !!str a deer a female deer
+ray: a drop of golden sun
+me: a name I call myself
+far: a long long way to run
+)");
+ //
+ Tree copy2 = copy;
+ EXPECT_EQ(copy.size(), tree.size());
+ copy2.resolve();
+ EXPECT_EQ(emitrs_yaml<std::string>(copy2), R"(key: val
+key2: val2
+keyref: key
+val2: was val anchor
+!!int 0: !!str foo
+!!str doe: !!str a deer a female deer
+ray: a drop of golden sun
+me: a name I call myself
+far: a long long way to run
+)");
+}
+
+
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+
+TEST(NodeType, type_str)
+{
+ // avoid coverage misses
+ EXPECT_EQ(to_csubstr(NodeType(KEYVAL).type_str()), "KEYVAL");
+ EXPECT_EQ(to_csubstr(NodeType(KEY).type_str()), "KEY");
+ EXPECT_EQ(to_csubstr(NodeType(VAL).type_str()), "VAL");
+ EXPECT_EQ(to_csubstr(NodeType(MAP).type_str()), "MAP");
+ EXPECT_EQ(to_csubstr(NodeType(SEQ).type_str()), "SEQ");
+ EXPECT_EQ(to_csubstr(NodeType(KEYMAP).type_str()), "KEYMAP");
+ EXPECT_EQ(to_csubstr(NodeType(KEYSEQ).type_str()), "KEYSEQ");
+ EXPECT_EQ(to_csubstr(NodeType(DOCSEQ).type_str()), "DOCSEQ");
+ EXPECT_EQ(to_csubstr(NodeType(DOCMAP).type_str()), "DOCMAP");
+ EXPECT_EQ(to_csubstr(NodeType(DOCVAL).type_str()), "DOCVAL");
+ EXPECT_EQ(to_csubstr(NodeType(DOC).type_str()), "DOC");
+ EXPECT_EQ(to_csubstr(NodeType(STREAM).type_str()), "STREAM");
+ EXPECT_EQ(to_csubstr(NodeType(NOTYPE).type_str()), "NOTYPE");
+ EXPECT_EQ(to_csubstr(NodeType(KEYVAL|KEYREF).type_str()), "KEYVAL***");
+ EXPECT_EQ(to_csubstr(NodeType(KEYVAL|VALREF).type_str()), "KEYVAL***");
+ EXPECT_EQ(to_csubstr(NodeType(KEYVAL|KEYANCH).type_str()), "KEYVAL***");
+ EXPECT_EQ(to_csubstr(NodeType(KEYVAL|VALANCH).type_str()), "KEYVAL***");
+ EXPECT_EQ(to_csubstr(NodeType(KEYVAL|KEYREF|VALANCH).type_str()), "KEYVAL***");
+ EXPECT_EQ(to_csubstr(NodeType(KEYVAL|KEYANCH|VALREF).type_str()), "KEYVAL***");
+ EXPECT_EQ(to_csubstr(NodeType(KEYMAP|KEYREF).type_str()), "KEYMAP***");
+ EXPECT_EQ(to_csubstr(NodeType(KEYMAP|VALREF).type_str()), "KEYMAP***");
+ EXPECT_EQ(to_csubstr(NodeType(KEYMAP|KEYANCH).type_str()), "KEYMAP***");
+ EXPECT_EQ(to_csubstr(NodeType(KEYMAP|VALANCH).type_str()), "KEYMAP***");
+ EXPECT_EQ(to_csubstr(NodeType(KEYMAP|KEYREF|VALANCH).type_str()), "KEYMAP***");
+ EXPECT_EQ(to_csubstr(NodeType(KEYMAP|KEYANCH|VALREF).type_str()), "KEYMAP***");
+ EXPECT_EQ(to_csubstr(NodeType(KEYSEQ|KEYREF).type_str()), "KEYSEQ***");
+ EXPECT_EQ(to_csubstr(NodeType(KEYSEQ|VALREF).type_str()), "KEYSEQ***");
+ EXPECT_EQ(to_csubstr(NodeType(KEYSEQ|KEYANCH).type_str()), "KEYSEQ***");
+ EXPECT_EQ(to_csubstr(NodeType(KEYSEQ|VALANCH).type_str()), "KEYSEQ***");
+ EXPECT_EQ(to_csubstr(NodeType(KEYSEQ|KEYREF|VALANCH).type_str()), "KEYSEQ***");
+ EXPECT_EQ(to_csubstr(NodeType(KEYSEQ|KEYANCH|VALREF).type_str()), "KEYSEQ***");
+ EXPECT_EQ(to_csubstr(NodeType(DOCSEQ|VALANCH).type_str()), "DOCSEQ***");
+ EXPECT_EQ(to_csubstr(NodeType(DOCSEQ|VALREF).type_str()), "DOCSEQ***");
+ EXPECT_EQ(to_csubstr(NodeType(DOCMAP|VALANCH).type_str()), "DOCMAP***");
+ EXPECT_EQ(to_csubstr(NodeType(DOCMAP|VALREF).type_str()), "DOCMAP***");
+ EXPECT_EQ(to_csubstr(NodeType(DOCVAL|VALANCH).type_str()), "DOCVAL***");
+ EXPECT_EQ(to_csubstr(NodeType(DOCVAL|VALREF).type_str()), "DOCVAL***");
+ EXPECT_EQ(to_csubstr(NodeType(KEY|KEYREF).type_str()), "KEY***");
+ EXPECT_EQ(to_csubstr(NodeType(KEY|KEYANCH).type_str()), "KEY***");
+ EXPECT_EQ(to_csubstr(NodeType(VAL|VALREF).type_str()), "VAL***");
+ EXPECT_EQ(to_csubstr(NodeType(VAL|VALANCH).type_str()), "VAL***");
+ EXPECT_EQ(to_csubstr(NodeType(MAP|VALREF).type_str()), "MAP***");
+ EXPECT_EQ(to_csubstr(NodeType(MAP|VALANCH).type_str()), "MAP***");
+ EXPECT_EQ(to_csubstr(NodeType(SEQ|VALREF).type_str()), "SEQ***");
+ EXPECT_EQ(to_csubstr(NodeType(SEQ|VALANCH).type_str()), "SEQ***");
+ EXPECT_EQ(to_csubstr(NodeType(DOC|VALREF).type_str()), "DOC***");
+ EXPECT_EQ(to_csubstr(NodeType(DOC|VALANCH).type_str()), "DOC***");
+ EXPECT_EQ(to_csubstr(NodeType(KEYREF).type_str()), "(unk)");
+ EXPECT_EQ(to_csubstr(NodeType(VALREF).type_str()), "(unk)");
+ EXPECT_EQ(to_csubstr(NodeType(KEYANCH).type_str()), "(unk)");
+ EXPECT_EQ(to_csubstr(NodeType(VALANCH).type_str()), "(unk)");
+}
+
+TEST(NodeType, is_stream)
+{
+ EXPECT_FALSE(NodeType(NOTYPE).is_stream());
+ EXPECT_TRUE(NodeType(STREAM).is_stream());
+}
+
+TEST(Tree, is_stream)
+{
+ Tree t = parse_in_arena(R"(---
+foo: bar
+...)");
+ const size_t stream_id = t.root_id();
+ const size_t doc_id = t.first_child(stream_id);
+ const size_t keyval_id = t.first_child(doc_id);
+ ConstNodeRef stream = t.ref(stream_id);
+ ConstNodeRef doc = t.ref(doc_id);
+ ConstNodeRef keyval = t.ref(keyval_id);
+ EXPECT_TRUE(t.is_stream(stream_id));
+ EXPECT_FALSE(t.is_stream(doc_id));
+ EXPECT_FALSE(t.is_stream(keyval_id));
+ EXPECT_TRUE(stream.is_stream());
+ EXPECT_FALSE(doc.is_stream());
+ EXPECT_FALSE(keyval.is_stream());
+ EXPECT_EQ(t.is_stream(stream_id), t._p(stream_id)->m_type.is_stream());
+ EXPECT_EQ(t.is_stream(doc_id), t._p(doc_id)->m_type.is_stream());
+ EXPECT_EQ(t.is_stream(keyval_id), t._p(keyval_id)->m_type.is_stream());
+ EXPECT_EQ(stream.is_stream(), stream.get()->m_type.is_stream());
+ EXPECT_EQ(doc.is_stream(), doc.get()->m_type.is_stream());
+ EXPECT_EQ(keyval.is_stream(), keyval.get()->m_type.is_stream());
+}
+
+TEST(NodeType, is_doc)
+{
+ EXPECT_FALSE(NodeType(NOTYPE).is_doc());
+ EXPECT_TRUE(NodeType(DOC).is_doc());
+}
+
+TEST(Tree, is_doc)
+{
+ Tree t = parse_in_arena(R"(---
+foo: bar
+---
+a scalar
+...)");
+ const size_t stream_id = t.root_id();
+ const size_t doc_id = t.first_child(stream_id);
+ const size_t keyval_id = t.first_child(doc_id);
+ const size_t docval_id = t.last_child(stream_id);
+ ConstNodeRef stream = t.ref(stream_id);
+ ConstNodeRef doc = t.ref(doc_id);
+ ConstNodeRef keyval = t.ref(keyval_id);
+ ConstNodeRef docval = t.ref(docval_id);
+ NodeRef mstream = t.ref(stream_id);
+ NodeRef mdoc = t.ref(doc_id);
+ NodeRef mkeyval = t.ref(keyval_id);
+ NodeRef mdocval = t.ref(docval_id);
+ EXPECT_FALSE(t.is_doc(stream_id));
+ EXPECT_TRUE(t.is_doc(doc_id));
+ EXPECT_FALSE(t.is_doc(keyval_id));
+ EXPECT_TRUE(t.is_doc(docval_id));
+ EXPECT_FALSE(stream.is_doc());
+ EXPECT_TRUE(doc.is_doc());
+ EXPECT_FALSE(keyval.is_doc());
+ EXPECT_TRUE(docval.is_doc());
+ EXPECT_FALSE(mstream.is_doc());
+ EXPECT_TRUE(mdoc.is_doc());
+ EXPECT_FALSE(mkeyval.is_doc());
+ EXPECT_TRUE(mdocval.is_doc());
+ EXPECT_EQ(t.is_doc(stream_id), t._p(stream_id)->m_type.is_doc());
+ EXPECT_EQ(t.is_doc(doc_id), t._p(doc_id)->m_type.is_doc());
+ EXPECT_EQ(t.is_doc(keyval_id), t._p(keyval_id)->m_type.is_doc());
+ EXPECT_EQ(t.is_doc(docval_id), t._p(docval_id)->m_type.is_doc());
+ EXPECT_EQ(stream.is_doc(), stream.get()->m_type.is_doc());
+ EXPECT_EQ(doc.is_doc(), doc.get()->m_type.is_doc());
+ EXPECT_EQ(keyval.is_doc(), keyval.get()->m_type.is_doc());
+ EXPECT_EQ(docval.is_doc(), docval.get()->m_type.is_doc());
+ EXPECT_EQ(mstream.is_doc(), mstream.get()->m_type.is_doc());
+ EXPECT_EQ(mdoc.is_doc(), mdoc.get()->m_type.is_doc());
+ EXPECT_EQ(mkeyval.is_doc(), mkeyval.get()->m_type.is_doc());
+ EXPECT_EQ(mdocval.is_doc(), mdocval.get()->m_type.is_doc());
+}
+
+TEST(NodeType, is_container)
+{
+ EXPECT_FALSE(NodeType(NOTYPE).is_container());
+ EXPECT_FALSE(NodeType(VAL).is_container());
+ EXPECT_FALSE(NodeType(KEY).is_container());
+ EXPECT_FALSE(NodeType(KEYVAL).is_container());
+ EXPECT_TRUE(NodeType(MAP).is_container());
+ EXPECT_TRUE(NodeType(SEQ).is_container());
+ EXPECT_TRUE(NodeType(KEYMAP).is_container());
+ EXPECT_TRUE(NodeType(KEYSEQ).is_container());
+ EXPECT_TRUE(NodeType(DOCMAP).is_container());
+ EXPECT_TRUE(NodeType(DOCSEQ).is_container());
+}
+
+TEST(Tree, is_container)
+{
+ Tree t = parse_in_arena(R"(---
+map: {foo: bar}
+seq: [foo, bar]
+---
+a scalar
+...)");
+ const size_t stream_id = t.root_id();
+ const size_t doc_id = t.first_child(stream_id);
+ const size_t map_id = t.first_child(doc_id);
+ const size_t keyval_id = t.first_child(map_id);
+ const size_t seq_id = t.last_child(doc_id);
+ const size_t val_id = t.first_child(seq_id);
+ const size_t docval_id = t.last_child(stream_id);
+ ConstNodeRef stream = t.ref(stream_id);
+ ConstNodeRef doc = t.ref(doc_id);
+ ConstNodeRef map = t.ref(map_id);
+ ConstNodeRef keyval = t.ref(keyval_id);
+ ConstNodeRef seq = t.ref(seq_id);
+ ConstNodeRef val = t.ref(val_id);
+ ConstNodeRef docval = t.ref(docval_id);
+ NodeRef mstream = t.ref(stream_id);
+ NodeRef mdoc = t.ref(doc_id);
+ NodeRef mmap = t.ref(map_id);
+ NodeRef mkeyval = t.ref(keyval_id);
+ NodeRef mseq = t.ref(seq_id);
+ NodeRef mval = t.ref(val_id);
+ NodeRef mdocval = t.ref(docval_id);
+ EXPECT_TRUE(t.is_container(stream_id));
+ EXPECT_TRUE(t.is_container(doc_id));
+ EXPECT_TRUE(t.is_container(map_id));
+ EXPECT_FALSE(t.is_container(keyval_id));
+ EXPECT_TRUE(t.is_container(seq_id));
+ EXPECT_FALSE(t.is_container(val_id));
+ EXPECT_FALSE(t.is_container(docval_id));
+ EXPECT_TRUE(stream.is_container());
+ EXPECT_TRUE(doc.is_container());
+ EXPECT_TRUE(map.is_container());
+ EXPECT_FALSE(keyval.is_container());
+ EXPECT_TRUE(seq.is_container());
+ EXPECT_FALSE(val.is_container());
+ EXPECT_FALSE(docval.is_container());
+ EXPECT_TRUE(mstream.is_container());
+ EXPECT_TRUE(mdoc.is_container());
+ EXPECT_TRUE(mmap.is_container());
+ EXPECT_FALSE(mkeyval.is_container());
+ EXPECT_TRUE(mseq.is_container());
+ EXPECT_FALSE(mval.is_container());
+ EXPECT_FALSE(mdocval.is_container());
+ EXPECT_EQ(t.is_container(stream_id), t._p(stream_id)->m_type.is_container());
+ EXPECT_EQ(t.is_container(doc_id), t._p(doc_id)->m_type.is_container());
+ EXPECT_EQ(t.is_container(map_id), t._p(map_id)->m_type.is_container());
+ EXPECT_EQ(t.is_container(keyval_id), t._p(keyval_id)->m_type.is_container());
+ EXPECT_EQ(t.is_container(seq_id), t._p(seq_id)->m_type.is_container());
+ EXPECT_EQ(t.is_container(val_id), t._p(val_id)->m_type.is_container());
+ EXPECT_EQ(t.is_container(docval_id), t._p(docval_id)->m_type.is_container());
+ EXPECT_EQ(stream.is_container(), stream.get()->m_type.is_container());
+ EXPECT_EQ(doc.is_container(), doc.get()->m_type.is_container());
+ EXPECT_EQ(map.is_container(), map.get()->m_type.is_container());
+ EXPECT_EQ(keyval.is_container(), keyval.get()->m_type.is_container());
+ EXPECT_EQ(seq.is_container(), seq.get()->m_type.is_container());
+ EXPECT_EQ(val.is_container(), val.get()->m_type.is_container());
+ EXPECT_EQ(docval.is_container(), docval.get()->m_type.is_container());
+ EXPECT_EQ(mstream.is_container(), mstream.get()->m_type.is_container());
+ EXPECT_EQ(mdoc.is_container(), mdoc.get()->m_type.is_container());
+ EXPECT_EQ(mmap.is_container(), mmap.get()->m_type.is_container());
+ EXPECT_EQ(mkeyval.is_container(), mkeyval.get()->m_type.is_container());
+ EXPECT_EQ(mseq.is_container(), mseq.get()->m_type.is_container());
+ EXPECT_EQ(mval.is_container(), mval.get()->m_type.is_container());
+ EXPECT_EQ(mdocval.is_container(), mdocval.get()->m_type.is_container());
+}
+
+TEST(NodeType, is_map)
+{
+ EXPECT_FALSE(NodeType(NOTYPE).is_map());
+ EXPECT_FALSE(NodeType(VAL).is_map());
+ EXPECT_FALSE(NodeType(KEY).is_map());
+ EXPECT_TRUE(NodeType(MAP).is_map());
+ EXPECT_TRUE(NodeType(KEYMAP).is_map());
+ EXPECT_FALSE(NodeType(SEQ).is_map());
+ EXPECT_FALSE(NodeType(KEYSEQ).is_map());
+}
+
+TEST(Tree, is_map)
+{
+ Tree t = parse_in_arena(R"(---
+map: {foo: bar}
+seq: [foo, bar]
+---
+a scalar
+...)");
+ const size_t stream_id = t.root_id();
+ const size_t doc_id = t.first_child(stream_id);
+ const size_t map_id = t.first_child(doc_id);
+ const size_t keyval_id = t.first_child(map_id);
+ const size_t seq_id = t.last_child(doc_id);
+ const size_t val_id = t.first_child(seq_id);
+ const size_t docval_id = t.last_child(stream_id);
+ ConstNodeRef stream = t.ref(stream_id);
+ ConstNodeRef doc = t.ref(doc_id);
+ ConstNodeRef map = t.ref(map_id);
+ ConstNodeRef keyval = t.ref(keyval_id);
+ ConstNodeRef seq = t.ref(seq_id);
+ ConstNodeRef val = t.ref(val_id);
+ ConstNodeRef docval = t.ref(docval_id);
+ NodeRef mstream = t.ref(stream_id);
+ NodeRef mdoc = t.ref(doc_id);
+ NodeRef mmap = t.ref(map_id);
+ NodeRef mkeyval = t.ref(keyval_id);
+ NodeRef mseq = t.ref(seq_id);
+ NodeRef mval = t.ref(val_id);
+ NodeRef mdocval = t.ref(docval_id);
+ EXPECT_FALSE(t.is_map(stream_id));
+ EXPECT_TRUE(t.is_map(doc_id));
+ EXPECT_TRUE(t.is_map(map_id));
+ EXPECT_FALSE(t.is_map(keyval_id));
+ EXPECT_FALSE(t.is_map(seq_id));
+ EXPECT_FALSE(t.is_map(val_id));
+ EXPECT_FALSE(t.is_map(docval_id));
+ EXPECT_FALSE(stream.is_map());
+ EXPECT_TRUE(doc.is_map());
+ EXPECT_TRUE(map.is_map());
+ EXPECT_FALSE(keyval.is_map());
+ EXPECT_FALSE(seq.is_map());
+ EXPECT_FALSE(val.is_map());
+ EXPECT_FALSE(docval.is_map());
+ EXPECT_FALSE(mstream.is_map());
+ EXPECT_TRUE(mdoc.is_map());
+ EXPECT_TRUE(mmap.is_map());
+ EXPECT_FALSE(mkeyval.is_map());
+ EXPECT_FALSE(mseq.is_map());
+ EXPECT_FALSE(mval.is_map());
+ EXPECT_FALSE(mdocval.is_map());
+ EXPECT_EQ(t.is_map(stream_id), t._p(stream_id)->m_type.is_map());
+ EXPECT_EQ(t.is_map(doc_id), t._p(doc_id)->m_type.is_map());
+ EXPECT_EQ(t.is_map(map_id), t._p(map_id)->m_type.is_map());
+ EXPECT_EQ(t.is_map(keyval_id), t._p(keyval_id)->m_type.is_map());
+ EXPECT_EQ(t.is_map(seq_id), t._p(seq_id)->m_type.is_map());
+ EXPECT_EQ(t.is_map(val_id), t._p(val_id)->m_type.is_map());
+ EXPECT_EQ(t.is_map(docval_id), t._p(docval_id)->m_type.is_map());
+ EXPECT_EQ(stream.is_map(), stream.get()->m_type.is_map());
+ EXPECT_EQ(doc.is_map(), doc.get()->m_type.is_map());
+ EXPECT_EQ(map.is_map(), map.get()->m_type.is_map());
+ EXPECT_EQ(keyval.is_map(), keyval.get()->m_type.is_map());
+ EXPECT_EQ(seq.is_map(), seq.get()->m_type.is_map());
+ EXPECT_EQ(val.is_map(), val.get()->m_type.is_map());
+ EXPECT_EQ(docval.is_map(), docval.get()->m_type.is_map());
+ EXPECT_EQ(mstream.is_map(), mstream.get()->m_type.is_map());
+ EXPECT_EQ(mdoc.is_map(), mdoc.get()->m_type.is_map());
+ EXPECT_EQ(mmap.is_map(), mmap.get()->m_type.is_map());
+ EXPECT_EQ(mkeyval.is_map(), mkeyval.get()->m_type.is_map());
+ EXPECT_EQ(mseq.is_map(), mseq.get()->m_type.is_map());
+ EXPECT_EQ(mval.is_map(), mval.get()->m_type.is_map());
+ EXPECT_EQ(mdocval.is_map(), mdocval.get()->m_type.is_map());
+}
+
+TEST(NodeType, is_seq)
+{
+ EXPECT_FALSE(NodeType(NOTYPE).is_seq());
+ EXPECT_FALSE(NodeType(VAL).is_seq());
+ EXPECT_FALSE(NodeType(KEY).is_seq());
+ EXPECT_FALSE(NodeType(MAP).is_seq());
+ EXPECT_FALSE(NodeType(KEYMAP).is_seq());
+ EXPECT_TRUE(NodeType(SEQ).is_seq());
+ EXPECT_TRUE(NodeType(KEYSEQ).is_seq());
+}
+
+TEST(Tree, is_seq)
+{
+ Tree t = parse_in_arena(R"(---
+map: {foo: bar}
+seq: [foo, bar]
+---
+a scalar
+...)");
+ const size_t stream_id = t.root_id();
+ const size_t doc_id = t.first_child(stream_id);
+ const size_t map_id = t.first_child(doc_id);
+ const size_t keyval_id = t.first_child(map_id);
+ const size_t seq_id = t.last_child(doc_id);
+ const size_t val_id = t.first_child(seq_id);
+ const size_t docval_id = t.last_child(stream_id);
+ ConstNodeRef stream = t.ref(stream_id);
+ ConstNodeRef doc = t.ref(doc_id);
+ ConstNodeRef map = t.ref(map_id);
+ ConstNodeRef keyval = t.ref(keyval_id);
+ ConstNodeRef seq = t.ref(seq_id);
+ ConstNodeRef val = t.ref(val_id);
+ ConstNodeRef docval = t.ref(docval_id);
+ NodeRef mstream = t.ref(stream_id);
+ NodeRef mdoc = t.ref(doc_id);
+ NodeRef mmap = t.ref(map_id);
+ NodeRef mkeyval = t.ref(keyval_id);
+ NodeRef mseq = t.ref(seq_id);
+ NodeRef mval = t.ref(val_id);
+ NodeRef mdocval = t.ref(docval_id);
+ EXPECT_TRUE(t.is_seq(stream_id));
+ EXPECT_FALSE(t.is_seq(doc_id));
+ EXPECT_FALSE(t.is_seq(map_id));
+ EXPECT_FALSE(t.is_seq(keyval_id));
+ EXPECT_TRUE(t.is_seq(seq_id));
+ EXPECT_FALSE(t.is_seq(val_id));
+ EXPECT_FALSE(t.is_seq(docval_id));
+ EXPECT_TRUE(stream.is_seq());
+ EXPECT_FALSE(doc.is_seq());
+ EXPECT_FALSE(map.is_seq());
+ EXPECT_FALSE(keyval.is_seq());
+ EXPECT_TRUE(seq.is_seq());
+ EXPECT_FALSE(val.is_seq());
+ EXPECT_FALSE(docval.is_seq());
+ EXPECT_TRUE(mstream.is_seq());
+ EXPECT_FALSE(mdoc.is_seq());
+ EXPECT_FALSE(mmap.is_seq());
+ EXPECT_FALSE(mkeyval.is_seq());
+ EXPECT_TRUE(mseq.is_seq());
+ EXPECT_FALSE(mval.is_seq());
+ EXPECT_FALSE(mdocval.is_seq());
+ EXPECT_EQ(t.is_seq(stream_id), t._p(stream_id)->m_type.is_seq());
+ EXPECT_EQ(t.is_seq(doc_id), t._p(doc_id)->m_type.is_seq());
+ EXPECT_EQ(t.is_seq(map_id), t._p(map_id)->m_type.is_seq());
+ EXPECT_EQ(t.is_seq(keyval_id), t._p(keyval_id)->m_type.is_seq());
+ EXPECT_EQ(t.is_seq(seq_id), t._p(seq_id)->m_type.is_seq());
+ EXPECT_EQ(t.is_seq(val_id), t._p(val_id)->m_type.is_seq());
+ EXPECT_EQ(t.is_seq(docval_id), t._p(docval_id)->m_type.is_seq());
+ EXPECT_EQ(stream.is_seq(), stream.get()->m_type.is_seq());
+ EXPECT_EQ(doc.is_seq(), doc.get()->m_type.is_seq());
+ EXPECT_EQ(map.is_seq(), map.get()->m_type.is_seq());
+ EXPECT_EQ(keyval.is_seq(), keyval.get()->m_type.is_seq());
+ EXPECT_EQ(seq.is_seq(), seq.get()->m_type.is_seq());
+ EXPECT_EQ(val.is_seq(), val.get()->m_type.is_seq());
+ EXPECT_EQ(docval.is_seq(), docval.get()->m_type.is_seq());
+ EXPECT_EQ(mstream.is_seq(), mstream.get()->m_type.is_seq());
+ EXPECT_EQ(mdoc.is_seq(), mdoc.get()->m_type.is_seq());
+ EXPECT_EQ(mmap.is_seq(), mmap.get()->m_type.is_seq());
+ EXPECT_EQ(mkeyval.is_seq(), mkeyval.get()->m_type.is_seq());
+ EXPECT_EQ(mseq.is_seq(), mseq.get()->m_type.is_seq());
+ EXPECT_EQ(mval.is_seq(), mval.get()->m_type.is_seq());
+ EXPECT_EQ(mdocval.is_seq(), mdocval.get()->m_type.is_seq());
+}
+
+TEST(NodeType, has_val)
+{
+ EXPECT_FALSE(NodeType(NOTYPE).has_val());
+ EXPECT_FALSE(NodeType(KEY).has_val());
+ EXPECT_TRUE(NodeType(VAL).has_val());
+ EXPECT_TRUE(NodeType(DOCVAL).has_val());
+ EXPECT_TRUE(NodeType(KEYVAL).has_val());
+ EXPECT_FALSE(NodeType(KEYMAP).has_val());
+ EXPECT_FALSE(NodeType(KEYSEQ).has_val());
+}
+
+TEST(Tree, has_val)
+{
+ Tree t = parse_in_arena(R"(---
+map: {foo: bar}
+seq: [foo, bar]
+---
+a scalar
+...)");
+ const size_t stream_id = t.root_id();
+ const size_t doc_id = t.first_child(stream_id);
+ const size_t map_id = t.first_child(doc_id);
+ const size_t keyval_id = t.first_child(map_id);
+ const size_t seq_id = t.last_child(doc_id);
+ const size_t val_id = t.first_child(seq_id);
+ const size_t docval_id = t.last_child(stream_id);
+ ConstNodeRef stream = t.ref(stream_id);
+ ConstNodeRef doc = t.ref(doc_id);
+ ConstNodeRef map = t.ref(map_id);
+ ConstNodeRef keyval = t.ref(keyval_id);
+ ConstNodeRef seq = t.ref(seq_id);
+ ConstNodeRef val = t.ref(val_id);
+ ConstNodeRef docval = t.ref(docval_id);
+ NodeRef mstream = t.ref(stream_id);
+ NodeRef mdoc = t.ref(doc_id);
+ NodeRef mmap = t.ref(map_id);
+ NodeRef mkeyval = t.ref(keyval_id);
+ NodeRef mseq = t.ref(seq_id);
+ NodeRef mval = t.ref(val_id);
+ NodeRef mdocval = t.ref(docval_id);
+ EXPECT_FALSE(t.has_val(stream_id));
+ EXPECT_FALSE(t.has_val(doc_id));
+ EXPECT_FALSE(t.has_val(map_id));
+ EXPECT_TRUE(t.has_val(keyval_id));
+ EXPECT_FALSE(t.has_val(seq_id));
+ EXPECT_TRUE(t.has_val(val_id));
+ EXPECT_TRUE(t.has_val(docval_id));
+ EXPECT_FALSE(stream.has_val());
+ EXPECT_FALSE(doc.has_val());
+ EXPECT_FALSE(map.has_val());
+ EXPECT_TRUE(keyval.has_val());
+ EXPECT_FALSE(seq.has_val());
+ EXPECT_TRUE(val.has_val());
+ EXPECT_TRUE(docval.has_val());
+ EXPECT_FALSE(mstream.has_val());
+ EXPECT_FALSE(mdoc.has_val());
+ EXPECT_FALSE(mmap.has_val());
+ EXPECT_TRUE(mkeyval.has_val());
+ EXPECT_FALSE(mseq.has_val());
+ EXPECT_TRUE(mval.has_val());
+ EXPECT_TRUE(mdocval.has_val());
+ EXPECT_EQ(t.has_val(stream_id), t._p(stream_id)->m_type.has_val());
+ EXPECT_EQ(t.has_val(doc_id), t._p(doc_id)->m_type.has_val());
+ EXPECT_EQ(t.has_val(map_id), t._p(map_id)->m_type.has_val());
+ EXPECT_EQ(t.has_val(keyval_id), t._p(keyval_id)->m_type.has_val());
+ EXPECT_EQ(t.has_val(seq_id), t._p(seq_id)->m_type.has_val());
+ EXPECT_EQ(t.has_val(val_id), t._p(val_id)->m_type.has_val());
+ EXPECT_EQ(t.has_val(docval_id), t._p(docval_id)->m_type.has_val());
+ EXPECT_EQ(stream.has_val(), stream.get()->m_type.has_val());
+ EXPECT_EQ(doc.has_val(), doc.get()->m_type.has_val());
+ EXPECT_EQ(map.has_val(), map.get()->m_type.has_val());
+ EXPECT_EQ(keyval.has_val(), keyval.get()->m_type.has_val());
+ EXPECT_EQ(seq.has_val(), seq.get()->m_type.has_val());
+ EXPECT_EQ(val.has_val(), val.get()->m_type.has_val());
+ EXPECT_EQ(docval.has_val(), docval.get()->m_type.has_val());
+ EXPECT_EQ(mstream.has_val(), mstream.get()->m_type.has_val());
+ EXPECT_EQ(mdoc.has_val(), mdoc.get()->m_type.has_val());
+ EXPECT_EQ(mmap.has_val(), mmap.get()->m_type.has_val());
+ EXPECT_EQ(mkeyval.has_val(), mkeyval.get()->m_type.has_val());
+ EXPECT_EQ(mseq.has_val(), mseq.get()->m_type.has_val());
+ EXPECT_EQ(mval.has_val(), mval.get()->m_type.has_val());
+ EXPECT_EQ(mdocval.has_val(), mdocval.get()->m_type.has_val());
+}
+
+TEST(NodeType, is_val)
+{
+ EXPECT_FALSE(NodeType(NOTYPE).is_val());
+ EXPECT_FALSE(NodeType(KEY).is_val());
+ EXPECT_TRUE(NodeType(VAL).is_val());
+ EXPECT_TRUE(NodeType(DOCVAL).is_val());
+ EXPECT_FALSE(NodeType(KEYVAL).is_val());
+ EXPECT_FALSE(NodeType(KEYMAP).is_val());
+ EXPECT_FALSE(NodeType(KEYSEQ).is_val());
+}
+
+TEST(Tree, is_val)
+{
+ Tree t = parse_in_arena(R"(---
+map: {foo: bar}
+seq: [foo, bar]
+---
+a scalar
+...)");
+ const size_t stream_id = t.root_id();
+ const size_t doc_id = t.first_child(stream_id);
+ const size_t map_id = t.first_child(doc_id);
+ const size_t keyval_id = t.first_child(map_id);
+ const size_t seq_id = t.last_child(doc_id);
+ const size_t val_id = t.first_child(seq_id);
+ const size_t docval_id = t.last_child(stream_id);
+ ConstNodeRef stream = t.ref(stream_id);
+ ConstNodeRef doc = t.ref(doc_id);
+ ConstNodeRef map = t.ref(map_id);
+ ConstNodeRef keyval = t.ref(keyval_id);
+ ConstNodeRef seq = t.ref(seq_id);
+ ConstNodeRef val = t.ref(val_id);
+ ConstNodeRef docval = t.ref(docval_id);
+ NodeRef mstream = t.ref(stream_id);
+ NodeRef mdoc = t.ref(doc_id);
+ NodeRef mmap = t.ref(map_id);
+ NodeRef mkeyval = t.ref(keyval_id);
+ NodeRef mseq = t.ref(seq_id);
+ NodeRef mval = t.ref(val_id);
+ NodeRef mdocval = t.ref(docval_id);
+ EXPECT_FALSE(t.is_val(stream_id));
+ EXPECT_FALSE(t.is_val(doc_id));
+ EXPECT_FALSE(t.is_val(map_id));
+ EXPECT_FALSE(t.is_val(keyval_id));
+ EXPECT_FALSE(t.is_val(seq_id));
+ EXPECT_TRUE(t.is_val(val_id));
+ EXPECT_TRUE(t.is_val(docval_id));
+ EXPECT_FALSE(stream.is_val());
+ EXPECT_FALSE(doc.is_val());
+ EXPECT_FALSE(map.is_val());
+ EXPECT_FALSE(keyval.is_val());
+ EXPECT_FALSE(seq.is_val());
+ EXPECT_TRUE(val.is_val());
+ EXPECT_TRUE(docval.is_val());
+ EXPECT_FALSE(mstream.is_val());
+ EXPECT_FALSE(mdoc.is_val());
+ EXPECT_FALSE(mmap.is_val());
+ EXPECT_FALSE(mkeyval.is_val());
+ EXPECT_FALSE(mseq.is_val());
+ EXPECT_TRUE(mval.is_val());
+ EXPECT_TRUE(mdocval.is_val());
+ EXPECT_EQ(t.is_val(stream_id), t._p(stream_id)->m_type.is_val());
+ EXPECT_EQ(t.is_val(doc_id), t._p(doc_id)->m_type.is_val());
+ EXPECT_EQ(t.is_val(map_id), t._p(map_id)->m_type.is_val());
+ EXPECT_EQ(t.is_val(keyval_id), t._p(keyval_id)->m_type.is_val());
+ EXPECT_EQ(t.is_val(seq_id), t._p(seq_id)->m_type.is_val());
+ EXPECT_EQ(t.is_val(val_id), t._p(val_id)->m_type.is_val());
+ EXPECT_EQ(t.is_val(docval_id), t._p(docval_id)->m_type.is_val());
+ EXPECT_EQ(stream.is_val(), stream.get()->m_type.is_val());
+ EXPECT_EQ(doc.is_val(), doc.get()->m_type.is_val());
+ EXPECT_EQ(map.is_val(), map.get()->m_type.is_val());
+ EXPECT_EQ(keyval.is_val(), keyval.get()->m_type.is_val());
+ EXPECT_EQ(seq.is_val(), seq.get()->m_type.is_val());
+ EXPECT_EQ(val.is_val(), val.get()->m_type.is_val());
+ EXPECT_EQ(docval.is_val(), docval.get()->m_type.is_val());
+ EXPECT_EQ(mstream.is_val(), mstream.get()->m_type.is_val());
+ EXPECT_EQ(mdoc.is_val(), mdoc.get()->m_type.is_val());
+ EXPECT_EQ(mmap.is_val(), mmap.get()->m_type.is_val());
+ EXPECT_EQ(mkeyval.is_val(), mkeyval.get()->m_type.is_val());
+ EXPECT_EQ(mseq.is_val(), mseq.get()->m_type.is_val());
+ EXPECT_EQ(mval.is_val(), mval.get()->m_type.is_val());
+ EXPECT_EQ(mdocval.is_val(), mdocval.get()->m_type.is_val());
+}
+
+TEST(NodeType, has_key)
+{
+ EXPECT_FALSE(NodeType(NOTYPE).has_key());
+ EXPECT_TRUE(NodeType(KEY).has_key());
+ EXPECT_FALSE(NodeType(VAL).has_key());
+ EXPECT_TRUE(NodeType(KEYVAL).has_key());
+ EXPECT_TRUE(NodeType(KEYMAP).has_key());
+ EXPECT_TRUE(NodeType(KEYSEQ).has_key());
+}
+
+TEST(Tree, has_key)
+{
+ Tree t = parse_in_arena(R"(---
+map: {foo: bar}
+seq: [foo, bar]
+---
+a scalar
+...)");
+ const size_t stream_id = t.root_id();
+ const size_t doc_id = t.first_child(stream_id);
+ const size_t map_id = t.first_child(doc_id);
+ const size_t keyval_id = t.first_child(map_id);
+ const size_t seq_id = t.last_child(doc_id);
+ const size_t val_id = t.first_child(seq_id);
+ const size_t docval_id = t.last_child(stream_id);
+ ConstNodeRef stream = t.ref(stream_id);
+ ConstNodeRef doc = t.ref(doc_id);
+ ConstNodeRef map = t.ref(map_id);
+ ConstNodeRef keyval = t.ref(keyval_id);
+ ConstNodeRef seq = t.ref(seq_id);
+ ConstNodeRef val = t.ref(val_id);
+ ConstNodeRef docval = t.ref(docval_id);
+ NodeRef mstream = t.ref(stream_id);
+ NodeRef mdoc = t.ref(doc_id);
+ NodeRef mmap = t.ref(map_id);
+ NodeRef mkeyval = t.ref(keyval_id);
+ NodeRef mseq = t.ref(seq_id);
+ NodeRef mval = t.ref(val_id);
+ NodeRef mdocval = t.ref(docval_id);
+ EXPECT_FALSE(t.has_key(stream_id));
+ EXPECT_FALSE(t.has_key(doc_id));
+ EXPECT_TRUE(t.has_key(map_id));
+ EXPECT_TRUE(t.has_key(keyval_id));
+ EXPECT_TRUE(t.has_key(seq_id));
+ EXPECT_FALSE(t.has_key(val_id));
+ EXPECT_FALSE(t.has_key(docval_id));
+ EXPECT_FALSE(stream.has_key());
+ EXPECT_FALSE(doc.has_key());
+ EXPECT_TRUE(map.has_key());
+ EXPECT_TRUE(keyval.has_key());
+ EXPECT_TRUE(seq.has_key());
+ EXPECT_FALSE(val.has_key());
+ EXPECT_FALSE(docval.has_key());
+ EXPECT_FALSE(mstream.has_key());
+ EXPECT_FALSE(mdoc.has_key());
+ EXPECT_TRUE(mmap.has_key());
+ EXPECT_TRUE(mkeyval.has_key());
+ EXPECT_TRUE(mseq.has_key());
+ EXPECT_FALSE(mval.has_key());
+ EXPECT_FALSE(mdocval.has_key());
+ EXPECT_EQ(t.has_key(stream_id), t._p(stream_id)->m_type.has_key());
+ EXPECT_EQ(t.has_key(doc_id), t._p(doc_id)->m_type.has_key());
+ EXPECT_EQ(t.has_key(map_id), t._p(map_id)->m_type.has_key());
+ EXPECT_EQ(t.has_key(keyval_id), t._p(keyval_id)->m_type.has_key());
+ EXPECT_EQ(t.has_key(seq_id), t._p(seq_id)->m_type.has_key());
+ EXPECT_EQ(t.has_key(val_id), t._p(val_id)->m_type.has_key());
+ EXPECT_EQ(t.has_key(docval_id), t._p(docval_id)->m_type.has_key());
+ EXPECT_EQ(stream.has_key(), stream.get()->m_type.has_key());
+ EXPECT_EQ(doc.has_key(), doc.get()->m_type.has_key());
+ EXPECT_EQ(map.has_key(), map.get()->m_type.has_key());
+ EXPECT_EQ(keyval.has_key(), keyval.get()->m_type.has_key());
+ EXPECT_EQ(seq.has_key(), seq.get()->m_type.has_key());
+ EXPECT_EQ(val.has_key(), val.get()->m_type.has_key());
+ EXPECT_EQ(docval.has_key(), docval.get()->m_type.has_key());
+ EXPECT_EQ(mstream.has_key(), mstream.get()->m_type.has_key());
+ EXPECT_EQ(mdoc.has_key(), mdoc.get()->m_type.has_key());
+ EXPECT_EQ(mmap.has_key(), mmap.get()->m_type.has_key());
+ EXPECT_EQ(mkeyval.has_key(), mkeyval.get()->m_type.has_key());
+ EXPECT_EQ(mseq.has_key(), mseq.get()->m_type.has_key());
+ EXPECT_EQ(mval.has_key(), mval.get()->m_type.has_key());
+ EXPECT_EQ(mdocval.has_key(), mdocval.get()->m_type.has_key());
+}
+
+TEST(NodeType, is_keyval)
+{
+ EXPECT_FALSE(NodeType(NOTYPE).is_keyval());
+ EXPECT_FALSE(NodeType(KEY).is_keyval());
+ EXPECT_FALSE(NodeType(VAL).is_keyval());
+ EXPECT_TRUE(NodeType(KEYVAL).is_keyval());
+ EXPECT_FALSE(NodeType(DOCVAL).is_keyval());
+ EXPECT_FALSE(NodeType(KEYMAP).is_keyval());
+ EXPECT_FALSE(NodeType(KEYSEQ).is_keyval());
+}
+
+TEST(Tree, is_keyval)
+{
+ Tree t = parse_in_arena(R"(---
+map: {foo: bar}
+seq: [foo, bar]
+---
+a scalar
+...)");
+ const size_t stream_id = t.root_id();
+ const size_t doc_id = t.first_child(stream_id);
+ const size_t map_id = t.first_child(doc_id);
+ const size_t keyval_id = t.first_child(map_id);
+ const size_t seq_id = t.last_child(doc_id);
+ const size_t val_id = t.first_child(seq_id);
+ const size_t docval_id = t.last_child(stream_id);
+ ConstNodeRef stream = t.ref(stream_id);
+ ConstNodeRef doc = t.ref(doc_id);
+ ConstNodeRef map = t.ref(map_id);
+ ConstNodeRef keyval = t.ref(keyval_id);
+ ConstNodeRef seq = t.ref(seq_id);
+ ConstNodeRef val = t.ref(val_id);
+ ConstNodeRef docval = t.ref(docval_id);
+ NodeRef mstream = t.ref(stream_id);
+ NodeRef mdoc = t.ref(doc_id);
+ NodeRef mmap = t.ref(map_id);
+ NodeRef mkeyval = t.ref(keyval_id);
+ NodeRef mseq = t.ref(seq_id);
+ NodeRef mval = t.ref(val_id);
+ NodeRef mdocval = t.ref(docval_id);
+ EXPECT_FALSE(t.is_keyval(stream_id));
+ EXPECT_FALSE(t.is_keyval(doc_id));
+ EXPECT_FALSE(t.is_keyval(map_id));
+ EXPECT_TRUE(t.is_keyval(keyval_id));
+ EXPECT_FALSE(t.is_keyval(seq_id));
+ EXPECT_FALSE(t.is_keyval(val_id));
+ EXPECT_FALSE(t.is_keyval(docval_id));
+ EXPECT_FALSE(stream.is_keyval());
+ EXPECT_FALSE(doc.is_keyval());
+ EXPECT_FALSE(map.is_keyval());
+ EXPECT_TRUE(keyval.is_keyval());
+ EXPECT_FALSE(seq.is_keyval());
+ EXPECT_FALSE(val.is_keyval());
+ EXPECT_FALSE(docval.is_keyval());
+ EXPECT_FALSE(mstream.is_keyval());
+ EXPECT_FALSE(mdoc.is_keyval());
+ EXPECT_FALSE(mmap.is_keyval());
+ EXPECT_TRUE(mkeyval.is_keyval());
+ EXPECT_FALSE(mseq.is_keyval());
+ EXPECT_FALSE(mval.is_keyval());
+ EXPECT_FALSE(mdocval.is_keyval());
+ EXPECT_EQ(t.is_keyval(stream_id), t._p(stream_id)->m_type.is_keyval());
+ EXPECT_EQ(t.is_keyval(doc_id), t._p(doc_id)->m_type.is_keyval());
+ EXPECT_EQ(t.is_keyval(map_id), t._p(map_id)->m_type.is_keyval());
+ EXPECT_EQ(t.is_keyval(keyval_id), t._p(keyval_id)->m_type.is_keyval());
+ EXPECT_EQ(t.is_keyval(seq_id), t._p(seq_id)->m_type.is_keyval());
+ EXPECT_EQ(t.is_keyval(val_id), t._p(val_id)->m_type.is_keyval());
+ EXPECT_EQ(t.is_keyval(docval_id), t._p(docval_id)->m_type.is_keyval());
+ EXPECT_EQ(stream.is_keyval(), stream.get()->m_type.is_keyval());
+ EXPECT_EQ(doc.is_keyval(), doc.get()->m_type.is_keyval());
+ EXPECT_EQ(map.is_keyval(), map.get()->m_type.is_keyval());
+ EXPECT_EQ(keyval.is_keyval(), keyval.get()->m_type.is_keyval());
+ EXPECT_EQ(seq.is_keyval(), seq.get()->m_type.is_keyval());
+ EXPECT_EQ(val.is_keyval(), val.get()->m_type.is_keyval());
+ EXPECT_EQ(docval.is_keyval(), docval.get()->m_type.is_keyval());
+ EXPECT_EQ(mstream.is_keyval(), mstream.get()->m_type.is_keyval());
+ EXPECT_EQ(mdoc.is_keyval(), mdoc.get()->m_type.is_keyval());
+ EXPECT_EQ(mmap.is_keyval(), mmap.get()->m_type.is_keyval());
+ EXPECT_EQ(mkeyval.is_keyval(), mkeyval.get()->m_type.is_keyval());
+ EXPECT_EQ(mseq.is_keyval(), mseq.get()->m_type.is_keyval());
+ EXPECT_EQ(mval.is_keyval(), mval.get()->m_type.is_keyval());
+ EXPECT_EQ(mdocval.is_keyval(), mdocval.get()->m_type.is_keyval());
+}
+
+TEST(NodeType, has_key_tag)
+{
+ EXPECT_FALSE(NodeType().has_key_tag());
+ EXPECT_FALSE(NodeType(KEYTAG).has_key_tag());
+ EXPECT_TRUE(NodeType(KEY|KEYTAG).has_key_tag());
+}
+
+TEST(Tree, has_key_tag)
+{
+ Tree t = parse_in_arena(R"(--- !docmaptag
+!maptag map: {!footag foo: bar, notag: none}
+!seqtag seq: [!footag foo, bar]
+---
+a scalar
+...)");
+ const size_t stream_id = t.root_id();
+ const size_t doc_id = t.first_child(stream_id);
+ const size_t map_id = t.first_child(doc_id);
+ const size_t keyval_id = t.first_child(map_id);
+ const size_t keyvalnotag_id = t.last_child(map_id);
+ const size_t seq_id = t.last_child(doc_id);
+ const size_t val_id = t.first_child(seq_id);
+ const size_t valnotag_id = t.last_child(seq_id);
+ const size_t docval_id = t.last_child(stream_id);
+ ConstNodeRef stream = t.ref(stream_id);
+ ConstNodeRef doc = t.ref(doc_id);
+ ConstNodeRef map = t.ref(map_id);
+ ConstNodeRef keyval = t.ref(keyval_id);
+ ConstNodeRef keyvalnotag = t.ref(keyvalnotag_id);
+ ConstNodeRef seq = t.ref(seq_id);
+ ConstNodeRef val = t.ref(val_id);
+ ConstNodeRef valnotag = t.ref(val_id);
+ ConstNodeRef docval = t.ref(docval_id);
+ NodeRef mstream = t.ref(stream_id);
+ NodeRef mdoc = t.ref(doc_id);
+ NodeRef mmap = t.ref(map_id);
+ NodeRef mkeyval = t.ref(keyval_id);
+ NodeRef mkeyvalnotag = t.ref(keyvalnotag_id);
+ NodeRef mseq = t.ref(seq_id);
+ NodeRef mval = t.ref(val_id);
+ NodeRef mvalnotag = t.ref(val_id);
+ NodeRef mdocval = t.ref(docval_id);
+ EXPECT_FALSE(t.has_key_tag(stream_id));
+ EXPECT_FALSE(t.has_key_tag(doc_id));
+ EXPECT_TRUE(t.has_key_tag(map_id));
+ EXPECT_TRUE(t.has_key_tag(keyval_id));
+ EXPECT_FALSE(t.has_key_tag(keyvalnotag_id));
+ EXPECT_TRUE(t.has_key_tag(seq_id));
+ EXPECT_FALSE(t.has_key_tag(val_id));
+ EXPECT_FALSE(t.has_key_tag(valnotag_id));
+ EXPECT_FALSE(t.has_key_tag(docval_id));
+ EXPECT_FALSE(stream.has_key_tag());
+ EXPECT_FALSE(doc.has_key_tag());
+ EXPECT_TRUE(map.has_key_tag());
+ EXPECT_TRUE(keyval.has_key_tag());
+ EXPECT_FALSE(keyvalnotag.has_key_tag());
+ EXPECT_TRUE(seq.has_key_tag());
+ EXPECT_FALSE(val.has_key_tag());
+ EXPECT_FALSE(valnotag.has_key_tag());
+ EXPECT_FALSE(docval.has_key_tag());
+ EXPECT_FALSE(mstream.has_key_tag());
+ EXPECT_FALSE(mdoc.has_key_tag());
+ EXPECT_TRUE(mmap.has_key_tag());
+ EXPECT_TRUE(mkeyval.has_key_tag());
+ EXPECT_FALSE(mkeyvalnotag.has_key_tag());
+ EXPECT_TRUE(mseq.has_key_tag());
+ EXPECT_FALSE(mval.has_key_tag());
+ EXPECT_FALSE(mvalnotag.has_key_tag());
+ EXPECT_FALSE(mdocval.has_key_tag());
+ EXPECT_EQ(t.has_key_tag(stream_id), t._p(stream_id)->m_type.has_key_tag());
+ EXPECT_EQ(t.has_key_tag(doc_id), t._p(doc_id)->m_type.has_key_tag());
+ EXPECT_EQ(t.has_key_tag(map_id), t._p(map_id)->m_type.has_key_tag());
+ EXPECT_EQ(t.has_key_tag(keyval_id), t._p(keyval_id)->m_type.has_key_tag());
+ EXPECT_EQ(t.has_key_tag(keyvalnotag_id), t._p(keyvalnotag_id)->m_type.has_key_tag());
+ EXPECT_EQ(t.has_key_tag(seq_id), t._p(seq_id)->m_type.has_key_tag());
+ EXPECT_EQ(t.has_key_tag(val_id), t._p(val_id)->m_type.has_key_tag());
+ EXPECT_EQ(t.has_key_tag(valnotag_id), t._p(valnotag_id)->m_type.has_key_tag());
+ EXPECT_EQ(t.has_key_tag(docval_id), t._p(docval_id)->m_type.has_key_tag());
+ EXPECT_EQ(stream.has_key_tag(), stream.get()->m_type.has_key_tag());
+ EXPECT_EQ(doc.has_key_tag(), doc.get()->m_type.has_key_tag());
+ EXPECT_EQ(map.has_key_tag(), map.get()->m_type.has_key_tag());
+ EXPECT_EQ(keyval.has_key_tag(), keyval.get()->m_type.has_key_tag());
+ EXPECT_EQ(keyvalnotag.has_key_tag(), keyvalnotag.get()->m_type.has_key_tag());
+ EXPECT_EQ(seq.has_key_tag(), seq.get()->m_type.has_key_tag());
+ EXPECT_EQ(val.has_key_tag(), val.get()->m_type.has_key_tag());
+ EXPECT_EQ(valnotag.has_key_tag(), valnotag.get()->m_type.has_key_tag());
+ EXPECT_EQ(docval.has_key_tag(), docval.get()->m_type.has_key_tag());
+ EXPECT_EQ(mstream.has_key_tag(), mstream.get()->m_type.has_key_tag());
+ EXPECT_EQ(mdoc.has_key_tag(), mdoc.get()->m_type.has_key_tag());
+ EXPECT_EQ(mmap.has_key_tag(), mmap.get()->m_type.has_key_tag());
+ EXPECT_EQ(mkeyval.has_key_tag(), mkeyval.get()->m_type.has_key_tag());
+ EXPECT_EQ(mkeyvalnotag.has_key_tag(), mkeyvalnotag.get()->m_type.has_key_tag());
+ EXPECT_EQ(mseq.has_key_tag(), mseq.get()->m_type.has_key_tag());
+ EXPECT_EQ(mval.has_key_tag(), mval.get()->m_type.has_key_tag());
+ EXPECT_EQ(mvalnotag.has_key_tag(), mvalnotag.get()->m_type.has_key_tag());
+ EXPECT_EQ(mdocval.has_key_tag(), mdocval.get()->m_type.has_key_tag());
+}
+
+TEST(NodeType, has_val_tag)
+{
+ EXPECT_FALSE(NodeType().has_val_tag());
+ EXPECT_FALSE(NodeType(VALTAG).has_val_tag());
+ EXPECT_TRUE(NodeType(VAL|VALTAG).has_val_tag());
+}
+
+TEST(Tree, has_val_tag)
+{
+ Tree t = parse_in_arena(R"(--- !docmaptag
+map: !maptag {foo: !bartag bar, notag: none}
+seq: !seqtag [!footag foo, bar]
+---
+a scalar
+...)");
+ const size_t stream_id = t.root_id();
+ const size_t doc_id = t.first_child(stream_id);
+ const size_t map_id = t.first_child(doc_id);
+ const size_t keyval_id = t.first_child(map_id);
+ const size_t keyvalnotag_id = t.last_child(map_id);
+ const size_t seq_id = t.last_child(doc_id);
+ const size_t val_id = t.first_child(seq_id);
+ const size_t valnotag_id = t.last_child(seq_id);
+ const size_t docval_id = t.last_child(stream_id);
+ ConstNodeRef stream = t.ref(stream_id);
+ ConstNodeRef doc = t.ref(doc_id);
+ ConstNodeRef map = t.ref(map_id);
+ ConstNodeRef keyval = t.ref(keyval_id);
+ ConstNodeRef keyvalnotag = t.ref(keyvalnotag_id);
+ ConstNodeRef seq = t.ref(seq_id);
+ ConstNodeRef val = t.ref(val_id);
+ ConstNodeRef valnotag = t.ref(valnotag_id);
+ ConstNodeRef docval = t.ref(docval_id);
+ NodeRef mstream = t.ref(stream_id);
+ NodeRef mdoc = t.ref(doc_id);
+ NodeRef mmap = t.ref(map_id);
+ NodeRef mkeyval = t.ref(keyval_id);
+ NodeRef mkeyvalnotag = t.ref(keyvalnotag_id);
+ NodeRef mseq = t.ref(seq_id);
+ NodeRef mval = t.ref(val_id);
+ NodeRef mvalnotag = t.ref(valnotag_id);
+ NodeRef mdocval = t.ref(docval_id);
+ EXPECT_FALSE(t.has_val_tag(stream_id));
+ EXPECT_TRUE(t.has_val_tag(doc_id));
+ EXPECT_TRUE(t.has_val_tag(map_id));
+ EXPECT_TRUE(t.has_val_tag(keyval_id));
+ EXPECT_FALSE(t.has_val_tag(keyvalnotag_id));
+ EXPECT_TRUE(t.has_val_tag(seq_id));
+ EXPECT_TRUE(t.has_val_tag(val_id));
+ EXPECT_FALSE(t.has_val_tag(valnotag_id));
+ EXPECT_FALSE(t.has_val_tag(docval_id));
+ EXPECT_FALSE(stream.has_val_tag());
+ EXPECT_TRUE(doc.has_val_tag());
+ EXPECT_TRUE(map.has_val_tag());
+ EXPECT_TRUE(keyval.has_val_tag());
+ EXPECT_FALSE(keyvalnotag.has_val_tag());
+ EXPECT_TRUE(seq.has_val_tag());
+ EXPECT_TRUE(val.has_val_tag());
+ EXPECT_FALSE(valnotag.has_val_tag());
+ EXPECT_FALSE(docval.has_val_tag());
+ EXPECT_FALSE(mstream.has_val_tag());
+ EXPECT_TRUE(mdoc.has_val_tag());
+ EXPECT_TRUE(mmap.has_val_tag());
+ EXPECT_TRUE(mkeyval.has_val_tag());
+ EXPECT_FALSE(mkeyvalnotag.has_val_tag());
+ EXPECT_TRUE(mseq.has_val_tag());
+ EXPECT_TRUE(mval.has_val_tag());
+ EXPECT_FALSE(mvalnotag.has_val_tag());
+ EXPECT_FALSE(mdocval.has_val_tag());
+ EXPECT_EQ(t.has_val_tag(stream_id), t._p(stream_id)->m_type.has_val_tag());
+ EXPECT_EQ(t.has_val_tag(doc_id), t._p(doc_id)->m_type.has_val_tag());
+ EXPECT_EQ(t.has_val_tag(map_id), t._p(map_id)->m_type.has_val_tag());
+ EXPECT_EQ(t.has_val_tag(keyval_id), t._p(keyval_id)->m_type.has_val_tag());
+ EXPECT_EQ(t.has_val_tag(keyvalnotag_id), t._p(keyvalnotag_id)->m_type.has_val_tag());
+ EXPECT_EQ(t.has_val_tag(seq_id), t._p(seq_id)->m_type.has_val_tag());
+ EXPECT_EQ(t.has_val_tag(val_id), t._p(val_id)->m_type.has_val_tag());
+ EXPECT_EQ(t.has_val_tag(valnotag_id), t._p(valnotag_id)->m_type.has_val_tag());
+ EXPECT_EQ(t.has_val_tag(docval_id), t._p(docval_id)->m_type.has_val_tag());
+ EXPECT_EQ(stream.has_val_tag(), stream.get()->m_type.has_val_tag());
+ EXPECT_EQ(doc.has_val_tag(), doc.get()->m_type.has_val_tag());
+ EXPECT_EQ(map.has_val_tag(), map.get()->m_type.has_val_tag());
+ EXPECT_EQ(keyval.has_val_tag(), keyval.get()->m_type.has_val_tag());
+ EXPECT_EQ(keyvalnotag.has_val_tag(), keyvalnotag.get()->m_type.has_val_tag());
+ EXPECT_EQ(seq.has_val_tag(), seq.get()->m_type.has_val_tag());
+ EXPECT_EQ(val.has_val_tag(), val.get()->m_type.has_val_tag());
+ EXPECT_EQ(valnotag.has_val_tag(), valnotag.get()->m_type.has_val_tag());
+ EXPECT_EQ(docval.has_val_tag(), docval.get()->m_type.has_val_tag());
+ EXPECT_EQ(mstream.has_val_tag(), mstream.get()->m_type.has_val_tag());
+ EXPECT_EQ(mdoc.has_val_tag(), mdoc.get()->m_type.has_val_tag());
+ EXPECT_EQ(mmap.has_val_tag(), mmap.get()->m_type.has_val_tag());
+ EXPECT_EQ(mkeyval.has_val_tag(), mkeyval.get()->m_type.has_val_tag());
+ EXPECT_EQ(mkeyvalnotag.has_val_tag(), mkeyvalnotag.get()->m_type.has_val_tag());
+ EXPECT_EQ(mseq.has_val_tag(), mseq.get()->m_type.has_val_tag());
+ EXPECT_EQ(mval.has_val_tag(), mval.get()->m_type.has_val_tag());
+ EXPECT_EQ(mvalnotag.has_val_tag(), mvalnotag.get()->m_type.has_val_tag());
+ EXPECT_EQ(mdocval.has_val_tag(), mdocval.get()->m_type.has_val_tag());
+}
+
+TEST(NodeType, has_key_anchor)
+{
+ EXPECT_FALSE(NodeType().has_key_anchor());
+ EXPECT_FALSE(NodeType(KEYANCH).has_key_anchor());
+ EXPECT_TRUE(NodeType(KEY|KEYANCH).has_key_anchor());
+}
+
+TEST(Tree, has_key_anchor)
+{
+ Tree t = parse_in_arena(R"(--- &docanchor
+&mapanchor map: {&keyvalanchor foo: bar, anchor: none}
+&seqanchor seq: [&valanchor foo, bar]
+...)");
+ const size_t stream_id = t.root_id();
+ const size_t doc_id = t.first_child(stream_id);
+ const size_t map_id = t.first_child(doc_id);
+ const size_t keyval_id = t.first_child(map_id);
+ const size_t keyvalnoanchor_id = t.last_child(map_id);
+ const size_t seq_id = t.last_child(doc_id);
+ const size_t val_id = t.first_child(seq_id);
+ const size_t valnoanchor_id = t.last_child(seq_id);
+ ConstNodeRef stream = t.ref(stream_id);
+ ConstNodeRef doc = t.ref(doc_id);
+ ConstNodeRef map = t.ref(map_id);
+ ConstNodeRef keyval = t.ref(keyval_id);
+ ConstNodeRef keyvalnoanchor = t.ref(keyvalnoanchor_id);
+ ConstNodeRef seq = t.ref(seq_id);
+ ConstNodeRef val = t.ref(val_id);
+ ConstNodeRef valnoanchor = t.ref(valnoanchor_id);
+ NodeRef mstream = t.ref(stream_id);
+ NodeRef mdoc = t.ref(doc_id);
+ NodeRef mmap = t.ref(map_id);
+ NodeRef mkeyval = t.ref(keyval_id);
+ NodeRef mkeyvalnoanchor = t.ref(keyvalnoanchor_id);
+ NodeRef mseq = t.ref(seq_id);
+ NodeRef mval = t.ref(val_id);
+ NodeRef mvalnoanchor = t.ref(valnoanchor_id);
+ EXPECT_FALSE(t.has_key_anchor(stream_id));
+ EXPECT_FALSE(t.has_key_anchor(doc_id));
+ EXPECT_TRUE(t.has_key_anchor(map_id));
+ EXPECT_TRUE(t.has_key_anchor(keyval_id));
+ EXPECT_FALSE(t.has_key_anchor(keyvalnoanchor_id));
+ EXPECT_TRUE(t.has_key_anchor(seq_id));
+ EXPECT_FALSE(t.has_key_anchor(val_id));
+ EXPECT_FALSE(t.has_key_anchor(valnoanchor_id));
+ EXPECT_FALSE(stream.has_key_anchor());
+ EXPECT_FALSE(doc.has_key_anchor());
+ EXPECT_TRUE(map.has_key_anchor());
+ EXPECT_TRUE(keyval.has_key_anchor());
+ EXPECT_FALSE(keyvalnoanchor.has_key_anchor());
+ EXPECT_TRUE(seq.has_key_anchor());
+ EXPECT_FALSE(val.has_key_anchor());
+ EXPECT_FALSE(valnoanchor.has_key_anchor());
+ EXPECT_FALSE(mstream.has_key_anchor());
+ EXPECT_FALSE(mdoc.has_key_anchor());
+ EXPECT_TRUE(mmap.has_key_anchor());
+ EXPECT_TRUE(mkeyval.has_key_anchor());
+ EXPECT_FALSE(mkeyvalnoanchor.has_key_anchor());
+ EXPECT_TRUE(mseq.has_key_anchor());
+ EXPECT_FALSE(mval.has_key_anchor());
+ EXPECT_FALSE(mvalnoanchor.has_key_anchor());
+ EXPECT_EQ(t.has_key_anchor(stream_id), t._p(stream_id)->m_type.has_key_anchor());
+ EXPECT_EQ(t.has_key_anchor(doc_id), t._p(doc_id)->m_type.has_key_anchor());
+ EXPECT_EQ(t.has_key_anchor(map_id), t._p(map_id)->m_type.has_key_anchor());
+ EXPECT_EQ(t.has_key_anchor(keyval_id), t._p(keyval_id)->m_type.has_key_anchor());
+ EXPECT_EQ(t.has_key_anchor(keyvalnoanchor_id), t._p(keyvalnoanchor_id)->m_type.has_key_anchor());
+ EXPECT_EQ(t.has_key_anchor(seq_id), t._p(seq_id)->m_type.has_key_anchor());
+ EXPECT_EQ(t.has_key_anchor(val_id), t._p(val_id)->m_type.has_key_anchor());
+ EXPECT_EQ(t.has_key_anchor(valnoanchor_id), t._p(valnoanchor_id)->m_type.has_key_anchor());
+ EXPECT_EQ(stream.has_key_anchor(), stream.get()->m_type.has_key_anchor());
+ EXPECT_EQ(doc.has_key_anchor(), doc.get()->m_type.has_key_anchor());
+ EXPECT_EQ(map.has_key_anchor(), map.get()->m_type.has_key_anchor());
+ EXPECT_EQ(keyval.has_key_anchor(), keyval.get()->m_type.has_key_anchor());
+ EXPECT_EQ(keyvalnoanchor.has_key_anchor(), keyvalnoanchor.get()->m_type.has_key_anchor());
+ EXPECT_EQ(seq.has_key_anchor(), seq.get()->m_type.has_key_anchor());
+ EXPECT_EQ(val.has_key_anchor(), val.get()->m_type.has_key_anchor());
+ EXPECT_EQ(valnoanchor.has_key_anchor(), valnoanchor.get()->m_type.has_key_anchor());
+ EXPECT_EQ(mstream.has_key_anchor(), mstream.get()->m_type.has_key_anchor());
+ EXPECT_EQ(mdoc.has_key_anchor(), mdoc.get()->m_type.has_key_anchor());
+ EXPECT_EQ(mmap.has_key_anchor(), mmap.get()->m_type.has_key_anchor());
+ EXPECT_EQ(mkeyval.has_key_anchor(), mkeyval.get()->m_type.has_key_anchor());
+ EXPECT_EQ(mkeyvalnoanchor.has_key_anchor(), mkeyvalnoanchor.get()->m_type.has_key_anchor());
+ EXPECT_EQ(mseq.has_key_anchor(), mseq.get()->m_type.has_key_anchor());
+ EXPECT_EQ(mval.has_key_anchor(), mval.get()->m_type.has_key_anchor());
+ EXPECT_EQ(mvalnoanchor.has_key_anchor(), mvalnoanchor.get()->m_type.has_key_anchor());
+}
+
+TEST(NodeType, is_key_anchor)
+{
+ EXPECT_FALSE(NodeType().is_key_anchor());
+ EXPECT_FALSE(NodeType(KEYANCH).is_key_anchor());
+ EXPECT_TRUE(NodeType(KEY|KEYANCH).is_key_anchor());
+}
+
+TEST(Tree, is_key_anchor)
+{
+ Tree t = parse_in_arena(R"(--- &docanchor
+&mapanchor map: {&keyvalanchor foo: bar, anchor: none}
+&seqanchor seq: [&valanchor foo, bar]
+...)");
+ const size_t stream_id = t.root_id();
+ const size_t doc_id = t.first_child(stream_id);
+ const size_t map_id = t.first_child(doc_id);
+ const size_t keyval_id = t.first_child(map_id);
+ const size_t keyvalnoanchor_id = t.last_child(map_id);
+ const size_t seq_id = t.last_child(doc_id);
+ const size_t val_id = t.first_child(seq_id);
+ const size_t valnoanchor_id = t.last_child(seq_id);
+ ConstNodeRef stream = t.ref(stream_id);
+ ConstNodeRef doc = t.ref(doc_id);
+ ConstNodeRef map = t.ref(map_id);
+ ConstNodeRef keyval = t.ref(keyval_id);
+ ConstNodeRef keyvalnoanchor = t.ref(keyvalnoanchor_id);
+ ConstNodeRef seq = t.ref(seq_id);
+ ConstNodeRef val = t.ref(val_id);
+ ConstNodeRef valnoanchor = t.ref(valnoanchor_id);
+ NodeRef mstream = t.ref(stream_id);
+ NodeRef mdoc = t.ref(doc_id);
+ NodeRef mmap = t.ref(map_id);
+ NodeRef mkeyval = t.ref(keyval_id);
+ NodeRef mkeyvalnoanchor = t.ref(keyvalnoanchor_id);
+ NodeRef mseq = t.ref(seq_id);
+ NodeRef mval = t.ref(val_id);
+ NodeRef mvalnoanchor = t.ref(valnoanchor_id);
+ EXPECT_FALSE(t.is_key_anchor(stream_id));
+ EXPECT_FALSE(t.is_key_anchor(doc_id));
+ EXPECT_TRUE(t.is_key_anchor(map_id));
+ EXPECT_TRUE(t.is_key_anchor(keyval_id));
+ EXPECT_FALSE(t.is_key_anchor(keyvalnoanchor_id));
+ EXPECT_TRUE(t.is_key_anchor(seq_id));
+ EXPECT_FALSE(t.is_key_anchor(val_id));
+ EXPECT_FALSE(t.is_key_anchor(valnoanchor_id));
+ EXPECT_FALSE(stream.is_key_anchor());
+ EXPECT_FALSE(doc.is_key_anchor());
+ EXPECT_TRUE(map.is_key_anchor());
+ EXPECT_TRUE(keyval.is_key_anchor());
+ EXPECT_FALSE(keyvalnoanchor.is_key_anchor());
+ EXPECT_TRUE(seq.is_key_anchor());
+ EXPECT_FALSE(val.is_key_anchor());
+ EXPECT_FALSE(valnoanchor.is_key_anchor());
+ EXPECT_FALSE(mstream.is_key_anchor());
+ EXPECT_FALSE(mdoc.is_key_anchor());
+ EXPECT_TRUE(mmap.is_key_anchor());
+ EXPECT_TRUE(mkeyval.is_key_anchor());
+ EXPECT_FALSE(mkeyvalnoanchor.is_key_anchor());
+ EXPECT_TRUE(mseq.is_key_anchor());
+ EXPECT_FALSE(mval.is_key_anchor());
+ EXPECT_FALSE(mvalnoanchor.is_key_anchor());
+ EXPECT_EQ(t.is_key_anchor(stream_id), t._p(stream_id)->m_type.is_key_anchor());
+ EXPECT_EQ(t.is_key_anchor(doc_id), t._p(doc_id)->m_type.is_key_anchor());
+ EXPECT_EQ(t.is_key_anchor(map_id), t._p(map_id)->m_type.is_key_anchor());
+ EXPECT_EQ(t.is_key_anchor(keyval_id), t._p(keyval_id)->m_type.is_key_anchor());
+ EXPECT_EQ(t.is_key_anchor(keyvalnoanchor_id), t._p(keyvalnoanchor_id)->m_type.is_key_anchor());
+ EXPECT_EQ(t.is_key_anchor(seq_id), t._p(seq_id)->m_type.is_key_anchor());
+ EXPECT_EQ(t.is_key_anchor(val_id), t._p(val_id)->m_type.is_key_anchor());
+ EXPECT_EQ(t.is_key_anchor(valnoanchor_id), t._p(valnoanchor_id)->m_type.is_key_anchor());
+ EXPECT_EQ(stream.is_key_anchor(), stream.get()->m_type.is_key_anchor());
+ EXPECT_EQ(doc.is_key_anchor(), doc.get()->m_type.is_key_anchor());
+ EXPECT_EQ(map.is_key_anchor(), map.get()->m_type.is_key_anchor());
+ EXPECT_EQ(keyval.is_key_anchor(), keyval.get()->m_type.is_key_anchor());
+ EXPECT_EQ(keyvalnoanchor.is_key_anchor(), keyvalnoanchor.get()->m_type.is_key_anchor());
+ EXPECT_EQ(seq.is_key_anchor(), seq.get()->m_type.is_key_anchor());
+ EXPECT_EQ(val.is_key_anchor(), val.get()->m_type.is_key_anchor());
+ EXPECT_EQ(valnoanchor.is_key_anchor(), valnoanchor.get()->m_type.is_key_anchor());
+ EXPECT_EQ(mstream.is_key_anchor(), mstream.get()->m_type.is_key_anchor());
+ EXPECT_EQ(mdoc.is_key_anchor(), mdoc.get()->m_type.is_key_anchor());
+ EXPECT_EQ(mmap.is_key_anchor(), mmap.get()->m_type.is_key_anchor());
+ EXPECT_EQ(mkeyval.is_key_anchor(), mkeyval.get()->m_type.is_key_anchor());
+ EXPECT_EQ(mkeyvalnoanchor.is_key_anchor(), mkeyvalnoanchor.get()->m_type.is_key_anchor());
+ EXPECT_EQ(mseq.is_key_anchor(), mseq.get()->m_type.is_key_anchor());
+ EXPECT_EQ(mval.is_key_anchor(), mval.get()->m_type.is_key_anchor());
+ EXPECT_EQ(mvalnoanchor.is_key_anchor(), mvalnoanchor.get()->m_type.is_key_anchor());
+}
+
+TEST(NodeType, has_val_anchor)
+{
+ EXPECT_FALSE(NodeType().has_val_anchor());
+ EXPECT_FALSE(NodeType(VALANCH).has_val_anchor());
+ EXPECT_TRUE(NodeType(VAL|VALANCH).has_val_anchor());
+}
+
+TEST(Tree, has_val_anchor)
+{
+ Tree t = parse_in_arena(R"(--- &docanchor
+map: &mapanchor {foo: &keyvalanchor bar, anchor: none}
+seq: &seqanchor [&valanchor foo, bar]
+...)");
+ const size_t stream_id = t.root_id();
+ const size_t doc_id = t.first_child(stream_id);
+ const size_t map_id = t.first_child(doc_id);
+ const size_t keyval_id = t.first_child(map_id);
+ const size_t keyvalnoanchor_id = t.last_child(map_id);
+ const size_t seq_id = t.last_child(doc_id);
+ const size_t val_id = t.first_child(seq_id);
+ const size_t valnoanchor_id = t.last_child(seq_id);
+ ConstNodeRef stream = t.ref(stream_id);
+ ConstNodeRef doc = t.ref(doc_id);
+ ConstNodeRef map = t.ref(map_id);
+ ConstNodeRef keyval = t.ref(keyval_id);
+ ConstNodeRef keyvalnoanchor = t.ref(keyvalnoanchor_id);
+ ConstNodeRef seq = t.ref(seq_id);
+ ConstNodeRef val = t.ref(val_id);
+ ConstNodeRef valnoanchor = t.ref(valnoanchor_id);
+ NodeRef mstream = t.ref(stream_id);
+ NodeRef mdoc = t.ref(doc_id);
+ NodeRef mmap = t.ref(map_id);
+ NodeRef mkeyval = t.ref(keyval_id);
+ NodeRef mkeyvalnoanchor = t.ref(keyvalnoanchor_id);
+ NodeRef mseq = t.ref(seq_id);
+ NodeRef mval = t.ref(val_id);
+ NodeRef mvalnoanchor = t.ref(valnoanchor_id);
+ EXPECT_FALSE(t.has_val_anchor(stream_id));
+ EXPECT_FALSE(t.has_val_anchor(doc_id));
+ EXPECT_TRUE(t.has_val_anchor(map_id));
+ EXPECT_TRUE(t.has_val_anchor(keyval_id));
+ EXPECT_FALSE(t.has_val_anchor(keyvalnoanchor_id));
+ EXPECT_TRUE(t.has_val_anchor(seq_id));
+ EXPECT_TRUE(t.has_val_anchor(val_id));
+ EXPECT_FALSE(t.has_val_anchor(valnoanchor_id));
+ EXPECT_FALSE(stream.has_val_anchor());
+ EXPECT_FALSE(doc.has_val_anchor());
+ EXPECT_TRUE(map.has_val_anchor());
+ EXPECT_TRUE(keyval.has_val_anchor());
+ EXPECT_FALSE(keyvalnoanchor.has_val_anchor());
+ EXPECT_TRUE(seq.has_val_anchor());
+ EXPECT_TRUE(val.has_val_anchor());
+ EXPECT_FALSE(valnoanchor.has_val_anchor());
+ EXPECT_FALSE(mstream.has_val_anchor());
+ EXPECT_FALSE(mdoc.has_val_anchor());
+ EXPECT_TRUE(mmap.has_val_anchor());
+ EXPECT_TRUE(mkeyval.has_val_anchor());
+ EXPECT_FALSE(mkeyvalnoanchor.has_val_anchor());
+ EXPECT_TRUE(mseq.has_val_anchor());
+ EXPECT_TRUE(mval.has_val_anchor());
+ EXPECT_FALSE(mvalnoanchor.has_val_anchor());
+ EXPECT_EQ(t.has_val_anchor(stream_id), t._p(stream_id)->m_type.has_val_anchor());
+ EXPECT_EQ(t.has_val_anchor(doc_id), t._p(doc_id)->m_type.has_val_anchor());
+ EXPECT_EQ(t.has_val_anchor(map_id), t._p(map_id)->m_type.has_val_anchor());
+ EXPECT_EQ(t.has_val_anchor(keyval_id), t._p(keyval_id)->m_type.has_val_anchor());
+ EXPECT_EQ(t.has_val_anchor(keyvalnoanchor_id), t._p(keyvalnoanchor_id)->m_type.has_val_anchor());
+ EXPECT_EQ(t.has_val_anchor(seq_id), t._p(seq_id)->m_type.has_val_anchor());
+ EXPECT_EQ(t.has_val_anchor(val_id), t._p(val_id)->m_type.has_val_anchor());
+ EXPECT_EQ(t.has_val_anchor(valnoanchor_id), t._p(valnoanchor_id)->m_type.has_val_anchor());
+ EXPECT_EQ(stream.has_val_anchor(), stream.get()->m_type.has_val_anchor());
+ EXPECT_EQ(doc.has_val_anchor(), doc.get()->m_type.has_val_anchor());
+ EXPECT_EQ(map.has_val_anchor(), map.get()->m_type.has_val_anchor());
+ EXPECT_EQ(keyval.has_val_anchor(), keyval.get()->m_type.has_val_anchor());
+ EXPECT_EQ(keyvalnoanchor.has_val_anchor(), keyvalnoanchor.get()->m_type.has_val_anchor());
+ EXPECT_EQ(seq.has_val_anchor(), seq.get()->m_type.has_val_anchor());
+ EXPECT_EQ(val.has_val_anchor(), val.get()->m_type.has_val_anchor());
+ EXPECT_EQ(valnoanchor.has_val_anchor(), valnoanchor.get()->m_type.has_val_anchor());
+ EXPECT_EQ(mstream.has_val_anchor(), mstream.get()->m_type.has_val_anchor());
+ EXPECT_EQ(mdoc.has_val_anchor(), mdoc.get()->m_type.has_val_anchor());
+ EXPECT_EQ(mmap.has_val_anchor(), mmap.get()->m_type.has_val_anchor());
+ EXPECT_EQ(mkeyval.has_val_anchor(), mkeyval.get()->m_type.has_val_anchor());
+ EXPECT_EQ(mkeyvalnoanchor.has_val_anchor(), mkeyvalnoanchor.get()->m_type.has_val_anchor());
+ EXPECT_EQ(mseq.has_val_anchor(), mseq.get()->m_type.has_val_anchor());
+ EXPECT_EQ(mval.has_val_anchor(), mval.get()->m_type.has_val_anchor());
+ EXPECT_EQ(mvalnoanchor.has_val_anchor(), mvalnoanchor.get()->m_type.has_val_anchor());
+}
+
+TEST(NodeType, is_val_anchor)
+{
+ EXPECT_FALSE(NodeType().is_val_anchor());
+ EXPECT_FALSE(NodeType(VALANCH).is_val_anchor());
+ EXPECT_TRUE(NodeType(VAL|VALANCH).is_val_anchor());
+}
+
+TEST(Tree, is_val_anchor)
+{
+ Tree t = parse_in_arena(R"(--- &docanchor
+map: &mapanchor {foo: &keyvalanchor bar, anchor: none}
+seq: &seqanchor [&valanchor foo, bar]
+...)");
+ const size_t stream_id = t.root_id();
+ const size_t doc_id = t.first_child(stream_id);
+ const size_t map_id = t.first_child(doc_id);
+ const size_t keyval_id = t.first_child(map_id);
+ const size_t keyvalnoanchor_id = t.last_child(map_id);
+ const size_t seq_id = t.last_child(doc_id);
+ const size_t val_id = t.first_child(seq_id);
+ const size_t valnoanchor_id = t.last_child(seq_id);
+ ConstNodeRef stream = t.ref(stream_id);
+ ConstNodeRef doc = t.ref(doc_id);
+ ConstNodeRef map = t.ref(map_id);
+ ConstNodeRef keyval = t.ref(keyval_id);
+ ConstNodeRef keyvalnoanchor = t.ref(keyvalnoanchor_id);
+ ConstNodeRef seq = t.ref(seq_id);
+ ConstNodeRef val = t.ref(val_id);
+ ConstNodeRef valnoanchor = t.ref(valnoanchor_id);
+ NodeRef mstream = t.ref(stream_id);
+ NodeRef mdoc = t.ref(doc_id);
+ NodeRef mmap = t.ref(map_id);
+ NodeRef mkeyval = t.ref(keyval_id);
+ NodeRef mkeyvalnoanchor = t.ref(keyvalnoanchor_id);
+ NodeRef mseq = t.ref(seq_id);
+ NodeRef mval = t.ref(val_id);
+ NodeRef mvalnoanchor = t.ref(valnoanchor_id);
+ EXPECT_FALSE(t.is_val_anchor(stream_id));
+ EXPECT_FALSE(t.is_val_anchor(doc_id));
+ EXPECT_TRUE(t.is_val_anchor(map_id));
+ EXPECT_TRUE(t.is_val_anchor(keyval_id));
+ EXPECT_FALSE(t.is_val_anchor(keyvalnoanchor_id));
+ EXPECT_TRUE(t.is_val_anchor(seq_id));
+ EXPECT_TRUE(t.is_val_anchor(val_id));
+ EXPECT_FALSE(t.is_val_anchor(valnoanchor_id));
+ EXPECT_FALSE(stream.is_val_anchor());
+ EXPECT_FALSE(doc.is_val_anchor());
+ EXPECT_TRUE(map.is_val_anchor());
+ EXPECT_TRUE(keyval.is_val_anchor());
+ EXPECT_FALSE(keyvalnoanchor.is_val_anchor());
+ EXPECT_TRUE(seq.is_val_anchor());
+ EXPECT_TRUE(val.is_val_anchor());
+ EXPECT_FALSE(valnoanchor.is_val_anchor());
+ EXPECT_FALSE(mstream.is_val_anchor());
+ EXPECT_FALSE(mdoc.is_val_anchor());
+ EXPECT_TRUE(mmap.is_val_anchor());
+ EXPECT_TRUE(mkeyval.is_val_anchor());
+ EXPECT_FALSE(mkeyvalnoanchor.is_val_anchor());
+ EXPECT_TRUE(mseq.is_val_anchor());
+ EXPECT_TRUE(mval.is_val_anchor());
+ EXPECT_FALSE(mvalnoanchor.is_val_anchor());
+ EXPECT_EQ(t.is_val_anchor(stream_id), t._p(stream_id)->m_type.is_val_anchor());
+ EXPECT_EQ(t.is_val_anchor(doc_id), t._p(doc_id)->m_type.is_val_anchor());
+ EXPECT_EQ(t.is_val_anchor(map_id), t._p(map_id)->m_type.is_val_anchor());
+ EXPECT_EQ(t.is_val_anchor(keyval_id), t._p(keyval_id)->m_type.is_val_anchor());
+ EXPECT_EQ(t.is_val_anchor(keyvalnoanchor_id), t._p(keyvalnoanchor_id)->m_type.is_val_anchor());
+ EXPECT_EQ(t.is_val_anchor(seq_id), t._p(seq_id)->m_type.is_val_anchor());
+ EXPECT_EQ(t.is_val_anchor(val_id), t._p(val_id)->m_type.is_val_anchor());
+ EXPECT_EQ(t.is_val_anchor(valnoanchor_id), t._p(valnoanchor_id)->m_type.is_val_anchor());
+ EXPECT_EQ(stream.is_val_anchor(), stream.get()->m_type.is_val_anchor());
+ EXPECT_EQ(doc.is_val_anchor(), doc.get()->m_type.is_val_anchor());
+ EXPECT_EQ(map.is_val_anchor(), map.get()->m_type.is_val_anchor());
+ EXPECT_EQ(keyval.is_val_anchor(), keyval.get()->m_type.is_val_anchor());
+ EXPECT_EQ(keyvalnoanchor.is_val_anchor(), keyvalnoanchor.get()->m_type.is_val_anchor());
+ EXPECT_EQ(seq.is_val_anchor(), seq.get()->m_type.is_val_anchor());
+ EXPECT_EQ(val.is_val_anchor(), val.get()->m_type.is_val_anchor());
+ EXPECT_EQ(valnoanchor.is_val_anchor(), valnoanchor.get()->m_type.is_val_anchor());
+ EXPECT_EQ(mstream.is_val_anchor(), mstream.get()->m_type.is_val_anchor());
+ EXPECT_EQ(mdoc.is_val_anchor(), mdoc.get()->m_type.is_val_anchor());
+ EXPECT_EQ(mmap.is_val_anchor(), mmap.get()->m_type.is_val_anchor());
+ EXPECT_EQ(mkeyval.is_val_anchor(), mkeyval.get()->m_type.is_val_anchor());
+ EXPECT_EQ(mkeyvalnoanchor.is_val_anchor(), mkeyvalnoanchor.get()->m_type.is_val_anchor());
+ EXPECT_EQ(mseq.is_val_anchor(), mseq.get()->m_type.is_val_anchor());
+ EXPECT_EQ(mval.is_val_anchor(), mval.get()->m_type.is_val_anchor());
+ EXPECT_EQ(mvalnoanchor.is_val_anchor(), mvalnoanchor.get()->m_type.is_val_anchor());
+}
+
+TEST(NodeType, has_anchor)
+{
+ EXPECT_FALSE(NodeType().has_anchor());
+ EXPECT_TRUE(NodeType(VALANCH).has_anchor());
+ EXPECT_TRUE(NodeType(KEYANCH).has_anchor());
+ EXPECT_TRUE(NodeType(KEYANCH|VALANCH).has_anchor());
+ EXPECT_TRUE(NodeType(KEY|VALANCH).has_anchor());
+ EXPECT_TRUE(NodeType(VAL|KEYANCH).has_anchor());
+ EXPECT_TRUE(NodeType(KEY|KEYANCH).has_anchor());
+ EXPECT_TRUE(NodeType(VAL|VALANCH).has_anchor());
+}
+
+TEST(Tree, has_anchor)
+{
+ Tree t = parse_in_arena(R"(--- &docanchor
+map: &mapanchor {foo: &keyvalanchor bar, anchor: none}
+&seqanchor seq: [&valanchor foo, bar]
+...)");
+ const size_t stream_id = t.root_id();
+ const size_t doc_id = t.first_child(stream_id);
+ const size_t map_id = t.first_child(doc_id);
+ const size_t keyval_id = t.first_child(map_id);
+ const size_t keyvalnoanchor_id = t.last_child(map_id);
+ const size_t seq_id = t.last_child(doc_id);
+ const size_t val_id = t.first_child(seq_id);
+ const size_t valnoanchor_id = t.last_child(seq_id);
+ ConstNodeRef stream = t.ref(stream_id);
+ ConstNodeRef doc = t.ref(doc_id);
+ ConstNodeRef map = t.ref(map_id);
+ ConstNodeRef keyval = t.ref(keyval_id);
+ ConstNodeRef keyvalnoanchor = t.ref(keyvalnoanchor_id);
+ ConstNodeRef seq = t.ref(seq_id);
+ ConstNodeRef val = t.ref(val_id);
+ ConstNodeRef valnoanchor = t.ref(valnoanchor_id);
+ NodeRef mstream = t.ref(stream_id);
+ NodeRef mdoc = t.ref(doc_id);
+ NodeRef mmap = t.ref(map_id);
+ NodeRef mkeyval = t.ref(keyval_id);
+ NodeRef mkeyvalnoanchor = t.ref(keyvalnoanchor_id);
+ NodeRef mseq = t.ref(seq_id);
+ NodeRef mval = t.ref(val_id);
+ NodeRef mvalnoanchor = t.ref(valnoanchor_id);
+ EXPECT_FALSE(t.has_anchor(stream_id));
+ EXPECT_FALSE(t.has_anchor(doc_id));
+ EXPECT_TRUE(t.has_anchor(map_id));
+ EXPECT_TRUE(t.has_anchor(keyval_id));
+ EXPECT_FALSE(t.has_anchor(keyvalnoanchor_id));
+ EXPECT_TRUE(t.has_anchor(seq_id));
+ EXPECT_TRUE(t.has_anchor(val_id));
+ EXPECT_FALSE(t.has_anchor(valnoanchor_id));
+ EXPECT_FALSE(stream.has_anchor());
+ EXPECT_FALSE(doc.has_anchor());
+ EXPECT_TRUE(map.has_anchor());
+ EXPECT_TRUE(keyval.has_anchor());
+ EXPECT_FALSE(keyvalnoanchor.has_anchor());
+ EXPECT_TRUE(seq.has_anchor());
+ EXPECT_TRUE(val.has_anchor());
+ EXPECT_FALSE(valnoanchor.has_anchor());
+ EXPECT_FALSE(mstream.has_anchor());
+ EXPECT_FALSE(mdoc.has_anchor());
+ EXPECT_TRUE(mmap.has_anchor());
+ EXPECT_TRUE(mkeyval.has_anchor());
+ EXPECT_FALSE(mkeyvalnoanchor.has_anchor());
+ EXPECT_TRUE(mseq.has_anchor());
+ EXPECT_TRUE(mval.has_anchor());
+ EXPECT_FALSE(mvalnoanchor.has_anchor());
+ EXPECT_EQ(t.has_anchor(stream_id), t._p(stream_id)->m_type.has_anchor());
+ EXPECT_EQ(t.has_anchor(doc_id), t._p(doc_id)->m_type.has_anchor());
+ EXPECT_EQ(t.has_anchor(map_id), t._p(map_id)->m_type.has_anchor());
+ EXPECT_EQ(t.has_anchor(keyval_id), t._p(keyval_id)->m_type.has_anchor());
+ EXPECT_EQ(t.has_anchor(keyvalnoanchor_id), t._p(keyvalnoanchor_id)->m_type.has_anchor());
+ EXPECT_EQ(t.has_anchor(seq_id), t._p(seq_id)->m_type.has_anchor());
+ EXPECT_EQ(t.has_anchor(val_id), t._p(val_id)->m_type.has_anchor());
+ EXPECT_EQ(t.has_anchor(valnoanchor_id), t._p(valnoanchor_id)->m_type.has_anchor());
+ EXPECT_EQ(stream.has_anchor(), stream.get()->m_type.has_anchor());
+ EXPECT_EQ(doc.has_anchor(), doc.get()->m_type.has_anchor());
+ EXPECT_EQ(map.has_anchor(), map.get()->m_type.has_anchor());
+ EXPECT_EQ(keyval.has_anchor(), keyval.get()->m_type.has_anchor());
+ EXPECT_EQ(keyvalnoanchor.has_anchor(), keyvalnoanchor.get()->m_type.has_anchor());
+ EXPECT_EQ(seq.has_anchor(), seq.get()->m_type.has_anchor());
+ EXPECT_EQ(val.has_anchor(), val.get()->m_type.has_anchor());
+ EXPECT_EQ(valnoanchor.has_anchor(), valnoanchor.get()->m_type.has_anchor());
+ EXPECT_EQ(mstream.has_anchor(), mstream.get()->m_type.has_anchor());
+ EXPECT_EQ(mdoc.has_anchor(), mdoc.get()->m_type.has_anchor());
+ EXPECT_EQ(mmap.has_anchor(), mmap.get()->m_type.has_anchor());
+ EXPECT_EQ(mkeyval.has_anchor(), mkeyval.get()->m_type.has_anchor());
+ EXPECT_EQ(mkeyvalnoanchor.has_anchor(), mkeyvalnoanchor.get()->m_type.has_anchor());
+ EXPECT_EQ(mseq.has_anchor(), mseq.get()->m_type.has_anchor());
+ EXPECT_EQ(mval.has_anchor(), mval.get()->m_type.has_anchor());
+ EXPECT_EQ(mvalnoanchor.has_anchor(), mvalnoanchor.get()->m_type.has_anchor());
+}
+
+TEST(NodeType, is_anchor)
+{
+ EXPECT_FALSE(NodeType().is_anchor());
+ EXPECT_TRUE(NodeType(VALANCH).is_anchor());
+ EXPECT_TRUE(NodeType(KEYANCH).is_anchor());
+ EXPECT_TRUE(NodeType(KEYANCH|VALANCH).is_anchor());
+ EXPECT_TRUE(NodeType(KEY|VALANCH).is_anchor());
+ EXPECT_TRUE(NodeType(VAL|KEYANCH).is_anchor());
+ EXPECT_TRUE(NodeType(KEY|KEYANCH).is_anchor());
+ EXPECT_TRUE(NodeType(VAL|VALANCH).is_anchor());
+}
+
+TEST(Tree, is_anchor)
+{
+ Tree t = parse_in_arena(R"(--- &docanchor
+map: &mapanchor {foo: &keyvalanchor bar, anchor: none}
+&seqanchor seq: [&valanchor foo, bar]
+...)");
+ const size_t stream_id = t.root_id();
+ const size_t doc_id = t.first_child(stream_id);
+ const size_t map_id = t.first_child(doc_id);
+ const size_t keyval_id = t.first_child(map_id);
+ const size_t keyvalnoanchor_id = t.last_child(map_id);
+ const size_t seq_id = t.last_child(doc_id);
+ const size_t val_id = t.first_child(seq_id);
+ const size_t valnoanchor_id = t.last_child(seq_id);
+ ConstNodeRef stream = t.ref(stream_id);
+ ConstNodeRef doc = t.ref(doc_id);
+ ConstNodeRef map = t.ref(map_id);
+ ConstNodeRef keyval = t.ref(keyval_id);
+ ConstNodeRef keyvalnoanchor = t.ref(keyvalnoanchor_id);
+ ConstNodeRef seq = t.ref(seq_id);
+ ConstNodeRef val = t.ref(val_id);
+ ConstNodeRef valnoanchor = t.ref(valnoanchor_id);
+ NodeRef mstream = t.ref(stream_id);
+ NodeRef mdoc = t.ref(doc_id);
+ NodeRef mmap = t.ref(map_id);
+ NodeRef mkeyval = t.ref(keyval_id);
+ NodeRef mkeyvalnoanchor = t.ref(keyvalnoanchor_id);
+ NodeRef mseq = t.ref(seq_id);
+ NodeRef mval = t.ref(val_id);
+ NodeRef mvalnoanchor = t.ref(valnoanchor_id);
+ EXPECT_FALSE(t.is_anchor(stream_id));
+ EXPECT_FALSE(t.is_anchor(doc_id));
+ EXPECT_TRUE(t.is_anchor(map_id));
+ EXPECT_TRUE(t.is_anchor(keyval_id));
+ EXPECT_FALSE(t.is_anchor(keyvalnoanchor_id));
+ EXPECT_TRUE(t.is_anchor(seq_id));
+ EXPECT_TRUE(t.is_anchor(val_id));
+ EXPECT_FALSE(t.is_anchor(valnoanchor_id));
+ EXPECT_FALSE(stream.is_anchor());
+ EXPECT_FALSE(doc.is_anchor());
+ EXPECT_TRUE(map.is_anchor());
+ EXPECT_TRUE(keyval.is_anchor());
+ EXPECT_FALSE(keyvalnoanchor.is_anchor());
+ EXPECT_TRUE(seq.is_anchor());
+ EXPECT_TRUE(val.is_anchor());
+ EXPECT_FALSE(valnoanchor.is_anchor());
+ EXPECT_FALSE(mstream.is_anchor());
+ EXPECT_FALSE(mdoc.is_anchor());
+ EXPECT_TRUE(mmap.is_anchor());
+ EXPECT_TRUE(mkeyval.is_anchor());
+ EXPECT_FALSE(mkeyvalnoanchor.is_anchor());
+ EXPECT_TRUE(mseq.is_anchor());
+ EXPECT_TRUE(mval.is_anchor());
+ EXPECT_FALSE(mvalnoanchor.is_anchor());
+ EXPECT_EQ(t.is_anchor(stream_id), t._p(stream_id)->m_type.is_anchor());
+ EXPECT_EQ(t.is_anchor(doc_id), t._p(doc_id)->m_type.is_anchor());
+ EXPECT_EQ(t.is_anchor(map_id), t._p(map_id)->m_type.is_anchor());
+ EXPECT_EQ(t.is_anchor(keyval_id), t._p(keyval_id)->m_type.is_anchor());
+ EXPECT_EQ(t.is_anchor(keyvalnoanchor_id), t._p(keyvalnoanchor_id)->m_type.is_anchor());
+ EXPECT_EQ(t.is_anchor(seq_id), t._p(seq_id)->m_type.is_anchor());
+ EXPECT_EQ(t.is_anchor(val_id), t._p(val_id)->m_type.is_anchor());
+ EXPECT_EQ(t.is_anchor(valnoanchor_id), t._p(valnoanchor_id)->m_type.is_anchor());
+ EXPECT_EQ(stream.is_anchor(), stream.get()->m_type.is_anchor());
+ EXPECT_EQ(doc.is_anchor(), doc.get()->m_type.is_anchor());
+ EXPECT_EQ(map.is_anchor(), map.get()->m_type.is_anchor());
+ EXPECT_EQ(keyval.is_anchor(), keyval.get()->m_type.is_anchor());
+ EXPECT_EQ(keyvalnoanchor.is_anchor(), keyvalnoanchor.get()->m_type.is_anchor());
+ EXPECT_EQ(seq.is_anchor(), seq.get()->m_type.is_anchor());
+ EXPECT_EQ(val.is_anchor(), val.get()->m_type.is_anchor());
+ EXPECT_EQ(valnoanchor.is_anchor(), valnoanchor.get()->m_type.is_anchor());
+ EXPECT_EQ(mstream.is_anchor(), mstream.get()->m_type.is_anchor());
+ EXPECT_EQ(mdoc.is_anchor(), mdoc.get()->m_type.is_anchor());
+ EXPECT_EQ(mmap.is_anchor(), mmap.get()->m_type.is_anchor());
+ EXPECT_EQ(mkeyval.is_anchor(), mkeyval.get()->m_type.is_anchor());
+ EXPECT_EQ(mkeyvalnoanchor.is_anchor(), mkeyvalnoanchor.get()->m_type.is_anchor());
+ EXPECT_EQ(mseq.is_anchor(), mseq.get()->m_type.is_anchor());
+ EXPECT_EQ(mval.is_anchor(), mval.get()->m_type.is_anchor());
+ EXPECT_EQ(mvalnoanchor.is_anchor(), mvalnoanchor.get()->m_type.is_anchor());
+}
+
+TEST(NodeType, is_key_ref)
+{
+ EXPECT_FALSE(NodeType().is_key_ref());
+ EXPECT_TRUE(NodeType(KEYREF).is_key_ref());
+ EXPECT_TRUE(NodeType(KEY|KEYREF).is_key_ref());
+}
+
+TEST(Tree, is_key_ref)
+{
+ Tree t = parse_in_arena(R"(---
+*mapref: {foo: bar, notag: none}
+*seqref: [foo, bar]
+...)");
+ const size_t stream_id = t.root_id();
+ const size_t doc_id = t.first_child(stream_id);
+ const size_t map_id = t.first_child(doc_id);
+ const size_t keyval_id = t.first_child(map_id);
+ const size_t seq_id = t.last_child(doc_id);
+ const size_t val_id = t.first_child(seq_id);
+ ConstNodeRef stream = t.ref(stream_id);
+ ConstNodeRef doc = t.ref(doc_id);
+ ConstNodeRef map = t.ref(map_id);
+ ConstNodeRef keyval = t.ref(keyval_id);
+ ConstNodeRef seq = t.ref(seq_id);
+ ConstNodeRef val = t.ref(val_id);
+ NodeRef mstream = t.ref(stream_id);
+ NodeRef mdoc = t.ref(doc_id);
+ NodeRef mmap = t.ref(map_id);
+ NodeRef mkeyval = t.ref(keyval_id);
+ NodeRef mseq = t.ref(seq_id);
+ NodeRef mval = t.ref(val_id);
+ EXPECT_FALSE(t.is_key_ref(stream_id));
+ EXPECT_FALSE(t.is_key_ref(doc_id));
+ EXPECT_TRUE(t.is_key_ref(map_id));
+ EXPECT_FALSE(t.is_key_ref(keyval_id));
+ EXPECT_TRUE(t.is_key_ref(seq_id));
+ EXPECT_FALSE(t.is_key_ref(val_id));
+ EXPECT_FALSE(stream.is_key_ref());
+ EXPECT_FALSE(doc.is_key_ref());
+ EXPECT_TRUE(map.is_key_ref());
+ EXPECT_FALSE(keyval.is_key_ref());
+ EXPECT_TRUE(seq.is_key_ref());
+ EXPECT_FALSE(val.is_key_ref());
+ EXPECT_FALSE(mstream.is_key_ref());
+ EXPECT_FALSE(mdoc.is_key_ref());
+ EXPECT_TRUE(mmap.is_key_ref());
+ EXPECT_FALSE(mkeyval.is_key_ref());
+ EXPECT_TRUE(mseq.is_key_ref());
+ EXPECT_FALSE(mval.is_key_ref());
+ EXPECT_EQ(t.is_key_ref(stream_id), t._p(stream_id)->m_type.is_key_ref());
+ EXPECT_EQ(t.is_key_ref(doc_id), t._p(doc_id)->m_type.is_key_ref());
+ EXPECT_EQ(t.is_key_ref(map_id), t._p(map_id)->m_type.is_key_ref());
+ EXPECT_EQ(t.is_key_ref(keyval_id), t._p(keyval_id)->m_type.is_key_ref());
+ EXPECT_EQ(t.is_key_ref(seq_id), t._p(seq_id)->m_type.is_key_ref());
+ EXPECT_EQ(t.is_key_ref(val_id), t._p(val_id)->m_type.is_key_ref());
+ EXPECT_EQ(stream.is_key_ref(), stream.get()->m_type.is_key_ref());
+ EXPECT_EQ(doc.is_key_ref(), doc.get()->m_type.is_key_ref());
+ EXPECT_EQ(map.is_key_ref(), map.get()->m_type.is_key_ref());
+ EXPECT_EQ(keyval.is_key_ref(), keyval.get()->m_type.is_key_ref());
+ EXPECT_EQ(seq.is_key_ref(), seq.get()->m_type.is_key_ref());
+ EXPECT_EQ(val.is_key_ref(), val.get()->m_type.is_key_ref());
+ EXPECT_EQ(mstream.is_key_ref(), mstream.get()->m_type.is_key_ref());
+ EXPECT_EQ(mdoc.is_key_ref(), mdoc.get()->m_type.is_key_ref());
+ EXPECT_EQ(mmap.is_key_ref(), mmap.get()->m_type.is_key_ref());
+ EXPECT_EQ(mkeyval.is_key_ref(), mkeyval.get()->m_type.is_key_ref());
+ EXPECT_EQ(mseq.is_key_ref(), mseq.get()->m_type.is_key_ref());
+ EXPECT_EQ(mval.is_key_ref(), mval.get()->m_type.is_key_ref());
+}
+
+TEST(NodeType, is_val_ref)
+{
+ EXPECT_FALSE(NodeType().is_val_ref());
+ EXPECT_TRUE(NodeType(VALREF).is_val_ref());
+ EXPECT_TRUE(NodeType(VAL|VALREF).is_val_ref());
+}
+
+TEST(Tree, is_val_ref)
+{
+ Tree t = parse_in_arena(R"(---
+map: {foo: *keyvalref, notag: none}
+seq: [*valref, bar]
+...)");
+ const size_t stream_id = t.root_id();
+ const size_t doc_id = t.first_child(stream_id);
+ const size_t map_id = t.first_child(doc_id);
+ const size_t keyval_id = t.first_child(map_id);
+ const size_t seq_id = t.last_child(doc_id);
+ const size_t val_id = t.first_child(seq_id);
+ ConstNodeRef stream = t.ref(stream_id);
+ ConstNodeRef doc = t.ref(doc_id);
+ ConstNodeRef map = t.ref(map_id);
+ ConstNodeRef keyval = t.ref(keyval_id);
+ ConstNodeRef seq = t.ref(seq_id);
+ ConstNodeRef val = t.ref(val_id);
+ NodeRef mstream = t.ref(stream_id);
+ NodeRef mdoc = t.ref(doc_id);
+ NodeRef mmap = t.ref(map_id);
+ NodeRef mkeyval = t.ref(keyval_id);
+ NodeRef mseq = t.ref(seq_id);
+ NodeRef mval = t.ref(val_id);
+ EXPECT_FALSE(t.is_val_ref(stream_id));
+ EXPECT_FALSE(t.is_val_ref(doc_id));
+ EXPECT_FALSE(t.is_val_ref(map_id));
+ EXPECT_TRUE(t.is_val_ref(keyval_id));
+ EXPECT_FALSE(t.is_val_ref(seq_id));
+ EXPECT_TRUE(t.is_val_ref(val_id));
+ EXPECT_FALSE(stream.is_val_ref());
+ EXPECT_FALSE(doc.is_val_ref());
+ EXPECT_FALSE(map.is_val_ref());
+ EXPECT_TRUE(keyval.is_val_ref());
+ EXPECT_FALSE(seq.is_val_ref());
+ EXPECT_TRUE(val.is_val_ref());
+ EXPECT_FALSE(mstream.is_val_ref());
+ EXPECT_FALSE(mdoc.is_val_ref());
+ EXPECT_FALSE(mmap.is_val_ref());
+ EXPECT_TRUE(mkeyval.is_val_ref());
+ EXPECT_FALSE(mseq.is_val_ref());
+ EXPECT_TRUE(mval.is_val_ref());
+ EXPECT_EQ(t.is_val_ref(stream_id), t._p(stream_id)->m_type.is_val_ref());
+ EXPECT_EQ(t.is_val_ref(doc_id), t._p(doc_id)->m_type.is_val_ref());
+ EXPECT_EQ(t.is_val_ref(map_id), t._p(map_id)->m_type.is_val_ref());
+ EXPECT_EQ(t.is_val_ref(keyval_id), t._p(keyval_id)->m_type.is_val_ref());
+ EXPECT_EQ(t.is_val_ref(seq_id), t._p(seq_id)->m_type.is_val_ref());
+ EXPECT_EQ(t.is_val_ref(val_id), t._p(val_id)->m_type.is_val_ref());
+ EXPECT_EQ(stream.is_val_ref(), stream.get()->m_type.is_val_ref());
+ EXPECT_EQ(doc.is_val_ref(), doc.get()->m_type.is_val_ref());
+ EXPECT_EQ(map.is_val_ref(), map.get()->m_type.is_val_ref());
+ EXPECT_EQ(keyval.is_val_ref(), keyval.get()->m_type.is_val_ref());
+ EXPECT_EQ(seq.is_val_ref(), seq.get()->m_type.is_val_ref());
+ EXPECT_EQ(val.is_val_ref(), val.get()->m_type.is_val_ref());
+ EXPECT_EQ(mstream.is_val_ref(), mstream.get()->m_type.is_val_ref());
+ EXPECT_EQ(mdoc.is_val_ref(), mdoc.get()->m_type.is_val_ref());
+ EXPECT_EQ(mmap.is_val_ref(), mmap.get()->m_type.is_val_ref());
+ EXPECT_EQ(mkeyval.is_val_ref(), mkeyval.get()->m_type.is_val_ref());
+ EXPECT_EQ(mseq.is_val_ref(), mseq.get()->m_type.is_val_ref());
+ EXPECT_EQ(mval.is_val_ref(), mval.get()->m_type.is_val_ref());
+}
+
+TEST(NodeType, is_ref)
+{
+ EXPECT_FALSE(NodeType().is_ref());
+ EXPECT_FALSE(NodeType(KEYVAL).is_ref());
+ EXPECT_TRUE(NodeType(KEYREF).is_ref());
+ EXPECT_TRUE(NodeType(VALREF).is_ref());
+ EXPECT_TRUE(NodeType(KEY|VALREF).is_ref());
+ EXPECT_TRUE(NodeType(VAL|KEYREF).is_ref());
+ EXPECT_TRUE(NodeType(KEYREF|VALREF).is_ref());
+}
+
+TEST(Tree, is_ref)
+{
+ Tree t = parse_in_arena(R"(---
+map: {foo: *keyvalref, notag: none}
+seq: [*valref, bar]
+...)");
+ const size_t stream_id = t.root_id();
+ const size_t doc_id = t.first_child(stream_id);
+ const size_t map_id = t.first_child(doc_id);
+ const size_t keyval_id = t.first_child(map_id);
+ const size_t seq_id = t.last_child(doc_id);
+ const size_t val_id = t.first_child(seq_id);
+ ConstNodeRef stream = t.ref(stream_id);
+ ConstNodeRef doc = t.ref(doc_id);
+ ConstNodeRef map = t.ref(map_id);
+ ConstNodeRef keyval = t.ref(keyval_id);
+ ConstNodeRef seq = t.ref(seq_id);
+ ConstNodeRef val = t.ref(val_id);
+ NodeRef mstream = t.ref(stream_id);
+ NodeRef mdoc = t.ref(doc_id);
+ NodeRef mmap = t.ref(map_id);
+ NodeRef mkeyval = t.ref(keyval_id);
+ NodeRef mseq = t.ref(seq_id);
+ NodeRef mval = t.ref(val_id);
+ EXPECT_FALSE(t.is_ref(stream_id));
+ EXPECT_FALSE(t.is_ref(doc_id));
+ EXPECT_FALSE(t.is_ref(map_id));
+ EXPECT_TRUE(t.is_ref(keyval_id));
+ EXPECT_FALSE(t.is_ref(seq_id));
+ EXPECT_TRUE(t.is_ref(val_id));
+ EXPECT_FALSE(stream.is_ref());
+ EXPECT_FALSE(doc.is_ref());
+ EXPECT_FALSE(map.is_ref());
+ EXPECT_TRUE(keyval.is_ref());
+ EXPECT_FALSE(seq.is_ref());
+ EXPECT_TRUE(val.is_ref());
+ EXPECT_FALSE(mstream.is_ref());
+ EXPECT_FALSE(mdoc.is_ref());
+ EXPECT_FALSE(mmap.is_ref());
+ EXPECT_TRUE(mkeyval.is_ref());
+ EXPECT_FALSE(mseq.is_ref());
+ EXPECT_TRUE(mval.is_ref());
+ EXPECT_EQ(t.is_ref(stream_id), t._p(stream_id)->m_type.is_ref());
+ EXPECT_EQ(t.is_ref(doc_id), t._p(doc_id)->m_type.is_ref());
+ EXPECT_EQ(t.is_ref(map_id), t._p(map_id)->m_type.is_ref());
+ EXPECT_EQ(t.is_ref(keyval_id), t._p(keyval_id)->m_type.is_ref());
+ EXPECT_EQ(t.is_ref(seq_id), t._p(seq_id)->m_type.is_ref());
+ EXPECT_EQ(t.is_ref(val_id), t._p(val_id)->m_type.is_ref());
+ EXPECT_EQ(stream.is_ref(), stream.get()->m_type.is_ref());
+ EXPECT_EQ(doc.is_ref(), doc.get()->m_type.is_ref());
+ EXPECT_EQ(map.is_ref(), map.get()->m_type.is_ref());
+ EXPECT_EQ(keyval.is_ref(), keyval.get()->m_type.is_ref());
+ EXPECT_EQ(seq.is_ref(), seq.get()->m_type.is_ref());
+ EXPECT_EQ(val.is_ref(), val.get()->m_type.is_ref());
+ EXPECT_EQ(mstream.is_ref(), mstream.get()->m_type.is_ref());
+ EXPECT_EQ(mdoc.is_ref(), mdoc.get()->m_type.is_ref());
+ EXPECT_EQ(mmap.is_ref(), mmap.get()->m_type.is_ref());
+ EXPECT_EQ(mkeyval.is_ref(), mkeyval.get()->m_type.is_ref());
+ EXPECT_EQ(mseq.is_ref(), mseq.get()->m_type.is_ref());
+ EXPECT_EQ(mval.is_ref(), mval.get()->m_type.is_ref());
+}
+
+TEST(NodeType, is_anchor_or_ref)
+{
+ EXPECT_FALSE(NodeType().is_anchor_or_ref());
+ EXPECT_FALSE(NodeType(KEYVAL).is_anchor_or_ref());
+ EXPECT_TRUE(NodeType(KEYREF).is_anchor_or_ref());
+ EXPECT_TRUE(NodeType(KEYANCH).is_anchor_or_ref());
+ EXPECT_TRUE(NodeType(VALREF).is_anchor_or_ref());
+ EXPECT_TRUE(NodeType(VALANCH).is_anchor_or_ref());
+ EXPECT_TRUE(NodeType(KEY|VALREF).is_anchor_or_ref());
+ EXPECT_TRUE(NodeType(KEY|VALANCH).is_anchor_or_ref());
+ EXPECT_TRUE(NodeType(VAL|KEYREF).is_anchor_or_ref());
+ EXPECT_TRUE(NodeType(VAL|VALANCH).is_anchor_or_ref());
+ EXPECT_TRUE(NodeType(KEY|VALANCH).is_anchor_or_ref());
+ EXPECT_TRUE(NodeType(KEYREF|VALREF).is_anchor_or_ref());
+ EXPECT_TRUE(NodeType(KEYANCH|VALANCH).is_anchor_or_ref());
+}
+
+TEST(Tree, is_anchor_or_ref)
+{
+ Tree t = parse_in_arena(R"(---
+&map map: {foo: *keyvalref, notag: none}
+seq: &seq [*valref, bar]
+...)");
+ const size_t stream_id = t.root_id();
+ const size_t doc_id = t.first_child(stream_id);
+ const size_t map_id = t.first_child(doc_id);
+ const size_t keyval_id = t.first_child(map_id);
+ const size_t seq_id = t.last_child(doc_id);
+ const size_t val_id = t.first_child(seq_id);
+ ConstNodeRef stream = t.ref(stream_id);
+ ConstNodeRef doc = t.ref(doc_id);
+ ConstNodeRef map = t.ref(map_id);
+ ConstNodeRef keyval = t.ref(keyval_id);
+ ConstNodeRef seq = t.ref(seq_id);
+ ConstNodeRef val = t.ref(val_id);
+ NodeRef mstream = t.ref(stream_id);
+ NodeRef mdoc = t.ref(doc_id);
+ NodeRef mmap = t.ref(map_id);
+ NodeRef mkeyval = t.ref(keyval_id);
+ NodeRef mseq = t.ref(seq_id);
+ NodeRef mval = t.ref(val_id);
+ EXPECT_FALSE(t.is_anchor_or_ref(stream_id));
+ EXPECT_FALSE(t.is_anchor_or_ref(doc_id));
+ EXPECT_TRUE(t.is_anchor_or_ref(map_id));
+ EXPECT_TRUE(t.is_anchor_or_ref(keyval_id));
+ EXPECT_TRUE(t.is_anchor_or_ref(seq_id));
+ EXPECT_TRUE(t.is_anchor_or_ref(val_id));
+ EXPECT_FALSE(stream.is_anchor_or_ref());
+ EXPECT_FALSE(doc.is_anchor_or_ref());
+ EXPECT_TRUE(map.is_anchor_or_ref());
+ EXPECT_TRUE(keyval.is_anchor_or_ref());
+ EXPECT_TRUE(seq.is_anchor_or_ref());
+ EXPECT_TRUE(val.is_anchor_or_ref());
+ EXPECT_FALSE(mstream.is_anchor_or_ref());
+ EXPECT_FALSE(mdoc.is_anchor_or_ref());
+ EXPECT_TRUE(mmap.is_anchor_or_ref());
+ EXPECT_TRUE(mkeyval.is_anchor_or_ref());
+ EXPECT_TRUE(mseq.is_anchor_or_ref());
+ EXPECT_TRUE(mval.is_anchor_or_ref());
+ EXPECT_EQ(t.is_anchor_or_ref(stream_id), t._p(stream_id)->m_type.is_anchor_or_ref());
+ EXPECT_EQ(t.is_anchor_or_ref(doc_id), t._p(doc_id)->m_type.is_anchor_or_ref());
+ EXPECT_EQ(t.is_anchor_or_ref(map_id), t._p(map_id)->m_type.is_anchor_or_ref());
+ EXPECT_EQ(t.is_anchor_or_ref(keyval_id), t._p(keyval_id)->m_type.is_anchor_or_ref());
+ EXPECT_EQ(t.is_anchor_or_ref(seq_id), t._p(seq_id)->m_type.is_anchor_or_ref());
+ EXPECT_EQ(t.is_anchor_or_ref(val_id), t._p(val_id)->m_type.is_anchor_or_ref());
+ EXPECT_EQ(stream.is_anchor_or_ref(), stream.get()->m_type.is_anchor_or_ref());
+ EXPECT_EQ(doc.is_anchor_or_ref(), doc.get()->m_type.is_anchor_or_ref());
+ EXPECT_EQ(map.is_anchor_or_ref(), map.get()->m_type.is_anchor_or_ref());
+ EXPECT_EQ(keyval.is_anchor_or_ref(), keyval.get()->m_type.is_anchor_or_ref());
+ EXPECT_EQ(seq.is_anchor_or_ref(), seq.get()->m_type.is_anchor_or_ref());
+ EXPECT_EQ(val.is_anchor_or_ref(), val.get()->m_type.is_anchor_or_ref());
+ EXPECT_EQ(mstream.is_anchor_or_ref(), mstream.get()->m_type.is_anchor_or_ref());
+ EXPECT_EQ(mdoc.is_anchor_or_ref(), mdoc.get()->m_type.is_anchor_or_ref());
+ EXPECT_EQ(mmap.is_anchor_or_ref(), mmap.get()->m_type.is_anchor_or_ref());
+ EXPECT_EQ(mkeyval.is_anchor_or_ref(), mkeyval.get()->m_type.is_anchor_or_ref());
+ EXPECT_EQ(mseq.is_anchor_or_ref(), mseq.get()->m_type.is_anchor_or_ref());
+ EXPECT_EQ(mval.is_anchor_or_ref(), mval.get()->m_type.is_anchor_or_ref());
+}
+
+TEST(NodeType, is_key_quoted)
+{
+ EXPECT_FALSE(NodeType().is_key_quoted());
+ EXPECT_FALSE(NodeType(KEYQUO).is_key_quoted());
+ EXPECT_TRUE(NodeType(KEY|KEYQUO).is_key_quoted());
+}
+
+TEST(Tree, is_key_quoted)
+{
+ Tree t = parse_in_arena(R"(---
+"quoted": foo
+notquoted: bar
+...)");
+ const size_t map_id = t.first_child(t.root_id());
+ const size_t quoted_id = t.first_child(map_id);
+ const size_t notquoted_id = t.last_child(map_id);
+ ConstNodeRef map = t.ref(map_id);
+ ConstNodeRef quoted = t.ref(quoted_id);
+ ConstNodeRef notquoted = t.ref(notquoted_id);
+ NodeRef mmap = t.ref(map_id);
+ NodeRef mquoted = t.ref(quoted_id);
+ NodeRef mnotquoted = t.ref(notquoted_id);
+ EXPECT_FALSE(t.is_key_quoted(map_id));
+ EXPECT_TRUE(t.is_key_quoted(quoted_id));
+ EXPECT_FALSE(t.is_key_quoted(notquoted_id));
+ EXPECT_FALSE(map.is_key_quoted());
+ EXPECT_TRUE(quoted.is_key_quoted());
+ EXPECT_FALSE(notquoted.is_key_quoted());
+ EXPECT_FALSE(mmap.is_key_quoted());
+ EXPECT_TRUE(mquoted.is_key_quoted());
+ EXPECT_FALSE(mnotquoted.is_key_quoted());
+ EXPECT_EQ(t.is_key_quoted(map_id), t._p(map_id)->m_type.is_key_quoted());
+ EXPECT_EQ(t.is_key_quoted(quoted_id), t._p(quoted_id)->m_type.is_key_quoted());
+ EXPECT_EQ(t.is_key_quoted(notquoted_id), t._p(notquoted_id)->m_type.is_key_quoted());
+ EXPECT_EQ(map.is_key_quoted(), map.get()->m_type.is_key_quoted());
+ EXPECT_EQ(quoted.is_key_quoted(), quoted.get()->m_type.is_key_quoted());
+ EXPECT_EQ(notquoted.is_key_quoted(), notquoted.get()->m_type.is_key_quoted());
+ EXPECT_EQ(mmap.is_key_quoted(), mmap.get()->m_type.is_key_quoted());
+ EXPECT_EQ(mquoted.is_key_quoted(), mquoted.get()->m_type.is_key_quoted());
+ EXPECT_EQ(mnotquoted.is_key_quoted(), mnotquoted.get()->m_type.is_key_quoted());
+}
+
+TEST(NodeType, is_val_quoted)
+{
+ EXPECT_FALSE(NodeType().is_val_quoted());
+ EXPECT_FALSE(NodeType(VALQUO).is_val_quoted());
+ EXPECT_TRUE(NodeType(VAL|VALQUO).is_val_quoted());
+}
+
+TEST(Tree, is_val_quoted)
+{
+ Tree t = parse_in_arena(R"(---
+"quoted": "foo"
+notquoted: bar
+...)");
+ const size_t map_id = t.first_child(t.root_id());
+ const size_t quoted_id = t.first_child(map_id);
+ const size_t notquoted_id = t.last_child(map_id);
+ ConstNodeRef map = t.ref(map_id);
+ ConstNodeRef quoted = t.ref(quoted_id);
+ ConstNodeRef notquoted = t.ref(notquoted_id);
+ NodeRef mmap = t.ref(map_id);
+ NodeRef mquoted = t.ref(quoted_id);
+ NodeRef mnotquoted = t.ref(notquoted_id);
+ EXPECT_FALSE(t.is_val_quoted(map_id));
+ EXPECT_TRUE(t.is_val_quoted(quoted_id));
+ EXPECT_FALSE(t.is_val_quoted(notquoted_id));
+ EXPECT_FALSE(map.is_val_quoted());
+ EXPECT_TRUE(quoted.is_val_quoted());
+ EXPECT_FALSE(notquoted.is_val_quoted());
+ EXPECT_FALSE(mmap.is_val_quoted());
+ EXPECT_TRUE(mquoted.is_val_quoted());
+ EXPECT_FALSE(mnotquoted.is_val_quoted());
+ EXPECT_EQ(t.is_val_quoted(map_id), t._p(map_id)->m_type.is_val_quoted());
+ EXPECT_EQ(t.is_val_quoted(quoted_id), t._p(quoted_id)->m_type.is_val_quoted());
+ EXPECT_EQ(t.is_val_quoted(notquoted_id), t._p(notquoted_id)->m_type.is_val_quoted());
+ EXPECT_EQ(map.is_val_quoted(), map.get()->m_type.is_val_quoted());
+ EXPECT_EQ(quoted.is_val_quoted(), quoted.get()->m_type.is_val_quoted());
+ EXPECT_EQ(notquoted.is_val_quoted(), notquoted.get()->m_type.is_val_quoted());
+ EXPECT_EQ(mmap.is_val_quoted(), mmap.get()->m_type.is_val_quoted());
+ EXPECT_EQ(mquoted.is_val_quoted(), mquoted.get()->m_type.is_val_quoted());
+ EXPECT_EQ(mnotquoted.is_val_quoted(), mnotquoted.get()->m_type.is_val_quoted());
+}
+
+TEST(NodeType, is_quoted)
+{
+ EXPECT_FALSE(NodeType().is_quoted());
+ EXPECT_FALSE(NodeType(KEYQUO).is_quoted());
+ EXPECT_FALSE(NodeType(VALQUO).is_quoted());
+ EXPECT_FALSE(NodeType(KEYQUO|VALQUO).is_quoted());
+ EXPECT_TRUE(NodeType(KEY|KEYQUO).is_quoted());
+ EXPECT_TRUE(NodeType(VAL|VALQUO).is_quoted());
+ EXPECT_FALSE(NodeType(KEY|VALQUO).is_quoted());
+ EXPECT_FALSE(NodeType(VAL|KEYQUO).is_quoted());
+}
+
+TEST(Tree, is_quoted)
+{
+ Tree t = parse_in_arena(R"(---
+"quoted1": foo
+quoted2: "foo"
+"quoted3": "foo"
+'quoted4': foo
+quoted5: 'foo'
+'quoted6': 'foo'
+notquoted: bar
+...)");
+ const size_t map_id = t.first_child(t.root_id());
+ const size_t quoted1_id = t.find_child(map_id, "quoted1");
+ const size_t quoted2_id = t.find_child(map_id, "quoted2");
+ const size_t quoted3_id = t.find_child(map_id, "quoted3");
+ const size_t quoted4_id = t.find_child(map_id, "quoted4");
+ const size_t quoted5_id = t.find_child(map_id, "quoted5");
+ const size_t quoted6_id = t.find_child(map_id, "quoted6");
+ const size_t notquoted_id = t.last_child(map_id);
+ ConstNodeRef map = t.ref(map_id);
+ ConstNodeRef quoted1 = t.ref(quoted1_id);
+ ConstNodeRef quoted2 = t.ref(quoted2_id);
+ ConstNodeRef quoted3 = t.ref(quoted3_id);
+ ConstNodeRef quoted4 = t.ref(quoted4_id);
+ ConstNodeRef quoted5 = t.ref(quoted5_id);
+ ConstNodeRef quoted6 = t.ref(quoted6_id);
+ ConstNodeRef notquoted = t.ref(notquoted_id);
+ NodeRef mmap = t.ref(map_id);
+ NodeRef mquoted1 = t.ref(quoted1_id);
+ NodeRef mquoted2 = t.ref(quoted2_id);
+ NodeRef mquoted3 = t.ref(quoted3_id);
+ NodeRef mquoted4 = t.ref(quoted4_id);
+ NodeRef mquoted5 = t.ref(quoted5_id);
+ NodeRef mquoted6 = t.ref(quoted6_id);
+ NodeRef mnotquoted = t.ref(notquoted_id);
+ EXPECT_FALSE(t.is_quoted(map_id));
+ EXPECT_TRUE(t.is_quoted(quoted1_id));
+ EXPECT_TRUE(t.is_quoted(quoted2_id));
+ EXPECT_TRUE(t.is_quoted(quoted3_id));
+ EXPECT_TRUE(t.is_quoted(quoted4_id));
+ EXPECT_TRUE(t.is_quoted(quoted5_id));
+ EXPECT_TRUE(t.is_quoted(quoted6_id));
+ EXPECT_FALSE(t.is_quoted(notquoted_id));
+ EXPECT_FALSE(map.is_quoted());
+ EXPECT_TRUE(quoted1.is_quoted());
+ EXPECT_TRUE(quoted2.is_quoted());
+ EXPECT_TRUE(quoted3.is_quoted());
+ EXPECT_TRUE(quoted4.is_quoted());
+ EXPECT_TRUE(quoted5.is_quoted());
+ EXPECT_TRUE(quoted6.is_quoted());
+ EXPECT_FALSE(notquoted.is_quoted());
+ EXPECT_FALSE(mmap.is_quoted());
+ EXPECT_TRUE(mquoted1.is_quoted());
+ EXPECT_TRUE(mquoted2.is_quoted());
+ EXPECT_TRUE(mquoted3.is_quoted());
+ EXPECT_TRUE(mquoted4.is_quoted());
+ EXPECT_TRUE(mquoted5.is_quoted());
+ EXPECT_TRUE(mquoted6.is_quoted());
+ EXPECT_FALSE(mnotquoted.is_quoted());
+ EXPECT_EQ(t.is_quoted(map_id), t._p(map_id)->m_type.is_quoted());
+ EXPECT_EQ(t.is_quoted(quoted1_id), t._p(quoted1_id)->m_type.is_quoted());
+ EXPECT_EQ(t.is_quoted(quoted2_id), t._p(quoted2_id)->m_type.is_quoted());
+ EXPECT_EQ(t.is_quoted(quoted3_id), t._p(quoted3_id)->m_type.is_quoted());
+ EXPECT_EQ(t.is_quoted(quoted4_id), t._p(quoted4_id)->m_type.is_quoted());
+ EXPECT_EQ(t.is_quoted(quoted5_id), t._p(quoted5_id)->m_type.is_quoted());
+ EXPECT_EQ(t.is_quoted(quoted6_id), t._p(quoted6_id)->m_type.is_quoted());
+ EXPECT_EQ(t.is_quoted(notquoted_id), t._p(notquoted_id)->m_type.is_quoted());
+ EXPECT_EQ(map.is_quoted(), map.get()->m_type.is_quoted());
+ EXPECT_EQ(quoted1.is_quoted(), quoted1.get()->m_type.is_quoted());
+ EXPECT_EQ(quoted2.is_quoted(), quoted2.get()->m_type.is_quoted());
+ EXPECT_EQ(quoted3.is_quoted(), quoted3.get()->m_type.is_quoted());
+ EXPECT_EQ(quoted4.is_quoted(), quoted4.get()->m_type.is_quoted());
+ EXPECT_EQ(quoted5.is_quoted(), quoted5.get()->m_type.is_quoted());
+ EXPECT_EQ(quoted6.is_quoted(), quoted6.get()->m_type.is_quoted());
+ EXPECT_EQ(notquoted.is_quoted(), notquoted.get()->m_type.is_quoted());
+ EXPECT_EQ(mmap.is_quoted(), mmap.get()->m_type.is_quoted());
+ EXPECT_EQ(mquoted1.is_quoted(), mquoted1.get()->m_type.is_quoted());
+ EXPECT_EQ(mquoted2.is_quoted(), mquoted2.get()->m_type.is_quoted());
+ EXPECT_EQ(mquoted3.is_quoted(), mquoted3.get()->m_type.is_quoted());
+ EXPECT_EQ(mquoted4.is_quoted(), mquoted4.get()->m_type.is_quoted());
+ EXPECT_EQ(mquoted5.is_quoted(), mquoted5.get()->m_type.is_quoted());
+ EXPECT_EQ(mquoted6.is_quoted(), mquoted6.get()->m_type.is_quoted());
+ EXPECT_EQ(mnotquoted.is_quoted(), mnotquoted.get()->m_type.is_quoted());
+}
+
+
+TEST(Tree, parent_is_seq)
+{
+ Tree t = parse_in_arena(R"(---
+map: {foo: *keyvalref, notag: none}
+seq: &seq [*valref, bar]
+...)");
+ const size_t stream_id = t.root_id();
+ const size_t doc_id = t.first_child(stream_id);
+ const size_t map_id = t.first_child(doc_id);
+ const size_t keyval_id = t.first_child(map_id);
+ const size_t seq_id = t.last_child(doc_id);
+ const size_t val_id = t.first_child(seq_id);
+ ConstNodeRef doc = t.ref(doc_id);
+ ConstNodeRef map = t.ref(map_id);
+ ConstNodeRef keyval = t.ref(keyval_id);
+ ConstNodeRef seq = t.ref(seq_id);
+ ConstNodeRef val = t.ref(val_id);
+ NodeRef mdoc = t.ref(doc_id);
+ NodeRef mmap = t.ref(map_id);
+ NodeRef mkeyval = t.ref(keyval_id);
+ NodeRef mseq = t.ref(seq_id);
+ NodeRef mval = t.ref(val_id);
+ //EXPECT_FALSE(t.parent_is_seq(stream_id));
+ EXPECT_TRUE(t.parent_is_seq(doc_id));
+ EXPECT_FALSE(t.parent_is_seq(map_id));
+ EXPECT_FALSE(t.parent_is_seq(keyval_id));
+ EXPECT_FALSE(t.parent_is_seq(seq_id));
+ EXPECT_TRUE(t.parent_is_seq(val_id));
+ //EXPECT_FALSE(stream.parent_is_seq());
+ EXPECT_TRUE(doc.parent_is_seq());
+ EXPECT_FALSE(map.parent_is_seq());
+ EXPECT_FALSE(keyval.parent_is_seq());
+ EXPECT_FALSE(seq.parent_is_seq());
+ EXPECT_TRUE(val.parent_is_seq());
+ //EXPECT_FALSE(mstream.parent_is_seq());
+ EXPECT_TRUE(mdoc.parent_is_seq());
+ EXPECT_FALSE(mmap.parent_is_seq());
+ EXPECT_FALSE(mkeyval.parent_is_seq());
+ EXPECT_FALSE(mseq.parent_is_seq());
+ EXPECT_TRUE(mval.parent_is_seq());
+ //EXPECT_EQ(t.parent_is_seq(stream_id), stream.parent_is_seq());
+ EXPECT_EQ(t.parent_is_seq(doc_id), doc.parent_is_seq());
+ EXPECT_EQ(t.parent_is_seq(map_id), map.parent_is_seq());
+ EXPECT_EQ(t.parent_is_seq(keyval_id), keyval.parent_is_seq());
+ EXPECT_EQ(t.parent_is_seq(seq_id), seq.parent_is_seq());
+ EXPECT_EQ(t.parent_is_seq(val_id), val.parent_is_seq());
+ EXPECT_EQ(t.parent_is_seq(doc_id), mdoc.parent_is_seq());
+ EXPECT_EQ(t.parent_is_seq(map_id), mmap.parent_is_seq());
+ EXPECT_EQ(t.parent_is_seq(keyval_id), mkeyval.parent_is_seq());
+ EXPECT_EQ(t.parent_is_seq(seq_id), mseq.parent_is_seq());
+ EXPECT_EQ(t.parent_is_seq(val_id), mval.parent_is_seq());
+}
+
+TEST(Tree, parent_is_map)
+{
+ Tree t = parse_in_arena(R"(---
+map: {foo: *keyvalref, notag: none}
+seq: &seq [*valref, bar]
+...)");
+ const size_t stream_id = t.root_id();
+ const size_t doc_id = t.first_child(stream_id);
+ const size_t map_id = t.first_child(doc_id);
+ const size_t keyval_id = t.first_child(map_id);
+ const size_t seq_id = t.last_child(doc_id);
+ const size_t val_id = t.first_child(seq_id);
+ ConstNodeRef doc = t.ref(doc_id);
+ ConstNodeRef map = t.ref(map_id);
+ ConstNodeRef keyval = t.ref(keyval_id);
+ ConstNodeRef seq = t.ref(seq_id);
+ ConstNodeRef val = t.ref(val_id);
+ NodeRef mdoc = t.ref(doc_id);
+ NodeRef mmap = t.ref(map_id);
+ NodeRef mkeyval = t.ref(keyval_id);
+ NodeRef mseq = t.ref(seq_id);
+ NodeRef mval = t.ref(val_id);
+ //EXPECT_FALSE(t.parent_is_map(stream_id));
+ EXPECT_FALSE(t.parent_is_map(doc_id));
+ EXPECT_TRUE(t.parent_is_map(map_id));
+ EXPECT_TRUE(t.parent_is_map(keyval_id));
+ EXPECT_TRUE(t.parent_is_map(seq_id));
+ EXPECT_FALSE(t.parent_is_map(val_id));
+ //EXPECT_FALSE(stream.parent_is_map());
+ EXPECT_FALSE(doc.parent_is_map());
+ EXPECT_TRUE(map.parent_is_map());
+ EXPECT_TRUE(keyval.parent_is_map());
+ EXPECT_TRUE(seq.parent_is_map());
+ EXPECT_FALSE(val.parent_is_map());
+ //EXPECT_FALSE(mstream.parent_is_map());
+ EXPECT_FALSE(mdoc.parent_is_map());
+ EXPECT_TRUE(mmap.parent_is_map());
+ EXPECT_TRUE(mkeyval.parent_is_map());
+ EXPECT_TRUE(mseq.parent_is_map());
+ EXPECT_FALSE(mval.parent_is_map());
+ //EXPECT_EQ(t.parent_is_map(stream_id), stream.parent_is_map());
+ EXPECT_EQ(t.parent_is_map(doc_id), doc.parent_is_map());
+ EXPECT_EQ(t.parent_is_map(map_id), map.parent_is_map());
+ EXPECT_EQ(t.parent_is_map(keyval_id), keyval.parent_is_map());
+ EXPECT_EQ(t.parent_is_map(seq_id), seq.parent_is_map());
+ EXPECT_EQ(t.parent_is_map(val_id), val.parent_is_map());
+ //EXPECT_EQ(t.parent_is_map(stream_id), mstream.parent_is_map());
+ EXPECT_EQ(t.parent_is_map(doc_id), mdoc.parent_is_map());
+ EXPECT_EQ(t.parent_is_map(map_id), mmap.parent_is_map());
+ EXPECT_EQ(t.parent_is_map(keyval_id), mkeyval.parent_is_map());
+ EXPECT_EQ(t.parent_is_map(seq_id), mseq.parent_is_map());
+ EXPECT_EQ(t.parent_is_map(val_id), mval.parent_is_map());
+}
+
+TEST(Tree, has_parent)
+{
+ Tree t = parse_in_arena(R"(---
+map: {foo: *keyvalref, notag: none}
+seq: &seq [*valref, bar]
+...)");
+ const size_t stream_id = t.root_id();
+ const size_t doc_id = t.first_child(stream_id);
+ const size_t map_id = t.first_child(doc_id);
+ const size_t keyval_id = t.first_child(map_id);
+ const size_t seq_id = t.last_child(doc_id);
+ const size_t val_id = t.first_child(seq_id);
+ ConstNodeRef stream = t.ref(stream_id);
+ ConstNodeRef doc = t.ref(doc_id);
+ ConstNodeRef map = t.ref(map_id);
+ ConstNodeRef keyval = t.ref(keyval_id);
+ ConstNodeRef seq = t.ref(seq_id);
+ ConstNodeRef val = t.ref(val_id);
+ NodeRef mstream = t.ref(stream_id);
+ NodeRef mdoc = t.ref(doc_id);
+ NodeRef mmap = t.ref(map_id);
+ NodeRef mkeyval = t.ref(keyval_id);
+ NodeRef mseq = t.ref(seq_id);
+ NodeRef mval = t.ref(val_id);
+ EXPECT_FALSE(t.has_parent(stream_id));
+ EXPECT_TRUE(t.has_parent(doc_id));
+ EXPECT_TRUE(t.has_parent(map_id));
+ EXPECT_TRUE(t.has_parent(keyval_id));
+ EXPECT_TRUE(t.has_parent(seq_id));
+ EXPECT_TRUE(t.has_parent(val_id));
+ EXPECT_FALSE(stream.has_parent());
+ EXPECT_TRUE(doc.has_parent());
+ EXPECT_TRUE(map.has_parent());
+ EXPECT_TRUE(keyval.has_parent());
+ EXPECT_TRUE(seq.has_parent());
+ EXPECT_TRUE(val.has_parent());
+ EXPECT_FALSE(mstream.has_parent());
+ EXPECT_TRUE(mdoc.has_parent());
+ EXPECT_TRUE(mmap.has_parent());
+ EXPECT_TRUE(mkeyval.has_parent());
+ EXPECT_TRUE(mseq.has_parent());
+ EXPECT_TRUE(mval.has_parent());
+ EXPECT_EQ(t.has_parent(stream_id), stream.has_parent());
+ EXPECT_EQ(t.has_parent(doc_id), doc.has_parent());
+ EXPECT_EQ(t.has_parent(map_id), map.has_parent());
+ EXPECT_EQ(t.has_parent(keyval_id), keyval.has_parent());
+ EXPECT_EQ(t.has_parent(seq_id), seq.has_parent());
+ EXPECT_EQ(t.has_parent(val_id), val.has_parent());
+ EXPECT_EQ(t.has_parent(stream_id), mstream.has_parent());
+ EXPECT_EQ(t.has_parent(doc_id), mdoc.has_parent());
+ EXPECT_EQ(t.has_parent(map_id), mmap.has_parent());
+ EXPECT_EQ(t.has_parent(keyval_id), mkeyval.has_parent());
+ EXPECT_EQ(t.has_parent(seq_id), mseq.has_parent());
+ EXPECT_EQ(t.has_parent(val_id), mval.has_parent());
+}
+
+
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+TEST(Tree, num_children)
+{
+ Tree t = parse_in_arena(R"(---
+map: {foo: *keyvalref, notag: none}
+seq: &seq [*valref, bar]
+...)");
+ const size_t stream_id = t.root_id();
+ const size_t doc_id = t.first_child(stream_id);
+ const size_t map_id = t.first_child(doc_id);
+ const size_t keyval_id = t.first_child(map_id);
+ const size_t seq_id = t.last_child(doc_id);
+ const size_t val_id = t.first_child(seq_id);
+ ConstNodeRef stream = t.ref(stream_id);
+ ConstNodeRef doc = t.ref(doc_id);
+ ConstNodeRef map = t.ref(map_id);
+ ConstNodeRef keyval = t.ref(keyval_id);
+ ConstNodeRef seq = t.ref(seq_id);
+ ConstNodeRef val = t.ref(val_id);
+ NodeRef mstream = t.ref(stream_id);
+ NodeRef mdoc = t.ref(doc_id);
+ NodeRef mmap = t.ref(map_id);
+ NodeRef mkeyval = t.ref(keyval_id);
+ NodeRef mseq = t.ref(seq_id);
+ NodeRef mval = t.ref(val_id);
+ EXPECT_EQ(t.num_children(stream_id), 1u);
+ EXPECT_EQ(t.num_children(doc_id), 2u);
+ EXPECT_EQ(t.num_children(map_id), 2u);
+ EXPECT_EQ(t.num_children(keyval_id), 0u);
+ EXPECT_EQ(t.num_children(seq_id), 2u);
+ EXPECT_EQ(t.num_children(val_id), 0u);
+ EXPECT_EQ(stream.num_children(), t.num_children(stream_id));
+ EXPECT_EQ(doc.num_children(), t.num_children(doc_id));
+ EXPECT_EQ(map.num_children(), t.num_children(map_id));
+ EXPECT_EQ(keyval.num_children(), t.num_children(keyval_id));
+ EXPECT_EQ(seq.num_children(), t.num_children(seq_id));
+ EXPECT_EQ(val.num_children(), t.num_children(val_id));
+ EXPECT_EQ(mstream.num_children(), t.num_children(stream_id));
+ EXPECT_EQ(mdoc.num_children(), t.num_children(doc_id));
+ EXPECT_EQ(mmap.num_children(), t.num_children(map_id));
+ EXPECT_EQ(mkeyval.num_children(), t.num_children(keyval_id));
+ EXPECT_EQ(mseq.num_children(), t.num_children(seq_id));
+ EXPECT_EQ(mval.num_children(), t.num_children(val_id));
+}
+
+TEST(Tree, child)
+{
+ Tree t = parse_in_arena(R"(---
+map: {foo: *keyvalref, notag: none}
+seq: &seq [*valref, bar]
+...)");
+ const size_t stream_id = t.root_id();
+ const size_t doc_id = t.first_child(stream_id);
+ const size_t map_id = t.first_child(doc_id);
+ const size_t keyval_id = t.first_child(map_id);
+ const size_t seq_id = t.last_child(doc_id);
+ const size_t val_id = t.first_child(seq_id);
+ ConstNodeRef stream = t.ref(stream_id);
+ ConstNodeRef doc = t.ref(doc_id);
+ ConstNodeRef map = t.ref(map_id);
+ ConstNodeRef keyval = t.ref(keyval_id);
+ ConstNodeRef seq = t.ref(seq_id);
+ ConstNodeRef val = t.ref(val_id);
+ NodeRef mstream = t.ref(stream_id);
+ NodeRef mdoc = t.ref(doc_id);
+ NodeRef mmap = t.ref(map_id);
+ NodeRef mkeyval = t.ref(keyval_id);
+ NodeRef mseq = t.ref(seq_id);
+ NodeRef mval = t.ref(val_id);
+ EXPECT_EQ(t.child(stream_id, 0), doc_id);
+ EXPECT_EQ(t.child(doc_id, 0), map_id);
+ EXPECT_EQ(t.child(map_id, 0), keyval_id);
+ EXPECT_EQ(t.child(keyval_id, 0), (size_t)NONE);
+ EXPECT_EQ(t.child(seq_id, 0), val_id);
+ EXPECT_EQ(t.child(val_id, 0), (size_t)NONE);
+ EXPECT_EQ(stream.child(0).id(), t.child(stream_id, 0));
+ EXPECT_EQ(doc.child(0).id(), t.child(doc_id, 0));
+ EXPECT_EQ(map.child(0).id(), t.child(map_id, 0));
+ EXPECT_EQ(keyval.child(0).id(), t.child(keyval_id, 0));
+ EXPECT_EQ(seq.child(0).id(), t.child(seq_id, 0));
+ EXPECT_EQ(val.child(0).id(), t.child(val_id, 0));
+ EXPECT_EQ(mstream.child(0).id(), t.child(stream_id, 0));
+ EXPECT_EQ(mdoc.child(0).id(), t.child(doc_id, 0));
+ EXPECT_EQ(mmap.child(0).id(), t.child(map_id, 0));
+ EXPECT_EQ(mkeyval.child(0).id(), t.child(keyval_id, 0));
+ EXPECT_EQ(mseq.child(0).id(), t.child(seq_id, 0));
+ EXPECT_EQ(mval.child(0).id(), t.child(val_id, 0));
+}
+
+TEST(Tree, find_child_by_name)
+{
+ Tree t = parse_in_arena(R"(---
+map: {foo: *keyvalref, notag: none}
+seq: &seq [*valref, bar]
+...)");
+ const size_t stream_id = t.root_id();
+ const size_t doc_id = t.first_child(stream_id);
+ const size_t map_id = t.first_child(doc_id);
+ const size_t keyval_id = t.first_child(map_id);
+ const size_t seq_id = t.last_child(doc_id);
+ ConstNodeRef doc = t.ref(doc_id);
+ ConstNodeRef map = t.ref(map_id);
+ NodeRef mdoc = t.ref(doc_id);
+ NodeRef mmap = t.ref(map_id);
+ EXPECT_EQ(t.find_child(doc_id, "map"), map_id);
+ EXPECT_EQ(t.find_child(doc_id, "seq"), seq_id);
+ EXPECT_EQ(t.find_child(doc_id, "..."), (size_t)NONE);
+ EXPECT_EQ(t.find_child(map_id, "foo"), keyval_id);
+ EXPECT_EQ(t.find_child(map_id, "bar"), (size_t)NONE);
+ EXPECT_EQ(doc.find_child("map").id(), t.find_child(doc_id, "map"));
+ EXPECT_EQ(doc.find_child("seq").id(), t.find_child(doc_id, "seq"));
+ EXPECT_EQ(doc.find_child("...").id(), t.find_child(doc_id, "..."));
+ EXPECT_EQ(map.find_child("foo").id(), t.find_child(map_id, "foo"));
+ EXPECT_EQ(map.find_child("bar").id(), t.find_child(map_id, "bar"));
+ EXPECT_EQ(mdoc.find_child("map").id(), t.find_child(doc_id, "map"));
+ EXPECT_EQ(mdoc.find_child("seq").id(), t.find_child(doc_id, "seq"));
+ EXPECT_EQ(mdoc.find_child("...").id(), t.find_child(doc_id, "..."));
+ EXPECT_EQ(mmap.find_child("foo").id(), t.find_child(map_id, "foo"));
+ EXPECT_EQ(mmap.find_child("bar").id(), t.find_child(map_id, "bar"));
+}
+
+
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+TEST(change_type, from_val)
+{
+ Tree t = parse_in_arena("[val0, val1, val2]");
+ t[0].change_type(VAL);
+ t[1].change_type(MAP);
+ t[2].change_type(SEQ);
+ Tree expected = parse_in_arena("[val0, {}, []]");
+ EXPECT_EQ(emitrs_yaml<std::string>(t), emitrs_yaml<std::string>(expected));
+}
+TEST(change_type, from_keyval)
+{
+ Tree t = parse_in_arena("{keyval0: val0, keyval1: val1, keyval2: val2}");
+ t[0].change_type(VAL);
+ t[1].change_type(MAP);
+ t[2].change_type(SEQ);
+ Tree expected = parse_in_arena("{keyval0: val0, keyval1: {}, keyval2: []}");
+ EXPECT_EQ(emitrs_yaml<std::string>(t), emitrs_yaml<std::string>(expected));
+}
+
+TEST(change_type, from_map)
+{
+ Tree t = parse_in_arena("[{map0: val0}, {map1: {map1key0: a, map1key1: b}}, {map2: [map2val0, map2val1]}]");
+ t[0].change_type(VAL);
+ t[1].change_type(MAP);
+ t[2].change_type(SEQ);
+ EXPECT_FALSE(t[0].val_is_null());
+ EXPECT_NE(t[0].val(), nullptr);
+ Tree expected = parse_in_arena("['', {map1: {map1key0: a, map1key1: b}}, []]");
+ EXPECT_EQ(emitrs_yaml<std::string>(t), emitrs_yaml<std::string>(expected));
+}
+TEST(change_type, from_keymap)
+{
+ Tree t = parse_in_arena("{map0: {map0: val0}, map1: {map1: {map1key0: a, map1key1: b}}, map2: {map2: [map2val0, map2val1]}}");
+ t[0].change_type(VAL);
+ t[1].change_type(MAP);
+ t[2].change_type(SEQ);
+ EXPECT_FALSE(t[0].val_is_null());
+ EXPECT_NE(t[0].val(), nullptr);
+ Tree expected = parse_in_arena("{map0: '', map1: {map1: {map1key0: a, map1key1: b}}, map2: []}");
+ EXPECT_EQ(emitrs_yaml<std::string>(t), emitrs_yaml<std::string>(expected));
+}
+
+TEST(change_type, from_seq)
+{
+ Tree t = parse_in_arena("[[seq00, seq01], [seq10, seq11], [seq20, seq21]]");
+ t[0].change_type(VAL);
+ t[1].change_type(MAP);
+ t[2].change_type(SEQ);
+ EXPECT_FALSE(t[0].val_is_null());
+ EXPECT_NE(t[0].val(), nullptr);
+ Tree expected = parse_in_arena("['', {}, [seq20, seq21]]");
+ EXPECT_EQ(emitrs_yaml<std::string>(t), emitrs_yaml<std::string>(expected));
+}
+TEST(change_type, from_keyseq)
+{
+ Tree t = parse_in_arena("{map0: [seq00, seq01], map1: [seq10, seq11], map2: [seq20, seq21]}");
+ t[0].change_type(VAL);
+ t[1].change_type(MAP);
+ t[2].change_type(SEQ);
+ EXPECT_FALSE(t[0].val_is_null());
+ EXPECT_NE(t[0].val(), nullptr);
+ Tree expected = parse_in_arena("{map0: '', map1: {}, map2: [seq20, seq21]}");
+ EXPECT_EQ(emitrs_yaml<std::string>(t), emitrs_yaml<std::string>(expected));
+}
+
+
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+
+TEST(Tree, lookup_path)
+{
+ const char yaml[] = R"(
+a:
+ b: bval
+ c:
+ d:
+ - e
+ - d
+ - f: fval
+ g: gval
+ h:
+ -
+ x: a
+ y: b
+ -
+ z: c
+ u:
+)";
+ Tree t = parse_in_arena(yaml);
+ print_tree(t);
+
+ EXPECT_EQ(t.lookup_path("a").target, 1);
+ EXPECT_EQ(t.lookup_path("a.b").target, 2);
+ EXPECT_EQ(t.lookup_path("a.c").target, 3);
+ EXPECT_EQ(t.lookup_path("a.c.d").target, 4);
+ EXPECT_EQ(t.lookup_path("a.c.d[0]").target, 5);
+ EXPECT_EQ(t.lookup_path("a.c.d[1]").target, 6);
+ EXPECT_EQ(t.lookup_path("a.c.d[2]").target, 7);
+ EXPECT_EQ(t.lookup_path("a.c.d[2].f").target, 8);
+ EXPECT_EQ(t.lookup_path("a.c.d[2].g").target, 9);
+ EXPECT_EQ(t.lookup_path("a.c.d[2].h").target, 10);
+ EXPECT_EQ(t.lookup_path("a.c.d[2].h[0]").target, 11);
+ EXPECT_EQ(t.lookup_path("a.c.d[2].h[0].x").target, 12);
+ EXPECT_EQ(t.lookup_path("a.c.d[2].h[0].y").target, 13);
+ EXPECT_EQ(t.lookup_path("a.c.d[2].h[1]").target, 14);
+ EXPECT_EQ(t.lookup_path("a.c.d[2].h[1].z").target, 15);
+ EXPECT_EQ(t.lookup_path("a.c.d[2].h[1].u").target, 16);
+ EXPECT_EQ(t.lookup_path("d", 3).target, 4);
+ EXPECT_EQ(t.lookup_path("d[0]", 3).target, 5);
+ EXPECT_EQ(t.lookup_path("d[1]", 3).target, 6);
+ EXPECT_EQ(t.lookup_path("d[2]", 3).target, 7);
+ EXPECT_EQ(t.lookup_path("d[2].f", 3).target, 8);
+ EXPECT_EQ(t.lookup_path("d[2].g", 3).target, 9);
+ EXPECT_EQ(t.lookup_path("d[2].h", 3).target, 10);
+ EXPECT_EQ(t.lookup_path("d[2].h[0]", 3).target, 11);
+ EXPECT_EQ(t.lookup_path("d[2].h[0].x", 3).target, 12);
+ EXPECT_EQ(t.lookup_path("d[2].h[0].y", 3).target, 13);
+ EXPECT_EQ(t.lookup_path("d[2].h[1]", 3).target, 14);
+ EXPECT_EQ(t.lookup_path("d[2].h[1].z", 3).target, 15);
+ EXPECT_EQ(t.lookup_path("d[2].h[1].u", 3).target, 16);
+
+ auto lp = t.lookup_path("x");
+ EXPECT_FALSE(lp);
+ EXPECT_EQ(lp.target, (size_t)NONE);
+ EXPECT_EQ(lp.closest, (size_t)NONE);
+ EXPECT_EQ(lp.resolved(), "");
+ EXPECT_EQ(lp.unresolved(), "x");
+ lp = t.lookup_path("a.x");
+ EXPECT_FALSE(lp);
+ EXPECT_EQ(lp.target, (size_t)NONE);
+ EXPECT_EQ(lp.closest, 1);
+ EXPECT_EQ(lp.resolved(), "a");
+ EXPECT_EQ(lp.unresolved(), "x");
+ lp = t.lookup_path("a.b.x");
+ EXPECT_FALSE(lp);
+ EXPECT_EQ(lp.target, (size_t)NONE);
+ EXPECT_EQ(lp.closest, 2);
+ EXPECT_EQ(lp.resolved(), "a.b");
+ EXPECT_EQ(lp.unresolved(), "x");
+ lp = t.lookup_path("a.c.x");
+ EXPECT_FALSE(lp);
+ EXPECT_EQ(lp.target, (size_t)NONE);
+ EXPECT_EQ(lp.closest, 3);
+ EXPECT_EQ(lp.resolved(), "a.c");
+ EXPECT_EQ(lp.unresolved(), "x");
+
+ size_t sz = t.size();
+ EXPECT_EQ(t.lookup_path("x").target, (size_t)NONE);
+ EXPECT_EQ(t.lookup_path_or_modify("x", "x"), sz);
+ EXPECT_EQ(t.lookup_path("x").target, sz);
+ EXPECT_EQ(t.val(sz), "x");
+ EXPECT_EQ(t.lookup_path_or_modify("y", "x"), sz);
+ EXPECT_EQ(t.val(sz), "y");
+ EXPECT_EQ(t.lookup_path_or_modify("z", "x"), sz);
+ EXPECT_EQ(t.val(sz), "z");
+
+ sz = t.size();
+ EXPECT_EQ(t.lookup_path("a.x").target, (size_t)NONE);
+ EXPECT_EQ(t.lookup_path_or_modify("x", "a.x"), sz);
+ EXPECT_EQ(t.lookup_path("a.x").target, sz);
+ EXPECT_EQ(t.val(sz), "x");
+ EXPECT_EQ(t.lookup_path_or_modify("y", "a.x"), sz);
+ EXPECT_EQ(t.val(sz), "y");
+ EXPECT_EQ(t.lookup_path_or_modify("z", "a.x"), sz);
+ EXPECT_EQ(t.val(sz), "z");
+
+ sz = t.size();
+ EXPECT_EQ(t.lookup_path("a.c.x").target, (size_t)NONE);
+ EXPECT_EQ(t.lookup_path_or_modify("x", "a.c.x"), sz);
+ EXPECT_EQ(t.lookup_path("a.c.x").target, sz);
+ EXPECT_EQ(t.val(sz), "x");
+ EXPECT_EQ(t.lookup_path_or_modify("y", "a.c.x"), sz);
+ EXPECT_EQ(t.val(sz), "y");
+ EXPECT_EQ(t.lookup_path_or_modify("z", "a.c.x"), sz);
+ EXPECT_EQ(t.val(sz), "z");
+}
+
+TEST(Tree, lookup_path_or_modify)
+{
+ {
+ Tree dst = parse_in_arena("{}");
+ Tree const src = parse_in_arena("{d: [x, y, z]}");
+ dst.lookup_path_or_modify("ok", "a.b.c");
+ EXPECT_EQ(dst["a"]["b"]["c"].val(), "ok");
+ dst.lookup_path_or_modify(&src, src["d"].id(), "a.b.d");
+ EXPECT_EQ(dst["a"]["b"]["d"][0].val(), "x");
+ EXPECT_EQ(dst["a"]["b"]["d"][1].val(), "y");
+ EXPECT_EQ(dst["a"]["b"]["d"][2].val(), "z");
+ }
+
+ {
+ Tree t = parse_in_arena("{}");
+ csubstr bigpath = "newmap.newseq[0].newmap.newseq[0].first";
+ auto result = t.lookup_path(bigpath);
+ EXPECT_EQ(result.target, (size_t)NONE);
+ EXPECT_EQ(result.closest, (size_t)NONE);
+ EXPECT_EQ(result.resolved(), "");
+ EXPECT_EQ(result.unresolved(), bigpath);
+ size_t sz = t.lookup_path_or_modify("x", bigpath);
+ EXPECT_EQ(t.lookup_path(bigpath).target, sz);
+ EXPECT_EQ(t.val(sz), "x");
+ EXPECT_EQ(t["newmap"]["newseq"].num_children(), 1u);
+ EXPECT_EQ(t["newmap"]["newseq"][0].is_map(), true);
+ EXPECT_EQ(t["newmap"]["newseq"][0]["newmap"].is_map(), true);
+ EXPECT_EQ(t["newmap"]["newseq"][0]["newmap"]["newseq"].is_seq(), true);
+ EXPECT_EQ(t["newmap"]["newseq"][0]["newmap"]["newseq"].num_children(), 1u);
+ EXPECT_EQ(t["newmap"]["newseq"][0]["newmap"]["newseq"][0].is_map(), true);
+ EXPECT_EQ(t["newmap"]["newseq"][0]["newmap"]["newseq"][0]["first"].val(), "x");
+ size_t sz2 = t.lookup_path_or_modify("y", bigpath);
+ EXPECT_EQ(t["newmap"]["newseq"][0]["newmap"]["newseq"][0]["first"].val(), "y");
+ EXPECT_EQ(sz2, sz);
+ EXPECT_EQ(t.lookup_path(bigpath).target, sz);
+ EXPECT_EQ(t.val(sz2), "y");
+
+ sz2 = t.lookup_path_or_modify("y", "newmap2.newseq2[2].newmap2.newseq2[3].first2");
+ EXPECT_EQ(t.lookup_path("newmap2.newseq2[2].newmap2.newseq2[3].first2").target, sz2);
+ EXPECT_EQ(t.val(sz2), "y");
+ EXPECT_EQ(t["newmap2"]["newseq2"].num_children(), 3u);
+ EXPECT_EQ(t["newmap2"]["newseq2"][0].val(), nullptr);
+ EXPECT_EQ(t["newmap2"]["newseq2"][1].val(), nullptr);
+ EXPECT_EQ(t["newmap2"]["newseq2"][2].is_map(), true);
+ EXPECT_EQ(t["newmap2"]["newseq2"][2]["newmap2"].is_map(), true);
+ EXPECT_EQ(t["newmap2"]["newseq2"][2]["newmap2"]["newseq2"].is_seq(), true);
+ EXPECT_EQ(t["newmap2"]["newseq2"][2]["newmap2"]["newseq2"].num_children(), 4u);
+ EXPECT_EQ(t["newmap2"]["newseq2"][2]["newmap2"]["newseq2"][0].val(), nullptr);
+ EXPECT_EQ(t["newmap2"]["newseq2"][2]["newmap2"]["newseq2"][1].val(), nullptr);
+ EXPECT_EQ(t["newmap2"]["newseq2"][2]["newmap2"]["newseq2"][2].val(), nullptr);
+ EXPECT_EQ(t["newmap2"]["newseq2"][2]["newmap2"]["newseq2"][3].is_map(), true);
+ EXPECT_EQ(t["newmap2"]["newseq2"][2]["newmap2"]["newseq2"][3]["first2"].val(), "y");
+ sz2 = t.lookup_path_or_modify("z", "newmap2.newseq2[2].newmap2.newseq2[3].second2");
+ EXPECT_EQ (t["newmap2"]["newseq2"][2]["newmap2"]["newseq2"][3]["second2"].val(), "z");
+
+ sz = t.lookup_path_or_modify("foo", "newmap.newseq1[1]");
+ EXPECT_EQ(t["newmap"].is_map(), true);
+ EXPECT_EQ(t["newmap"]["newseq1"].is_seq(), true);
+ EXPECT_EQ(t["newmap"]["newseq1"].num_children(), 2u);
+ EXPECT_EQ(t["newmap"]["newseq1"][0].val(), nullptr);
+ EXPECT_EQ(t["newmap"]["newseq1"][1].val(), "foo");
+ sz = t.lookup_path_or_modify("bar", "newmap.newseq1[2][1]");
+ EXPECT_EQ(t["newmap"]["newseq1"].is_seq(), true);
+ EXPECT_EQ(t["newmap"]["newseq1"].num_children(), 3u);
+ EXPECT_EQ(t["newmap"]["newseq1"][0].val(), nullptr);
+ EXPECT_EQ(t["newmap"]["newseq1"][1].val(), "foo");
+ EXPECT_EQ(t["newmap"]["newseq1"][2].is_seq(), true);
+ EXPECT_EQ(t["newmap"]["newseq1"][2].num_children(), 2u);
+ EXPECT_EQ(t["newmap"]["newseq1"][2][0].val(), nullptr);
+ EXPECT_EQ(t["newmap"]["newseq1"][2][1].val(), "bar");
+ sz = t.lookup_path_or_modify("Foo?" , "newmap.newseq1[0]");
+ sz = t.lookup_path_or_modify("Bar?" , "newmap.newseq1[2][0]");
+ sz = t.lookup_path_or_modify("happy" , "newmap.newseq1[2][2][3]");
+ sz = t.lookup_path_or_modify("trigger", "newmap.newseq1[2][2][2]");
+ sz = t.lookup_path_or_modify("Arnold" , "newmap.newseq1[2][2][0]");
+ sz = t.lookup_path_or_modify("is" , "newmap.newseq1[2][2][1]");
+ EXPECT_EQ(t["newmap"]["newseq1"].is_seq(), true);
+ EXPECT_EQ(t["newmap"]["newseq1"].num_children(), 3u);
+ EXPECT_EQ(t["newmap"]["newseq1"][0].val(), "Foo?");
+ EXPECT_EQ(t["newmap"]["newseq1"][1].val(), "foo");
+ EXPECT_EQ(t["newmap"]["newseq1"][2].is_seq(), true);
+ EXPECT_EQ(t["newmap"]["newseq1"][2].num_children(), 3u);
+ EXPECT_EQ(t["newmap"]["newseq1"][2][0].val(), "Bar?");
+ EXPECT_EQ(t["newmap"]["newseq1"][2][1].val(), "bar");
+ EXPECT_EQ(t["newmap"]["newseq1"][2][2].is_seq(), true);
+ EXPECT_EQ(t["newmap"]["newseq1"][2][2].num_children(), 4u);
+ EXPECT_EQ(t["newmap"]["newseq1"][2][2][0].val(), "Arnold");
+ EXPECT_EQ(t["newmap"]["newseq1"][2][2][1].val(), "is");
+ EXPECT_EQ(t["newmap"]["newseq1"][2][2][2].val(), "trigger");
+ EXPECT_EQ(t["newmap"]["newseq1"][2][2][3].val(), "happy");
+
+ EXPECT_EQ(emitrs_yaml<std::string>(t), R"(newmap:
+ newseq:
+ - newmap:
+ newseq:
+ - first: y
+ newseq1:
+ - 'Foo?'
+ - foo
+ - - 'Bar?'
+ - bar
+ - - Arnold
+ - is
+ - trigger
+ - happy
+newmap2:
+ newseq2:
+ -
+ -
+ - newmap2:
+ newseq2:
+ -
+ -
+ -
+ - first2: y
+ second2: z
+)");
+ }
+}
+
+
+
+//-----------------------------------------------------------------------------
+
+TEST(set_root_as_stream, empty_tree)
+{
+ Tree t;
+ NodeRef r = t.rootref();
+ EXPECT_EQ(r.is_stream(), false);
+ EXPECT_EQ(r.num_children(), 0u);
+ t.set_root_as_stream();
+ EXPECT_EQ(r.is_stream(), true);
+ EXPECT_EQ(r.num_children(), 0u);
+}
+
+TEST(set_root_as_stream, already_with_stream)
+{
+ Tree t;
+ t.to_stream(t.root_id());
+ NodeRef r = t.rootref();
+ EXPECT_EQ(r.is_stream(), true);
+ EXPECT_EQ(r.num_children(), 0u);
+ t.set_root_as_stream();
+ EXPECT_EQ(r.is_stream(), true);
+ EXPECT_EQ(r.num_children(), 0u);
+}
+
+
+TEST(set_root_as_stream, root_is_map)
+{
+ Tree t = parse_in_arena(R"({a: b, c: d})");
+ NodeRef r = t.rootref();
+ EXPECT_EQ(r.is_stream(), false);
+ EXPECT_EQ(r.is_doc(), false);
+ EXPECT_EQ(r.is_map(), true);
+ EXPECT_EQ(r.is_seq(), false);
+ EXPECT_EQ(r.num_children(), 2u);
+ print_tree(t);
+ std::cout << t;
+ t.set_root_as_stream();
+ print_tree(t);
+ std::cout << t;
+ EXPECT_EQ(r.is_stream(), true);
+ EXPECT_EQ(r.is_doc(), false);
+ EXPECT_EQ(r.is_map(), false);
+ EXPECT_EQ(r.is_seq(), true);
+ EXPECT_EQ(r.num_children(), 1u);
+ EXPECT_EQ(r[0].is_doc(), true);
+ EXPECT_EQ(r[0].is_map(), true);
+ EXPECT_EQ(r[0].is_seq(), false);
+ EXPECT_EQ(r[0].num_children(), 2u);
+ EXPECT_EQ(r[0]["a"].is_keyval(), true);
+ EXPECT_EQ(r[0]["c"].is_keyval(), true);
+ EXPECT_EQ(r[0]["a"].val(), "b");
+ EXPECT_EQ(r[0]["c"].val(), "d");
+}
+
+TEST(set_root_as_stream, root_is_docmap)
+{
+ Tree t = parse_in_arena(R"({a: b, c: d})");
+ t._p(t.root_id())->m_type.add(DOC);
+ NodeRef r = t.rootref();
+ EXPECT_EQ(r.is_stream(), false);
+ EXPECT_EQ(r.is_doc(), true);
+ EXPECT_EQ(r.is_map(), true);
+ EXPECT_EQ(r.is_seq(), false);
+ EXPECT_EQ(r.num_children(), 2u);
+ print_tree(t);
+ std::cout << t;
+ t.set_root_as_stream();
+ print_tree(t);
+ std::cout << t;
+ EXPECT_EQ(r.is_stream(), true);
+ EXPECT_EQ(r.is_doc(), false);
+ EXPECT_EQ(r.is_map(), false);
+ EXPECT_EQ(r.is_seq(), true);
+ EXPECT_EQ(r.num_children(), 1u);
+ EXPECT_EQ(r[0].is_doc(), true);
+ EXPECT_EQ(r[0].is_map(), true);
+ EXPECT_EQ(r[0].is_seq(), false);
+ EXPECT_EQ(r[0].num_children(), 2u);
+ EXPECT_EQ(r[0]["a"].is_keyval(), true);
+ EXPECT_EQ(r[0]["c"].is_keyval(), true);
+ EXPECT_EQ(r[0]["a"].val(), "b");
+ EXPECT_EQ(r[0]["c"].val(), "d");
+}
+
+
+TEST(set_root_as_stream, root_is_seq)
+{
+ Tree t = parse_in_arena(R"([a, b, c, d])");
+ NodeRef r = t.rootref();
+ EXPECT_EQ(r.is_stream(), false);
+ EXPECT_EQ(r.is_doc(), false);
+ EXPECT_EQ(r.is_map(), false);
+ EXPECT_EQ(r.is_seq(), true);
+ EXPECT_EQ(r.num_children(), 4u);
+ print_tree(t);
+ std::cout << t;
+ t.set_root_as_stream();
+ print_tree(t);
+ std::cout << t;
+ EXPECT_EQ(r.is_stream(), true);
+ EXPECT_EQ(r.is_doc(), false);
+ EXPECT_EQ(r.is_map(), false);
+ EXPECT_EQ(r.is_seq(), true);
+ EXPECT_EQ(r.num_children(), 1u);
+ EXPECT_EQ(r[0].is_doc(), true);
+ EXPECT_EQ(r[0].is_map(), false);
+ EXPECT_EQ(r[0].is_seq(), true);
+ EXPECT_EQ(r[0].num_children(), 4u);
+ EXPECT_EQ(r[0][0].val(), "a");
+ EXPECT_EQ(r[0][1].val(), "b");
+ EXPECT_EQ(r[0][2].val(), "c");
+ EXPECT_EQ(r[0][3].val(), "d");
+}
+
+TEST(set_root_as_stream, root_is_docseq)
+{
+ Tree t = parse_in_arena(R"([a, b, c, d])");
+ t._p(t.root_id())->m_type.add(DOC);
+ NodeRef r = t.rootref();
+ EXPECT_EQ(r.is_stream(), false);
+ EXPECT_EQ(r.is_doc(), true);
+ EXPECT_EQ(r.is_map(), false);
+ EXPECT_EQ(r.is_seq(), true);
+ EXPECT_EQ(r.num_children(), 4u);
+ print_tree(t);
+ std::cout << t;
+ t.set_root_as_stream();
+ print_tree(t);
+ std::cout << t;
+ EXPECT_EQ(r.is_stream(), true);
+ EXPECT_EQ(r.is_doc(), false);
+ EXPECT_EQ(r.is_map(), false);
+ EXPECT_EQ(r.is_seq(), true);
+ EXPECT_EQ(r.num_children(), 1u);
+ EXPECT_EQ(r[0].is_doc(), true);
+ EXPECT_EQ(r[0].is_map(), false);
+ EXPECT_EQ(r[0].is_seq(), true);
+ EXPECT_EQ(r[0].num_children(), 4u);
+ EXPECT_EQ(r[0][0].val(), "a");
+ EXPECT_EQ(r[0][1].val(), "b");
+ EXPECT_EQ(r[0][2].val(), "c");
+ EXPECT_EQ(r[0][3].val(), "d");
+}
+
+TEST(set_root_as_stream, root_is_seqmap)
+{
+ Tree t = parse_in_arena(R"([{a: b, c: d}, {e: e, f: f}, {g: g, h: h}, {i: i, j: j}])");
+ NodeRef r = t.rootref();
+ EXPECT_EQ(r.is_stream(), false);
+ EXPECT_EQ(r.is_doc(), false);
+ EXPECT_EQ(r.is_map(), false);
+ EXPECT_EQ(r.is_seq(), true);
+ EXPECT_EQ(r.num_children(), 4u);
+ print_tree(t);
+ std::cout << t;
+ t.set_root_as_stream();
+ print_tree(t);
+ std::cout << t;
+ EXPECT_EQ(r.is_stream(), true);
+ EXPECT_EQ(r.is_doc(), false);
+ EXPECT_EQ(r.is_map(), false);
+ EXPECT_EQ(r.is_seq(), true);
+ EXPECT_EQ(r.num_children(), 1u);
+ EXPECT_EQ(r[0].is_doc(), true);
+ EXPECT_EQ(r[0].is_map(), false);
+ EXPECT_EQ(r[0].is_seq(), true);
+ EXPECT_EQ(r[0].num_children(), 4u);
+ EXPECT_EQ(r[0][0].is_map(), true);
+ EXPECT_EQ(r[0][1].is_map(), true);
+ EXPECT_EQ(r[0][2].is_map(), true);
+ EXPECT_EQ(r[0][3].is_map(), true);
+ EXPECT_EQ(r[0][0]["a"].val(), "b");
+ EXPECT_EQ(r[0][0]["c"].val(), "d");
+ EXPECT_EQ(r[0][1]["e"].val(), "e");
+ EXPECT_EQ(r[0][1]["f"].val(), "f");
+ EXPECT_EQ(r[0][2]["g"].val(), "g");
+ EXPECT_EQ(r[0][2]["h"].val(), "h");
+ EXPECT_EQ(r[0][3]["i"].val(), "i");
+ EXPECT_EQ(r[0][3]["j"].val(), "j");
+}
+
+TEST(set_root_as_stream, root_is_mapseq)
+{
+ Tree t = parse_in_arena(R"({a: [0, 1, 2], b: [3, 4, 5], c: [6, 7, 8]})");
+ NodeRef r = t.rootref();
+ EXPECT_EQ(r.is_stream(), false);
+ EXPECT_EQ(r.is_doc(), false);
+ EXPECT_EQ(r.is_map(), true);
+ EXPECT_EQ(r.is_seq(), false);
+ EXPECT_EQ(r.num_children(), 3u);
+ print_tree(t);
+ std::cout << t;
+ t.set_root_as_stream();
+ print_tree(t);
+ std::cout << t;
+ EXPECT_EQ(r.is_stream(), true);
+ EXPECT_EQ(r.is_doc(), false);
+ EXPECT_EQ(r.is_map(), false);
+ EXPECT_EQ(r.is_seq(), true);
+ EXPECT_EQ(r.num_children(), 1u);
+ EXPECT_EQ(r[0].is_doc(), true);
+ EXPECT_EQ(r[0].is_map(), true);
+ EXPECT_EQ(r[0].is_seq(), false);
+ EXPECT_EQ(r[0].num_children(), 3u);
+ EXPECT_EQ(r[0]["a"].is_seq(), true);
+ EXPECT_EQ(r[0]["b"].is_seq(), true);
+ EXPECT_EQ(r[0]["c"].is_seq(), true);
+ EXPECT_EQ(r[0]["a"][0].val(), "0");
+ EXPECT_EQ(r[0]["a"][1].val(), "1");
+ EXPECT_EQ(r[0]["a"][2].val(), "2");
+ EXPECT_EQ(r[0]["b"][0].val(), "3");
+ EXPECT_EQ(r[0]["b"][1].val(), "4");
+ EXPECT_EQ(r[0]["b"][2].val(), "5");
+ EXPECT_EQ(r[0]["c"][0].val(), "6");
+ EXPECT_EQ(r[0]["c"][1].val(), "7");
+ EXPECT_EQ(r[0]["c"][2].val(), "8");
+}
+
+TEST(set_root_as_stream, root_is_docval)
+{
+ Tree t;
+ NodeRef r = t.rootref();
+ r.set_type(DOCVAL);
+ r.set_val("bar");
+ r.set_val_tag("<!foo>");
+ EXPECT_EQ(r.is_stream(), false);
+ EXPECT_EQ(r.is_doc(), true);
+ EXPECT_EQ(r.is_map(), false);
+ EXPECT_EQ(r.is_seq(), false);
+ EXPECT_EQ(r.is_val(), true);
+ EXPECT_EQ(r.has_val_tag(), true);
+ EXPECT_EQ(r.val_tag(), "<!foo>");
+ EXPECT_EQ(r.num_children(), 0u);
+ print_tree(t);
+ std::cout << t;
+ t.set_root_as_stream();
+ print_tree(t);
+ std::cout << t;
+ EXPECT_EQ(r.is_stream(), true);
+ EXPECT_EQ(r.is_doc(), false);
+ EXPECT_EQ(r.is_map(), false);
+ EXPECT_EQ(r.is_seq(), true);
+ EXPECT_EQ(r.is_val(), false);
+ ASSERT_EQ(r.num_children(), 1u);
+ EXPECT_EQ(r[0].is_stream(), false);
+ EXPECT_EQ(r[0].is_doc(), true);
+ EXPECT_EQ(r[0].is_map(), false);
+ EXPECT_EQ(r[0].is_seq(), false);
+ EXPECT_EQ(r[0].is_val(), true);
+ EXPECT_EQ(r[0].has_val_tag(), true);
+ EXPECT_EQ(r[0].val_tag(), "<!foo>");
+ EXPECT_EQ(r[0].num_children(), 0u);
+}
+
+
+//-------------------------------------------
+//-------------------------------------------
+//-------------------------------------------
+
+TEST(Tree, doc)
+{
+ Tree t = parse_in_arena(R"(---
+doc0
+---
+doc1
+---
+doc2
+---
+doc3
+---
+doc4
+)");
+ size_t ir = t.root_id();
+ ASSERT_EQ(t.num_children(ir), 5u);
+ ASSERT_TRUE(t.is_stream(ir));
+ EXPECT_EQ(t.child(ir, 0), t.doc(0));
+ EXPECT_EQ(t.child(ir, 1), t.doc(1));
+ EXPECT_EQ(t.child(ir, 2), t.doc(2));
+ EXPECT_EQ(t.child(ir, 3), t.doc(3));
+ EXPECT_EQ(t.child(ir, 4), t.doc(4));
+ {
+ NodeRef r = t.rootref();
+ EXPECT_EQ(r.id(), ir);
+ EXPECT_EQ(r.child(0), r.doc(0));
+ EXPECT_EQ(r.child(1), r.doc(1));
+ EXPECT_EQ(r.child(2), r.doc(2));
+ EXPECT_EQ(r.child(3), r.doc(3));
+ EXPECT_EQ(r.child(4), r.doc(4));
+ EXPECT_EQ(r.child(0).id(), t.doc(0));
+ EXPECT_EQ(r.child(1).id(), t.doc(1));
+ EXPECT_EQ(r.child(2).id(), t.doc(2));
+ EXPECT_EQ(r.child(3).id(), t.doc(3));
+ EXPECT_EQ(r.child(4).id(), t.doc(4));
+ EXPECT_EQ(r.child(0).id(), t.docref(0).id());
+ EXPECT_EQ(r.child(1).id(), t.docref(1).id());
+ EXPECT_EQ(r.child(2).id(), t.docref(2).id());
+ EXPECT_EQ(r.child(3).id(), t.docref(3).id());
+ EXPECT_EQ(r.child(4).id(), t.docref(4).id());
+ }
+ {
+ const Tree &ct = t;
+ const ConstNodeRef r = ct.rootref();
+ EXPECT_EQ(r.id(), ir);
+ EXPECT_EQ(r.child(0), r.doc(0));
+ EXPECT_EQ(r.child(1), r.doc(1));
+ EXPECT_EQ(r.child(2), r.doc(2));
+ EXPECT_EQ(r.child(3), r.doc(3));
+ EXPECT_EQ(r.child(4), r.doc(4));
+ EXPECT_EQ(r.child(0).id(), t.doc(0));
+ EXPECT_EQ(r.child(1).id(), t.doc(1));
+ EXPECT_EQ(r.child(2).id(), t.doc(2));
+ EXPECT_EQ(r.child(3).id(), t.doc(3));
+ EXPECT_EQ(r.child(4).id(), t.doc(4));
+ EXPECT_EQ(r.child(0).id(), t.docref(0).id());
+ EXPECT_EQ(r.child(1).id(), t.docref(1).id());
+ EXPECT_EQ(r.child(2).id(), t.docref(2).id());
+ EXPECT_EQ(r.child(3).id(), t.docref(3).id());
+ EXPECT_EQ(r.child(4).id(), t.docref(4).id());
+ }
+}
+
+
+//-------------------------------------------
+//-------------------------------------------
+//-------------------------------------------
+
+TEST(Tree, add_tag_directives)
+{
+ #if RYML_MAX_TAG_DIRECTIVES != 4
+ #error this test assumes RYML_MAX_TAG_DIRECTIVES == 4
+ #endif
+ const TagDirective td[RYML_MAX_TAG_DIRECTIVES + 1] = {
+ TagDirective{csubstr("!a!"), csubstr("!ay-"), 0u},
+ TagDirective{csubstr("!b!"), csubstr("!by-"), 0u},
+ TagDirective{csubstr("!c!"), csubstr("!cy-"), 0u},
+ TagDirective{csubstr("!d!"), csubstr("!dy-"), 0u},
+ TagDirective{csubstr("!e!"), csubstr("!ey-"), 0u},
+ };
+ Tree t;
+ auto check_up_to = [&](size_t num)
+ {
+ size_t pos = 0;
+ EXPECT_EQ(t.num_tag_directives(), num);
+ for(TagDirective const& d : t.tag_directives())
+ {
+ EXPECT_EQ(d.handle.str, td[pos].handle.str);
+ EXPECT_EQ(d.handle.len, td[pos].handle.len);
+ EXPECT_EQ(d.prefix.str, td[pos].prefix.str);
+ EXPECT_EQ(d.prefix.str, td[pos].prefix.str);
+ EXPECT_EQ(d.next_node_id, td[pos].next_node_id);
+ ++pos;
+ }
+ EXPECT_EQ(pos, num);
+ };
+ check_up_to(0);
+ t.add_tag_directive(td[0]);
+ check_up_to(1);
+ t.add_tag_directive(td[1]);
+ check_up_to(2);
+ t.add_tag_directive(td[2]);
+ check_up_to(3);
+ t.add_tag_directive(td[3]);
+ check_up_to(4);
+ ExpectError::do_check(&t, [&]{ // number exceeded
+ t.add_tag_directive(td[4]);
+ });
+ t.clear_tag_directives();
+ check_up_to(0);
+}
+
+TEST(Tree, resolve_tag)
+{
+ csubstr yaml = R"(
+#%TAG !m! !my-
+--- # Bulb here
+!m!light fluorescent
+...
+#%TAG !m! !meta-
+--- # Color here
+!m!light green
+)";
+ // we're not testing the parser here, just the tag mechanics.
+ // So we'll add the tag directives by hand.
+ Tree t = parse_in_arena(yaml);
+ EXPECT_EQ(t[0].val_tag(), "!m!light");
+ EXPECT_EQ(t[1].val_tag(), "!m!light");
+ EXPECT_EQ(t.num_tag_directives(), 0u);
+ t.add_tag_directive(TagDirective{csubstr("!m!"), csubstr("!my-"), 1});
+ t.add_tag_directive(TagDirective{csubstr("!m!"), csubstr("!meta-"), 2});
+ EXPECT_EQ(t.num_tag_directives(), 2u);
+ char buf_[100];
+ EXPECT_EQ(t.resolve_tag_sub(buf_, "!m!light", 1u), csubstr("<!my-light>"));
+ EXPECT_EQ(t.resolve_tag_sub(buf_, "!m!light", 2u), csubstr("<!meta-light>"));
+}
+
+
+//-------------------------------------------
+// this is needed to use the test case library
+Case const* get_case(csubstr /*name*/)
+{
+ return nullptr;
+}
+
+} // namespace yml
+} // namespace c4
+
+#if defined(_MSC_VER)
+# pragma warning(pop)
+#elif defined(__clang__)
+# pragma clang diagnostic pop
+#elif defined(__GNUC__)
+# pragma GCC diagnostic pop
+#endif
diff --git a/thirdparty/ryml/test/test_yaml_events.cpp b/thirdparty/ryml/test/test_yaml_events.cpp
new file mode 100644
index 000000000..683c6d0f9
--- /dev/null
+++ b/thirdparty/ryml/test/test_yaml_events.cpp
@@ -0,0 +1,467 @@
+#ifndef RYML_SINGLE_HEADER
+#include <c4/yml/std/std.hpp>
+#include <c4/yml/yml.hpp>
+#endif
+#include <gtest/gtest.h>
+
+#include "./test_case.hpp"
+#include "./test_suite/test_suite_events.hpp"
+#include "./test_suite/test_suite_events_emitter.cpp" // HACK
+
+namespace c4 {
+namespace yml {
+
+void test_evts(csubstr src, std::string expected)
+{
+ Tree tree = parse_in_arena(src);
+ #if RYML_DBG
+ print_tree(tree);
+ #endif
+ auto actual = emit_events<std::string>(tree);
+ EXPECT_EQ(actual, expected);
+}
+
+TEST(events, empty)
+{
+ test_evts(
+ R"()",
+ R"(+STR
+-STR
+)"
+ );
+}
+
+TEST(events, empty_whitespace)
+{
+ test_evts(
+ R"( )",
+ R"(+STR
+-STR
+)"
+ );
+}
+
+TEST(events, empty_whitespace_newlines)
+{
+ test_evts(
+ R"(
+ )",
+ R"(+STR
+-STR
+)"
+ );
+}
+
+TEST(events, empty_whitespace_newlines_comments)
+{
+ test_evts(
+ R"(
+# a comment
+ )",
+ R"(+STR
+-STR
+)"
+ );
+}
+
+TEST(events, docval)
+{
+ test_evts(
+ R"('quoted val'
+)",
+ R"(+STR
++DOC
+=VAL 'quoted val
+-DOC
+-STR
+)"
+ );
+}
+
+TEST(events, docsep)
+{
+ test_evts(
+ R"(--- 'quoted val'
+--- another
+...
+--- and yet another
+...
+---
+...
+)",
+ R"(+STR
++DOC ---
+=VAL 'quoted val
+-DOC
++DOC ---
+=VAL :another
+-DOC
++DOC ---
+=VAL :and yet another
+-DOC
++DOC ---
+=VAL :
+-DOC
+-STR
+)"
+ );
+}
+
+TEST(events, docsep_v2)
+{
+ test_evts(
+ R"(
+doc1
+---
+doc2
+...
+doc3
+)",
+ R"(+STR
++DOC ---
+=VAL :doc1
+-DOC
++DOC ---
+=VAL :doc2
+-DOC
++DOC ---
+=VAL :doc3
+-DOC
+-STR
+)"
+ );
+}
+
+TEST(events, basic_map)
+{
+ test_evts(
+ "{foo: bar}",
+ R"(+STR
++DOC
++MAP
+=VAL :foo
+=VAL :bar
+-MAP
+-DOC
+-STR
+)"
+ );
+}
+
+TEST(events, basic_seq)
+{
+ test_evts(
+ "[foo, bar]",
+ R"(+STR
++DOC
++SEQ
+=VAL :foo
+=VAL :bar
+-SEQ
+-DOC
+-STR
+)"
+ );
+}
+
+TEST(events, escapes)
+{
+ test_evts(
+ R"("\t\ \ \r\n\0\f\/\a\v\e\N\_\L\P \b")",
+ "+STR\n"
+ "+DOC\n"
+ "=VAL '\\t\\t \\r\\n\\0\\f/\\a\\v\\e\\N\\_\\L\\P \\b" "\n"
+ "-DOC\n"
+ "-STR\n"
+ );
+}
+
+TEST(events, dquo_bytes)
+{
+ test_evts(
+ R"("\x0a\x0a\u263A\x0a\x55\x56\x57\x0a\u2705\U0001D11E")",
+ "+STR\n"
+ "+DOC\n"
+ "=VAL '\\n\\n☺\\nUVW\\n✅𝄞" "\n"
+ "-DOC\n"
+ "-STR\n"
+ );
+}
+
+TEST(events, sets)
+{
+ test_evts(
+ R"(--- !!set
+? Mark McGwire
+? Sammy Sosa
+? Ken Griff
+)",
+ R"(+STR
++DOC ---
++MAP <tag:yaml.org,2002:set>
+=VAL :Mark McGwire
+=VAL :
+=VAL :Sammy Sosa
+=VAL :
+=VAL :Ken Griff
+=VAL :
+-MAP
+-DOC
+-STR
+)");
+}
+
+TEST(events, binary)
+{
+ test_evts(
+ R"(canonical: !!binary "\
+ R0lGODlhDAAMAIQAAP//9/X17unp5WZmZgAAAOfn515eXvPz7Y6OjuDg4J+fn5\
+ OTk6enp56enmlpaWNjY6Ojo4SEhP/++f/++f/++f/++f/++f/++f/++f/++f/+\
+ +f/++f/++f/++f/++f/++SH+Dk1hZGUgd2l0aCBHSU1QACwAAAAADAAMAAAFLC\
+ AgjoEwnuNAFOhpEMTRiggcz4BNJHrv/zCFcLiwMWYNG84BwwEeECcgggoBADs="
+generic: !!binary |
+ R0lGODlhDAAMAIQAAP//9/X17unp5WZmZgAAAOfn515eXvPz7Y6OjuDg4J+fn5
+ OTk6enp56enmlpaWNjY6Ojo4SEhP/++f/++f/++f/++f/++f/++f/++f/++f/+
+ +f/++f/++f/++f/++f/++SH+Dk1hZGUgd2l0aCBHSU1QACwAAAAADAAMAAAFLC
+ AgjoEwnuNAFOhpEMTRiggcz4BNJHrv/zCFcLiwMWYNG84BwwEeECcgggoBADs=
+description:
+ The binary value above is a tiny arrow encoded as a gif image.
+)",
+ R"(+STR
++DOC
++MAP
+=VAL :canonical
+=VAL <tag:yaml.org,2002:binary> 'R0lGODlhDAAMAIQAAP//9/X17unp5WZmZgAAAOfn515eXvPz7Y6OjuDg4J+fn5OTk6enp56enmlpaWNjY6Ojo4SEhP/++f/++f/++f/++f/++f/++f/++f/++f/++f/++f/++f/++f/++f/++SH+Dk1hZGUgd2l0aCBHSU1QACwAAAAADAAMAAAFLCAgjoEwnuNAFOhpEMTRiggcz4BNJHrv/zCFcLiwMWYNG84BwwEeECcgggoBADs=
+=VAL :generic
+=VAL <tag:yaml.org,2002:binary> 'R0lGODlhDAAMAIQAAP//9/X17unp5WZmZgAAAOfn515eXvPz7Y6OjuDg4J+fn5\nOTk6enp56enmlpaWNjY6Ojo4SEhP/++f/++f/++f/++f/++f/++f/++f/++f/+\n+f/++f/++f/++f/++f/++SH+Dk1hZGUgd2l0aCBHSU1QACwAAAAADAAMAAAFLC\nAgjoEwnuNAFOhpEMTRiggcz4BNJHrv/zCFcLiwMWYNG84BwwEeECcgggoBADs=\n
+=VAL :description
+=VAL :The binary value above is a tiny arrow encoded as a gif image.
+-MAP
+-DOC
+-STR
+)");
+}
+
+
+TEST(events, tag_directives_6CK3)
+{
+ test_evts(
+ R"(
+%TAG !e! tag:example.com,2000:app/
+---
+- !local foo
+- !!str bar
+- !e!tag%21 baz
+)",
+ R"(+STR
++DOC ---
++SEQ
+=VAL <!local> :foo
+=VAL <tag:yaml.org,2002:str> :bar
+=VAL <tag:example.com,2000:app/tag!> :baz
+-SEQ
+-DOC
+-STR
+)");
+}
+
+TEST(events, tag_directives_6VLF)
+{
+ test_evts(
+ R"(
+%FOO bar baz # Should be ignored
+ # with a warning.
+--- "foo"
+)",
+ R"(+STR
++DOC ---
+=VAL 'foo
+-DOC
+-STR
+)");
+}
+
+TEST(events, tag_directives_6WLZ)
+{
+ test_evts(
+ R"(
+# Private
+---
+!foo "bar"
+...
+# Global
+%TAG ! tag:example.com,2000:app/
+---
+!foo "bar"
+)",
+ R"(+STR
++DOC ---
+=VAL <!foo> 'bar
+-DOC
++DOC ---
+=VAL <tag:example.com,2000:app/foo> 'bar
+-DOC
+-STR
+)");
+}
+
+TEST(events, tag_directives_9WXW)
+{
+ test_evts(
+ R"(
+# Private
+#--- # note this is commented out
+!foo "bar"
+...
+# Global
+%TAG ! tag:example.com,2000:app/
+---
+!foo "bar"
+)",
+ R"(+STR
++DOC ---
+=VAL <!foo> 'bar
+-DOC
++DOC ---
+=VAL <tag:example.com,2000:app/foo> 'bar
+-DOC
+-STR
+)");
+}
+
+
+TEST(events, tag_directives_7FWL)
+{
+ test_evts(
+ R"(!<tag:yaml.org,2002:str> foo :
+ !<!bar> baz
+)",
+ R"(+STR
++DOC
++MAP
+=VAL <tag:yaml.org,2002:str> :foo
+=VAL <!bar> :baz
+-MAP
+-DOC
+-STR
+)");
+}
+
+TEST(events, tag_directives_P76L)
+{
+ test_evts(
+ R"(
+%TAG !! tag:example.com,2000:app/
+---
+!!int 1 - 3 # Interval, not integer
+)",
+ R"(+STR
++DOC ---
+=VAL <tag:example.com,2000:app/int> :1 - 3
+-DOC
+-STR
+)");
+}
+
+TEST(events, tag_directives_S4JQ)
+{
+ test_evts(
+ R"(
+- "12"
+- 12
+- ! 12
+)",
+ R"(+STR
++DOC
++SEQ
+=VAL '12
+=VAL :12
+=VAL <!> :12
+-SEQ
+-DOC
+-STR
+)");
+}
+
+TEST(events, tag_directives_lookup)
+{
+ test_evts(
+ R"(
+%TAG !m! !my-
+--- # Bulb here
+!m!light fluorescent
+...
+%TAG !m! !meta-
+--- # Color here
+!m!light green
+)",
+ R"(+STR
++DOC ---
+=VAL <!my-light> :fluorescent
+-DOC
++DOC ---
+=VAL <!meta-light> :green
+-DOC
+-STR
+)");
+}
+
+TEST(events, anchors_refs)
+{
+ test_evts(
+ R"(
+A: &A
+ V: 3
+ L:
+ - 1
+B:
+ <<: *A
+ V: 4
+ L:
+ -5
+)",
+ R"(+STR
++DOC
++MAP
+=VAL :A
++MAP &A
+=VAL :V
+=VAL :3
+=VAL :L
++SEQ
+=VAL :1
+-SEQ
+-MAP
+=VAL :B
++MAP
+=VAL :<<
+=ALI *A
+=VAL :V
+=VAL :4
+=VAL :L
+=VAL :-5
+-MAP
+-MAP
+-DOC
+-STR
+)");
+}
+
+
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+//-----------------------------------------------------------------------------
+
+// The other test executables are written to contain the declarative-style
+// YmlTestCases. This executable does not have any but the build setup
+// assumes it does, and links with the test lib, which requires an existing
+// get_case() function. So this is here to act as placeholder until (if?)
+// proper test cases are added here. This was detected in #47 (thanks
+// @cburgard).
+Case const* get_case(csubstr)
+{
+ return nullptr;
+}
+
+} // namespace yml
+} // namespace c4