diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 0000000..c00b9cf --- /dev/null +++ b/.gitmodules @@ -0,0 +1,3 @@ +[submodule "benchmark/libs/zig-yaml"] + path = benchmark/libs/zig-yaml + url = https://github.com/kubkon/zig-yaml diff --git a/benchmark/benchmark.py b/benchmark/benchmark.py index 7d67bc8..fee2e29 100644 --- a/benchmark/benchmark.py +++ b/benchmark/benchmark.py @@ -20,26 +20,26 @@ ) # Parsing with zaml: Prototype YAML parser written in Zig: -start = time.time() +start = time.perf_counter() zaml_result = zaml.load(big_yaml) -print(f"Benchmark results:\nzaml took {(time.time() - start):.2f} seconds") +print(f"Benchmark results:\nzaml took {(time.perf_counter() - start):.4f} seconds") # Parsing with PyYAML in C: -start = time.time() +start = time.perf_counter() pyyaml_c_result = pyyaml.load(big_yaml, Loader=pyyaml.CSafeLoader) -print(f"PyYAML CSafeLoader took {(time.time() - start):.2f} seconds") +print(f"PyYAML CSafeLoader took {(time.perf_counter() - start):.4f} seconds") # Parsing with ruamel: -start = time.time() +start = time.perf_counter() yaml = YAML(typ="safe") yaml.load(big_yaml) rueaml_result = yaml.load(big_yaml) -print(f"ruamel took {(time.time() - start):.2f} seconds") +print(f"ruamel took {(time.perf_counter() - start):.4f} seconds") # Parsing with PyYAML: -start = time.time() +start = time.perf_counter() pyyaml_result = pyyaml.load(big_yaml, Loader=pyyaml.SafeLoader) -print(f"PyYAML SafeLoader took {(time.time() - start):.2f} seconds") +print(f"PyYAML SafeLoader took {(time.perf_counter() - start):.4f} seconds") assert zaml_result == pyyaml_result == pyyaml_c_result == rueaml_result diff --git a/benchmark/benchmark.zig b/benchmark/benchmark.zig index 07196b0..7283c51 100644 --- a/benchmark/benchmark.zig +++ b/benchmark/benchmark.zig @@ -7,8 +7,7 @@ const std = @import("std"); // Zig library for parsing yaml // https://github.com/kubkon/zig-yaml -// TODO: include this as a Git submodule or as a package (when Zig gets official package manager) -const yaml = @import("libs/zig-yaml/src/main.zig"); +const yaml = @import("libs/zig-yaml/src/yaml.zig"); const PyArg_ParseTuple = py.PyArg_ParseTuple; const PyObject = py.PyObject; @@ -21,42 +20,50 @@ const PyDict_SetItem = py.PyDict_SetItem; const Py_BuildValue = py.Py_BuildValue; const METH_VARARGS = py.METH_VARARGS; -// Would not use "testing" allocator for production -const test_allocator = std.testing.allocator; +var general_purpose_allocator = std.heap.GeneralPurposeAllocator(.{}){}; // Don't think about using this in production, it probably has bugs + memory leaks -fn benchmark_load(self: [*c]PyObject, args: [*c]PyObject) callconv(.C) [*]PyObject { +fn benchmark_load(self: [*c]PyObject, args: [*c]PyObject) callconv(.C) [*c]PyObject { _ = self; var string: [*:0]const u8 = undefined; - // TODO: handle errors / unexpected input. Probably not a good idea to silently ignore them. - _ = PyArg_ParseTuple(args, "s", &string); + if (PyArg_ParseTuple(args, "s", &string) == 0) return null; - // "catch unreachable" tells Zig compiler this can't possibly fail - // Of course, it might fail: this is just a benchmark. - // Did I mention not to use this in production? - var untyped = yaml.Yaml.load(std.testing.allocator, std.mem.sliceTo(string, 0)) catch unreachable; - // Free all memory at the end of the current scope - defer untyped.deinit(); + var arena = std.heap.ArenaAllocator.init(general_purpose_allocator.allocator()); + defer arena.deinit(); + const allocator = arena.allocator(); + // TODO: remove 'catch unreachable' by catching the YamlError + // https://github.com/kubkon/zig-yaml/blob/3d3c7ae400243a37c6b422b6cba7173656984897/src/yaml.zig#L17-L22 + // define and set an appropriate error + // https://docs.python.org/3.9/extending/extending.html#intermezzo-errors-and-exceptions + // and return null as above + var untyped = yaml.Yaml.load(allocator, std.mem.sliceTo(string, 0)) catch unreachable; + + // TODO: same as TODO on ln 50 but maybe assert on `docs` size // Our friend "catch unreachable" again :) var map = untyped.docs.items[0].asMap() catch unreachable; var dict = PyDict_New(); - const keys = map.keys(); + for (map.keys(), map.values()) |key, value| { + // TODO: `value` type can be any of https://github.com/kubkon/zig-yaml/blob/3d3c7ae400243a37c6b422b6cba7173656984897/src/yaml.zig#L28-L33 + // Suggestion to handle the type appropriately + // 1. Pattern match on value type + // 2. Build the corresponsing PyObject https://docs.python.org/3.9/extending/extending.html#building-arbitrary-values + // 3. Return its pointer - for (keys) |key| { - const value = map.get(key) orelse unreachable; - var pyKey = Py_BuildValue("s#", @ptrCast([*]const u8, key), key.len); - var valueStr = value.asString() catch unreachable; - const pyValue = Py_BuildValue("s#", @ptrCast([*]const u8, valueStr), valueStr.len); + var value_str = value.asString() catch unreachable; // TODO: again, we just ignore the potential errors that could happen here. // Don't do that in real life! - _ = PyDict_SetItem(dict, pyKey, pyValue); - } + const py_key_ptr: [*]const u8 = @ptrCast(key); + const py_value_ptr: [*]const u8 = @ptrCast(value_str); + const py_key = Py_BuildValue("s#", py_key_ptr, key.len); + const py_value = Py_BuildValue("s#", py_value_ptr, value_str.len); + _ = PyDict_SetItem(dict, py_key, py_value); + } return Py_BuildValue("O", dict); } @@ -99,4 +106,3 @@ var benchmarkmodule = PyModuleDef{ pub export fn PyInit_benchmark() [*]PyObject { return PyModule_Create(&benchmarkmodule); } - diff --git a/benchmark/libs/zig-yaml b/benchmark/libs/zig-yaml new file mode 160000 index 0000000..3d3c7ae --- /dev/null +++ b/benchmark/libs/zig-yaml @@ -0,0 +1 @@ +Subproject commit 3d3c7ae400243a37c6b422b6cba7173656984897 diff --git a/benchmark/libs/zig-yaml/LICENSE b/benchmark/libs/zig-yaml/LICENSE deleted file mode 100644 index 1a0fbac..0000000 --- a/benchmark/libs/zig-yaml/LICENSE +++ /dev/null @@ -1,22 +0,0 @@ -MIT License - -Copyright (c) 2021 Jakub Konka - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. - diff --git a/benchmark/libs/zig-yaml/README.md b/benchmark/libs/zig-yaml/README.md deleted file mode 100644 index b54f5ac..0000000 --- a/benchmark/libs/zig-yaml/README.md +++ /dev/null @@ -1,90 +0,0 @@ -# zig-yaml - -YAML parser for Zig - -## What is it? - -This lib is meant to serve as a basic (or maybe not?) YAML parser for Zig. It will strive to be YAML 1.2 compatible -but one step at a time. - -This is very much a work-in-progress, so expect things to break on a regular basis. Oh, I'd love to get the -community involved in helping out with this btw! Feel free to fork and submit patches, enhancements, and of course -issues. - -## Basic usage - -The parser currently understands a few YAML primitives such as: -* explicit documents (`---`, `...`) -* mappings (`:`) -* sequences (`-`, `[`, `]`) - -In fact, if you head over to `examples/` dir, you will find YAML examples that have been tested against this -parser. You can also have a look at end-to-end test inputs in `test/` directory. - -If you want to use the parser as a library, add it as a package the usual way, and then: - -```zig -const std = @import("std"); -const yaml = @import("yaml"); - -const source = - \\names: [ John Doe, MacIntosh, Jane Austin ] - \\numbers: - \\ - 10 - \\ - -8 - \\ - 6 - \\nested: - \\ some: one - \\ wick: john doe - \\finally: [ 8.17, - \\ 19.78 , 17 , - \\ 21 ] -; -``` - -1. For untyped, raw representation of YAML, use `Yaml.load`: - -```zig -var untyped = try yaml.Yaml.load(std.testing.allocator, source); -defer untyped.deinit(); - -try std.testing.expectEqual(untyped.docs.items.len, 1); - -const map = untyped.docs.items[0].map; -try std.testing.expect(map.contains("names")); -try std.testing.expectEqual(map.get("names").?.list.len, 3); -``` - -2. For typed representation of YAML, use `Yaml.parse`: - -```zig -const Simple = struct { - names: []const []const u8, - numbers: []const i16, - nested: struct { - some: []const u8, - wick: []const u8, - }, - finally: [4]f16, -}; - -const simple = try untyped.parse(Simple); -try std.testing.expectEqual(simple.names.len, 3); -``` - -3. To convert `Yaml` structure back into text representation, use `Yaml.stringify`: - -```zig -try untyped.stringify(std.io.getStdOut().writer()); -``` - -which should write the following output to standard output when run: - -```sh -names: [ John Doe, MacIntosh, Jane Austin ] -numbers: [ 10, -8, 6 ] -nested: - some: one - wick: john doe -finally: [ 8.17, 19.78, 17, 21 ] -``` diff --git a/benchmark/libs/zig-yaml/build.zig b/benchmark/libs/zig-yaml/build.zig deleted file mode 100644 index 89bd48f..0000000 --- a/benchmark/libs/zig-yaml/build.zig +++ /dev/null @@ -1,35 +0,0 @@ -const std = @import("std"); - -pub fn build(b: *std.build.Builder) void { - // Standard release options allow the person running `zig build` to select - // between Debug, ReleaseSafe, ReleaseFast, and ReleaseSmall. - const mode = b.standardReleaseOptions(); - - const lib = b.addStaticLibrary("yaml", "src/main.zig"); - lib.setBuildMode(mode); - lib.install(); - - var main_tests = b.addTest("src/main.zig"); - main_tests.setBuildMode(mode); - // main_tests.addPackagePath("e2e_tests", "test/test.zig"); - - var e2e_tests = b.addTest("test/test.zig"); - e2e_tests.setBuildMode(mode); - e2e_tests.addPackagePath("yaml", "src/main.zig"); - - const test_step = b.step("test", "Run library tests"); - test_step.dependOn(&main_tests.step); - test_step.dependOn(&e2e_tests.step); - - const example = b.addExecutable("yaml", "examples/yaml.zig"); - example.setBuildMode(mode); - example.addPackagePath("yaml", "src/main.zig"); - example.step.dependOn(b.getInstallStep()); - - const path_to_yaml = b.option([]const u8, "input-yaml", "Path to input yaml file") orelse "examples/simple.yml"; - - const run_example = example.run(); - run_example.addArg(path_to_yaml); - const run_example_step = b.step("run", "Runs examples/yaml.zig"); - run_example_step.dependOn(&run_example.step); -} diff --git a/benchmark/libs/zig-yaml/ci/linux_ci b/benchmark/libs/zig-yaml/ci/linux_ci deleted file mode 100755 index 3077459..0000000 --- a/benchmark/libs/zig-yaml/ci/linux_ci +++ /dev/null @@ -1,12 +0,0 @@ -#!/bin/sh - -set -x -set -e - -ZIG="zig-linux-x86_64-0.9.0-dev.1903+2af94e76a" -wget -nv "https://ziglang.org/builds/$ZIG.tar.xz" -tar xf "$ZIG.tar.xz" -export PATH="$(pwd)/$ZIG:$PATH" - -zig build test -zig build run diff --git a/benchmark/libs/zig-yaml/ci/macos_ci b/benchmark/libs/zig-yaml/ci/macos_ci deleted file mode 100755 index 4ad294f..0000000 --- a/benchmark/libs/zig-yaml/ci/macos_ci +++ /dev/null @@ -1,12 +0,0 @@ -#!/bin/sh - -set -x -set -e - -ZIG="zig-macos-x86_64-0.9.0-dev.1903+2af94e76a" -curl -L "https://ziglang.org/builds/$ZIG.tar.xz" -o "$ZIG.tar.xz" -tar xf "$ZIG.tar.xz" -export PATH="$(pwd)/$ZIG:$PATH" - -zig build test -zig build run diff --git a/benchmark/libs/zig-yaml/ci/win_ci b/benchmark/libs/zig-yaml/ci/win_ci deleted file mode 100755 index 84ab17a..0000000 --- a/benchmark/libs/zig-yaml/ci/win_ci +++ /dev/null @@ -1,12 +0,0 @@ -#!/bin/sh - -set -x -set -e - -ZIG="zig-windows-x86_64-0.9.0-dev.1903+2af94e76a" -curl -L "https://ziglang.org/builds/$ZIG.zip" -o "$ZIG.zip" -7z x "$ZIG.zip" -export PATH="$(pwd)/$ZIG:$PATH" - -zig build test -zig build run diff --git a/benchmark/libs/zig-yaml/examples/explicit_doc.yml b/benchmark/libs/zig-yaml/examples/explicit_doc.yml deleted file mode 100644 index 0b8ce5e..0000000 --- a/benchmark/libs/zig-yaml/examples/explicit_doc.yml +++ /dev/null @@ -1,4 +0,0 @@ ---- !tapi-tbd -a: b -c : d -... diff --git a/benchmark/libs/zig-yaml/examples/lists.yml b/benchmark/libs/zig-yaml/examples/lists.yml deleted file mode 100644 index b6478a2..0000000 --- a/benchmark/libs/zig-yaml/examples/lists.yml +++ /dev/null @@ -1,7 +0,0 @@ -- a -- b -- c -- d: - - 0 - - 1 - - 2 diff --git a/benchmark/libs/zig-yaml/examples/map_of_lists.yml b/benchmark/libs/zig-yaml/examples/map_of_lists.yml deleted file mode 100644 index ea9b826..0000000 --- a/benchmark/libs/zig-yaml/examples/map_of_lists.yml +++ /dev/null @@ -1,9 +0,0 @@ -map: -- 0 -- 1 -- 2 -another: - - key: value - - keys: [ a, b, - c, d ] -final: what is that? diff --git a/benchmark/libs/zig-yaml/examples/maps.yml b/benchmark/libs/zig-yaml/examples/maps.yml deleted file mode 100644 index ec484f0..0000000 --- a/benchmark/libs/zig-yaml/examples/maps.yml +++ /dev/null @@ -1,8 +0,0 @@ -key1: - key1_1: value1_1 - key1_2: value1_2 -key2: value2 -key3: - key3_1: value3_1 - key3_2: value3_2 - key3_3: value3_3 diff --git a/benchmark/libs/zig-yaml/examples/simple.yml b/benchmark/libs/zig-yaml/examples/simple.yml deleted file mode 100644 index 9c7565a..0000000 --- a/benchmark/libs/zig-yaml/examples/simple.yml +++ /dev/null @@ -1,2 +0,0 @@ -key: value -other_key: other_value diff --git a/benchmark/libs/zig-yaml/examples/yaml.zig b/benchmark/libs/zig-yaml/examples/yaml.zig deleted file mode 100644 index a3ff132..0000000 --- a/benchmark/libs/zig-yaml/examples/yaml.zig +++ /dev/null @@ -1,41 +0,0 @@ -const std = @import("std"); -const yaml = @import("yaml"); - -const io = std.io; -const mem = std.mem; - -var gpa = std.heap.GeneralPurposeAllocator(.{}){}; - -const usage = - \\Usage: yaml - \\ - \\General options: - \\-h, --help Print this help and exit -; - -pub fn main() !void { - var arena = std.heap.ArenaAllocator.init(gpa.allocator()); - defer arena.deinit(); - const allocator = arena.allocator(); - - const args = try std.process.argsAlloc(allocator); - if (args.len == 1) { - try io.getStdErr().writeAll("fatal: no input path to yaml file specified"); - try io.getStdOut().writeAll(usage); - return; - } - - if (mem.eql(u8, "-h", args[1]) or mem.eql(u8, "--help", args[1])) { - try io.getStdOut().writeAll(usage); - return; - } else { - const file_path = args[1]; - const file = try std.fs.cwd().openFile(file_path, .{}); - defer file.close(); - - const source = try file.readToEndAlloc(allocator, std.math.maxInt(u32)); - - var parsed = try yaml.Yaml.load(allocator, source); - try parsed.stringify(io.getStdOut().writer()); - } -} diff --git a/benchmark/libs/zig-yaml/src/Tokenizer.zig b/benchmark/libs/zig-yaml/src/Tokenizer.zig deleted file mode 100644 index 150770d..0000000 --- a/benchmark/libs/zig-yaml/src/Tokenizer.zig +++ /dev/null @@ -1,539 +0,0 @@ -const Tokenizer = @This(); - -const std = @import("std"); -const log = std.log.scoped(.tokenizer); -const testing = std.testing; - -buffer: []const u8, -index: usize = 0, -string_type: StringType = .Unquoted, - -const StringType = enum { - Unquoted, - SingleQuoted, - DoubleQuoted, -}; - -pub const Token = struct { - id: Id, - start: usize, - end: usize, - // Count of spaces/tabs. - // Only active for .Space and .Tab tokens. - count: ?usize = null, - - pub const Id = enum { - Eof, - - NewLine, - DocStart, // --- - DocEnd, // ... - SeqItemInd, // - - MapValueInd, // : - FlowMapStart, // { - FlowMapEnd, // } - FlowSeqStart, // [ - FlowSeqEnd, // ] - - Comma, - Space, - Tab, - Comment, // # - Alias, // * - Anchor, // & - Tag, // ! - SingleQuote, // ' - DoubleQuote, // " - EscapeSeq, // '' for single quoted strings, starts with \ for double quoted strings - - Literal, - }; -}; - -pub const TokenIndex = usize; - -pub const TokenIterator = struct { - buffer: []const Token, - pos: TokenIndex = 0, - - pub fn next(self: *TokenIterator) Token { - const token = self.buffer[self.pos]; - self.pos += 1; - return token; - } - - pub fn peek(self: TokenIterator) ?Token { - if (self.pos >= self.buffer.len) return null; - return self.buffer[self.pos]; - } - - pub fn reset(self: *TokenIterator) void { - self.pos = 0; - } - - pub fn seekTo(self: *TokenIterator, pos: TokenIndex) void { - self.pos = pos; - } - - pub fn seekBy(self: *TokenIterator, offset: isize) void { - const new_pos = @bitCast(isize, self.pos) + offset; - if (new_pos < 0) { - self.pos = 0; - } else { - self.pos = @intCast(usize, new_pos); - } - } -}; - -pub fn next(self: *Tokenizer) Token { - var result = Token{ - .id = .Eof, - .start = self.index, - .end = undefined, - }; - - var state: union(enum) { - Start, - NewLine, - Space: usize, - Tab: usize, - Hyphen: usize, - Dot: usize, - SingleQuoteOrEscape, - Literal, - EscapeSeq, - } = .Start; - - while (self.index < self.buffer.len) : (self.index += 1) { - const c = self.buffer[self.index]; - switch (state) { - .Start => switch (c) { - ' ' => { - state = .{ .Space = 1 }; - }, - '\t' => { - state = .{ .Tab = 1 }; - }, - '\n' => { - result.id = .NewLine; - self.index += 1; - break; - }, - '\r' => { - state = .NewLine; - }, - '-' => { - state = .{ .Hyphen = 1 }; - }, - '.' => { - state = .{ .Dot = 1 }; - }, - ',' => { - result.id = .Comma; - self.index += 1; - break; - }, - '#' => { - result.id = .Comment; - self.index += 1; - break; - }, - '*' => { - result.id = .Alias; - self.index += 1; - break; - }, - '&' => { - result.id = .Anchor; - self.index += 1; - break; - }, - '!' => { - result.id = .Tag; - self.index += 1; - break; - }, - '\'' => { - switch (self.string_type) { - .Unquoted => { - result.id = .SingleQuote; - self.string_type = if (self.string_type == .SingleQuoted) .Unquoted else .SingleQuoted; - self.index += 1; - break; - }, - .SingleQuoted => { - state = .SingleQuoteOrEscape; - }, - .DoubleQuoted => { - result.id = .SingleQuote; - self.index += 1; - break; - }, - } - }, - '"' => { - result.id = .DoubleQuote; - self.string_type = if (self.string_type == .DoubleQuoted) .Unquoted else .DoubleQuoted; - self.index += 1; - break; - }, - '[' => { - result.id = .FlowSeqStart; - self.index += 1; - break; - }, - ']' => { - result.id = .FlowSeqEnd; - self.index += 1; - break; - }, - ':' => { - result.id = .MapValueInd; - self.index += 1; - break; - }, - '{' => { - result.id = .FlowMapStart; - self.index += 1; - break; - }, - '}' => { - result.id = .FlowMapEnd; - self.index += 1; - break; - }, - '\\' => { - if (self.string_type == .DoubleQuoted) { - state = .EscapeSeq; - } else { - state = .Literal; - } - }, - else => { - state = .Literal; - }, - }, - .Space => |*count| switch (c) { - ' ' => { - count.* += 1; - }, - else => { - result.id = .Space; - result.count = count.*; - break; - }, - }, - .Tab => |*count| switch (c) { - ' ' => { - count.* += 1; - }, - else => { - result.id = .Tab; - result.count = count.*; - break; - }, - }, - .NewLine => switch (c) { - '\n' => { - result.id = .NewLine; - self.index += 1; - break; - }, - else => {}, // TODO this should be an error condition - }, - .Hyphen => |*count| switch (c) { - ' ' => { - result.id = .SeqItemInd; - self.index += 1; - break; - }, - '-' => { - count.* += 1; - - if (count.* == 3) { - result.id = .DocStart; - self.index += 1; - break; - } - }, - else => { - state = .Literal; - }, - }, - .Dot => |*count| switch (c) { - '.' => { - count.* += 1; - - if (count.* == 3) { - result.id = .DocEnd; - self.index += 1; - break; - } - }, - else => { - state = .Literal; - }, - }, - .SingleQuoteOrEscape => switch (c) { - '\'' => { - result.id = .EscapeSeq; - self.index += 1; - break; - }, - else => { - self.string_type = .Unquoted; - result.id = .SingleQuote; - break; - }, - }, - .Literal => switch (c) { - '\\' => { - result.id = .Literal; - if (self.string_type == .DoubleQuoted) { - // escape sequence - break; - } - }, - '\r', '\n', ' ', '\'', '"', ',', ':', ']', '}' => { - result.id = .Literal; - break; - }, - else => { - result.id = .Literal; - }, - }, - .EscapeSeq => { - // Only support single character escape codes for now... - result.id = .EscapeSeq; - self.index += 1; - break; - }, - } - } - - if (self.index >= self.buffer.len) { - switch (state) { - .Literal => { - result.id = .Literal; - }, - .SingleQuoteOrEscape => { - result.id = .SingleQuote; - }, - else => {}, - } - } - - result.end = self.index; - - log.debug("{any}", .{result}); - log.debug(" | {s}", .{self.buffer[result.start..result.end]}); - - return result; -} - -fn testExpected(source: []const u8, expected: []const Token.Id) !void { - var tokenizer = Tokenizer{ - .buffer = source, - }; - - for (expected) |exp| { - const token = tokenizer.next(); - try testing.expectEqual(exp, token.id); - } -} - -test "empty doc" { - try testExpected("", &[_]Token.Id{.Eof}); -} - -test "empty doc with explicit markers" { - try testExpected( - \\--- - \\... - , &[_]Token.Id{ - .DocStart, .NewLine, .DocEnd, .Eof, - }); -} - -test "sequence of values" { - try testExpected( - \\- 0 - \\- 1 - \\- 2 - , &[_]Token.Id{ - .SeqItemInd, - .Literal, - .NewLine, - .SeqItemInd, - .Literal, - .NewLine, - .SeqItemInd, - .Literal, - .Eof, - }); -} - -test "sequence of sequences" { - try testExpected( - \\- [ val1, val2] - \\- [val3, val4 ] - , &[_]Token.Id{ - .SeqItemInd, - .FlowSeqStart, - .Space, - .Literal, - .Comma, - .Space, - .Literal, - .FlowSeqEnd, - .NewLine, - .SeqItemInd, - .FlowSeqStart, - .Literal, - .Comma, - .Space, - .Literal, - .Space, - .FlowSeqEnd, - .Eof, - }); -} - -test "mappings" { - try testExpected( - \\key1: value1 - \\key2: value2 - , &[_]Token.Id{ - .Literal, - .MapValueInd, - .Space, - .Literal, - .NewLine, - .Literal, - .MapValueInd, - .Space, - .Literal, - .Eof, - }); -} - -test "inline mapped sequence of values" { - try testExpected( - \\key : [ val1, - \\ val2 ] - , &[_]Token.Id{ - .Literal, - .Space, - .MapValueInd, - .Space, - .FlowSeqStart, - .Space, - .Literal, - .Comma, - .Space, - .NewLine, - .Space, - .Literal, - .Space, - .FlowSeqEnd, - .Eof, - }); -} - -test "part of tdb" { - try testExpected( - \\--- !tapi-tbd - \\tbd-version: 4 - \\targets: [ x86_64-macos ] - \\ - \\uuids: - \\ - target: x86_64-macos - \\ value: F86CC732-D5E4-30B5-AA7D-167DF5EC2708 - \\ - \\install-name: '/usr/lib/libSystem.B.dylib' - \\... - , &[_]Token.Id{ - .DocStart, - .Space, - .Tag, - .Literal, - .NewLine, - .Literal, - .MapValueInd, - .Space, - .Literal, - .NewLine, - .Literal, - .MapValueInd, - .Space, - .FlowSeqStart, - .Space, - .Literal, - .Space, - .FlowSeqEnd, - .NewLine, - .NewLine, - .Literal, - .MapValueInd, - .NewLine, - .Space, - .SeqItemInd, - .Literal, - .MapValueInd, - .Space, - .Literal, - .NewLine, - .Space, - .Literal, - .MapValueInd, - .Space, - .Literal, - .NewLine, - .NewLine, - .Literal, - .MapValueInd, - .Space, - .SingleQuote, - .Literal, - .SingleQuote, - .NewLine, - .DocEnd, - .Eof, - }); -} - -test "escape sequences" { - try testExpected( - \\a: 'here''s an apostrophe' - \\b: "a newline\nand a\ttab" - , &[_]Token.Id{ - .Literal, - .MapValueInd, - .Space, - .SingleQuote, - .Literal, - .EscapeSeq, - .Literal, - .Space, - .Literal, - .Space, - .Literal, - .SingleQuote, - .NewLine, - .Literal, - .MapValueInd, - .Space, - .DoubleQuote, - .Literal, - .Space, - .Literal, - .EscapeSeq, - .Literal, - .Space, - .Literal, - .EscapeSeq, - .Literal, - .DoubleQuote, - .Eof, - }); -} diff --git a/benchmark/libs/zig-yaml/src/main.zig b/benchmark/libs/zig-yaml/src/main.zig deleted file mode 100644 index 6d16656..0000000 --- a/benchmark/libs/zig-yaml/src/main.zig +++ /dev/null @@ -1,690 +0,0 @@ -const std = @import("std"); -const assert = std.debug.assert; -const math = std.math; -const mem = std.mem; -const testing = std.testing; -const log = std.log.scoped(.yaml); - -const Allocator = mem.Allocator; -const ArenaAllocator = std.heap.ArenaAllocator; - -pub const Tokenizer = @import("Tokenizer.zig"); -pub const parse = @import("parse.zig"); - -const Node = parse.Node; -const Tree = parse.Tree; -const ParseError = parse.ParseError; - -pub const YamlError = error{ - UnexpectedNodeType, - OutOfMemory, -} || ParseError || std.fmt.ParseIntError; - -pub const ValueType = enum { - empty, - int, - float, - string, - list, - map, -}; - -pub const List = []Value; -pub const Map = std.StringArrayHashMap(Value); - -pub const Value = union(ValueType) { - empty, - int: i64, - float: f64, - string: []const u8, - list: List, - map: Map, - - pub fn asInt(self: Value) !i64 { - if (self != .int) return error.TypeMismatch; - return self.int; - } - - pub fn asFloat(self: Value) !f64 { - if (self != .float) return error.TypeMismatch; - return self.float; - } - - pub fn asString(self: Value) ![]const u8 { - if (self != .string) return error.TypeMismatch; - return self.string; - } - - pub fn asList(self: Value) !List { - if (self != .list) return error.TypeMismatch; - return self.list; - } - - pub fn asMap(self: Value) !Map { - if (self != .map) return error.TypeMismatch; - return self.map; - } - - const StringifyArgs = struct { - indentation: usize = 0, - should_inline_first_key: bool = false, - }; - - pub const StringifyError = std.os.WriteError; - - pub fn stringify(self: Value, writer: anytype, args: StringifyArgs) StringifyError!void { - switch (self) { - .empty => return, - .int => |int| return writer.print("{}", .{int}), - .float => |float| return writer.print("{d}", .{float}), - .string => |string| return writer.print("{s}", .{string}), - .list => |list| { - const len = list.len; - if (len == 0) return; - - const first = list[0]; - if (first.is_compound()) { - for (list) |elem, i| { - try writer.writeByteNTimes(' ', args.indentation); - try writer.writeAll("- "); - try elem.stringify(writer, .{ - .indentation = args.indentation + 2, - .should_inline_first_key = true, - }); - if (i < len - 1) { - try writer.writeByte('\n'); - } - } - return; - } - - try writer.writeAll("[ "); - for (list) |elem, i| { - try elem.stringify(writer, args); - if (i < len - 1) { - try writer.writeAll(", "); - } - } - try writer.writeAll(" ]"); - }, - .map => |map| { - const keys = map.keys(); - const len = keys.len; - if (len == 0) return; - - for (keys) |key, i| { - if (!args.should_inline_first_key or i != 0) { - try writer.writeByteNTimes(' ', args.indentation); - } - try writer.print("{s}: ", .{key}); - - const value = map.get(key) orelse unreachable; - const should_inline = blk: { - if (!value.is_compound()) break :blk true; - if (value == .list and value.list.len > 0 and !value.list[0].is_compound()) break :blk true; - break :blk false; - }; - - if (should_inline) { - try value.stringify(writer, args); - } else { - try writer.writeByte('\n'); - try value.stringify(writer, .{ - .indentation = args.indentation + 4, - }); - } - - if (i < len - 1) { - try writer.writeByte('\n'); - } - } - }, - } - } - - fn is_compound(self: Value) bool { - return switch (self) { - .list, .map => true, - else => false, - }; - } - - fn fromNode(arena: Allocator, tree: *const Tree, node: *const Node, type_hint: ?ValueType) YamlError!Value { - if (node.cast(Node.Doc)) |doc| { - const inner = doc.value orelse { - // empty doc - return Value{ .empty = .{} }; - }; - return Value.fromNode(arena, tree, inner, null); - } else if (node.cast(Node.Map)) |map| { - var out_map = std.StringArrayHashMap(Value).init(arena); - try out_map.ensureUnusedCapacity(map.values.items.len); - - for (map.values.items) |entry| { - const key_tok = tree.tokens[entry.key]; - const key = try arena.dupe(u8, tree.source[key_tok.start..key_tok.end]); - const value = try Value.fromNode(arena, tree, entry.value, null); - - out_map.putAssumeCapacityNoClobber(key, value); - } - - return Value{ .map = out_map }; - } else if (node.cast(Node.List)) |list| { - var out_list = std.ArrayList(Value).init(arena); - try out_list.ensureUnusedCapacity(list.values.items.len); - - if (list.values.items.len > 0) { - const hint = if (list.values.items[0].cast(Node.Value)) |value| hint: { - const start = tree.tokens[value.start.?]; - const end = tree.tokens[value.end.?]; - const raw = tree.source[start.start..end.end]; - _ = std.fmt.parseInt(i64, raw, 10) catch { - _ = std.fmt.parseFloat(f64, raw) catch { - break :hint ValueType.string; - }; - break :hint ValueType.float; - }; - break :hint ValueType.int; - } else null; - - for (list.values.items) |elem| { - const value = try Value.fromNode(arena, tree, elem, hint); - out_list.appendAssumeCapacity(value); - } - } - - return Value{ .list = out_list.toOwnedSlice() }; - } else if (node.cast(Node.Value)) |value| { - const start = tree.tokens[value.start.?]; - const end = tree.tokens[value.end.?]; - const raw = tree.source[start.start..end.end]; - - if (type_hint) |hint| { - return switch (hint) { - .int => Value{ .int = try std.fmt.parseInt(i64, raw, 10) }, - .float => Value{ .float = try std.fmt.parseFloat(f64, raw) }, - .string => Value{ .string = try arena.dupe(u8, value.string_value.items) }, - else => unreachable, - }; - } - - try_int: { - // TODO infer base for int - const int = std.fmt.parseInt(i64, raw, 10) catch break :try_int; - return Value{ .int = int }; - } - try_float: { - const float = std.fmt.parseFloat(f64, raw) catch break :try_float; - return Value{ .float = float }; - } - return Value{ .string = try arena.dupe(u8, raw) }; - } else { - log.err("Unexpected node type: {}", .{node.tag}); - return error.UnexpectedNodeType; - } - } -}; - -pub const Yaml = struct { - arena: ArenaAllocator, - tree: ?Tree = null, - docs: std.ArrayList(Value), - - pub fn deinit(self: *Yaml) void { - self.arena.deinit(); - } - - pub fn stringify(self: Yaml, writer: anytype) !void { - for (self.docs.items) |doc| { - // if (doc.directive) |directive| { - // try writer.print("--- !{s}\n", .{directive}); - // } - try doc.stringify(writer, .{}); - // if (doc.directive != null) { - // try writer.writeAll("...\n"); - // } - } - } - - pub fn load(allocator: Allocator, source: []const u8) !Yaml { - var arena = ArenaAllocator.init(allocator); - - var tree = Tree.init(arena.allocator()); - try tree.parse(source); - - var docs = std.ArrayList(Value).init(arena.allocator()); - try docs.ensureUnusedCapacity(tree.docs.items.len); - - for (tree.docs.items) |node| { - const value = try Value.fromNode(arena.allocator(), &tree, node, null); - docs.appendAssumeCapacity(value); - } - - return Yaml{ - .arena = arena, - .tree = tree, - .docs = docs, - }; - } - - pub const Error = error{ - Unimplemented, - TypeMismatch, - StructFieldMissing, - ArraySizeMismatch, - UntaggedUnion, - UnionTagMissing, - Overflow, - OutOfMemory, - }; - - pub fn parse(self: *Yaml, comptime T: type) Error!T { - if (self.docs.items.len == 0) { - if (@typeInfo(T) == .Void) return {}; - return error.TypeMismatch; - } - - if (self.docs.items.len == 1) { - return self.parseValue(T, self.docs.items[0]); - } - - switch (@typeInfo(T)) { - .Array => |info| { - var parsed: T = undefined; - for (self.docs.items) |doc, i| { - parsed[i] = try self.parseValue(info.child, doc); - } - return parsed; - }, - .Pointer => |info| { - switch (info.size) { - .Slice => { - var parsed = try self.arena.allocator().alloc(info.child, self.docs.items.len); - for (self.docs.items) |doc, i| { - parsed[i] = try self.parseValue(info.child, doc); - } - return parsed; - }, - else => return error.TypeMismatch, - } - }, - .Union => return error.Unimplemented, - else => return error.TypeMismatch, - } - } - - fn parseValue(self: *Yaml, comptime T: type, value: Value) Error!T { - return switch (@typeInfo(T)) { - .Int => math.cast(T, try value.asInt()), - .Float => math.lossyCast(T, try value.asFloat()), - .Struct => self.parseStruct(T, try value.asMap()), - .Union => self.parseUnion(T, value), - .Array => self.parseArray(T, try value.asList()), - .Pointer => { - if (value.asList()) |list| { - return self.parsePointer(T, .{ .list = list }); - } else |_| { - return self.parsePointer(T, .{ .string = try value.asString() }); - } - }, - .Void => error.TypeMismatch, - .Optional => unreachable, - else => error.Unimplemented, - }; - } - - fn parseUnion(self: *Yaml, comptime T: type, value: Value) Error!T { - const union_info = @typeInfo(T).Union; - - if (union_info.tag_type) |_| { - inline for (union_info.fields) |field| { - if (self.parseValue(field.field_type, value)) |u_value| { - return @unionInit(T, field.name, u_value); - } else |err| { - if (@as(@TypeOf(err) || error{TypeMismatch}, err) != error.TypeMismatch) return err; - } - } - } else return error.UntaggedUnion; - - return error.UnionTagMissing; - } - - fn parseOptional(self: *Yaml, comptime T: type, value: ?Value) Error!T { - const unwrapped = value orelse return null; - const opt_info = @typeInfo(T).Optional; - return @as(T, try self.parseValue(opt_info.child, unwrapped)); - } - - fn parseStruct(self: *Yaml, comptime T: type, map: Map) Error!T { - const struct_info = @typeInfo(T).Struct; - var parsed: T = undefined; - - inline for (struct_info.fields) |field| { - const value: ?Value = map.get(field.name) orelse blk: { - const field_name = try mem.replaceOwned(u8, self.arena.allocator(), field.name, "_", "-"); - break :blk map.get(field_name); - }; - - if (@typeInfo(field.field_type) == .Optional) { - @field(parsed, field.name) = try self.parseOptional(field.field_type, value); - continue; - } - - const unwrapped = value orelse { - log.err("missing struct field: {s}: {s}", .{ field.name, @typeName(field.field_type) }); - return error.StructFieldMissing; - }; - @field(parsed, field.name) = try self.parseValue(field.field_type, unwrapped); - } - - return parsed; - } - - fn parsePointer(self: *Yaml, comptime T: type, value: Value) Error!T { - const ptr_info = @typeInfo(T).Pointer; - const arena = self.arena.allocator(); - - switch (ptr_info.size) { - .Slice => { - const child_info = @typeInfo(ptr_info.child); - if (child_info == .Int and child_info.Int.bits == 8) { - return value.asString(); - } - - var parsed = try arena.alloc(ptr_info.child, value.list.len); - for (value.list) |elem, i| { - parsed[i] = try self.parseValue(ptr_info.child, elem); - } - return parsed; - }, - else => return error.Unimplemented, - } - } - - fn parseArray(self: *Yaml, comptime T: type, list: List) Error!T { - const array_info = @typeInfo(T).Array; - if (array_info.len != list.len) return error.ArraySizeMismatch; - - var parsed: T = undefined; - for (list) |elem, i| { - parsed[i] = try self.parseValue(array_info.child, elem); - } - - return parsed; - } -}; - -test { - testing.refAllDecls(@This()); -} - -test "simple list" { - const source = - \\- a - \\- b - \\- c - ; - - var yaml = try Yaml.load(testing.allocator, source); - defer yaml.deinit(); - - try testing.expectEqual(yaml.docs.items.len, 1); - - const list = yaml.docs.items[0].list; - try testing.expectEqual(list.len, 3); - - try testing.expect(mem.eql(u8, list[0].string, "a")); - try testing.expect(mem.eql(u8, list[1].string, "b")); - try testing.expect(mem.eql(u8, list[2].string, "c")); -} - -test "simple list typed as array of strings" { - const source = - \\- a - \\- b - \\- c - ; - - var yaml = try Yaml.load(testing.allocator, source); - defer yaml.deinit(); - - try testing.expectEqual(yaml.docs.items.len, 1); - - const arr = try yaml.parse([3][]const u8); - try testing.expectEqual(arr.len, 3); - try testing.expect(mem.eql(u8, arr[0], "a")); - try testing.expect(mem.eql(u8, arr[1], "b")); - try testing.expect(mem.eql(u8, arr[2], "c")); -} - -test "simple list typed as array of ints" { - const source = - \\- 0 - \\- 1 - \\- 2 - ; - - var yaml = try Yaml.load(testing.allocator, source); - defer yaml.deinit(); - - try testing.expectEqual(yaml.docs.items.len, 1); - - const arr = try yaml.parse([3]u8); - try testing.expectEqual(arr.len, 3); - try testing.expectEqual(arr[0], 0); - try testing.expectEqual(arr[1], 1); - try testing.expectEqual(arr[2], 2); -} - -test "list of mixed sign integer" { - const source = - \\- 0 - \\- -1 - \\- 2 - ; - - var yaml = try Yaml.load(testing.allocator, source); - defer yaml.deinit(); - - try testing.expectEqual(yaml.docs.items.len, 1); - - const arr = try yaml.parse([3]i8); - try testing.expectEqual(arr.len, 3); - try testing.expectEqual(arr[0], 0); - try testing.expectEqual(arr[1], -1); - try testing.expectEqual(arr[2], 2); -} - -test "simple map untyped" { - const source = - \\a: 0 - ; - - var yaml = try Yaml.load(testing.allocator, source); - defer yaml.deinit(); - - try testing.expectEqual(yaml.docs.items.len, 1); - - const map = yaml.docs.items[0].map; - try testing.expect(map.contains("a")); - try testing.expectEqual(map.get("a").?.int, 0); -} - -test "simple map typed" { - const source = - \\a: 0 - \\b: hello there - \\c: 'wait, what?' - ; - - var yaml = try Yaml.load(testing.allocator, source); - defer yaml.deinit(); - - const simple = try yaml.parse(struct { a: usize, b: []const u8, c: []const u8 }); - try testing.expectEqual(simple.a, 0); - try testing.expect(mem.eql(u8, simple.b, "hello there")); - try testing.expect(mem.eql(u8, simple.c, "wait, what?")); -} - -test "typed nested structs" { - const source = - \\a: - \\ b: hello there - \\ c: 'wait, what?' - ; - - var yaml = try Yaml.load(testing.allocator, source); - defer yaml.deinit(); - - const simple = try yaml.parse(struct { - a: struct { - b: []const u8, - c: []const u8, - }, - }); - try testing.expect(mem.eql(u8, simple.a.b, "hello there")); - try testing.expect(mem.eql(u8, simple.a.c, "wait, what?")); -} - -test "single quoted string" { - const source = - \\- 'hello' - \\- 'here''s an escaped quote' - \\- 'newlines and tabs\nare not\tsupported' - ; - - var yaml = try Yaml.load(testing.allocator, source); - defer yaml.deinit(); - - const arr = try yaml.parse([3][]const u8); - try testing.expectEqual(arr.len, 3); - try testing.expect(mem.eql(u8, arr[0], "hello")); - try testing.expect(mem.eql(u8, arr[1], "here's an escaped quote")); - try testing.expect(mem.eql(u8, arr[2], "newlines and tabs\\nare not\\tsupported")); -} - -test "double quoted string" { - const source = - \\- "hello" - \\- "\"here\" are some escaped quotes" - \\- "newlines and tabs\nare\tsupported" - ; - - var yaml = try Yaml.load(testing.allocator, source); - defer yaml.deinit(); - - const arr = try yaml.parse([3][]const u8); - try testing.expectEqual(arr.len, 3); - try testing.expect(mem.eql(u8, arr[0], "hello")); - try testing.expect(mem.eql(u8, arr[1], - \\"here" are some escaped quotes - )); - try testing.expect(mem.eql(u8, arr[2], - \\newlines and tabs - \\are supported - )); -} - -test "multidoc typed as a slice of structs" { - const source = - \\--- - \\a: 0 - \\--- - \\a: 1 - \\... - ; - - var yaml = try Yaml.load(testing.allocator, source); - defer yaml.deinit(); - - { - const result = try yaml.parse([2]struct { a: usize }); - try testing.expectEqual(result.len, 2); - try testing.expectEqual(result[0].a, 0); - try testing.expectEqual(result[1].a, 1); - } - - { - const result = try yaml.parse([]struct { a: usize }); - try testing.expectEqual(result.len, 2); - try testing.expectEqual(result[0].a, 0); - try testing.expectEqual(result[1].a, 1); - } -} - -test "multidoc typed as a struct is an error" { - const source = - \\--- - \\a: 0 - \\--- - \\b: 1 - \\... - ; - - var yaml = try Yaml.load(testing.allocator, source); - defer yaml.deinit(); - - try testing.expectError(Yaml.Error.TypeMismatch, yaml.parse(struct { a: usize })); - try testing.expectError(Yaml.Error.TypeMismatch, yaml.parse(struct { b: usize })); - try testing.expectError(Yaml.Error.TypeMismatch, yaml.parse(struct { a: usize, b: usize })); -} - -test "multidoc typed as a slice of structs with optionals" { - const source = - \\--- - \\a: 0 - \\c: 1.0 - \\--- - \\a: 1 - \\b: different field - \\... - ; - - var yaml = try Yaml.load(testing.allocator, source); - defer yaml.deinit(); - - const result = try yaml.parse([]struct { a: usize, b: ?[]const u8, c: ?f16 }); - try testing.expectEqual(result.len, 2); - - try testing.expectEqual(result[0].a, 0); - try testing.expect(result[0].b == null); - try testing.expect(result[0].c != null); - try testing.expectEqual(result[0].c.?, 1.0); - - try testing.expectEqual(result[1].a, 1); - try testing.expect(result[1].b != null); - try testing.expect(mem.eql(u8, result[1].b.?, "different field")); - try testing.expect(result[1].c == null); -} - -test "empty yaml can be represented as void" { - const source = ""; - var yaml = try Yaml.load(testing.allocator, source); - defer yaml.deinit(); - const result = try yaml.parse(void); - try testing.expect(@TypeOf(result) == void); -} - -test "nonempty yaml cannot be represented as void" { - const source = - \\a: b - ; - - var yaml = try Yaml.load(testing.allocator, source); - defer yaml.deinit(); - - try testing.expectError(Yaml.Error.TypeMismatch, yaml.parse(void)); -} - -test "typed array size mismatch" { - const source = - \\- 0 - \\- 0 - ; - - var yaml = try Yaml.load(testing.allocator, source); - defer yaml.deinit(); - - try testing.expectError(Yaml.Error.ArraySizeMismatch, yaml.parse([1]usize)); - try testing.expectError(Yaml.Error.ArraySizeMismatch, yaml.parse([5]usize)); -} diff --git a/benchmark/libs/zig-yaml/src/parse.zig b/benchmark/libs/zig-yaml/src/parse.zig deleted file mode 100644 index dd231f8..0000000 --- a/benchmark/libs/zig-yaml/src/parse.zig +++ /dev/null @@ -1,745 +0,0 @@ -const std = @import("std"); -const assert = std.debug.assert; -const log = std.log.scoped(.parse); -const mem = std.mem; -const testing = std.testing; - -const Allocator = mem.Allocator; -const Tokenizer = @import("Tokenizer.zig"); -const Token = Tokenizer.Token; -const TokenIndex = Tokenizer.TokenIndex; -const TokenIterator = Tokenizer.TokenIterator; - -pub const ParseError = error{ - MalformedYaml, - NestedDocuments, - UnexpectedTag, - UnexpectedEof, - UnexpectedToken, - Unhandled, -} || Allocator.Error; - -pub const Node = struct { - tag: Tag, - tree: *const Tree, - - pub const Tag = enum { - doc, - map, - list, - value, - }; - - pub fn cast(self: *const Node, comptime T: type) ?*const T { - if (self.tag != T.base_tag) { - return null; - } - return @fieldParentPtr(T, "base", self); - } - - pub fn deinit(self: *Node, allocator: Allocator) void { - switch (self.tag) { - .doc => @fieldParentPtr(Node.Doc, "base", self).deinit(allocator), - .map => @fieldParentPtr(Node.Map, "base", self).deinit(allocator), - .list => @fieldParentPtr(Node.List, "base", self).deinit(allocator), - .value => @fieldParentPtr(Node.Value, "base", self).deinit(allocator), - } - } - - pub fn format( - self: *const Node, - comptime fmt: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, - ) !void { - return switch (self.tag) { - .doc => @fieldParentPtr(Node.Doc, "base", self).format(fmt, options, writer), - .map => @fieldParentPtr(Node.Map, "base", self).format(fmt, options, writer), - .list => @fieldParentPtr(Node.List, "base", self).format(fmt, options, writer), - .value => @fieldParentPtr(Node.Value, "base", self).format(fmt, options, writer), - }; - } - - pub const Doc = struct { - base: Node = Node{ .tag = Tag.doc, .tree = undefined }, - start: ?TokenIndex = null, - end: ?TokenIndex = null, - directive: ?TokenIndex = null, - value: ?*Node = null, - - pub const base_tag: Node.Tag = .doc; - - pub fn deinit(self: *Doc, allocator: Allocator) void { - if (self.value) |node| { - node.deinit(allocator); - allocator.destroy(node); - } - } - - pub fn format( - self: *const Doc, - comptime fmt: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, - ) !void { - _ = options; - _ = fmt; - if (self.directive) |id| { - try std.fmt.format(writer, "{{ ", .{}); - const directive = self.base.tree.tokens[id]; - try std.fmt.format(writer, ".directive = {s}, ", .{ - self.base.tree.source[directive.start..directive.end], - }); - } - if (self.value) |node| { - try std.fmt.format(writer, "{}", .{node}); - } - if (self.directive != null) { - try std.fmt.format(writer, " }}", .{}); - } - } - }; - - pub const Map = struct { - base: Node = Node{ .tag = Tag.map, .tree = undefined }, - start: ?TokenIndex = null, - end: ?TokenIndex = null, - values: std.ArrayListUnmanaged(Entry) = .{}, - - pub const base_tag: Node.Tag = .map; - - pub const Entry = struct { - key: TokenIndex, - value: *Node, - }; - - pub fn deinit(self: *Map, allocator: Allocator) void { - for (self.values.items) |entry| { - entry.value.deinit(allocator); - allocator.destroy(entry.value); - } - self.values.deinit(allocator); - } - - pub fn format( - self: *const Map, - comptime fmt: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, - ) !void { - _ = options; - _ = fmt; - try std.fmt.format(writer, "{{ ", .{}); - for (self.values.items) |entry| { - const key = self.base.tree.tokens[entry.key]; - try std.fmt.format(writer, "{s} => {}, ", .{ - self.base.tree.source[key.start..key.end], - entry.value, - }); - } - return std.fmt.format(writer, " }}", .{}); - } - }; - - pub const List = struct { - base: Node = Node{ .tag = Tag.list, .tree = undefined }, - start: ?TokenIndex = null, - end: ?TokenIndex = null, - values: std.ArrayListUnmanaged(*Node) = .{}, - - pub const base_tag: Node.Tag = .list; - - pub fn deinit(self: *List, allocator: Allocator) void { - for (self.values.items) |node| { - node.deinit(allocator); - allocator.destroy(node); - } - self.values.deinit(allocator); - } - - pub fn format( - self: *const List, - comptime fmt: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, - ) !void { - _ = options; - _ = fmt; - try std.fmt.format(writer, "[ ", .{}); - for (self.values.items) |node| { - try std.fmt.format(writer, "{}, ", .{node}); - } - return std.fmt.format(writer, " ]", .{}); - } - }; - - pub const Value = struct { - base: Node = Node{ .tag = Tag.value, .tree = undefined }, - start: ?TokenIndex = null, - end: ?TokenIndex = null, - string_value: std.ArrayListUnmanaged(u8) = .{}, - - pub const base_tag: Node.Tag = .value; - - pub fn deinit(self: *Value, allocator: Allocator) void { - self.string_value.deinit(allocator); - } - - pub fn format( - self: *const Value, - comptime fmt: []const u8, - options: std.fmt.FormatOptions, - writer: anytype, - ) !void { - _ = options; - _ = fmt; - const start = self.base.tree.tokens[self.start.?]; - const end = self.base.tree.tokens[self.end.?]; - return std.fmt.format(writer, "{s}", .{ - self.base.tree.source[start.start..end.end], - }); - } - }; -}; - -pub const Tree = struct { - allocator: Allocator, - source: []const u8, - tokens: []Token, - docs: std.ArrayListUnmanaged(*Node) = .{}, - - pub fn init(allocator: Allocator) Tree { - return .{ - .allocator = allocator, - .source = undefined, - .tokens = undefined, - }; - } - - pub fn deinit(self: *Tree) void { - self.allocator.free(self.tokens); - for (self.docs.items) |doc| { - doc.deinit(self.allocator); - self.allocator.destroy(doc); - } - self.docs.deinit(self.allocator); - } - - pub fn parse(self: *Tree, source: []const u8) !void { - var tokenizer = Tokenizer{ .buffer = source }; - var tokens = std.ArrayList(Token).init(self.allocator); - errdefer tokens.deinit(); - - while (true) { - const token = tokenizer.next(); - try tokens.append(token); - if (token.id == .Eof) break; - } - - self.source = source; - self.tokens = tokens.toOwnedSlice(); - - var it = TokenIterator{ .buffer = self.tokens }; - var parser = Parser{ - .allocator = self.allocator, - .tree = self, - .token_it = &it, - }; - defer parser.deinit(); - - try parser.scopes.append(self.allocator, .{ - .indent = 0, - }); - - while (true) { - if (parser.token_it.peek() == null) return; - const pos = parser.token_it.pos; - const token = parser.token_it.next(); - - log.debug("Next token: {}, {}", .{ pos, token }); - - switch (token.id) { - .Space, .Comment, .NewLine => {}, - .Eof => break, - else => { - const doc = try parser.doc(pos); - try self.docs.append(self.allocator, &doc.base); - }, - } - } - } -}; - -const Parser = struct { - allocator: Allocator, - tree: *Tree, - token_it: *TokenIterator, - scopes: std.ArrayListUnmanaged(Scope) = .{}, - - const Scope = struct { - indent: usize, - }; - - fn deinit(self: *Parser) void { - self.scopes.deinit(self.allocator); - } - - fn doc(self: *Parser, start: TokenIndex) ParseError!*Node.Doc { - const node = try self.allocator.create(Node.Doc); - errdefer self.allocator.destroy(node); - node.* = .{ - .start = start, - }; - node.base.tree = self.tree; - - self.token_it.seekTo(start); - - log.debug("Doc start: {}, {}", .{ start, self.tree.tokens[start] }); - - const explicit_doc: bool = if (self.eatToken(.DocStart)) |_| explicit_doc: { - if (self.eatToken(.Tag)) |_| { - node.directive = try self.expectToken(.Literal); - } - _ = try self.expectToken(.NewLine); - break :explicit_doc true; - } else false; - - while (true) { - const pos = self.token_it.pos; - const token = self.token_it.next(); - - log.debug("Next token: {}, {}", .{ pos, token }); - - switch (token.id) { - .Tag => { - return error.UnexpectedTag; - }, - .Literal, .SingleQuote, .DoubleQuote => { - _ = try self.expectToken(.MapValueInd); - const map_node = try self.map(pos); - node.value = &map_node.base; - }, - .SeqItemInd => { - const list_node = try self.list(pos); - node.value = &list_node.base; - }, - .FlowSeqStart => { - const list_node = try self.list_bracketed(pos); - node.value = &list_node.base; - }, - .DocEnd => { - if (explicit_doc) break; - return error.UnexpectedToken; - }, - .DocStart, .Eof => { - self.token_it.seekBy(-1); - break; - }, - else => { - return error.UnexpectedToken; - }, - } - } - - node.end = self.token_it.pos - 1; - - log.debug("Doc end: {}, {}", .{ node.end.?, self.tree.tokens[node.end.?] }); - - return node; - } - - fn map(self: *Parser, start: TokenIndex) ParseError!*Node.Map { - const node = try self.allocator.create(Node.Map); - errdefer self.allocator.destroy(node); - node.* = .{ - .start = start, - }; - node.base.tree = self.tree; - - self.token_it.seekTo(start); - - log.debug("Map start: {}, {}", .{ start, self.tree.tokens[start] }); - log.debug("Current scope: {}", .{self.scopes.items[self.scopes.items.len - 1]}); - - while (true) { - // Parse key. - const key_pos = self.token_it.pos; - const key = self.token_it.next(); - switch (key.id) { - .Literal => {}, - else => { - self.token_it.seekBy(-1); - break; - }, - } - - log.debug("Map key: {}, '{s}'", .{ key, self.tree.source[key.start..key.end] }); - - // Separator - _ = try self.expectToken(.MapValueInd); - self.eatCommentsAndSpace(); - - // Parse value. - const value: *Node = value: { - if (self.eatToken(.NewLine)) |_| { - // Explicit, complex value such as list or map. - try self.openScope(); - const value_pos = self.token_it.pos; - const value = self.token_it.next(); - switch (value.id) { - .Literal, .SingleQuote, .DoubleQuote => { - // Assume nested map. - const map_node = try self.map(value_pos); - break :value &map_node.base; - }, - .SeqItemInd => { - // Assume list of values. - const list_node = try self.list(value_pos); - break :value &list_node.base; - }, - else => { - log.err("{}", .{key}); - return error.Unhandled; - }, - } - } else { - const value_pos = self.token_it.pos; - const value = self.token_it.next(); - switch (value.id) { - .Literal, .SingleQuote, .DoubleQuote => { - // Assume leaf value. - const leaf_node = try self.leaf_value(value_pos); - break :value &leaf_node.base; - }, - .FlowSeqStart => { - const list_node = try self.list_bracketed(value_pos); - break :value &list_node.base; - }, - else => { - log.err("{}", .{key}); - return error.Unhandled; - }, - } - } - }; - log.debug("Map value: {}", .{value}); - - try node.values.append(self.allocator, .{ - .key = key_pos, - .value = value, - }); - - if (self.eatToken(.NewLine)) |_| { - if (try self.closeScope()) { - break; - } - } - } - - node.end = self.token_it.pos - 1; - - log.debug("Map end: {}, {}", .{ node.end.?, self.tree.tokens[node.end.?] }); - - return node; - } - - fn list(self: *Parser, start: TokenIndex) ParseError!*Node.List { - const node = try self.allocator.create(Node.List); - errdefer self.allocator.destroy(node); - node.* = .{ - .start = start, - }; - node.base.tree = self.tree; - - self.token_it.seekTo(start); - - log.debug("List start: {}, {}", .{ start, self.tree.tokens[start] }); - log.debug("Current scope: {}", .{self.scopes.items[self.scopes.items.len - 1]}); - - while (true) { - _ = self.eatToken(.SeqItemInd) orelse { - _ = try self.closeScope(); - break; - }; - self.eatCommentsAndSpace(); - - const pos = self.token_it.pos; - const token = self.token_it.next(); - const value: *Node = value: { - switch (token.id) { - .Literal, .SingleQuote, .DoubleQuote => { - if (self.eatToken(.MapValueInd)) |_| { - if (self.eatToken(.NewLine)) |_| { - try self.openScope(); - } - // nested map - const map_node = try self.map(pos); - break :value &map_node.base; - } else { - // standalone (leaf) value - const leaf_node = try self.leaf_value(pos); - break :value &leaf_node.base; - } - }, - .FlowSeqStart => { - const list_node = try self.list_bracketed(pos); - break :value &list_node.base; - }, - else => { - log.err("{}", .{token}); - return error.Unhandled; - }, - } - }; - try node.values.append(self.allocator, value); - - _ = self.eatToken(.NewLine); - } - - node.end = self.token_it.pos - 1; - - log.debug("List end: {}, {}", .{ node.end.?, self.tree.tokens[node.end.?] }); - - return node; - } - - fn list_bracketed(self: *Parser, start: TokenIndex) ParseError!*Node.List { - const node = try self.allocator.create(Node.List); - errdefer self.allocator.destroy(node); - node.* = .{ - .start = start, - }; - node.base.tree = self.tree; - - self.token_it.seekTo(start); - - log.debug("List start: {}, {}", .{ start, self.tree.tokens[start] }); - log.debug("Current scope: {}", .{self.scopes.items[self.scopes.items.len - 1]}); - - _ = try self.expectToken(.FlowSeqStart); - - while (true) { - _ = self.eatToken(.NewLine); - self.eatCommentsAndSpace(); - - const pos = self.token_it.pos; - const token = self.token_it.next(); - - log.debug("Next token: {}, {}", .{ pos, token }); - - const value: *Node = value: { - switch (token.id) { - .FlowSeqStart => { - const list_node = try self.list_bracketed(pos); - break :value &list_node.base; - }, - .FlowSeqEnd => { - break; - }, - .Literal, .SingleQuote, .DoubleQuote => { - const leaf_node = try self.leaf_value(pos); - _ = self.eatToken(.Comma); - // TODO newline - break :value &leaf_node.base; - }, - else => { - log.err("{}", .{token}); - return error.Unhandled; - }, - } - }; - try node.values.append(self.allocator, value); - } - - node.end = self.token_it.pos - 1; - - log.debug("List end: {}, {}", .{ node.end.?, self.tree.tokens[node.end.?] }); - - return node; - } - - fn leaf_value(self: *Parser, start: TokenIndex) ParseError!*Node.Value { - const node = try self.allocator.create(Node.Value); - errdefer self.allocator.destroy(node); - node.* = .{ - .start = start, - .string_value = .{}, - }; - errdefer node.string_value.deinit(self.allocator); - - node.base.tree = self.tree; - - self.token_it.seekTo(start); - - log.debug("Leaf start: {}, {}", .{ node.start.?, self.tree.tokens[node.start.?] }); - - parse: { - if (self.eatToken(.SingleQuote)) |_| { - node.start = node.start.? + 1; - while (true) { - const tok = self.token_it.next(); - switch (tok.id) { - .SingleQuote => { - node.end = self.token_it.pos - 2; - break :parse; - }, - .NewLine => return error.UnexpectedToken, - .EscapeSeq => { - try node.string_value.append(self.allocator, self.tree.source[tok.start + 1]); - }, - else => { - try node.string_value.appendSlice(self.allocator, self.tree.source[tok.start..tok.end]); - }, - } - } - } - - if (self.eatToken(.DoubleQuote)) |_| { - node.start = node.start.? + 1; - while (true) { - const tok = self.token_it.next(); - switch (tok.id) { - .DoubleQuote => { - node.end = self.token_it.pos - 2; - break :parse; - }, - .NewLine => return error.UnexpectedToken, - .EscapeSeq => { - switch (self.tree.source[tok.start + 1]) { - 'n' => { - try node.string_value.append(self.allocator, '\n'); - }, - 't' => { - try node.string_value.append(self.allocator, '\t'); - }, - '"' => { - try node.string_value.append(self.allocator, '"'); - }, - else => {}, - } - }, - else => { - try node.string_value.appendSlice(self.allocator, self.tree.source[tok.start..tok.end]); - }, - } - } - } - - // TODO handle multiline strings in new block scope - while (true) { - const tok = self.token_it.next(); - switch (tok.id) { - .Literal => {}, - .Space => { - const trailing = self.token_it.pos - 2; - self.eatCommentsAndSpace(); - if (self.token_it.peek()) |peek| { - if (peek.id != .Literal) { - node.end = trailing; - const start_token = self.tree.tokens[node.start.?]; - const end_token = self.tree.tokens[node.end.?]; - const raw = self.tree.source[start_token.start..end_token.end]; - try node.string_value.appendSlice(self.allocator, raw); - break; - } - } - }, - else => { - self.token_it.seekBy(-1); - node.end = self.token_it.pos - 1; - const start_token = self.tree.tokens[node.start.?]; - const end_token = self.tree.tokens[node.end.?]; - const raw = self.tree.source[start_token.start..end_token.end]; - try node.string_value.appendSlice(self.allocator, raw); - break; - }, - } - } - } - - log.debug("Leaf end: {}, {}", .{ node.end.?, self.tree.tokens[node.end.?] }); - - return node; - } - - fn openScope(self: *Parser) !void { - const peek = self.token_it.peek() orelse return error.UnexpectedEof; - if (peek.id != .Space and peek.id != .Tab) { - // No need to open scope. - return; - } - const indent = self.token_it.next().count.?; - const prev_scope = self.scopes.items[self.scopes.items.len - 1]; - if (indent < prev_scope.indent) { - return error.MalformedYaml; - } - - log.debug("Opening scope...", .{}); - - try self.scopes.append(self.allocator, .{ - .indent = indent, - }); - } - - fn closeScope(self: *Parser) !bool { - const indent = indent: { - const peek = self.token_it.peek() orelse return error.UnexpectedEof; - switch (peek.id) { - .Space, .Tab => { - break :indent self.token_it.next().count.?; - }, - else => { - break :indent 0; - }, - } - }; - - const scope = self.scopes.items[self.scopes.items.len - 1]; - if (indent < scope.indent) { - log.debug("Closing scope...", .{}); - _ = self.scopes.pop(); - return true; - } - - return false; - } - - fn eatCommentsAndSpace(self: *Parser) void { - while (true) { - _ = self.token_it.peek() orelse return; - const token = self.token_it.next(); - switch (token.id) { - .Comment, .Space => {}, - else => { - self.token_it.seekBy(-1); - break; - }, - } - } - } - - fn eatToken(self: *Parser, id: Token.Id) ?TokenIndex { - while (true) { - const pos = self.token_it.pos; - _ = self.token_it.peek() orelse return null; - const token = self.token_it.next(); - switch (token.id) { - .Comment, .Space => continue, - else => |next_id| if (next_id == id) { - return pos; - } else { - self.token_it.seekTo(pos); - return null; - }, - } - } - } - - fn expectToken(self: *Parser, id: Token.Id) ParseError!TokenIndex { - return self.eatToken(id) orelse error.UnexpectedToken; - } -}; - -test { - _ = @import("parse/test.zig"); -} diff --git a/benchmark/libs/zig-yaml/src/parse/test.zig b/benchmark/libs/zig-yaml/src/parse/test.zig deleted file mode 100644 index b310a5c..0000000 --- a/benchmark/libs/zig-yaml/src/parse/test.zig +++ /dev/null @@ -1,558 +0,0 @@ -const std = @import("std"); -const mem = std.mem; -const testing = std.testing; -const parse = @import("../parse.zig"); - -const Node = parse.Node; -const Tree = parse.Tree; - -test "explicit doc" { - const source = - \\--- !tapi-tbd - \\tbd-version: 4 - \\abc-version: 5 - \\... - ; - - var tree = Tree.init(testing.allocator); - defer tree.deinit(); - try tree.parse(source); - - try testing.expectEqual(tree.docs.items.len, 1); - - const doc = tree.docs.items[0].cast(Node.Doc).?; - try testing.expectEqual(doc.start.?, 0); - try testing.expectEqual(doc.end.?, tree.tokens.len - 2); - - const directive = tree.tokens[doc.directive.?]; - try testing.expectEqual(directive.id, .Literal); - try testing.expect(mem.eql(u8, "tapi-tbd", tree.source[directive.start..directive.end])); - - try testing.expect(doc.value != null); - try testing.expectEqual(doc.value.?.tag, .map); - - const map = doc.value.?.cast(Node.Map).?; - try testing.expectEqual(map.start.?, 5); - try testing.expectEqual(map.end.?, 14); - try testing.expectEqual(map.values.items.len, 2); - - { - const entry = map.values.items[0]; - - const key = tree.tokens[entry.key]; - try testing.expectEqual(key.id, .Literal); - try testing.expect(mem.eql(u8, "tbd-version", tree.source[key.start..key.end])); - - const value = entry.value.cast(Node.Value).?; - const value_tok = tree.tokens[value.start.?]; - try testing.expectEqual(value_tok.id, .Literal); - try testing.expect(mem.eql(u8, "4", tree.source[value_tok.start..value_tok.end])); - } - - { - const entry = map.values.items[1]; - - const key = tree.tokens[entry.key]; - try testing.expectEqual(key.id, .Literal); - try testing.expect(mem.eql(u8, "abc-version", tree.source[key.start..key.end])); - - const value = entry.value.cast(Node.Value).?; - const value_tok = tree.tokens[value.start.?]; - try testing.expectEqual(value_tok.id, .Literal); - try testing.expect(mem.eql(u8, "5", tree.source[value_tok.start..value_tok.end])); - } -} - -test "leaf in quotes" { - const source = - \\key1: no quotes - \\key2: 'single quoted' - \\key3: "double quoted" - ; - - var tree = Tree.init(testing.allocator); - defer tree.deinit(); - try tree.parse(source); - - try testing.expectEqual(tree.docs.items.len, 1); - - const doc = tree.docs.items[0].cast(Node.Doc).?; - try testing.expectEqual(doc.start.?, 0); - try testing.expectEqual(doc.end.?, tree.tokens.len - 2); - try testing.expect(doc.directive == null); - - try testing.expect(doc.value != null); - try testing.expectEqual(doc.value.?.tag, .map); - - const map = doc.value.?.cast(Node.Map).?; - try testing.expectEqual(map.start.?, 0); - try testing.expectEqual(map.end.?, tree.tokens.len - 2); - try testing.expectEqual(map.values.items.len, 3); - - { - const entry = map.values.items[0]; - - const key = tree.tokens[entry.key]; - try testing.expectEqual(key.id, .Literal); - try testing.expect(mem.eql( - u8, - "key1", - tree.source[key.start..key.end], - )); - - const value = entry.value.cast(Node.Value).?; - const start = tree.tokens[value.start.?]; - const end = tree.tokens[value.end.?]; - try testing.expectEqual(start.id, .Literal); - try testing.expectEqual(end.id, .Literal); - try testing.expect(mem.eql( - u8, - "no quotes", - tree.source[start.start..end.end], - )); - } -} - -test "nested maps" { - const source = - \\key1: - \\ key1_1 : value1_1 - \\ key1_2 : value1_2 - \\key2 : value2 - ; - - var tree = Tree.init(testing.allocator); - defer tree.deinit(); - try tree.parse(source); - - try testing.expectEqual(tree.docs.items.len, 1); - - const doc = tree.docs.items[0].cast(Node.Doc).?; - try testing.expectEqual(doc.start.?, 0); - try testing.expectEqual(doc.end.?, tree.tokens.len - 2); - try testing.expect(doc.directive == null); - - try testing.expect(doc.value != null); - try testing.expectEqual(doc.value.?.tag, .map); - - const map = doc.value.?.cast(Node.Map).?; - try testing.expectEqual(map.start.?, 0); - try testing.expectEqual(map.end.?, tree.tokens.len - 2); - try testing.expectEqual(map.values.items.len, 2); - - { - const entry = map.values.items[0]; - - const key = tree.tokens[entry.key]; - try testing.expectEqual(key.id, .Literal); - try testing.expect(mem.eql(u8, "key1", tree.source[key.start..key.end])); - - const nested_map = entry.value.cast(Node.Map).?; - try testing.expectEqual(nested_map.start.?, 4); - try testing.expectEqual(nested_map.end.?, 16); - try testing.expectEqual(nested_map.values.items.len, 2); - - { - const nested_entry = nested_map.values.items[0]; - - const nested_key = tree.tokens[nested_entry.key]; - try testing.expectEqual(nested_key.id, .Literal); - try testing.expect(mem.eql( - u8, - "key1_1", - tree.source[nested_key.start..nested_key.end], - )); - - const nested_value = nested_entry.value.cast(Node.Value).?; - const nested_value_tok = tree.tokens[nested_value.start.?]; - try testing.expectEqual(nested_value_tok.id, .Literal); - try testing.expect(mem.eql( - u8, - "value1_1", - tree.source[nested_value_tok.start..nested_value_tok.end], - )); - } - - { - const nested_entry = nested_map.values.items[1]; - - const nested_key = tree.tokens[nested_entry.key]; - try testing.expectEqual(nested_key.id, .Literal); - try testing.expect(mem.eql( - u8, - "key1_2", - tree.source[nested_key.start..nested_key.end], - )); - - const nested_value = nested_entry.value.cast(Node.Value).?; - const nested_value_tok = tree.tokens[nested_value.start.?]; - try testing.expectEqual(nested_value_tok.id, .Literal); - try testing.expect(mem.eql( - u8, - "value1_2", - tree.source[nested_value_tok.start..nested_value_tok.end], - )); - } - } - - { - const entry = map.values.items[1]; - - const key = tree.tokens[entry.key]; - try testing.expectEqual(key.id, .Literal); - try testing.expect(mem.eql(u8, "key2", tree.source[key.start..key.end])); - - const value = entry.value.cast(Node.Value).?; - const value_tok = tree.tokens[value.start.?]; - try testing.expectEqual(value_tok.id, .Literal); - try testing.expect(mem.eql( - u8, - "value2", - tree.source[value_tok.start..value_tok.end], - )); - } -} - -test "map of list of values" { - const source = - \\ints: - \\ - 0 - \\ - 1 - \\ - 2 - ; - var tree = Tree.init(testing.allocator); - defer tree.deinit(); - try tree.parse(source); - - try testing.expectEqual(tree.docs.items.len, 1); - - const doc = tree.docs.items[0].cast(Node.Doc).?; - try testing.expectEqual(doc.start.?, 0); - try testing.expectEqual(doc.end.?, tree.tokens.len - 2); - - try testing.expect(doc.value != null); - try testing.expectEqual(doc.value.?.tag, .map); - - const map = doc.value.?.cast(Node.Map).?; - try testing.expectEqual(map.start.?, 0); - try testing.expectEqual(map.end.?, tree.tokens.len - 2); - try testing.expectEqual(map.values.items.len, 1); - - const entry = map.values.items[0]; - const key = tree.tokens[entry.key]; - try testing.expectEqual(key.id, .Literal); - try testing.expect(mem.eql(u8, "ints", tree.source[key.start..key.end])); - - const value = entry.value.cast(Node.List).?; - try testing.expectEqual(value.start.?, 4); - try testing.expectEqual(value.end.?, tree.tokens.len - 2); - try testing.expectEqual(value.values.items.len, 3); - - { - const elem = value.values.items[0].cast(Node.Value).?; - const leaf = tree.tokens[elem.start.?]; - try testing.expectEqual(leaf.id, .Literal); - try testing.expect(mem.eql(u8, "0", tree.source[leaf.start..leaf.end])); - } - - { - const elem = value.values.items[1].cast(Node.Value).?; - const leaf = tree.tokens[elem.start.?]; - try testing.expectEqual(leaf.id, .Literal); - try testing.expect(mem.eql(u8, "1", tree.source[leaf.start..leaf.end])); - } - - { - const elem = value.values.items[2].cast(Node.Value).?; - const leaf = tree.tokens[elem.start.?]; - try testing.expectEqual(leaf.id, .Literal); - try testing.expect(mem.eql(u8, "2", tree.source[leaf.start..leaf.end])); - } -} - -test "map of list of maps" { - const source = - \\key1: - \\- key2 : value2 - \\- key3 : value3 - \\- key4 : value4 - ; - - var tree = Tree.init(testing.allocator); - defer tree.deinit(); - try tree.parse(source); - - try testing.expectEqual(tree.docs.items.len, 1); - - const doc = tree.docs.items[0].cast(Node.Doc).?; - try testing.expectEqual(doc.start.?, 0); - try testing.expectEqual(doc.end.?, tree.tokens.len - 2); - - try testing.expect(doc.value != null); - try testing.expectEqual(doc.value.?.tag, .map); - - const map = doc.value.?.cast(Node.Map).?; - try testing.expectEqual(map.start.?, 0); - try testing.expectEqual(map.end.?, tree.tokens.len - 2); - try testing.expectEqual(map.values.items.len, 1); - - const entry = map.values.items[0]; - const key = tree.tokens[entry.key]; - try testing.expectEqual(key.id, .Literal); - try testing.expect(mem.eql(u8, "key1", tree.source[key.start..key.end])); - - const value = entry.value.cast(Node.List).?; - try testing.expectEqual(value.start.?, 3); - try testing.expectEqual(value.end.?, tree.tokens.len - 2); - try testing.expectEqual(value.values.items.len, 3); - - { - const elem = value.values.items[0].cast(Node.Map).?; - const nested = elem.values.items[0]; - const nested_key = tree.tokens[nested.key]; - try testing.expectEqual(nested_key.id, .Literal); - try testing.expect(mem.eql(u8, "key2", tree.source[nested_key.start..nested_key.end])); - - const nested_v = nested.value.cast(Node.Value).?; - const leaf = tree.tokens[nested_v.start.?]; - try testing.expectEqual(leaf.id, .Literal); - try testing.expect(mem.eql(u8, "value2", tree.source[leaf.start..leaf.end])); - } - - { - const elem = value.values.items[1].cast(Node.Map).?; - const nested = elem.values.items[0]; - const nested_key = tree.tokens[nested.key]; - try testing.expectEqual(nested_key.id, .Literal); - try testing.expect(mem.eql(u8, "key3", tree.source[nested_key.start..nested_key.end])); - - const nested_v = nested.value.cast(Node.Value).?; - const leaf = tree.tokens[nested_v.start.?]; - try testing.expectEqual(leaf.id, .Literal); - try testing.expect(mem.eql(u8, "value3", tree.source[leaf.start..leaf.end])); - } - - { - const elem = value.values.items[2].cast(Node.Map).?; - const nested = elem.values.items[0]; - const nested_key = tree.tokens[nested.key]; - try testing.expectEqual(nested_key.id, .Literal); - try testing.expect(mem.eql(u8, "key4", tree.source[nested_key.start..nested_key.end])); - - const nested_v = nested.value.cast(Node.Value).?; - const leaf = tree.tokens[nested_v.start.?]; - try testing.expectEqual(leaf.id, .Literal); - try testing.expect(mem.eql(u8, "value4", tree.source[leaf.start..leaf.end])); - } -} - -test "list of lists" { - const source = - \\- [name , hr, avg ] - \\- [Mark McGwire , 65, 0.278] - \\- [Sammy Sosa , 63, 0.288] - ; - - var tree = Tree.init(testing.allocator); - defer tree.deinit(); - try tree.parse(source); - - try testing.expectEqual(tree.docs.items.len, 1); - - const doc = tree.docs.items[0].cast(Node.Doc).?; - try testing.expectEqual(doc.start.?, 0); - try testing.expectEqual(doc.end.?, tree.tokens.len - 2); - - try testing.expect(doc.value != null); - try testing.expectEqual(doc.value.?.tag, .list); - - const list = doc.value.?.cast(Node.List).?; - try testing.expectEqual(list.start.?, 0); - try testing.expectEqual(list.end.?, tree.tokens.len - 2); - try testing.expectEqual(list.values.items.len, 3); - - { - try testing.expectEqual(list.values.items[0].tag, .list); - const nested = list.values.items[0].cast(Node.List).?; - try testing.expectEqual(nested.values.items.len, 3); - - { - try testing.expectEqual(nested.values.items[0].tag, .value); - const value = nested.values.items[0].cast(Node.Value).?; - const leaf = tree.tokens[value.start.?]; - try testing.expect(mem.eql(u8, "name", tree.source[leaf.start..leaf.end])); - } - - { - try testing.expectEqual(nested.values.items[1].tag, .value); - const value = nested.values.items[1].cast(Node.Value).?; - const leaf = tree.tokens[value.start.?]; - try testing.expect(mem.eql(u8, "hr", tree.source[leaf.start..leaf.end])); - } - - { - try testing.expectEqual(nested.values.items[2].tag, .value); - const value = nested.values.items[2].cast(Node.Value).?; - const leaf = tree.tokens[value.start.?]; - try testing.expect(mem.eql(u8, "avg", tree.source[leaf.start..leaf.end])); - } - } - - { - try testing.expectEqual(list.values.items[1].tag, .list); - const nested = list.values.items[1].cast(Node.List).?; - try testing.expectEqual(nested.values.items.len, 3); - - { - try testing.expectEqual(nested.values.items[0].tag, .value); - const value = nested.values.items[0].cast(Node.Value).?; - const start = tree.tokens[value.start.?]; - const end = tree.tokens[value.end.?]; - try testing.expect(mem.eql(u8, "Mark McGwire", tree.source[start.start..end.end])); - } - - { - try testing.expectEqual(nested.values.items[1].tag, .value); - const value = nested.values.items[1].cast(Node.Value).?; - const leaf = tree.tokens[value.start.?]; - try testing.expect(mem.eql(u8, "65", tree.source[leaf.start..leaf.end])); - } - - { - try testing.expectEqual(nested.values.items[2].tag, .value); - const value = nested.values.items[2].cast(Node.Value).?; - const leaf = tree.tokens[value.start.?]; - try testing.expect(mem.eql(u8, "0.278", tree.source[leaf.start..leaf.end])); - } - } - - { - try testing.expectEqual(list.values.items[2].tag, .list); - const nested = list.values.items[2].cast(Node.List).?; - try testing.expectEqual(nested.values.items.len, 3); - - { - try testing.expectEqual(nested.values.items[0].tag, .value); - const value = nested.values.items[0].cast(Node.Value).?; - const start = tree.tokens[value.start.?]; - const end = tree.tokens[value.end.?]; - try testing.expect(mem.eql(u8, "Sammy Sosa", tree.source[start.start..end.end])); - } - - { - try testing.expectEqual(nested.values.items[1].tag, .value); - const value = nested.values.items[1].cast(Node.Value).?; - const leaf = tree.tokens[value.start.?]; - try testing.expect(mem.eql(u8, "63", tree.source[leaf.start..leaf.end])); - } - - { - try testing.expectEqual(nested.values.items[2].tag, .value); - const value = nested.values.items[2].cast(Node.Value).?; - const leaf = tree.tokens[value.start.?]; - try testing.expect(mem.eql(u8, "0.288", tree.source[leaf.start..leaf.end])); - } - } -} - -test "inline list" { - const source = - \\[name , hr, avg ] - ; - - var tree = Tree.init(testing.allocator); - defer tree.deinit(); - try tree.parse(source); - - try testing.expectEqual(tree.docs.items.len, 1); - - const doc = tree.docs.items[0].cast(Node.Doc).?; - try testing.expectEqual(doc.start.?, 0); - try testing.expectEqual(doc.end.?, tree.tokens.len - 2); - - try testing.expect(doc.value != null); - try testing.expectEqual(doc.value.?.tag, .list); - - const list = doc.value.?.cast(Node.List).?; - try testing.expectEqual(list.start.?, 0); - try testing.expectEqual(list.end.?, tree.tokens.len - 2); - try testing.expectEqual(list.values.items.len, 3); - - { - try testing.expectEqual(list.values.items[0].tag, .value); - const value = list.values.items[0].cast(Node.Value).?; - const leaf = tree.tokens[value.start.?]; - try testing.expect(mem.eql(u8, "name", tree.source[leaf.start..leaf.end])); - } - - { - try testing.expectEqual(list.values.items[1].tag, .value); - const value = list.values.items[1].cast(Node.Value).?; - const leaf = tree.tokens[value.start.?]; - try testing.expect(mem.eql(u8, "hr", tree.source[leaf.start..leaf.end])); - } - - { - try testing.expectEqual(list.values.items[2].tag, .value); - const value = list.values.items[2].cast(Node.Value).?; - const leaf = tree.tokens[value.start.?]; - try testing.expect(mem.eql(u8, "avg", tree.source[leaf.start..leaf.end])); - } -} - -test "inline list as mapping value" { - const source = - \\key : [ - \\ name , - \\ hr, avg ] - ; - - var tree = Tree.init(testing.allocator); - defer tree.deinit(); - try tree.parse(source); - - try testing.expectEqual(tree.docs.items.len, 1); - - const doc = tree.docs.items[0].cast(Node.Doc).?; - try testing.expectEqual(doc.start.?, 0); - try testing.expectEqual(doc.end.?, tree.tokens.len - 2); - - try testing.expect(doc.value != null); - try testing.expectEqual(doc.value.?.tag, .map); - - const map = doc.value.?.cast(Node.Map).?; - try testing.expectEqual(map.start.?, 0); - try testing.expectEqual(map.end.?, tree.tokens.len - 2); - try testing.expectEqual(map.values.items.len, 1); - - const entry = map.values.items[0]; - const key = tree.tokens[entry.key]; - try testing.expectEqual(key.id, .Literal); - try testing.expect(mem.eql(u8, "key", tree.source[key.start..key.end])); - - const list = entry.value.cast(Node.List).?; - try testing.expectEqual(list.start.?, 4); - try testing.expectEqual(list.end.?, tree.tokens.len - 2); - try testing.expectEqual(list.values.items.len, 3); - - { - try testing.expectEqual(list.values.items[0].tag, .value); - const value = list.values.items[0].cast(Node.Value).?; - const leaf = tree.tokens[value.start.?]; - try testing.expect(mem.eql(u8, "name", tree.source[leaf.start..leaf.end])); - } - - { - try testing.expectEqual(list.values.items[1].tag, .value); - const value = list.values.items[1].cast(Node.Value).?; - const leaf = tree.tokens[value.start.?]; - try testing.expect(mem.eql(u8, "hr", tree.source[leaf.start..leaf.end])); - } - - { - try testing.expectEqual(list.values.items[2].tag, .value); - const value = list.values.items[2].cast(Node.Value).?; - const leaf = tree.tokens[value.start.?]; - try testing.expect(mem.eql(u8, "avg", tree.source[leaf.start..leaf.end])); - } -} diff --git a/benchmark/libs/zig-yaml/test/multi_lib.tbd b/benchmark/libs/zig-yaml/test/multi_lib.tbd deleted file mode 100644 index 3e9788b..0000000 --- a/benchmark/libs/zig-yaml/test/multi_lib.tbd +++ /dev/null @@ -1,31 +0,0 @@ ---- !tapi-tbd -tbd-version: 4 -targets: [ x86_64-macos ] -uuids: - - target: x86_64-macos - value: F86CC732-D5E4-30B5-AA7D-167DF5EC2708 -install-name: '/usr/lib/libSystem.B.dylib' -current-version: 1292.60.1 -reexported-libraries: - - targets: [ x86_64-macos ] - libraries: [ '/usr/lib/system/libcache.dylib' ] -exports: - - targets: [ x86_64-macos ] - symbols: [ 'R8289209$_close', 'R8289209$_fork' ] - - targets: [ x86_64-macos ] - symbols: [ ___crashreporter_info__, _libSystem_atfork_child ] ---- !tapi-tbd -tbd-version: 4 -targets: [ x86_64-macos ] -uuids: - - target: x86_64-macos - value: 2F7F7303-DB23-359E-85CD-8B2F93223E2A -install-name: '/usr/lib/system/libcache.dylib' -current-version: 83 -parent-umbrella: - - targets: [ x86_64-macos ] - umbrella: System -exports: - - targets: [ x86_64-macos ] - symbols: [ _cache_create, _cache_destroy ] -... diff --git a/benchmark/libs/zig-yaml/test/simple.yaml b/benchmark/libs/zig-yaml/test/simple.yaml deleted file mode 100644 index f12b0b7..0000000 --- a/benchmark/libs/zig-yaml/test/simple.yaml +++ /dev/null @@ -1,11 +0,0 @@ -names: [ John Doe, MacIntosh, Jane Austin ] -numbers: - - 10 - - -8 - - 6 -nested: - some: one - wick: john doe -finally: [ 8.17, - 19.78 , 17 , - 21 ] diff --git a/benchmark/libs/zig-yaml/test/single_lib.tbd b/benchmark/libs/zig-yaml/test/single_lib.tbd deleted file mode 100644 index 0164624..0000000 --- a/benchmark/libs/zig-yaml/test/single_lib.tbd +++ /dev/null @@ -1,33 +0,0 @@ ---- !tapi-tbd -tbd-version: 4 -targets: [ x86_64-macos, x86_64-maccatalyst, arm64-macos, arm64-maccatalyst, - arm64e-macos, arm64e-maccatalyst ] -uuids: - - target: x86_64-macos - value: F86CC732-D5E4-30B5-AA7D-167DF5EC2708 - - target: x86_64-maccatalyst - value: F86CC732-D5E4-30B5-AA7D-167DF5EC2708 - - target: arm64-macos - value: 00000000-0000-0000-0000-000000000000 - - target: arm64-maccatalyst - value: 00000000-0000-0000-0000-000000000000 - - target: arm64e-macos - value: A17E8744-051E-356E-8619-66F2A6E89AD4 - - target: arm64e-maccatalyst - value: A17E8744-051E-356E-8619-66F2A6E89AD4 -install-name: '/usr/lib/libSystem.B.dylib' -current-version: 1292.60.1 -reexported-libraries: - - targets: [ x86_64-macos, x86_64-maccatalyst, arm64-macos, arm64-maccatalyst, - arm64e-macos, arm64e-maccatalyst ] - libraries: [ '/usr/lib/system/libcache.dylib', '/usr/lib/system/libcommonCrypto.dylib', - '/usr/lib/system/libcompiler_rt.dylib', '/usr/lib/system/libcopyfile.dylib', - '/usr/lib/system/libxpc.dylib' ] -exports: - - targets: [ x86_64-maccatalyst, x86_64-macos ] - symbols: [ 'R8289209$_close', 'R8289209$_fork', 'R8289209$_fsync', 'R8289209$_getattrlist', - 'R8289209$_write' ] - - targets: [ x86_64-maccatalyst, x86_64-macos, arm64e-maccatalyst, arm64e-macos, - arm64-macos, arm64-maccatalyst ] - symbols: [ ___crashreporter_info__, _libSystem_atfork_child, _libSystem_atfork_parent, - _libSystem_atfork_prepare, _mach_init_routine ] diff --git a/benchmark/libs/zig-yaml/test/test.zig b/benchmark/libs/zig-yaml/test/test.zig deleted file mode 100644 index b4810d7..0000000 --- a/benchmark/libs/zig-yaml/test/test.zig +++ /dev/null @@ -1,310 +0,0 @@ -const std = @import("std"); -const mem = std.mem; -const testing = std.testing; - -const Allocator = mem.Allocator; -const Yaml = @import("yaml").Yaml; - -const gpa = testing.allocator; - -fn loadFromFile(file_path: []const u8) !Yaml { - const file = try std.fs.cwd().openFile(file_path, .{}); - defer file.close(); - - const source = try file.readToEndAlloc(gpa, std.math.maxInt(u32)); - defer gpa.free(source); - - return Yaml.load(gpa, source); -} - -test "simple" { - const Simple = struct { - names: []const []const u8, - numbers: []const i16, - nested: struct { - some: []const u8, - wick: []const u8, - }, - finally: [4]f16, - - pub fn eql(self: @This(), other: @This()) bool { - if (self.names.len != other.names.len) return false; - if (self.numbers.len != other.numbers.len) return false; - if (self.finally.len != other.finally.len) return false; - - for (self.names) |lhs, i| { - if (!mem.eql(u8, lhs, other.names[i])) return false; - } - - for (self.numbers) |lhs, i| { - if (lhs != other.numbers[i]) return false; - } - - for (self.finally) |lhs, i| { - if (lhs != other.finally[i]) return false; - } - - if (!mem.eql(u8, self.nested.some, other.nested.some)) return false; - if (!mem.eql(u8, self.nested.wick, other.nested.wick)) return false; - - return true; - } - }; - - var parsed = try loadFromFile("test/simple.yaml"); - defer parsed.deinit(); - - const result = try parsed.parse(Simple); - const expected = .{ - .names = &[_][]const u8{ "John Doe", "MacIntosh", "Jane Austin" }, - .numbers = &[_]i16{ 10, -8, 6 }, - .nested = .{ - .some = "one", - .wick = "john doe", - }, - .finally = [_]f16{ 8.17, 19.78, 17, 21 }, - }; - try testing.expect(result.eql(expected)); -} - -const LibTbd = struct { - tbd_version: u3, - targets: []const []const u8, - uuids: []const struct { - target: []const u8, - value: []const u8, - }, - install_name: []const u8, - current_version: union(enum) { - string: []const u8, - int: usize, - }, - reexported_libraries: ?[]const struct { - targets: []const []const u8, - libraries: []const []const u8, - }, - parent_umbrella: ?[]const struct { - targets: []const []const u8, - umbrella: []const u8, - }, - exports: []const struct { - targets: []const []const u8, - symbols: []const []const u8, - }, - - pub fn eql(self: LibTbd, other: LibTbd) bool { - if (self.tbd_version != other.tbd_version) return false; - if (self.targets.len != other.targets.len) return false; - - for (self.targets) |target, i| { - if (!mem.eql(u8, target, other.targets[i])) return false; - } - - if (!mem.eql(u8, self.install_name, other.install_name)) return false; - - switch (self.current_version) { - .string => |string| { - if (other.current_version != .string) return false; - if (!mem.eql(u8, string, other.current_version.string)) return false; - }, - .int => |int| { - if (other.current_version != .int) return false; - if (int != other.current_version.int) return false; - }, - } - - if (self.reexported_libraries) |reexported_libraries| { - const o_reexported_libraries = other.reexported_libraries orelse return false; - - if (reexported_libraries.len != o_reexported_libraries.len) return false; - - for (reexported_libraries) |reexport, i| { - const o_reexport = o_reexported_libraries[i]; - if (reexport.targets.len != o_reexport.targets.len) return false; - if (reexport.libraries.len != o_reexport.libraries.len) return false; - - for (reexport.targets) |target, j| { - const o_target = o_reexport.targets[j]; - if (!mem.eql(u8, target, o_target)) return false; - } - - for (reexport.libraries) |library, j| { - const o_library = o_reexport.libraries[j]; - if (!mem.eql(u8, library, o_library)) return false; - } - } - } - - if (self.parent_umbrella) |parent_umbrella| { - const o_parent_umbrella = other.parent_umbrella orelse return false; - - if (parent_umbrella.len != o_parent_umbrella.len) return false; - - for (parent_umbrella) |pumbrella, i| { - const o_pumbrella = o_parent_umbrella[i]; - if (pumbrella.targets.len != o_pumbrella.targets.len) return false; - - for (pumbrella.targets) |target, j| { - const o_target = o_pumbrella.targets[j]; - if (!mem.eql(u8, target, o_target)) return false; - } - - if (!mem.eql(u8, pumbrella.umbrella, o_pumbrella.umbrella)) return false; - } - } - - if (self.exports.len != other.exports.len) return false; - - for (self.exports) |exp, i| { - const o_exp = other.exports[i]; - if (exp.targets.len != o_exp.targets.len) return false; - if (exp.symbols.len != o_exp.symbols.len) return false; - - for (exp.targets) |target, j| { - const o_target = o_exp.targets[j]; - if (!mem.eql(u8, target, o_target)) return false; - } - - for (exp.symbols) |symbol, j| { - const o_symbol = o_exp.symbols[j]; - if (!mem.eql(u8, symbol, o_symbol)) return false; - } - } - - return true; - } -}; - -test "single lib tbd" { - var parsed = try loadFromFile("test/single_lib.tbd"); - defer parsed.deinit(); - - const result = try parsed.parse(LibTbd); - const expected = .{ - .tbd_version = 4, - .targets = &[_][]const u8{ - "x86_64-macos", - "x86_64-maccatalyst", - "arm64-macos", - "arm64-maccatalyst", - "arm64e-macos", - "arm64e-maccatalyst", - }, - .uuids = &.{ - .{ .target = "x86_64-macos", .value = "F86CC732-D5E4-30B5-AA7D-167DF5EC2708" }, - .{ .target = "x86_64-maccatalyst", .value = "F86CC732-D5E4-30B5-AA7D-167DF5EC2708" }, - .{ .target = "arm64-macos", .value = "00000000-0000-0000-0000-000000000000" }, - .{ .target = "arm64-maccatalyst", .value = "00000000-0000-0000-0000-000000000000" }, - .{ .target = "arm64e-macos", .value = "A17E8744-051E-356E-8619-66F2A6E89AD4" }, - .{ .target = "arm64e-maccatalyst", .value = "A17E8744-051E-356E-8619-66F2A6E89AD4" }, - }, - .install_name = "/usr/lib/libSystem.B.dylib", - .current_version = .{ .string = "1292.60.1" }, - .reexported_libraries = &.{ - .{ - .targets = &.{ - "x86_64-macos", - "x86_64-maccatalyst", - "arm64-macos", - "arm64-maccatalyst", - "arm64e-macos", - "arm64e-maccatalyst", - }, - .libraries = &.{ - "/usr/lib/system/libcache.dylib", "/usr/lib/system/libcommonCrypto.dylib", - "/usr/lib/system/libcompiler_rt.dylib", "/usr/lib/system/libcopyfile.dylib", - "/usr/lib/system/libxpc.dylib", - }, - }, - }, - .exports = &.{ - .{ - .targets = &.{ - "x86_64-maccatalyst", - "x86_64-macos", - }, - .symbols = &.{ - "R8289209$_close", "R8289209$_fork", "R8289209$_fsync", "R8289209$_getattrlist", - "R8289209$_write", - }, - }, - .{ - .targets = &.{ - "x86_64-maccatalyst", - "x86_64-macos", - "arm64e-maccatalyst", - "arm64e-macos", - "arm64-macos", - "arm64-maccatalyst", - }, - .symbols = &.{ - "___crashreporter_info__", "_libSystem_atfork_child", "_libSystem_atfork_parent", - "_libSystem_atfork_prepare", "_mach_init_routine", - }, - }, - }, - .parent_umbrella = null, - }; - try testing.expect(result.eql(expected)); -} - -test "multi lib tbd" { - var parsed = try loadFromFile("test/multi_lib.tbd"); - defer parsed.deinit(); - - const result = try parsed.parse([]LibTbd); - const expected = &[_]LibTbd{ - .{ - .tbd_version = 4, - .targets = &[_][]const u8{"x86_64-macos"}, - .uuids = &.{ - .{ .target = "x86_64-macos", .value = "F86CC732-D5E4-30B5-AA7D-167DF5EC2708" }, - }, - .install_name = "/usr/lib/libSystem.B.dylib", - .current_version = .{ .string = "1292.60.1" }, - .reexported_libraries = &.{ - .{ - .targets = &.{"x86_64-macos"}, - .libraries = &.{"/usr/lib/system/libcache.dylib"}, - }, - }, - .exports = &.{ - .{ - .targets = &.{"x86_64-macos"}, - .symbols = &.{ "R8289209$_close", "R8289209$_fork" }, - }, - .{ - .targets = &.{"x86_64-macos"}, - .symbols = &.{ "___crashreporter_info__", "_libSystem_atfork_child" }, - }, - }, - .parent_umbrella = null, - }, - .{ - .tbd_version = 4, - .targets = &[_][]const u8{"x86_64-macos"}, - .uuids = &.{ - .{ .target = "x86_64-macos", .value = "2F7F7303-DB23-359E-85CD-8B2F93223E2A" }, - }, - .install_name = "/usr/lib/system/libcache.dylib", - .current_version = .{ .int = 83 }, - .parent_umbrella = &.{ - .{ - .targets = &.{"x86_64-macos"}, - .umbrella = "System", - }, - }, - .exports = &.{ - .{ - .targets = &.{"x86_64-macos"}, - .symbols = &.{ "_cache_create", "_cache_destroy" }, - }, - }, - .reexported_libraries = null, - }, - }; - - for (result) |lib, i| { - try testing.expect(lib.eql(expected[i])); - } -}