Compare commits
6 commits
master
...
zig-develo
Author | SHA1 | Date | |
---|---|---|---|
393a034df5 | |||
2a61160ef8 | |||
1fcfa3003c | |||
6322c465d5 | |||
aac7b03c2e | |||
69ffe49cc8 |
38 changed files with 1895 additions and 3017 deletions
|
@ -18,13 +18,11 @@ jobs:
|
|||
- name: Check out repository code
|
||||
uses: actions/checkout@v4
|
||||
- name: Setup Zig
|
||||
uses: https://github.com/mlugg/setup-zig@v2.0.1
|
||||
uses: https://github.com/mlugg/setup-zig@v1.2.1
|
||||
with:
|
||||
version: 0.14.0
|
||||
- name: Restore Zig caches
|
||||
uses: https://github.com/Hanaasagi/zig-action-cache@3954aae427f8b05914e08dfd79f15e1f2e435929
|
||||
- name: Ulimit
|
||||
run: ulimit -a
|
||||
- name: Run smoke test
|
||||
run: zig build smoke-test --verbose
|
||||
- name: Run tests
|
||||
|
|
|
@ -26,7 +26,7 @@ jobs:
|
|||
with:
|
||||
ref: zig-mach
|
||||
- name: Setup Zig
|
||||
uses: https://github.com/mlugg/setup-zig@v2.0.1
|
||||
uses: https://github.com/mlugg/setup-zig@v1.2.1
|
||||
with:
|
||||
version: mach-latest
|
||||
- name: Restore Zig caches
|
||||
|
@ -37,9 +37,8 @@ jobs:
|
|||
run: zig build smoke-test --verbose
|
||||
- name: Run full tests
|
||||
run: zig build test --verbose --summary all
|
||||
# TODO: Zig mach currently tracking behind zig 0.14.0 branch - enable this test after update
|
||||
# - name: Run tests (release mode)
|
||||
# run: zig build test -Doptimize=ReleaseSafe --verbose
|
||||
- name: Run tests (release mode)
|
||||
run: zig build test -Doptimize=ReleaseSafe --verbose
|
||||
# Zig package manager expects everything to be inside a directory in the archive,
|
||||
# which it then strips out on download. So we need to shove everything inside a directory
|
||||
# the way GitHub/Gitea does for repo archives
|
||||
|
|
|
@ -26,7 +26,7 @@ jobs:
|
|||
with:
|
||||
ref: zig-develop
|
||||
- name: Setup Zig
|
||||
uses: https://github.com/mlugg/setup-zig@v2.0.1
|
||||
uses: https://github.com/mlugg/setup-zig@v1.2.1
|
||||
with:
|
||||
version: master
|
||||
- name: Restore Zig caches
|
||||
|
|
|
@ -20,7 +20,7 @@ jobs:
|
|||
with:
|
||||
ref: zig-0.13
|
||||
- name: Setup Zig
|
||||
uses: https://github.com/mlugg/setup-zig@v2.0.1
|
||||
uses: https://github.com/mlugg/setup-zig@v1.2.1
|
||||
with:
|
||||
version: 0.13.0
|
||||
- name: Restore Zig caches
|
||||
|
|
|
@ -1,33 +0,0 @@
|
|||
# See https://pre-commit.com for more information
|
||||
# See https://pre-commit.com/hooks.html for more hooks
|
||||
repos:
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v3.2.0
|
||||
hooks:
|
||||
- id: trailing-whitespace
|
||||
- id: end-of-file-fixer
|
||||
- id: check-yaml
|
||||
- id: check-added-large-files
|
||||
- repo: https://github.com/batmac/pre-commit-zig
|
||||
rev: v0.3.0
|
||||
hooks:
|
||||
- id: zig-fmt
|
||||
- id: zig-build
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: zlint
|
||||
name: Run zig build smoke-test
|
||||
entry: zig
|
||||
args: ["build", "--verbose", "smoke-test"]
|
||||
language: system
|
||||
types: [file]
|
||||
pass_filenames: false
|
||||
|
||||
# - repo: local
|
||||
# hooks:
|
||||
# - id: zlint
|
||||
# name: Run zlint
|
||||
# entry: zlint
|
||||
# args: ["--deny-warnings", "--fix"]
|
||||
# language: system
|
||||
# types: [zig]
|
149
build.zig
149
build.zig
|
@ -1,4 +1,5 @@
|
|||
const std = @import("std");
|
||||
const builtin = @import("builtin");
|
||||
const Builder = @import("std").Build;
|
||||
|
||||
const models_subdir = "codegen/sdk-codegen/aws-models/"; // note will probably not work on windows
|
||||
|
@ -18,7 +19,14 @@ const test_targets = [_]std.Target.Query{
|
|||
};
|
||||
|
||||
pub fn build(b: *Builder) !void {
|
||||
// Standard target options allows the person running `zig build` to choose
|
||||
// what target to build for. Here we do not override the defaults, which
|
||||
// means any target is allowed, and the default is native. Other options
|
||||
// for restricting supported target set are available.
|
||||
const target = b.standardTargetOptions(.{});
|
||||
|
||||
// Standard release options allow the person running `zig build` to select
|
||||
// between Debug, ReleaseSafe, ReleaseFast, and ReleaseSmall.
|
||||
const optimize = b.standardOptimizeOption(.{});
|
||||
|
||||
const no_llvm = b.option(
|
||||
|
@ -38,24 +46,45 @@ pub fn build(b: *Builder) !void {
|
|||
"test-filter",
|
||||
"Skip tests that do not match any of the specified filters",
|
||||
) orelse &.{};
|
||||
|
||||
const dep_mods = try getDependencyModules(b, .{
|
||||
.target = target,
|
||||
.optimize = optimize,
|
||||
});
|
||||
|
||||
const mod_exe = b.createModule(.{
|
||||
// TODO: Embed the current git version in the code. We can do this
|
||||
// by looking for .git/HEAD (if it exists, follow the ref to /ref/heads/whatevs,
|
||||
// grab that commit, and use b.addOptions/exe.addOptions to generate the
|
||||
// Options file. See https://github.com/ziglang/zig/issues/14979 for usage
|
||||
// example.
|
||||
//
|
||||
// From there, I'm not sure what the generated file looks like or quite how
|
||||
// to use, but that should be easy. It may also give some ideas on the
|
||||
// code gen piece itself, though it might be nice to leave as a seperate
|
||||
// executable
|
||||
// TODO: This executable should not be built when importing as a package.
|
||||
// It relies on code gen and is all fouled up when getting imported
|
||||
const exe = b.addExecutable(.{
|
||||
.name = "demo",
|
||||
.root_source_file = b.path("src/main.zig"),
|
||||
.target = target,
|
||||
.optimize = optimize,
|
||||
});
|
||||
configure(mod_exe, dep_mods, true);
|
||||
|
||||
const exe = b.addExecutable(.{
|
||||
.name = "demo",
|
||||
.root_module = mod_exe,
|
||||
.use_llvm = !no_llvm,
|
||||
exe.use_llvm = !no_llvm;
|
||||
const smithy_dep = b.dependency("smithy", .{
|
||||
// These are the arguments to the dependency. It expects a target and optimization level.
|
||||
.target = target,
|
||||
.optimize = optimize,
|
||||
});
|
||||
const smithy_module = smithy_dep.module("smithy");
|
||||
exe.root_module.addImport("smithy", smithy_module); // not sure this should be here...
|
||||
|
||||
// TODO: This does not work correctly due to https://github.com/ziglang/zig/issues/16354
|
||||
//
|
||||
// We are working here with kind of a weird dependency though. So we can do this
|
||||
// another way
|
||||
//
|
||||
// TODO: These target/optimize are not correct, as we need to run the thing
|
||||
// const codegen = b.anonymousDependency("codegen/", @import("codegen/build.zig"), .{
|
||||
// .target = target,
|
||||
// .optimize = optimize,
|
||||
// });
|
||||
// const codegen_cmd = b.addRunArtifact(codegen.artifact("codegen"));
|
||||
// exe.step.dependOn(&codegen_cmd.step);
|
||||
|
||||
const run_cmd = b.addRunArtifact(exe);
|
||||
run_cmd.step.dependOn(b.getInstallStep());
|
||||
|
@ -68,18 +97,14 @@ pub fn build(b: *Builder) !void {
|
|||
|
||||
const cg = b.step("gen", "Generate zig service code from smithy models");
|
||||
|
||||
const cg_mod = b.createModule(.{
|
||||
const cg_exe = b.addExecutable(.{
|
||||
.name = "codegen",
|
||||
.root_source_file = b.path("codegen/src/main.zig"),
|
||||
// We need this generated for the host, not the real target
|
||||
.target = b.graph.host,
|
||||
.optimize = if (b.verbose) .Debug else .ReleaseSafe,
|
||||
});
|
||||
configure(cg_mod, dep_mods, false);
|
||||
|
||||
const cg_exe = b.addExecutable(.{
|
||||
.name = "codegen",
|
||||
.root_module = cg_mod,
|
||||
});
|
||||
cg_exe.root_module.addImport("smithy", smithy_module);
|
||||
var cg_cmd = b.addRunArtifact(cg_exe);
|
||||
cg_cmd.addArg("--models");
|
||||
cg_cmd.addArg(try std.fs.path.join(
|
||||
|
@ -91,12 +116,8 @@ pub fn build(b: *Builder) !void {
|
|||
));
|
||||
cg_cmd.addArg("--output");
|
||||
const cg_output_dir = cg_cmd.addOutputDirectoryArg("src/models");
|
||||
if (b.verbose) {
|
||||
if (b.verbose)
|
||||
cg_cmd.addArg("--verbose");
|
||||
}
|
||||
if (!no_bin) {
|
||||
b.installArtifact(cg_exe);
|
||||
}
|
||||
// cg_cmd.step.dependOn(&fetch_step.step);
|
||||
// TODO: this should use zig_exe from std.Build
|
||||
// codegen should store a hash in a comment
|
||||
|
@ -123,24 +144,24 @@ pub fn build(b: *Builder) !void {
|
|||
.target = target,
|
||||
.optimize = optimize,
|
||||
});
|
||||
configure(service_manifest_module, dep_mods, true);
|
||||
service_manifest_module.addImport("smithy", smithy_module);
|
||||
|
||||
mod_exe.addImport("service_manifest", service_manifest_module);
|
||||
exe.root_module.addImport("service_manifest", service_manifest_module);
|
||||
|
||||
// Expose module to others
|
||||
const mod_aws = b.addModule("aws", .{
|
||||
_ = b.addModule("aws", .{
|
||||
.root_source_file = b.path("src/aws.zig"),
|
||||
.target = target,
|
||||
.optimize = optimize,
|
||||
.imports = &.{
|
||||
.{ .name = "smithy", .module = smithy_module },
|
||||
.{ .name = "service_manifest", .module = service_manifest_module },
|
||||
},
|
||||
});
|
||||
mod_aws.addImport("service_manifest", service_manifest_module);
|
||||
configure(mod_aws, dep_mods, true);
|
||||
|
||||
// Expose module to others
|
||||
const mod_aws_signing = b.addModule("aws-signing", .{
|
||||
_ = b.addModule("aws-signing", .{
|
||||
.root_source_file = b.path("src/aws_signing.zig"),
|
||||
.imports = &.{.{ .name = "smithy", .module = smithy_module }},
|
||||
});
|
||||
configure(mod_aws_signing, dep_mods, false);
|
||||
|
||||
// Similar to creating the run step earlier, this exposes a `test` step to
|
||||
// the `zig build --help` menu, providing a way for the user to request
|
||||
|
@ -163,22 +184,16 @@ pub fn build(b: *Builder) !void {
|
|||
// test_step.dependOn(&run_unit_tests.step);
|
||||
for (test_targets) |t| {
|
||||
if (broken_windows and t.os_tag == .windows) continue;
|
||||
|
||||
const mod_unit_tests = b.createModule(.{
|
||||
.root_source_file = b.path("src/aws.zig"),
|
||||
.target = b.resolveTargetQuery(t),
|
||||
.optimize = optimize,
|
||||
});
|
||||
mod_unit_tests.addImport("service_manifest", service_manifest_module);
|
||||
configure(mod_unit_tests, dep_mods, true);
|
||||
|
||||
// Creates a step for unit testing. This only builds the test executable
|
||||
// but does not run it.
|
||||
const unit_tests = b.addTest(.{
|
||||
.root_module = mod_unit_tests,
|
||||
.root_source_file = b.path("src/aws.zig"),
|
||||
.target = b.resolveTargetQuery(t),
|
||||
.optimize = optimize,
|
||||
.filters = test_filters,
|
||||
});
|
||||
|
||||
unit_tests.root_module.addImport("smithy", smithy_module);
|
||||
unit_tests.root_module.addImport("service_manifest", service_manifest_module);
|
||||
unit_tests.step.dependOn(cg);
|
||||
unit_tests.use_llvm = !no_llvm;
|
||||
|
||||
|
@ -198,10 +213,14 @@ pub fn build(b: *Builder) !void {
|
|||
// Creates a step for unit testing. This only builds the test executable
|
||||
// but does not run it.
|
||||
const smoke_test = b.addTest(.{
|
||||
.root_module = mod_aws,
|
||||
.root_source_file = b.path("src/aws.zig"),
|
||||
.target = target,
|
||||
.optimize = optimize,
|
||||
.filters = test_filters,
|
||||
});
|
||||
smoke_test.use_llvm = !no_llvm;
|
||||
smoke_test.root_module.addImport("smithy", smithy_module);
|
||||
smoke_test.root_module.addImport("service_manifest", service_manifest_module);
|
||||
smoke_test.step.dependOn(cg);
|
||||
|
||||
const run_smoke_test = b.addRunArtifact(smoke_test);
|
||||
|
@ -213,41 +232,3 @@ pub fn build(b: *Builder) !void {
|
|||
b.installArtifact(exe);
|
||||
}
|
||||
}
|
||||
|
||||
fn configure(compile: *std.Build.Module, modules: std.StringHashMap(*std.Build.Module), include_time: bool) void {
|
||||
compile.addImport("smithy", modules.get("smithy").?);
|
||||
compile.addImport("date", modules.get("date").?);
|
||||
compile.addImport("json", modules.get("json").?);
|
||||
compile.addImport("case", modules.get("case").?);
|
||||
if (include_time) compile.addImport("zeit", modules.get("zeit").?);
|
||||
}
|
||||
|
||||
fn getDependencyModules(b: *std.Build, args: anytype) !std.StringHashMap(*std.Build.Module) {
|
||||
var result = std.StringHashMap(*std.Build.Module).init(b.allocator);
|
||||
|
||||
// External dependencies
|
||||
const dep_smithy = b.dependency("smithy", args);
|
||||
const mod_smithy = dep_smithy.module("smithy");
|
||||
try result.putNoClobber("smithy", mod_smithy);
|
||||
|
||||
const dep_zeit = b.dependency("zeit", args);
|
||||
const mod_zeit = dep_zeit.module("zeit");
|
||||
try result.putNoClobber("zeit", mod_zeit);
|
||||
|
||||
const dep_case = b.dependency("case", args);
|
||||
const mod_case = dep_case.module("case");
|
||||
try result.putNoClobber("case", mod_case);
|
||||
// End External dependencies
|
||||
|
||||
// Private modules/dependencies
|
||||
const dep_json = b.dependency("json", args);
|
||||
const mod_json = dep_json.module("json");
|
||||
try result.putNoClobber("json", mod_json);
|
||||
|
||||
const dep_date = b.dependency("date", args);
|
||||
const mod_date = dep_date.module("date");
|
||||
try result.putNoClobber("date", mod_date);
|
||||
// End private modules/dependencies
|
||||
|
||||
return result;
|
||||
}
|
||||
|
|
|
@ -7,33 +7,18 @@
|
|||
"build.zig.zon",
|
||||
"src",
|
||||
"codegen",
|
||||
"lib",
|
||||
"README.md",
|
||||
"LICENSE",
|
||||
},
|
||||
|
||||
.dependencies = .{
|
||||
.smithy = .{
|
||||
.url = "https://git.lerch.org/lobo/smithy/archive/fd9be1afbfcc60d52896c077d8e9c963bb667bf1.tar.gz",
|
||||
.hash = "smithy-1.0.0-uAyBgZPSAgBHStx7nrj0u3sN66g8Ppnn3XFUEJhn00rP",
|
||||
.url = "https://git.lerch.org/lobo/smithy/archive/a4c6ec6dfe552c57bab601c7d99e8de02bbab1fe.tar.gz",
|
||||
.hash = "smithy-1.0.0-uAyBgS_MAgC4qgc9QaEy5Y5Nf7kv32buQZBYugqNQsAn",
|
||||
},
|
||||
.models = .{
|
||||
.url = "https://github.com/aws/aws-sdk-go-v2/archive/refs/tags/release-2025-05-05.tar.gz",
|
||||
.hash = "N-V-__8AAKWdeiawujEcrfukQbb8lLAiQIRT0uG5gCcm4b7W",
|
||||
},
|
||||
.zeit = .{
|
||||
.url = "git+https://github.com/rockorager/zeit#f86d568b89a5922f084dae524a1eaf709855cd5e",
|
||||
.hash = "zeit-0.6.0-5I6bkzt5AgC1_BCuSzXkV0JHeF4Mhti1Z_jFC7E_nmD2",
|
||||
},
|
||||
.date = .{
|
||||
.path = "lib/date",
|
||||
},
|
||||
.json = .{
|
||||
.path = "lib/json",
|
||||
},
|
||||
.case = .{
|
||||
.url = "git+https://github.com/travisstaloch/case.git#610caade88ca54d2745f115114b08e73e2c6fe02",
|
||||
.hash = "N-V-__8AAIfIAAC_RzCtghVVBVdqUzB8AaaGIyvK2WWz38bC",
|
||||
.url = "https://github.com/aws/aws-sdk-go-v2/archive/58cf6509525a12d64fd826da883bfdbacbd2f00e.tar.gz",
|
||||
.hash = "122017a2f3081ce83c23e0c832feb1b8b4176d507b6077f522855dc774bcf83ee315",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
|
|
@ -1,10 +0,0 @@
|
|||
const std = @import("std");
|
||||
const smithy = @import("smithy");
|
||||
|
||||
const FileGenerationState = @This();
|
||||
|
||||
protocol: smithy.AwsProtocol,
|
||||
shapes: std.StringHashMap(smithy.ShapeInfo),
|
||||
shape_references: std.StringHashMap(u64),
|
||||
additional_types_to_generate: *std.ArrayList(smithy.ShapeInfo),
|
||||
additional_types_generated: *std.StringHashMap(void),
|
|
@ -1,21 +0,0 @@
|
|||
const std = @import("std");
|
||||
const case = @import("case");
|
||||
|
||||
const GenerateTypeOptions = @This();
|
||||
|
||||
end_structure: bool,
|
||||
key_case: case.Case,
|
||||
|
||||
pub fn endStructure(self: @This(), value: bool) GenerateTypeOptions {
|
||||
return .{
|
||||
.end_structure = value,
|
||||
.key_case = self.key_case,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn keyCase(self: @This(), value: case.Case) GenerateTypeOptions {
|
||||
return .{
|
||||
.end_structure = self.end_structure,
|
||||
.key_case = value,
|
||||
};
|
||||
}
|
|
@ -1,53 +0,0 @@
|
|||
const std = @import("std");
|
||||
const smithy = @import("smithy");
|
||||
|
||||
const FileGenerationState = @import("FileGenerationState.zig");
|
||||
|
||||
const GenerationState = @This();
|
||||
|
||||
type_stack: *std.ArrayList(*const smithy.ShapeInfo),
|
||||
file_state: FileGenerationState,
|
||||
// we will need some sort of "type decls needed" for recursive structures
|
||||
allocator: std.mem.Allocator,
|
||||
indent_level: u64,
|
||||
|
||||
pub fn appendToTypeStack(self: @This(), shape_info: *const smithy.ShapeInfo) !void {
|
||||
try self.type_stack.append(shape_info);
|
||||
}
|
||||
|
||||
pub fn popFromTypeStack(self: @This()) void {
|
||||
_ = self.type_stack.pop();
|
||||
}
|
||||
|
||||
pub fn getTypeRecurrenceCount(self: @This(), id: []const u8) u8 {
|
||||
var self_occurences: u8 = 0;
|
||||
|
||||
for (self.type_stack.items) |i| {
|
||||
if (std.mem.eql(u8, i.id, id)) {
|
||||
self_occurences += 1;
|
||||
}
|
||||
}
|
||||
|
||||
return self_occurences;
|
||||
}
|
||||
|
||||
pub fn indent(self: @This()) GenerationState {
|
||||
var new_state = self.clone();
|
||||
new_state.indent_level += 1;
|
||||
return new_state;
|
||||
}
|
||||
|
||||
pub fn deindent(self: @This()) GenerationState {
|
||||
var new_state = self.clone();
|
||||
new_state.indent_level = @max(0, new_state.indent_level - 1);
|
||||
return new_state;
|
||||
}
|
||||
|
||||
pub fn clone(self: @This()) GenerationState {
|
||||
return GenerationState{
|
||||
.type_stack = self.type_stack,
|
||||
.file_state = self.file_state,
|
||||
.allocator = self.allocator,
|
||||
.indent_level = self.indent_level,
|
||||
};
|
||||
}
|
|
@ -20,6 +20,7 @@ const multihash_len = 1 + 1 + Hash.digest_length;
|
|||
pub const hex_multihash_len = 2 * multihash_len;
|
||||
pub const digest_len = Hash.digest_length;
|
||||
|
||||
const MultiHashHexDigest = [hex_multihash_len]u8;
|
||||
const MultihashFunction = enum(u16) {
|
||||
identity = 0x00,
|
||||
sha1 = 0x11,
|
||||
|
@ -69,7 +70,7 @@ pub fn hex64(x: u64) [16]u8 {
|
|||
var result: [16]u8 = undefined;
|
||||
var i: usize = 0;
|
||||
while (i < 8) : (i += 1) {
|
||||
const byte: u8 = @truncate(x >> @as(u6, @intCast(8 * i)));
|
||||
const byte = @as(u8, @truncate(x >> @as(u6, @intCast(8 * i))));
|
||||
result[i * 2 + 0] = hex_charset[byte >> 4];
|
||||
result[i * 2 + 1] = hex_charset[byte & 15];
|
||||
}
|
||||
|
|
150
codegen/src/json.zig
Normal file
150
codegen/src/json.zig
Normal file
|
@ -0,0 +1,150 @@
|
|||
const std = @import("std");
|
||||
// options is a json.Options, but since we're using our hacked json.zig we don't want to
|
||||
// specifically call this out
|
||||
pub fn serializeMap(map: anytype, key: []const u8, options: anytype, out_stream: anytype) !bool {
|
||||
if (@typeInfo(@TypeOf(map)) == .optional) {
|
||||
if (map == null)
|
||||
return false
|
||||
else
|
||||
return serializeMapInternal(map.?, key, options, out_stream);
|
||||
}
|
||||
return serializeMapInternal(map, key, options, out_stream);
|
||||
}
|
||||
|
||||
fn serializeMapInternal(map: anytype, key: []const u8, options: anytype, out_stream: anytype) !bool {
|
||||
if (map.len == 0) {
|
||||
var child_options = options;
|
||||
if (child_options.whitespace) |*child_ws|
|
||||
child_ws.indent_level += 1;
|
||||
|
||||
try out_stream.writeByte('"');
|
||||
try out_stream.writeAll(key);
|
||||
_ = try out_stream.write("\":");
|
||||
if (options.whitespace) |ws| {
|
||||
if (ws.separator) {
|
||||
try out_stream.writeByte(' ');
|
||||
}
|
||||
}
|
||||
try out_stream.writeByte('{');
|
||||
try out_stream.writeByte('}');
|
||||
return true;
|
||||
}
|
||||
// TODO: Map might be [][]struct{key, value} rather than []struct{key, value}
|
||||
var child_options = options;
|
||||
if (child_options.whitespace) |*child_ws|
|
||||
child_ws.indent_level += 1;
|
||||
|
||||
try out_stream.writeByte('"');
|
||||
try out_stream.writeAll(key);
|
||||
_ = try out_stream.write("\":");
|
||||
if (options.whitespace) |ws| {
|
||||
if (ws.separator) {
|
||||
try out_stream.writeByte(' ');
|
||||
}
|
||||
}
|
||||
try out_stream.writeByte('{');
|
||||
if (options.whitespace) |_|
|
||||
try out_stream.writeByte('\n');
|
||||
for (map, 0..) |tag, i| {
|
||||
if (tag.key == null or tag.value == null) continue;
|
||||
// TODO: Deal with escaping and general "json.stringify" the values...
|
||||
if (child_options.whitespace) |ws|
|
||||
try ws.outputIndent(out_stream);
|
||||
try out_stream.writeByte('"');
|
||||
try jsonEscape(tag.key.?, child_options, out_stream);
|
||||
_ = try out_stream.write("\":");
|
||||
if (child_options.whitespace) |ws| {
|
||||
if (ws.separator) {
|
||||
try out_stream.writeByte(' ');
|
||||
}
|
||||
}
|
||||
try out_stream.writeByte('"');
|
||||
try jsonEscape(tag.value.?, child_options, out_stream);
|
||||
try out_stream.writeByte('"');
|
||||
if (i < map.len - 1) {
|
||||
try out_stream.writeByte(',');
|
||||
}
|
||||
if (child_options.whitespace) |_|
|
||||
try out_stream.writeByte('\n');
|
||||
}
|
||||
if (options.whitespace) |ws|
|
||||
try ws.outputIndent(out_stream);
|
||||
try out_stream.writeByte('}');
|
||||
return true;
|
||||
}
|
||||
// code within jsonEscape lifted from json.zig in stdlib
|
||||
fn jsonEscape(value: []const u8, options: anytype, out_stream: anytype) !void {
|
||||
var i: usize = 0;
|
||||
while (i < value.len) : (i += 1) {
|
||||
switch (value[i]) {
|
||||
// normal ascii character
|
||||
0x20...0x21, 0x23...0x2E, 0x30...0x5B, 0x5D...0x7F => |c| try out_stream.writeByte(c),
|
||||
// only 2 characters that *must* be escaped
|
||||
'\\' => try out_stream.writeAll("\\\\"),
|
||||
'\"' => try out_stream.writeAll("\\\""),
|
||||
// solidus is optional to escape
|
||||
'/' => {
|
||||
if (options.string.String.escape_solidus) {
|
||||
try out_stream.writeAll("\\/");
|
||||
} else {
|
||||
try out_stream.writeByte('/');
|
||||
}
|
||||
},
|
||||
// control characters with short escapes
|
||||
// TODO: option to switch between unicode and 'short' forms?
|
||||
0x8 => try out_stream.writeAll("\\b"),
|
||||
0xC => try out_stream.writeAll("\\f"),
|
||||
'\n' => try out_stream.writeAll("\\n"),
|
||||
'\r' => try out_stream.writeAll("\\r"),
|
||||
'\t' => try out_stream.writeAll("\\t"),
|
||||
else => {
|
||||
const ulen = std.unicode.utf8ByteSequenceLength(value[i]) catch unreachable;
|
||||
// control characters (only things left with 1 byte length) should always be printed as unicode escapes
|
||||
if (ulen == 1 or options.string.String.escape_unicode) {
|
||||
const codepoint = std.unicode.utf8Decode(value[i .. i + ulen]) catch unreachable;
|
||||
try outputUnicodeEscape(codepoint, out_stream);
|
||||
} else {
|
||||
try out_stream.writeAll(value[i .. i + ulen]);
|
||||
}
|
||||
i += ulen - 1;
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
// outputUnicodeEscape and assert lifted from json.zig in stdlib
|
||||
fn outputUnicodeEscape(
|
||||
codepoint: u21,
|
||||
out_stream: anytype,
|
||||
) !void {
|
||||
if (codepoint <= 0xFFFF) {
|
||||
// If the character is in the Basic Multilingual Plane (U+0000 through U+FFFF),
|
||||
// then it may be represented as a six-character sequence: a reverse solidus, followed
|
||||
// by the lowercase letter u, followed by four hexadecimal digits that encode the character's code point.
|
||||
try out_stream.writeAll("\\u");
|
||||
try std.fmt.formatIntValue(codepoint, "x", std.fmt.FormatOptions{ .width = 4, .fill = '0' }, out_stream);
|
||||
} else {
|
||||
assert(codepoint <= 0x10FFFF);
|
||||
// To escape an extended character that is not in the Basic Multilingual Plane,
|
||||
// the character is represented as a 12-character sequence, encoding the UTF-16 surrogate pair.
|
||||
const high = @as(u16, @intCast((codepoint - 0x10000) >> 10)) + 0xD800;
|
||||
const low = @as(u16, @intCast(codepoint & 0x3FF)) + 0xDC00;
|
||||
try out_stream.writeAll("\\u");
|
||||
try std.fmt.formatIntValue(high, "x", std.fmt.FormatOptions{ .width = 4, .fill = '0' }, out_stream);
|
||||
try out_stream.writeAll("\\u");
|
||||
try std.fmt.formatIntValue(low, "x", std.fmt.FormatOptions{ .width = 4, .fill = '0' }, out_stream);
|
||||
}
|
||||
}
|
||||
|
||||
/// This function invokes undefined behavior when `ok` is `false`.
|
||||
/// In Debug and ReleaseSafe modes, calls to this function are always
|
||||
/// generated, and the `unreachable` statement triggers a panic.
|
||||
/// In ReleaseFast and ReleaseSmall modes, calls to this function are
|
||||
/// optimized away, and in fact the optimizer is able to use the assertion
|
||||
/// in its heuristics.
|
||||
/// Inside a test block, it is best to use the `std.testing` module rather
|
||||
/// than this function, because this function may not detect a test failure
|
||||
/// in ReleaseFast and ReleaseSmall mode. Outside of a test block, this assert
|
||||
/// function is the correct function to use.
|
||||
pub fn assert(ok: bool) void {
|
||||
if (!ok) unreachable; // assertion failure
|
||||
}
|
|
@ -1,26 +1,12 @@
|
|||
const std = @import("std");
|
||||
const smithy = @import("smithy");
|
||||
const snake = @import("snake.zig");
|
||||
const Hasher = @import("Hasher.zig");
|
||||
const case = @import("case");
|
||||
const smt = @import("smithy_tools.zig");
|
||||
const serialization = @import("serialization.zig");
|
||||
const support = @import("support.zig");
|
||||
const json_zig = @embedFile("json.zig");
|
||||
|
||||
var verbose = false;
|
||||
|
||||
const GenerationState = @import("GenerationState.zig");
|
||||
const FileGenerationState = @import("FileGenerationState.zig");
|
||||
const GenerateTypeOptions = @import("GenerateTypeOptions.zig");
|
||||
|
||||
const Shape = smt.Shape;
|
||||
const ServiceShape = smt.ServiceShape;
|
||||
const ListShape = smt.ListShape;
|
||||
const MapShape = smt.MapShape;
|
||||
|
||||
pub fn main() anyerror!void {
|
||||
const root_progress_node = std.Progress.start(.{});
|
||||
defer root_progress_node.end();
|
||||
|
||||
var arena = std.heap.ArenaAllocator.init(std.heap.page_allocator);
|
||||
defer arena.deinit();
|
||||
const allocator = arena.allocator();
|
||||
|
@ -47,6 +33,8 @@ pub fn main() anyerror!void {
|
|||
if (std.mem.eql(u8, "--models", arg))
|
||||
models_dir = try std.fs.cwd().openDir(args[i + 1], .{ .iterate = true });
|
||||
}
|
||||
// TODO: Seems like we should remove this in favor of a package
|
||||
try output_dir.writeFile(.{ .sub_path = "json.zig", .data = json_zig });
|
||||
|
||||
// TODO: We need a different way to handle this file...
|
||||
const manifest_file_started = false;
|
||||
|
@ -82,31 +70,24 @@ pub fn main() anyerror!void {
|
|||
// no files specified, look for json files in models directory or cwd
|
||||
// this is our normal mode of operation and where initial optimizations
|
||||
// can be made
|
||||
|
||||
if (models_dir) |m| {
|
||||
var cwd = try std.fs.cwd().openDir(".", .{});
|
||||
defer cwd.close();
|
||||
defer cwd.setAsCwd() catch unreachable;
|
||||
|
||||
try m.setAsCwd();
|
||||
try processDirectories(m, output_dir, &root_progress_node);
|
||||
try processDirectories(m, output_dir);
|
||||
}
|
||||
}
|
||||
|
||||
if (args.len == 0)
|
||||
_ = try generateServices(allocator, ";", std.io.getStdIn(), stdout);
|
||||
|
||||
if (verbose) {
|
||||
const output_path = try output_dir.realpathAlloc(allocator, ".");
|
||||
std.debug.print("Output path: {s}\n", .{output_path});
|
||||
}
|
||||
}
|
||||
|
||||
const OutputManifest = struct {
|
||||
model_dir_hash_digest: [Hasher.hex_multihash_len]u8,
|
||||
output_dir_hash_digest: [Hasher.hex_multihash_len]u8,
|
||||
};
|
||||
fn processDirectories(models_dir: std.fs.Dir, output_dir: std.fs.Dir, parent_progress: *const std.Progress.Node) !void {
|
||||
fn processDirectories(models_dir: std.fs.Dir, output_dir: std.fs.Dir) !void {
|
||||
// Let's get ready to hash!!
|
||||
var arena = std.heap.ArenaAllocator.init(std.heap.page_allocator);
|
||||
defer arena.deinit();
|
||||
|
@ -114,8 +95,7 @@ fn processDirectories(models_dir: std.fs.Dir, output_dir: std.fs.Dir, parent_pro
|
|||
var thread_pool: std.Thread.Pool = undefined;
|
||||
try thread_pool.init(.{ .allocator = allocator });
|
||||
defer thread_pool.deinit();
|
||||
|
||||
const count, var calculated_manifest = try calculateDigests(models_dir, output_dir, &thread_pool);
|
||||
var calculated_manifest = try calculateDigests(models_dir, output_dir, &thread_pool);
|
||||
const output_stored_manifest = output_dir.readFileAlloc(allocator, "output_manifest.json", std.math.maxInt(usize)) catch null;
|
||||
if (output_stored_manifest) |o| {
|
||||
// we have a stored manifest. Parse it and compare to our calculations
|
||||
|
@ -135,19 +115,14 @@ fn processDirectories(models_dir: std.fs.Dir, output_dir: std.fs.Dir, parent_pro
|
|||
defer manifest_file.close();
|
||||
const manifest = manifest_file.writer();
|
||||
var mi = models_dir.iterate();
|
||||
|
||||
const generating_models_progress = parent_progress.start("generating models", count);
|
||||
defer generating_models_progress.end();
|
||||
|
||||
while (try mi.next()) |e| {
|
||||
if ((e.kind == .file or e.kind == .sym_link) and std.mem.endsWith(u8, e.name, ".json")) {
|
||||
if ((e.kind == .file or e.kind == .sym_link) and
|
||||
std.mem.endsWith(u8, e.name, ".json"))
|
||||
try processFile(e.name, output_dir, manifest);
|
||||
generating_models_progress.completeOne();
|
||||
}
|
||||
}
|
||||
// re-calculate so we can store the manifest
|
||||
model_digest = calculated_manifest.model_dir_hash_digest;
|
||||
_, calculated_manifest = try calculateDigests(models_dir, output_dir, &thread_pool);
|
||||
calculated_manifest = try calculateDigests(models_dir, output_dir, &thread_pool);
|
||||
try output_dir.writeFile(.{ .sub_path = "output_manifest.json", .data = try std.json.stringifyAlloc(
|
||||
allocator,
|
||||
calculated_manifest,
|
||||
|
@ -156,18 +131,13 @@ fn processDirectories(models_dir: std.fs.Dir, output_dir: std.fs.Dir, parent_pro
|
|||
}
|
||||
|
||||
var model_digest: ?[Hasher.hex_multihash_len]u8 = null;
|
||||
fn calculateDigests(models_dir: std.fs.Dir, output_dir: std.fs.Dir, thread_pool: *std.Thread.Pool) !struct { usize, OutputManifest } {
|
||||
const Include = struct {
|
||||
threadlocal var count: usize = 0;
|
||||
pub fn include(entry: std.fs.Dir.Walker.Entry) bool {
|
||||
const included = std.mem.endsWith(u8, entry.basename, ".json");
|
||||
if (included) count += 1;
|
||||
return included;
|
||||
}
|
||||
};
|
||||
|
||||
fn calculateDigests(models_dir: std.fs.Dir, output_dir: std.fs.Dir, thread_pool: *std.Thread.Pool) !OutputManifest {
|
||||
const model_hash = if (model_digest) |m| m[0..Hasher.digest_len].* else try Hasher.computeDirectoryHash(thread_pool, models_dir, @constCast(&Hasher.ComputeDirectoryOptions{
|
||||
.isIncluded = Include.include,
|
||||
.isIncluded = struct {
|
||||
pub fn include(entry: std.fs.Dir.Walker.Entry) bool {
|
||||
return std.mem.endsWith(u8, entry.basename, ".json");
|
||||
}
|
||||
}.include,
|
||||
.isExcluded = struct {
|
||||
pub fn exclude(entry: std.fs.Dir.Walker.Entry) bool {
|
||||
_ = entry;
|
||||
|
@ -194,13 +164,20 @@ fn calculateDigests(models_dir: std.fs.Dir, output_dir: std.fs.Dir, thread_pool:
|
|||
}));
|
||||
if (verbose) std.log.info("Output directory hash: {s}", .{Hasher.hexDigest(output_hash)});
|
||||
return .{
|
||||
Include.count, .{
|
||||
.model_dir_hash_digest = model_digest orelse Hasher.hexDigest(model_hash),
|
||||
.output_dir_hash_digest = Hasher.hexDigest(output_hash),
|
||||
},
|
||||
.model_dir_hash_digest = model_digest orelse Hasher.hexDigest(model_hash),
|
||||
.output_dir_hash_digest = Hasher.hexDigest(output_hash),
|
||||
};
|
||||
}
|
||||
fn processFile(file_name: []const u8, output_dir: std.fs.Dir, manifest: anytype) !void {
|
||||
// The fixed buffer for output will be 2MB, which is twice as large as the size of the EC2
|
||||
// (the largest) model. We'll then flush all this at one go at the end.
|
||||
var buffer = [_]u8{0} ** (1024 * 1024 * 2);
|
||||
var output_stream = std.io.FixedBufferStream([]u8){
|
||||
.buffer = &buffer,
|
||||
.pos = 0,
|
||||
};
|
||||
var writer = output_stream.writer();
|
||||
|
||||
// It's probably best to create our own allocator here so we can deint at the end and
|
||||
// toss all allocations related to the services in this file
|
||||
// I can't guarantee we're not leaking something, and at the end of the
|
||||
|
@ -208,36 +185,30 @@ fn processFile(file_name: []const u8, output_dir: std.fs.Dir, manifest: anytype)
|
|||
var arena = std.heap.ArenaAllocator.init(std.heap.page_allocator);
|
||||
defer arena.deinit();
|
||||
const allocator = arena.allocator();
|
||||
|
||||
var output = try std.ArrayListUnmanaged(u8).initCapacity(allocator, 1024 * 1024 * 2);
|
||||
defer output.deinit(allocator);
|
||||
|
||||
var counting_writer = std.io.countingWriter(output.writer(allocator));
|
||||
var writer = counting_writer.writer();
|
||||
|
||||
_ = try writer.write("const std = @import(\"std\");\n");
|
||||
_ = try writer.write("const smithy = @import(\"smithy\");\n");
|
||||
_ = try writer.write("const json = @import(\"json\");\n");
|
||||
_ = try writer.write("const date = @import(\"date\");\n");
|
||||
_ = try writer.write("const zeit = @import(\"zeit\");\n");
|
||||
_ = try writer.write("\n");
|
||||
_ = try writer.write("const serializeMap = json.serializeMap;\n");
|
||||
_ = try writer.write("\n");
|
||||
|
||||
_ = try writer.write("const serializeMap = @import(\"json.zig\").serializeMap;\n");
|
||||
_ = try writer.write("const smithy = @import(\"smithy\");\n\n");
|
||||
if (verbose) std.log.info("Processing file: {s}", .{file_name});
|
||||
|
||||
const service_names = generateServicesForFilePath(allocator, ";", file_name, writer) catch |err| {
|
||||
std.log.err("Error processing file: {s}", .{file_name});
|
||||
return err;
|
||||
};
|
||||
|
||||
var output_file_name: []const u8 = try std.mem.join(allocator, "-", service_names);
|
||||
|
||||
if (output_file_name.len == 0) {
|
||||
const ext = std.fs.path.extension(file_name);
|
||||
output_file_name = file_name[0 .. file_name.len - ext.len];
|
||||
defer {
|
||||
for (service_names) |name| allocator.free(name);
|
||||
allocator.free(service_names);
|
||||
}
|
||||
var output_file_name = try std.fmt.allocPrint(allocator, "", .{});
|
||||
defer allocator.free(output_file_name);
|
||||
for (service_names) |name| {
|
||||
const seperator = if (output_file_name.len > 0) "-" else "";
|
||||
const new_output_file_name = try std.fmt.allocPrint(
|
||||
allocator,
|
||||
"{s}{s}{s}",
|
||||
.{ output_file_name, seperator, name },
|
||||
);
|
||||
allocator.free(output_file_name);
|
||||
output_file_name = new_output_file_name;
|
||||
}
|
||||
|
||||
{
|
||||
// append .zig on to the file name
|
||||
const new_output_file_name = try std.fmt.allocPrint(
|
||||
|
@ -248,27 +219,15 @@ fn processFile(file_name: []const u8, output_dir: std.fs.Dir, manifest: anytype)
|
|||
allocator.free(output_file_name);
|
||||
output_file_name = new_output_file_name;
|
||||
}
|
||||
|
||||
const unformatted: [:0]const u8 = try output.toOwnedSliceSentinel(allocator, 0);
|
||||
const formatted = try zigFmt(allocator, unformatted);
|
||||
|
||||
// Dump our buffer out to disk
|
||||
var file = try output_dir.createFile(output_file_name, .{ .truncate = true });
|
||||
defer file.close();
|
||||
try file.writeAll(formatted);
|
||||
|
||||
try file.writeAll(output_stream.getWritten());
|
||||
for (service_names) |name| {
|
||||
try manifest.print("pub const {s} = @import(\"{s}\");\n", .{ name, std.fs.path.basename(output_file_name) });
|
||||
}
|
||||
}
|
||||
|
||||
fn zigFmt(allocator: std.mem.Allocator, buffer: [:0]const u8) ![]const u8 {
|
||||
var tree = try std.zig.Ast.parse(allocator, buffer, .zig);
|
||||
defer tree.deinit(allocator);
|
||||
|
||||
return try tree.render(allocator);
|
||||
}
|
||||
|
||||
fn generateServicesForFilePath(
|
||||
allocator: std.mem.Allocator,
|
||||
comptime terminator: []const u8,
|
||||
|
@ -404,8 +363,7 @@ fn generateServices(allocator: std.mem.Allocator, comptime _: []const u8, file:
|
|||
var generated = std.StringHashMap(void).init(allocator);
|
||||
defer generated.deinit();
|
||||
|
||||
var state = FileGenerationState{
|
||||
.protocol = undefined,
|
||||
const state = FileGenerationState{
|
||||
.shape_references = shape_references,
|
||||
.additional_types_to_generate = &unresolved,
|
||||
.additional_types_generated = &generated,
|
||||
|
@ -413,7 +371,7 @@ fn generateServices(allocator: std.mem.Allocator, comptime _: []const u8, file:
|
|||
};
|
||||
for (services.items) |service| {
|
||||
var sdk_id: []const u8 = undefined;
|
||||
const version: ?[]const u8 = service.shape.service.version;
|
||||
const version: []const u8 = service.shape.service.version;
|
||||
const name: []const u8 = service.name;
|
||||
var arn_namespace: ?[]const u8 = undefined;
|
||||
var sigv4_name: ?[]const u8 = null;
|
||||
|
@ -428,10 +386,7 @@ fn generateServices(allocator: std.mem.Allocator, comptime _: []const u8, file:
|
|||
endpoint_prefix = trait.aws_api_service.endpoint_prefix;
|
||||
},
|
||||
.aws_auth_sigv4 => sigv4_name = trait.aws_auth_sigv4.name,
|
||||
.aws_protocol => {
|
||||
aws_protocol = trait.aws_protocol;
|
||||
state.protocol = aws_protocol;
|
||||
},
|
||||
.aws_protocol => aws_protocol = trait.aws_protocol,
|
||||
else => {},
|
||||
}
|
||||
}
|
||||
|
@ -444,13 +399,10 @@ fn generateServices(allocator: std.mem.Allocator, comptime _: []const u8, file:
|
|||
// Service struct
|
||||
// name of the field will be snake_case of whatever comes in from
|
||||
// sdk_id. Not sure this will simple...
|
||||
const constant_name = try support.constantName(allocator, sdk_id, .snake);
|
||||
const constant_name = try constantName(allocator, sdk_id);
|
||||
try constant_names.append(constant_name);
|
||||
try writer.print("const Self = @This();\n", .{});
|
||||
if (version) |v|
|
||||
try writer.print("pub const version: ?[]const u8 = \"{s}\";\n", .{v})
|
||||
else
|
||||
try writer.print("pub const version: ?[]const u8 = null;\n", .{});
|
||||
try writer.print("pub const version: []const u8 = \"{s}\";\n", .{version});
|
||||
try writer.print("pub const sdk_id: []const u8 = \"{s}\";\n", .{sdk_id});
|
||||
if (arn_namespace) |a| {
|
||||
try writer.print("pub const arn_namespace: ?[]const u8 = \"{s}\";\n", .{a});
|
||||
|
@ -461,10 +413,7 @@ fn generateServices(allocator: std.mem.Allocator, comptime _: []const u8, file:
|
|||
// TODO: This really should just be ".whatevs". We're fully qualifying here, which isn't typical
|
||||
try writer.print("pub const aws_protocol: smithy.AwsProtocol = {};\n\n", .{aws_protocol});
|
||||
_ = try writer.write("pub const service_metadata: struct {\n");
|
||||
if (version) |v|
|
||||
try writer.print(" version: ?[]const u8 = \"{s}\",\n", .{v})
|
||||
else
|
||||
try writer.print(" version: ?[]const u8 = null,\n", .{});
|
||||
try writer.print(" version: []const u8 = \"{s}\",\n", .{version});
|
||||
try writer.print(" sdk_id: []const u8 = \"{s}\",\n", .{sdk_id});
|
||||
if (arn_namespace) |a| {
|
||||
try writer.print(" arn_namespace: ?[]const u8 = \"{s}\",\n", .{a});
|
||||
|
@ -497,47 +446,56 @@ fn generateAdditionalTypes(allocator: std.mem.Allocator, file_state: FileGenerat
|
|||
.allocator = allocator,
|
||||
.indent_level = 0,
|
||||
};
|
||||
const type_name = try getTypeName(allocator, t);
|
||||
defer allocator.free(type_name);
|
||||
|
||||
const type_name = avoidReserved(t.name);
|
||||
try writer.print("\npub const {s} = ", .{type_name});
|
||||
try file_state.additional_types_generated.putNoClobber(t.name, {});
|
||||
_ = try generateTypeFor(t.id, writer, state, .{
|
||||
.key_case = .snake,
|
||||
.end_structure = true,
|
||||
});
|
||||
_ = try generateTypeFor(t.id, writer, state, true);
|
||||
_ = try writer.write(";\n");
|
||||
}
|
||||
}
|
||||
|
||||
fn constantName(allocator: std.mem.Allocator, id: []const u8) ![]const u8 {
|
||||
// There are some ids that don't follow consistent rules, so we'll
|
||||
// look for the exceptions and, if not found, revert to the snake case
|
||||
// algorithm
|
||||
|
||||
// This one might be a bug in snake, but it's the only example so HPDL
|
||||
if (std.mem.eql(u8, id, "SESv2")) return try std.fmt.allocPrint(allocator, "ses_v2", .{});
|
||||
if (std.mem.eql(u8, id, "CloudFront")) return try std.fmt.allocPrint(allocator, "cloudfront", .{});
|
||||
// IoT is an acryonym, but snake wouldn't know that. Interestingly not all
|
||||
// iot services are capitalizing that way.
|
||||
if (std.mem.eql(u8, id, "IoTSiteWise")) return try std.fmt.allocPrint(allocator, "iot_sitewise", .{});
|
||||
if (std.mem.eql(u8, id, "IoTFleetHub")) return try std.fmt.allocPrint(allocator, "iot_fleet_hub", .{});
|
||||
if (std.mem.eql(u8, id, "IoTSecureTunneling")) return try std.fmt.allocPrint(allocator, "iot_secure_tunneling", .{});
|
||||
if (std.mem.eql(u8, id, "IoTThingsGraph")) return try std.fmt.allocPrint(allocator, "iot_things_graph", .{});
|
||||
// snake turns this into dev_ops, which is a little weird
|
||||
if (std.mem.eql(u8, id, "DevOps Guru")) return try std.fmt.allocPrint(allocator, "devops_guru", .{});
|
||||
if (std.mem.eql(u8, id, "FSx")) return try std.fmt.allocPrint(allocator, "fsx", .{});
|
||||
|
||||
// Not a special case - just snake it
|
||||
return try snake.fromPascalCase(allocator, id);
|
||||
}
|
||||
|
||||
const FileGenerationState = struct {
|
||||
shapes: std.StringHashMap(smithy.ShapeInfo),
|
||||
shape_references: std.StringHashMap(u64),
|
||||
additional_types_to_generate: *std.ArrayList(smithy.ShapeInfo),
|
||||
additional_types_generated: *std.StringHashMap(void),
|
||||
};
|
||||
const GenerationState = struct {
|
||||
type_stack: *std.ArrayList(*const smithy.ShapeInfo),
|
||||
file_state: FileGenerationState,
|
||||
// we will need some sort of "type decls needed" for recursive structures
|
||||
allocator: std.mem.Allocator,
|
||||
indent_level: u64,
|
||||
};
|
||||
|
||||
fn outputIndent(state: GenerationState, writer: anytype) !void {
|
||||
const n_chars = 4 * state.indent_level;
|
||||
try writer.writeByteNTimes(' ', n_chars);
|
||||
}
|
||||
|
||||
const StructType = enum {
|
||||
request,
|
||||
response,
|
||||
};
|
||||
|
||||
const OperationSubTypeInfo = struct {
|
||||
type: StructType,
|
||||
key_case: case.Case,
|
||||
};
|
||||
|
||||
const operation_sub_types = [_]OperationSubTypeInfo{
|
||||
OperationSubTypeInfo{
|
||||
.key_case = .snake,
|
||||
.type = .request,
|
||||
},
|
||||
OperationSubTypeInfo{
|
||||
.key_case = .snake,
|
||||
.type = .response,
|
||||
},
|
||||
};
|
||||
|
||||
fn generateOperation(allocator: std.mem.Allocator, operation: smithy.ShapeInfo, file_state: FileGenerationState, writer: anytype) !void {
|
||||
const snake_case_name = try support.constantName(allocator, operation.name, .snake);
|
||||
const snake_case_name = try snake.fromPascalCase(allocator, operation.name);
|
||||
defer allocator.free(snake_case_name);
|
||||
|
||||
var type_stack = std.ArrayList(*const smithy.ShapeInfo).init(allocator);
|
||||
|
@ -552,53 +510,6 @@ fn generateOperation(allocator: std.mem.Allocator, operation: smithy.ShapeInfo,
|
|||
child_state.indent_level += 1;
|
||||
// indent should start at 4 spaces here
|
||||
const operation_name = avoidReserved(snake_case_name);
|
||||
|
||||
inline for (operation_sub_types) |type_info| {
|
||||
_ = try writer.print("pub const {s}", .{operation.name});
|
||||
switch (type_info.type) {
|
||||
.request => try writer.writeAll("Request"),
|
||||
.response => try writer.writeAll("Response"),
|
||||
}
|
||||
try writer.writeAll(" = ");
|
||||
|
||||
const operation_field_name = switch (type_info.type) {
|
||||
.request => "input",
|
||||
.response => "output",
|
||||
};
|
||||
const maybe_shape_id = @field(operation.shape.operation, operation_field_name);
|
||||
|
||||
const generate_type_options = GenerateTypeOptions{
|
||||
.key_case = type_info.key_case,
|
||||
.end_structure = false,
|
||||
};
|
||||
|
||||
if (maybe_shape_id == null or
|
||||
(try smt.getShapeInfo(maybe_shape_id.?, state.file_state.shapes)).shape == .unit)
|
||||
{
|
||||
_ = try writer.write("struct {\n");
|
||||
} else if (maybe_shape_id) |shape_id| {
|
||||
if (try generateTypeFor(shape_id, writer, state, generate_type_options)) unreachable; // we expect only structs here
|
||||
_ = try writer.write("\n");
|
||||
|
||||
switch (type_info.type) {
|
||||
.request => {
|
||||
var new_state = state.clone();
|
||||
new_state.indent_level = 0;
|
||||
std.debug.assert(new_state.type_stack.items.len == 0);
|
||||
|
||||
try serialization.json.generateToJsonFunction(shape_id, writer.any(), new_state, generate_type_options.keyCase(.pascal));
|
||||
|
||||
try writer.writeAll("\n");
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
}
|
||||
|
||||
try generateMetadataFunction(operation_name, state, writer, generate_type_options);
|
||||
|
||||
_ = try writer.write("};\n\n");
|
||||
}
|
||||
|
||||
try writer.print("pub const {s}: struct ", .{operation_name});
|
||||
_ = try writer.write("{\n");
|
||||
for (operation.shape.operation.traits) |trait| {
|
||||
|
@ -619,10 +530,28 @@ fn generateOperation(allocator: std.mem.Allocator, operation: smithy.ShapeInfo,
|
|||
try outputIndent(state, writer);
|
||||
try writer.print("action_name: []const u8 = \"{s}\",\n", .{operation.name});
|
||||
try outputIndent(state, writer);
|
||||
_ = try writer.print("Request: type = {s}Request,\n", .{operation.name});
|
||||
|
||||
_ = try writer.write("Request: type = ");
|
||||
if (operation.shape.operation.input == null or
|
||||
(try shapeInfoForId(operation.shape.operation.input.?, state)).shape == .unit)
|
||||
{
|
||||
_ = try writer.write("struct {\n");
|
||||
try generateMetadataFunction(operation_name, state, writer);
|
||||
} else if (operation.shape.operation.input) |member| {
|
||||
if (try generateTypeFor(member, writer, state, false)) unreachable; // we expect only structs here
|
||||
_ = try writer.write("\n");
|
||||
try generateMetadataFunction(operation_name, state, writer);
|
||||
}
|
||||
_ = try writer.write(",\n");
|
||||
try outputIndent(state, writer);
|
||||
_ = try writer.print("Response: type = {s}Response,\n", .{operation.name});
|
||||
_ = try writer.write("Response: type = ");
|
||||
if (operation.shape.operation.output == null or
|
||||
(try shapeInfoForId(operation.shape.operation.output.?, state)).shape == .unit)
|
||||
{
|
||||
_ = try writer.write("struct {}"); // we want to maintain consistency with other ops
|
||||
} else if (operation.shape.operation.output) |member| {
|
||||
if (try generateTypeFor(member, writer, state, true)) unreachable; // we expect only structs here
|
||||
}
|
||||
_ = try writer.write(",\n");
|
||||
|
||||
if (operation.shape.operation.errors) |errors| {
|
||||
try outputIndent(state, writer);
|
||||
|
@ -638,7 +567,7 @@ fn generateOperation(allocator: std.mem.Allocator, operation: smithy.ShapeInfo,
|
|||
_ = try writer.write("} = .{};\n");
|
||||
}
|
||||
|
||||
fn generateMetadataFunction(operation_name: []const u8, state: GenerationState, writer: anytype, options: GenerateTypeOptions) !void {
|
||||
fn generateMetadataFunction(operation_name: []const u8, state: GenerationState, writer: anytype) !void {
|
||||
// TODO: Shove these lines in here, and also the else portion
|
||||
// pub fn metaInfo(self: @This()) struct { service: @TypeOf(sts), action: @TypeOf(sts.get_caller_identity) } {
|
||||
// return .{ .service = sts, .action = sts.get_caller_identity };
|
||||
|
@ -659,12 +588,8 @@ fn generateMetadataFunction(operation_name: []const u8, state: GenerationState,
|
|||
try outputIndent(child_state, writer);
|
||||
_ = try writer.write("}\n");
|
||||
try outputIndent(state, writer);
|
||||
|
||||
if (options.end_structure) {
|
||||
try writer.writeByte('}');
|
||||
}
|
||||
try writer.writeByte('}');
|
||||
}
|
||||
|
||||
fn getErrorName(err_name: []const u8) []const u8 {
|
||||
if (endsWith("Exception", err_name))
|
||||
return err_name[0 .. err_name.len - "Exception".len];
|
||||
|
@ -679,26 +604,6 @@ fn endsWith(item: []const u8, str: []const u8) bool {
|
|||
return std.mem.eql(u8, item, str[str.len - item.len ..]);
|
||||
}
|
||||
|
||||
fn getTypeName(allocator: std.mem.Allocator, shape: smithy.ShapeInfo) ![]const u8 {
|
||||
const pascal_shape_name = try case.allocTo(allocator, .pascal, shape.name);
|
||||
const type_name = avoidReserved(pascal_shape_name);
|
||||
|
||||
switch (shape.shape) {
|
||||
// maps are named like "Tags"
|
||||
// this removes the trailing s and adds "KeyValue" suffix
|
||||
.map => {
|
||||
var name_slice = pascal_shape_name;
|
||||
|
||||
if (pascal_shape_name[pascal_shape_name.len - 1] == 's') {
|
||||
name_slice = pascal_shape_name[0 .. pascal_shape_name.len - 1];
|
||||
}
|
||||
|
||||
return try std.fmt.allocPrint(allocator, "{s}KeyValue", .{name_slice});
|
||||
},
|
||||
else => return type_name,
|
||||
}
|
||||
}
|
||||
|
||||
fn reuseCommonType(shape: smithy.ShapeInfo, writer: anytype, state: GenerationState) !bool {
|
||||
// We want to return if we're at the top level of the stack. There are three
|
||||
// reasons for this:
|
||||
|
@ -713,50 +618,59 @@ fn reuseCommonType(shape: smithy.ShapeInfo, writer: anytype, state: GenerationSt
|
|||
// can at least see the top level.
|
||||
// 3. When we come through at the end, we want to make sure we're writing
|
||||
// something or we'll have an infinite loop!
|
||||
|
||||
switch (shape.shape) {
|
||||
.structure, .uniontype, .map => {},
|
||||
else => return false,
|
||||
}
|
||||
|
||||
const type_name = try getTypeName(state.allocator, shape);
|
||||
defer state.allocator.free(type_name);
|
||||
|
||||
if (state.type_stack.items.len == 1) return false;
|
||||
var rc = false;
|
||||
if (state.file_state.shape_references.get(shape.id)) |r| {
|
||||
if (r > 1) {
|
||||
if (r > 1 and (shape.shape == .structure or shape.shape == .uniontype)) {
|
||||
rc = true;
|
||||
_ = try writer.write(type_name); // This can't possibly be this easy...
|
||||
_ = try writer.write(avoidReserved(shape.name)); // This can't possibly be this easy...
|
||||
if (state.file_state.additional_types_generated.getEntry(shape.name) == null)
|
||||
try state.file_state.additional_types_to_generate.append(shape);
|
||||
}
|
||||
}
|
||||
return rc;
|
||||
}
|
||||
fn shapeInfoForId(id: []const u8, state: GenerationState) !smithy.ShapeInfo {
|
||||
return state.file_state.shapes.get(id) orelse {
|
||||
std.debug.print("Shape ID not found. This is most likely a bug. Shape ID: {s}\n", .{id});
|
||||
return error.InvalidType;
|
||||
};
|
||||
}
|
||||
|
||||
/// return type is anyerror!void as this is a recursive function, so the compiler cannot properly infer error types
|
||||
fn generateTypeFor(shape_id: []const u8, writer: anytype, state: GenerationState, comptime options: GenerateTypeOptions) anyerror!bool {
|
||||
const end_structure = options.end_structure;
|
||||
|
||||
fn generateTypeFor(shape_id: []const u8, writer: anytype, state: GenerationState, end_structure: bool) anyerror!bool {
|
||||
var rc = false;
|
||||
|
||||
// We assume it must exist
|
||||
const shape_info = try smt.getShapeInfo(shape_id, state.file_state.shapes);
|
||||
const shape_info = try shapeInfoForId(shape_id, state);
|
||||
const shape = shape_info.shape;
|
||||
|
||||
// Check for ourselves up the stack
|
||||
const self_occurences: u8 = state.getTypeRecurrenceCount(shape_id);
|
||||
var self_occurences: u8 = 0;
|
||||
for (state.type_stack.items) |i| {
|
||||
// NOTE: shapes.get isn't providing a consistent pointer - is it allocating each time?
|
||||
// we will therefore need to compare ids
|
||||
if (std.mem.eql(u8, i.*.id, shape_info.id))
|
||||
self_occurences = self_occurences + 1;
|
||||
}
|
||||
// Debugging
|
||||
// if (std.mem.eql(u8, shape_info.name, "Expression")) {
|
||||
// std.log.info(" Type stack len: {d}, occurences: {d}\n", .{ type_stack.items.len, self_occurences });
|
||||
// if (type_stack.items.len > 15) {
|
||||
// std.log.info(" Type stack:\n", .{});
|
||||
// for (type_stack.items) |i|
|
||||
// std.log.info(" {s}: {*}", .{ i.*.id, i });
|
||||
// return error.BugDetected;
|
||||
// }
|
||||
// }
|
||||
// End Debugging
|
||||
if (self_occurences > 2) { // TODO: What's the appropriate number here?
|
||||
// TODO: Determine if this warrants the creation of another public
|
||||
// type to properly reference. Realistically, AWS or the service
|
||||
// must be blocking deep recursion somewhere or this would be a great
|
||||
// DOS attack
|
||||
try generateSimpleTypeFor("nothing", "[]const u8", writer);
|
||||
|
||||
if (verbose) {
|
||||
std.log.warn("Type cycle detected, limiting depth. Type: {s}", .{shape_id});
|
||||
}
|
||||
std.log.warn("Type cycle detected, limiting depth. Type: {s}", .{shape_id});
|
||||
// if (std.mem.eql(u8, "com.amazonaws.workmail#Timestamp", shape_id)) {
|
||||
// std.log.info(" Type stack:\n", .{});
|
||||
// for (state.type_stack.items) |i|
|
||||
|
@ -764,14 +678,12 @@ fn generateTypeFor(shape_id: []const u8, writer: anytype, state: GenerationState
|
|||
// }
|
||||
return false; // not a map
|
||||
}
|
||||
|
||||
try state.appendToTypeStack(&shape_info);
|
||||
defer state.popFromTypeStack();
|
||||
|
||||
try state.type_stack.append(&shape_info);
|
||||
defer _ = state.type_stack.pop();
|
||||
switch (shape) {
|
||||
.structure => {
|
||||
if (!try reuseCommonType(shape_info, writer, state)) {
|
||||
try generateComplexTypeFor(shape_id, shape.structure.members, "struct", writer, state, options);
|
||||
try generateComplexTypeFor(shape_id, shape.structure.members, "struct", writer, state);
|
||||
if (end_structure) {
|
||||
// epilog
|
||||
try outputIndent(state, writer);
|
||||
|
@ -781,7 +693,7 @@ fn generateTypeFor(shape_id: []const u8, writer: anytype, state: GenerationState
|
|||
},
|
||||
.uniontype => {
|
||||
if (!try reuseCommonType(shape_info, writer, state)) {
|
||||
try generateComplexTypeFor(shape_id, shape.uniontype.members, "union", writer, state, options);
|
||||
try generateComplexTypeFor(shape_id, shape.uniontype.members, "union", writer, state);
|
||||
// epilog
|
||||
try outputIndent(state, writer);
|
||||
_ = try writer.write("}");
|
||||
|
@ -789,32 +701,55 @@ fn generateTypeFor(shape_id: []const u8, writer: anytype, state: GenerationState
|
|||
},
|
||||
// Document is unstructured data, so bag of bytes it is
|
||||
// https://smithy.io/2.0/spec/simple-types.html#document
|
||||
.string, .@"enum", .document, .blob => try generateSimpleTypeFor({}, "[]const u8", writer),
|
||||
.document => |s| try generateSimpleTypeFor(s, "[]const u8", writer),
|
||||
.string => |s| try generateSimpleTypeFor(s, "[]const u8", writer),
|
||||
.unit => |s| try generateSimpleTypeFor(s, "struct {}", writer), // Would be better as void, but doing so creates inconsistency we don't want clients to have to deal with
|
||||
.@"enum" => |s| try generateSimpleTypeFor(s, "[]const u8", writer), // This should be closer to uniontype, but the generated code will look ugly, and Smithy 2.0 requires that enums are open (clients accept unspecified values). So string is the best analog
|
||||
.integer => |s| try generateSimpleTypeFor(s, "i64", writer),
|
||||
.list => |s| {
|
||||
.list => {
|
||||
_ = try writer.write("[]");
|
||||
// The serializer will have to deal with the idea we might be an array
|
||||
return try generateTypeFor(s.member_target, writer, state, options.endStructure(true));
|
||||
return try generateTypeFor(shape.list.member_target, writer, state, true);
|
||||
},
|
||||
.set => |s| {
|
||||
.set => {
|
||||
_ = try writer.write("[]");
|
||||
// The serializer will have to deal with the idea we might be an array
|
||||
return try generateTypeFor(s.member_target, writer, state, options.endStructure(true));
|
||||
return try generateTypeFor(shape.set.member_target, writer, state, true);
|
||||
},
|
||||
.timestamp => |s| try generateSimpleTypeFor(s, "date.Timestamp", writer),
|
||||
.timestamp => |s| try generateSimpleTypeFor(s, "f128", writer),
|
||||
.blob => |s| try generateSimpleTypeFor(s, "[]const u8", writer),
|
||||
.boolean => |s| try generateSimpleTypeFor(s, "bool", writer),
|
||||
.double => |s| try generateSimpleTypeFor(s, "f64", writer),
|
||||
.float => |s| try generateSimpleTypeFor(s, "f32", writer),
|
||||
.long => |s| try generateSimpleTypeFor(s, "i64", writer),
|
||||
.map => |m| {
|
||||
if (!try reuseCommonType(shape_info, std.io.null_writer, state)) {
|
||||
try generateMapTypeFor(m, writer, state, options);
|
||||
rc = true;
|
||||
} else {
|
||||
try writer.writeAll("[]");
|
||||
_ = try reuseCommonType(shape_info, writer, state);
|
||||
.map => {
|
||||
_ = try writer.write("[]struct {\n");
|
||||
var child_state = state;
|
||||
child_state.indent_level += 1;
|
||||
try outputIndent(child_state, writer);
|
||||
_ = try writer.write("key: ");
|
||||
try writeOptional(shape.map.traits, writer, null);
|
||||
var sub_maps = std.ArrayList([]const u8).init(state.allocator);
|
||||
defer sub_maps.deinit();
|
||||
if (try generateTypeFor(shape.map.key, writer, child_state, true))
|
||||
try sub_maps.append("key");
|
||||
try writeOptional(shape.map.traits, writer, " = null");
|
||||
_ = try writer.write(",\n");
|
||||
try outputIndent(child_state, writer);
|
||||
_ = try writer.write("value: ");
|
||||
try writeOptional(shape.map.traits, writer, null);
|
||||
if (try generateTypeFor(shape.map.value, writer, child_state, true))
|
||||
try sub_maps.append("value");
|
||||
try writeOptional(shape.map.traits, writer, " = null");
|
||||
_ = try writer.write(",\n");
|
||||
if (sub_maps.items.len > 0) {
|
||||
_ = try writer.write("\n");
|
||||
try writeStringify(state, sub_maps.items, writer);
|
||||
}
|
||||
try outputIndent(state, writer);
|
||||
_ = try writer.write("}");
|
||||
|
||||
rc = true;
|
||||
},
|
||||
else => {
|
||||
std.log.err("encountered unimplemented shape type {s} for shape_id {s}. Generated code will not compile", .{ @tagName(shape), shape_id });
|
||||
|
@ -825,58 +760,41 @@ fn generateTypeFor(shape_id: []const u8, writer: anytype, state: GenerationState
|
|||
return rc;
|
||||
}
|
||||
|
||||
fn generateMapTypeFor(map: anytype, writer: anytype, state: GenerationState, comptime options: GenerateTypeOptions) anyerror!void {
|
||||
_ = try writer.write("struct {\n");
|
||||
|
||||
try writer.writeAll("pub const is_map_type = true;\n\n");
|
||||
|
||||
var child_state = state;
|
||||
child_state.indent_level += 1;
|
||||
|
||||
_ = try writer.write("key: ");
|
||||
_ = try generateTypeFor(map.key, writer, child_state, options.endStructure(true));
|
||||
_ = try writer.write(",\n");
|
||||
|
||||
const value_shape_info = try smt.getShapeInfo(map.value, state.file_state.shapes);
|
||||
const value_traits = smt.getShapeTraits(value_shape_info.shape);
|
||||
|
||||
_ = try writer.write("value: ");
|
||||
try writeOptional(value_traits, writer, null);
|
||||
_ = try generateTypeFor(map.value, writer, child_state, options.endStructure(true));
|
||||
|
||||
_ = try writer.write(",\n");
|
||||
_ = try writer.write("}");
|
||||
}
|
||||
|
||||
fn generateSimpleTypeFor(_: anytype, type_name: []const u8, writer: anytype) !void {
|
||||
_ = try writer.write(type_name); // This had required stuff but the problem was elsewhere. Better to leave as function just in case
|
||||
}
|
||||
|
||||
const Mapping = struct { snake: []const u8, original: []const u8 };
|
||||
fn generateComplexTypeFor(shape_id: []const u8, members: []smithy.TypeMember, type_type_name: []const u8, writer: anytype, state: GenerationState, comptime options: GenerateTypeOptions) anyerror!void {
|
||||
fn generateComplexTypeFor(shape_id: []const u8, members: []smithy.TypeMember, type_type_name: []const u8, writer: anytype, state: GenerationState) anyerror!void {
|
||||
_ = shape_id;
|
||||
|
||||
var arena = std.heap.ArenaAllocator.init(state.allocator);
|
||||
defer arena.deinit();
|
||||
const allocator = arena.allocator();
|
||||
|
||||
var field_name_mappings = try std.ArrayList(Mapping).initCapacity(allocator, members.len);
|
||||
defer field_name_mappings.deinit();
|
||||
const Mapping = struct { snake: []const u8, original: []const u8 };
|
||||
var field_name_mappings = try std.ArrayList(Mapping).initCapacity(state.allocator, members.len);
|
||||
defer {
|
||||
for (field_name_mappings.items) |mapping|
|
||||
state.allocator.free(mapping.snake);
|
||||
field_name_mappings.deinit();
|
||||
}
|
||||
// There is an httpQueryParams trait as well, but nobody is using it. API GW
|
||||
// pretends to, but it's an empty map
|
||||
//
|
||||
// Same with httpPayload
|
||||
//
|
||||
// httpLabel is interesting - right now we just assume anything can be used - do we need to track this?
|
||||
var http_query_mappings = try std.ArrayList(Mapping).initCapacity(allocator, members.len);
|
||||
defer http_query_mappings.deinit();
|
||||
|
||||
var http_header_mappings = try std.ArrayList(Mapping).initCapacity(allocator, members.len);
|
||||
defer http_header_mappings.deinit();
|
||||
|
||||
var map_fields = std.ArrayList([]const u8).init(allocator);
|
||||
defer map_fields.deinit();
|
||||
|
||||
var http_query_mappings = try std.ArrayList(Mapping).initCapacity(state.allocator, members.len);
|
||||
defer {
|
||||
for (http_query_mappings.items) |mapping|
|
||||
state.allocator.free(mapping.snake);
|
||||
http_query_mappings.deinit();
|
||||
}
|
||||
var http_header_mappings = try std.ArrayList(Mapping).initCapacity(state.allocator, members.len);
|
||||
defer {
|
||||
for (http_header_mappings.items) |mapping|
|
||||
state.allocator.free(mapping.snake);
|
||||
http_header_mappings.deinit();
|
||||
}
|
||||
var map_fields = std.ArrayList([]const u8).init(state.allocator);
|
||||
defer {
|
||||
for (map_fields.items) |f| state.allocator.free(f);
|
||||
map_fields.deinit();
|
||||
}
|
||||
// prolog. We'll rely on caller to get the spacing correct here
|
||||
_ = try writer.write(type_type_name);
|
||||
_ = try writer.write(" {\n");
|
||||
|
@ -885,7 +803,7 @@ fn generateComplexTypeFor(shape_id: []const u8, members: []smithy.TypeMember, ty
|
|||
var payload: ?[]const u8 = null;
|
||||
for (members) |member| {
|
||||
// This is our mapping
|
||||
const snake_case_member = try support.constantName(allocator, member.name, .snake);
|
||||
const snake_case_member = try snake.fromPascalCase(state.allocator, member.name);
|
||||
// So it looks like some services have duplicate names?! Check out "httpMethod"
|
||||
// in API Gateway. Not sure what we're supposed to do there. Checking the go
|
||||
// sdk, they move this particular duplicate to 'http_method' - not sure yet
|
||||
|
@ -895,42 +813,34 @@ fn generateComplexTypeFor(shape_id: []const u8, members: []smithy.TypeMember, ty
|
|||
switch (trait) {
|
||||
.json_name => |n| {
|
||||
found_name_trait = true;
|
||||
field_name_mappings.appendAssumeCapacity(.{ .snake = try allocator.dupe(u8, snake_case_member), .original = n });
|
||||
field_name_mappings.appendAssumeCapacity(.{ .snake = try state.allocator.dupe(u8, snake_case_member), .original = n });
|
||||
},
|
||||
.xml_name => |n| {
|
||||
found_name_trait = true;
|
||||
field_name_mappings.appendAssumeCapacity(.{ .snake = try allocator.dupe(u8, snake_case_member), .original = n });
|
||||
field_name_mappings.appendAssumeCapacity(.{ .snake = try state.allocator.dupe(u8, snake_case_member), .original = n });
|
||||
},
|
||||
.http_query => |n| http_query_mappings.appendAssumeCapacity(.{ .snake = try allocator.dupe(u8, snake_case_member), .original = n }),
|
||||
.http_header => http_header_mappings.appendAssumeCapacity(.{ .snake = try allocator.dupe(u8, snake_case_member), .original = trait.http_header }),
|
||||
.http_query => |n| http_query_mappings.appendAssumeCapacity(.{ .snake = try state.allocator.dupe(u8, snake_case_member), .original = n }),
|
||||
.http_header => http_header_mappings.appendAssumeCapacity(.{ .snake = try state.allocator.dupe(u8, snake_case_member), .original = trait.http_header }),
|
||||
.http_payload => {
|
||||
// Don't assert as that will be optimized for Release* builds
|
||||
// We'll continue here and treat the above as a warning
|
||||
if (payload) |first| {
|
||||
std.log.warn("Found multiple httpPayloads in violation of smithy spec! Ignoring '{s}' and using '{s}'", .{ first, snake_case_member });
|
||||
std.log.err("Found multiple httpPayloads in violation of smithy spec! Ignoring '{s}' and using '{s}'", .{ first, snake_case_member });
|
||||
}
|
||||
payload = try allocator.dupe(u8, snake_case_member);
|
||||
payload = try state.allocator.dupe(u8, snake_case_member);
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
}
|
||||
if (!found_name_trait)
|
||||
field_name_mappings.appendAssumeCapacity(.{ .snake = try allocator.dupe(u8, snake_case_member), .original = member.name });
|
||||
|
||||
field_name_mappings.appendAssumeCapacity(.{ .snake = try state.allocator.dupe(u8, snake_case_member), .original = member.name });
|
||||
defer state.allocator.free(snake_case_member);
|
||||
try outputIndent(child_state, writer);
|
||||
|
||||
const member_name = blk: {
|
||||
if (options.key_case == .snake) {
|
||||
break :blk avoidReserved(snake_case_member);
|
||||
}
|
||||
|
||||
break :blk avoidReserved(try case.allocTo(allocator, options.key_case, snake_case_member));
|
||||
};
|
||||
|
||||
const member_name = avoidReserved(snake_case_member);
|
||||
try writer.print("{s}: ", .{member_name});
|
||||
try writeOptional(member.traits, writer, null);
|
||||
if (try generateTypeFor(member.target, writer, child_state, options.endStructure(true)))
|
||||
try map_fields.append(try std.fmt.allocPrint(allocator, "{s}", .{member_name}));
|
||||
if (try generateTypeFor(member.target, writer, child_state, true))
|
||||
try map_fields.append(try std.fmt.allocPrint(state.allocator, "{s}", .{member_name}));
|
||||
|
||||
if (!std.mem.eql(u8, "union", type_type_name))
|
||||
try writeOptional(member.traits, writer, " = null");
|
||||
|
@ -962,7 +872,7 @@ fn generateComplexTypeFor(shape_id: []const u8, members: []smithy.TypeMember, ty
|
|||
if (payload) |load| {
|
||||
try writer.writeByte('\n');
|
||||
try outputIndent(child_state, writer);
|
||||
try writer.print("pub const http_payload: []const u8 = \"{s}\";\n", .{load});
|
||||
try writer.print("pub const http_payload: []const u8 = \"{s}\";", .{load});
|
||||
}
|
||||
|
||||
try writer.writeByte('\n');
|
||||
|
@ -976,6 +886,34 @@ fn generateComplexTypeFor(shape_id: []const u8, members: []smithy.TypeMember, ty
|
|||
_ = try writer.write("return @field(mappings, field_name);\n");
|
||||
try outputIndent(child_state, writer);
|
||||
_ = try writer.write("}\n");
|
||||
try writeStringify(child_state, map_fields.items, writer);
|
||||
}
|
||||
|
||||
fn writeStringify(state: GenerationState, fields: [][]const u8, writer: anytype) !void {
|
||||
if (fields.len > 0) {
|
||||
// pub fn jsonStringifyField(self: @This(), comptime field_name: []const u8, options: anytype, out_stream: anytype) !bool {
|
||||
// if (std.mem.eql(u8, "tags", field_name))
|
||||
// return try serializeMap(self.tags, self.jsonFieldNameFor("tags"), options, out_stream);
|
||||
// return false;
|
||||
// }
|
||||
var child_state = state;
|
||||
child_state.indent_level += 1;
|
||||
try writer.writeByte('\n');
|
||||
try outputIndent(state, writer);
|
||||
_ = try writer.write("pub fn jsonStringifyField(self: @This(), comptime field_name: []const u8, options: anytype, out_stream: anytype) !bool {\n");
|
||||
var return_state = child_state;
|
||||
return_state.indent_level += 1;
|
||||
for (fields) |field| {
|
||||
try outputIndent(child_state, writer);
|
||||
try writer.print("if (std.mem.eql(u8, \"{s}\", field_name))\n", .{field});
|
||||
try outputIndent(return_state, writer);
|
||||
try writer.print("return try serializeMap(self.{s}, self.fieldNameFor(\"{s}\"), options, out_stream);\n", .{ field, field });
|
||||
}
|
||||
try outputIndent(child_state, writer);
|
||||
_ = try writer.write("return false;\n");
|
||||
try outputIndent(state, writer);
|
||||
_ = try writer.write("}\n");
|
||||
}
|
||||
}
|
||||
|
||||
fn writeMappings(state: GenerationState, @"pub": []const u8, mapping_name: []const u8, mappings: anytype, force_output: bool, writer: anytype) !void {
|
||||
|
@ -999,20 +937,30 @@ fn writeMappings(state: GenerationState, @"pub": []const u8, mapping_name: []con
|
|||
}
|
||||
|
||||
fn writeOptional(traits: ?[]smithy.Trait, writer: anytype, value: ?[]const u8) !void {
|
||||
if (traits) |ts| if (smt.hasTrait(.required, ts)) return;
|
||||
try writer.writeAll(value orelse "?");
|
||||
if (traits) |ts| {
|
||||
for (ts) |t|
|
||||
if (t == .required) return;
|
||||
}
|
||||
|
||||
// not required
|
||||
if (value) |v| {
|
||||
_ = try writer.write(v);
|
||||
} else _ = try writer.write("?");
|
||||
}
|
||||
fn avoidReserved(name: []const u8) []const u8 {
|
||||
if (std.mem.eql(u8, name, "error")) return "@\"error\"";
|
||||
if (std.mem.eql(u8, name, "return")) return "@\"return\"";
|
||||
if (std.mem.eql(u8, name, "not")) return "@\"not\"";
|
||||
if (std.mem.eql(u8, name, "and")) return "@\"and\"";
|
||||
if (std.mem.eql(u8, name, "or")) return "@\"or\"";
|
||||
if (std.mem.eql(u8, name, "test")) return "@\"test\"";
|
||||
if (std.mem.eql(u8, name, "null")) return "@\"null\"";
|
||||
if (std.mem.eql(u8, name, "export")) return "@\"export\"";
|
||||
if (std.mem.eql(u8, name, "union")) return "@\"union\"";
|
||||
if (std.mem.eql(u8, name, "enum")) return "@\"enum\"";
|
||||
if (std.mem.eql(u8, name, "inline")) return "@\"inline\"";
|
||||
return name;
|
||||
fn camelCase(allocator: std.mem.Allocator, name: []const u8) ![]const u8 {
|
||||
const first_letter = name[0] + ('a' - 'A');
|
||||
return try std.fmt.allocPrint(allocator, "{c}{s}", .{ first_letter, name[1..] });
|
||||
}
|
||||
fn avoidReserved(snake_name: []const u8) []const u8 {
|
||||
if (std.mem.eql(u8, snake_name, "error")) return "@\"error\"";
|
||||
if (std.mem.eql(u8, snake_name, "return")) return "@\"return\"";
|
||||
if (std.mem.eql(u8, snake_name, "not")) return "@\"not\"";
|
||||
if (std.mem.eql(u8, snake_name, "and")) return "@\"and\"";
|
||||
if (std.mem.eql(u8, snake_name, "or")) return "@\"or\"";
|
||||
if (std.mem.eql(u8, snake_name, "test")) return "@\"test\"";
|
||||
if (std.mem.eql(u8, snake_name, "null")) return "@\"null\"";
|
||||
if (std.mem.eql(u8, snake_name, "export")) return "@\"export\"";
|
||||
if (std.mem.eql(u8, snake_name, "union")) return "@\"union\"";
|
||||
if (std.mem.eql(u8, snake_name, "enum")) return "@\"enum\"";
|
||||
return snake_name;
|
||||
}
|
||||
|
|
|
@ -1 +0,0 @@
|
|||
pub const json = @import("serialization/json.zig");
|
|
@ -1,392 +0,0 @@
|
|||
const std = @import("std");
|
||||
const smithy = @import("smithy");
|
||||
const smithy_tools = @import("../smithy_tools.zig");
|
||||
const support = @import("../support.zig");
|
||||
|
||||
const GenerationState = @import("../GenerationState.zig");
|
||||
const GenerateTypeOptions = @import("../GenerateTypeOptions.zig");
|
||||
const Allocator = std.mem.Allocator;
|
||||
|
||||
const Shape = smithy_tools.Shape;
|
||||
|
||||
const JsonMember = struct {
|
||||
field_name: []const u8,
|
||||
json_key: []const u8,
|
||||
target: []const u8,
|
||||
type_member: smithy.TypeMember,
|
||||
shape_info: smithy.ShapeInfo,
|
||||
};
|
||||
|
||||
pub fn generateToJsonFunction(shape_id: []const u8, writer: std.io.AnyWriter, state: GenerationState, comptime options: GenerateTypeOptions) !void {
|
||||
_ = options;
|
||||
const allocator = state.allocator;
|
||||
|
||||
const shape_info = try smithy_tools.getShapeInfo(shape_id, state.file_state.shapes);
|
||||
const shape = shape_info.shape;
|
||||
|
||||
if (try getJsonMembers(allocator, shape, state)) |json_members| {
|
||||
if (json_members.items.len > 0) {
|
||||
try writer.writeAll("pub fn jsonStringify(self: @This(), jw: anytype) !void {\n");
|
||||
try writer.writeAll("try jw.beginObject();\n");
|
||||
try writer.writeAll("{\n");
|
||||
|
||||
for (json_members.items) |member| {
|
||||
const member_value = try getMemberValueJson(allocator, "self", member);
|
||||
defer allocator.free(member_value);
|
||||
|
||||
try writer.print("try jw.objectField(\"{s}\");\n", .{member.json_key});
|
||||
try writeMemberJson(
|
||||
.{
|
||||
.shape_id = member.target,
|
||||
.field_name = member.field_name,
|
||||
.field_value = member_value,
|
||||
.state = state.indent(),
|
||||
.member = member.type_member,
|
||||
},
|
||||
writer,
|
||||
);
|
||||
}
|
||||
|
||||
try writer.writeAll("}\n");
|
||||
try writer.writeAll("try jw.endObject();\n");
|
||||
try writer.writeAll("}\n\n");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn getJsonMembers(allocator: Allocator, shape: Shape, state: GenerationState) !?std.ArrayListUnmanaged(JsonMember) {
|
||||
const is_json_shape = switch (state.file_state.protocol) {
|
||||
.json_1_0, .json_1_1, .rest_json_1 => true,
|
||||
else => false,
|
||||
};
|
||||
|
||||
if (!is_json_shape) {
|
||||
return null;
|
||||
}
|
||||
|
||||
var hash_map = std.StringHashMapUnmanaged(smithy.TypeMember){};
|
||||
|
||||
const shape_members = smithy_tools.getShapeMembers(shape);
|
||||
for (shape_members) |member| {
|
||||
try hash_map.putNoClobber(state.allocator, member.name, member);
|
||||
}
|
||||
|
||||
for (shape_members) |member| {
|
||||
for (member.traits) |trait| {
|
||||
switch (trait) {
|
||||
.http_header, .http_query => {
|
||||
std.debug.assert(hash_map.remove(member.name));
|
||||
break;
|
||||
},
|
||||
else => continue,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (hash_map.count() == 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
var json_members = std.ArrayListUnmanaged(JsonMember){};
|
||||
|
||||
var iter = hash_map.iterator();
|
||||
while (iter.next()) |kvp| {
|
||||
const member = kvp.value_ptr.*;
|
||||
|
||||
const key = blk: {
|
||||
if (smithy_tools.findTrait(.json_name, member.traits)) |trait| {
|
||||
break :blk trait.json_name;
|
||||
}
|
||||
|
||||
break :blk member.name;
|
||||
};
|
||||
|
||||
try json_members.append(allocator, .{
|
||||
.field_name = try support.constantName(allocator, member.name, .snake),
|
||||
.json_key = key,
|
||||
.target = member.target,
|
||||
.type_member = member,
|
||||
.shape_info = try smithy_tools.getShapeInfo(member.target, state.file_state.shapes),
|
||||
});
|
||||
}
|
||||
|
||||
return json_members;
|
||||
}
|
||||
|
||||
fn getMemberValueJson(allocator: std.mem.Allocator, source: []const u8, member: JsonMember) ![]const u8 {
|
||||
const member_value = try std.fmt.allocPrint(allocator, "@field({s}, \"{s}\")", .{ source, member.field_name });
|
||||
defer allocator.free(member_value);
|
||||
|
||||
var output_block = std.ArrayListUnmanaged(u8){};
|
||||
const writer = output_block.writer(allocator);
|
||||
|
||||
try writeMemberValue(
|
||||
writer,
|
||||
member_value,
|
||||
);
|
||||
|
||||
return output_block.toOwnedSlice(allocator);
|
||||
}
|
||||
|
||||
fn getShapeJsonValueType(shape: Shape) []const u8 {
|
||||
return switch (shape) {
|
||||
.string, .@"enum", .blob, .document, .timestamp => ".string",
|
||||
.boolean => ".bool",
|
||||
.integer, .bigInteger, .short, .long => ".integer",
|
||||
.float, .double, .bigDecimal => ".float",
|
||||
else => std.debug.panic("Unexpected shape: {}", .{shape}),
|
||||
};
|
||||
}
|
||||
|
||||
fn writeMemberValue(
|
||||
writer: anytype,
|
||||
member_value: []const u8,
|
||||
) !void {
|
||||
try writer.writeAll(member_value);
|
||||
}
|
||||
|
||||
const WriteMemberJsonParams = struct {
|
||||
shape_id: []const u8,
|
||||
field_name: []const u8,
|
||||
field_value: []const u8,
|
||||
state: GenerationState,
|
||||
member: smithy.TypeMember,
|
||||
};
|
||||
|
||||
fn writeStructureJson(params: WriteMemberJsonParams, writer: std.io.AnyWriter) !void {
|
||||
const shape_type = "structure";
|
||||
const allocator = params.state.allocator;
|
||||
const state = params.state;
|
||||
|
||||
const shape_info = try smithy_tools.getShapeInfo(params.shape_id, state.file_state.shapes);
|
||||
const shape = shape_info.shape;
|
||||
|
||||
const structure_name = try std.fmt.allocPrint(params.state.allocator, "{s}_{s}_{d}", .{ params.field_name, shape_type, state.indent_level });
|
||||
defer params.state.allocator.free(structure_name);
|
||||
|
||||
const object_value_capture = try std.fmt.allocPrint(allocator, "{s}_capture", .{structure_name});
|
||||
defer allocator.free(object_value_capture);
|
||||
|
||||
try writer.print("\n// start {s}: {s}\n", .{ shape_type, structure_name });
|
||||
defer writer.print("// end {s}: {s}\n", .{ shape_type, structure_name }) catch std.debug.panic("Unreachable", .{});
|
||||
|
||||
if (try getJsonMembers(allocator, shape, state)) |json_members| {
|
||||
if (json_members.items.len > 0) {
|
||||
const is_optional = smithy_tools.shapeIsOptional(params.member.traits);
|
||||
|
||||
var object_value = params.field_value;
|
||||
|
||||
if (is_optional) {
|
||||
object_value = object_value_capture;
|
||||
|
||||
try writer.print("if ({s}) |{s}|", .{ params.field_value, object_value_capture });
|
||||
try writer.writeAll("{\n");
|
||||
}
|
||||
|
||||
try writer.writeAll("try jw.beginObject();\n");
|
||||
try writer.writeAll("{\n");
|
||||
|
||||
// this is a workaround in case a child structure doesn't have any fields
|
||||
// and therefore doesn't use the structure variable so we capture it here.
|
||||
// the compiler should optimize this away
|
||||
try writer.print("const unused_capture_{s} = {s};\n", .{ structure_name, object_value });
|
||||
try writer.print("_ = unused_capture_{s};\n", .{structure_name});
|
||||
|
||||
for (json_members.items) |member| {
|
||||
const member_value = try getMemberValueJson(allocator, object_value, member);
|
||||
defer allocator.free(member_value);
|
||||
|
||||
try writer.print("try jw.objectField(\"{s}\");\n", .{member.json_key});
|
||||
try writeMemberJson(
|
||||
.{
|
||||
.shape_id = member.target,
|
||||
.field_name = member.field_name,
|
||||
.field_value = member_value,
|
||||
.state = state.indent(),
|
||||
.member = member.type_member,
|
||||
},
|
||||
writer,
|
||||
);
|
||||
}
|
||||
|
||||
try writer.writeAll("}\n");
|
||||
try writer.writeAll("try jw.endObject();\n");
|
||||
|
||||
if (is_optional) {
|
||||
try writer.writeAll("} else {\n");
|
||||
try writer.writeAll("try jw.write(null);\n");
|
||||
try writer.writeAll("}\n");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn writeListJson(list: smithy_tools.ListShape, params: WriteMemberJsonParams, writer: std.io.AnyWriter) anyerror!void {
|
||||
const state = params.state;
|
||||
const allocator = state.allocator;
|
||||
|
||||
const list_name = try std.fmt.allocPrint(allocator, "{s}_list_{d}", .{ params.field_name, state.indent_level });
|
||||
defer state.allocator.free(list_name);
|
||||
|
||||
try writer.print("\n// start list: {s}\n", .{list_name});
|
||||
defer writer.print("// end list: {s}\n", .{list_name}) catch std.debug.panic("Unreachable", .{});
|
||||
|
||||
const list_each_value = try std.fmt.allocPrint(allocator, "{s}_value", .{list_name});
|
||||
defer allocator.free(list_each_value);
|
||||
|
||||
const list_capture = try std.fmt.allocPrint(allocator, "{s}_capture", .{list_name});
|
||||
defer allocator.free(list_capture);
|
||||
|
||||
{
|
||||
const list_is_optional = smithy_tools.shapeIsOptional(list.traits);
|
||||
|
||||
var list_value = params.field_value;
|
||||
|
||||
if (list_is_optional) {
|
||||
list_value = list_capture;
|
||||
|
||||
try writer.print("if ({s}) |{s}| ", .{
|
||||
params.field_value,
|
||||
list_capture,
|
||||
});
|
||||
try writer.writeAll("{\n");
|
||||
}
|
||||
|
||||
// start loop
|
||||
try writer.writeAll("try jw.beginArray();\n");
|
||||
try writer.print("for ({s}) |{s}|", .{ list_value, list_each_value });
|
||||
try writer.writeAll("{\n");
|
||||
try writer.writeAll("try jw.write(");
|
||||
try writeMemberValue(
|
||||
writer,
|
||||
list_each_value,
|
||||
);
|
||||
try writer.writeAll(");\n");
|
||||
try writer.writeAll("}\n");
|
||||
try writer.writeAll("try jw.endArray();\n");
|
||||
// end loop
|
||||
|
||||
if (list_is_optional) {
|
||||
try writer.writeAll("} else {\n");
|
||||
try writer.writeAll("try jw.write(null);\n");
|
||||
try writer.writeAll("}\n");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn writeMapJson(map: smithy_tools.MapShape, params: WriteMemberJsonParams, writer: std.io.AnyWriter) anyerror!void {
|
||||
const state = params.state;
|
||||
const name = params.field_name;
|
||||
const value = params.field_value;
|
||||
const allocator = state.allocator;
|
||||
|
||||
const map_name = try std.fmt.allocPrint(allocator, "{s}_object_map_{d}", .{ name, state.indent_level });
|
||||
defer allocator.free(map_name);
|
||||
|
||||
try writer.print("\n// start map: {s}\n", .{map_name});
|
||||
defer writer.print("// end map: {s}\n", .{map_name}) catch std.debug.panic("Unreachable", .{});
|
||||
|
||||
const map_value_capture = try std.fmt.allocPrint(allocator, "{s}_kvp", .{map_name});
|
||||
defer allocator.free(map_value_capture);
|
||||
|
||||
const map_capture_key = try std.fmt.allocPrint(allocator, "{s}.key", .{map_value_capture});
|
||||
defer allocator.free(map_capture_key);
|
||||
|
||||
const map_capture_value = try std.fmt.allocPrint(allocator, "{s}.value", .{map_value_capture});
|
||||
defer allocator.free(map_capture_value);
|
||||
|
||||
const value_shape_info = try smithy_tools.getShapeInfo(map.value, state.file_state.shapes);
|
||||
|
||||
const value_member = smithy.TypeMember{
|
||||
.name = "value",
|
||||
.target = map.value,
|
||||
.traits = smithy_tools.getShapeTraits(value_shape_info.shape),
|
||||
};
|
||||
|
||||
const map_capture = try std.fmt.allocPrint(state.allocator, "{s}_capture", .{map_name});
|
||||
|
||||
{
|
||||
const map_member = params.member;
|
||||
const map_is_optional = !smithy_tools.hasTrait(.required, map_member.traits);
|
||||
|
||||
var map_value = value;
|
||||
|
||||
if (map_is_optional) {
|
||||
map_value = map_capture;
|
||||
|
||||
try writer.print("if ({s}) |{s}| ", .{
|
||||
value,
|
||||
map_capture,
|
||||
});
|
||||
try writer.writeAll("{\n");
|
||||
}
|
||||
|
||||
try writer.writeAll("try jw.beginObject();\n");
|
||||
try writer.writeAll("{\n");
|
||||
|
||||
// start loop
|
||||
try writer.print("for ({s}) |{s}|", .{ map_value, map_value_capture });
|
||||
try writer.writeAll("{\n");
|
||||
try writer.print("try jw.objectField({s});\n", .{map_capture_key});
|
||||
|
||||
try writeMemberJson(.{
|
||||
.shape_id = map.value,
|
||||
.field_name = "value",
|
||||
.field_value = map_capture_value,
|
||||
.state = state.indent(),
|
||||
.member = value_member,
|
||||
}, writer);
|
||||
|
||||
try writer.writeAll("}\n");
|
||||
// end loop
|
||||
|
||||
try writer.writeAll("}\n");
|
||||
try writer.writeAll("try jw.endObject();\n");
|
||||
|
||||
if (map_is_optional) {
|
||||
try writer.writeAll("} else {\n");
|
||||
try writer.writeAll("try jw.write(null);\n");
|
||||
try writer.writeAll("}\n");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn writeScalarJson(comment: []const u8, params: WriteMemberJsonParams, writer: std.io.AnyWriter) anyerror!void {
|
||||
try writer.print("try jw.write({s}); // {s}\n\n", .{ params.field_value, comment });
|
||||
}
|
||||
|
||||
fn writeMemberJson(params: WriteMemberJsonParams, writer: std.io.AnyWriter) anyerror!void {
|
||||
const shape_id = params.shape_id;
|
||||
const state = params.state;
|
||||
const shape_info = try smithy_tools.getShapeInfo(shape_id, state.file_state.shapes);
|
||||
const shape = shape_info.shape;
|
||||
|
||||
if (state.getTypeRecurrenceCount(shape_id) > 2) {
|
||||
return;
|
||||
}
|
||||
|
||||
try state.appendToTypeStack(&shape_info);
|
||||
defer state.popFromTypeStack();
|
||||
|
||||
switch (shape) {
|
||||
.structure, .uniontype => try writeStructureJson(params, writer),
|
||||
.list => |l| try writeListJson(l, params, writer),
|
||||
.map => |m| try writeMapJson(m, params, writer),
|
||||
.timestamp => try writeScalarJson("timestamp", params, writer),
|
||||
.string => try writeScalarJson("string", params, writer),
|
||||
.@"enum" => try writeScalarJson("enum", params, writer),
|
||||
.document => try writeScalarJson("document", params, writer),
|
||||
.blob => try writeScalarJson("blob", params, writer),
|
||||
.boolean => try writeScalarJson("bool", params, writer),
|
||||
.float => try writeScalarJson("float", params, writer),
|
||||
.integer => try writeScalarJson("integer", params, writer),
|
||||
.long => try writeScalarJson("long", params, writer),
|
||||
.double => try writeScalarJson("double", params, writer),
|
||||
.bigDecimal => try writeScalarJson("bigDecimal", params, writer),
|
||||
.bigInteger => try writeScalarJson("bigInteger", params, writer),
|
||||
.unit => try writeScalarJson("unit", params, writer),
|
||||
.byte => try writeScalarJson("byte", params, writer),
|
||||
.short => try writeScalarJson("short", params, writer),
|
||||
.service, .resource, .operation, .member, .set => std.debug.panic("Shape type not supported: {}", .{shape}),
|
||||
}
|
||||
}
|
|
@ -1,67 +0,0 @@
|
|||
const std = @import("std");
|
||||
const smithy = @import("smithy");
|
||||
|
||||
pub const Shape = @FieldType(smithy.ShapeInfo, "shape");
|
||||
pub const ServiceShape = @TypeOf((Shape{ .service = undefined }).service);
|
||||
pub const ListShape = @TypeOf((Shape{ .list = undefined }).list);
|
||||
pub const MapShape = @TypeOf((Shape{ .map = undefined }).map);
|
||||
|
||||
pub fn getShapeInfo(id: []const u8, shapes: std.StringHashMap(smithy.ShapeInfo)) !smithy.ShapeInfo {
|
||||
return shapes.get(id) orelse {
|
||||
std.debug.print("Shape ID not found. This is most likely a bug. Shape ID: {s}\n", .{id});
|
||||
return error.InvalidType;
|
||||
};
|
||||
}
|
||||
|
||||
pub fn getShapeTraits(shape: Shape) []smithy.Trait {
|
||||
return switch (shape) {
|
||||
.service, .operation, .resource => std.debug.panic("Unexpected shape type: {}", .{shape}),
|
||||
inline else => |s| s.traits,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn getShapeMembers(shape: Shape) []smithy.TypeMember {
|
||||
return switch (shape) {
|
||||
inline .structure, .uniontype => |s| s.members,
|
||||
else => std.debug.panic("Unexpected shape type: {}", .{shape}),
|
||||
};
|
||||
}
|
||||
|
||||
pub fn shapeIsLeaf(shape: Shape) bool {
|
||||
return switch (shape) {
|
||||
.@"enum",
|
||||
.bigDecimal,
|
||||
.bigInteger,
|
||||
.blob,
|
||||
.boolean,
|
||||
.byte,
|
||||
.document,
|
||||
.double,
|
||||
.float,
|
||||
.integer,
|
||||
.long,
|
||||
.short,
|
||||
.string,
|
||||
.timestamp,
|
||||
=> true,
|
||||
else => false,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn shapeIsOptional(traits: []smithy.Trait) bool {
|
||||
return !hasTrait(.required, traits);
|
||||
}
|
||||
|
||||
pub fn findTrait(trait_type: smithy.TraitType, traits: []smithy.Trait) ?smithy.Trait {
|
||||
for (traits) |trait| {
|
||||
if (trait == trait_type) {
|
||||
return trait;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
pub fn hasTrait(trait_type: smithy.TraitType, traits: []smithy.Trait) bool {
|
||||
return findTrait(trait_type, traits) != null;
|
||||
}
|
157
codegen/src/snake.zig
Normal file
157
codegen/src/snake.zig
Normal file
|
@ -0,0 +1,157 @@
|
|||
const std = @import("std");
|
||||
const expectEqualStrings = std.testing.expectEqualStrings;
|
||||
|
||||
pub fn fromPascalCase(allocator: std.mem.Allocator, name: []const u8) ![]u8 {
|
||||
const rc = try allocator.alloc(u8, name.len * 2); // This is overkill, but is > the maximum length possibly needed
|
||||
errdefer allocator.free(rc);
|
||||
var utf8_name = (std.unicode.Utf8View.init(name) catch unreachable).iterator();
|
||||
var target_inx: u64 = 0;
|
||||
var curr_char = (try isAscii(utf8_name.nextCodepoint())).?;
|
||||
target_inx = setNext(lowercase(curr_char), rc, target_inx);
|
||||
var prev_char = curr_char;
|
||||
if (try isAscii(utf8_name.nextCodepoint())) |ch| {
|
||||
curr_char = ch;
|
||||
} else {
|
||||
// Single character only - we're done here
|
||||
_ = setNext(0, rc, target_inx);
|
||||
return rc[0..target_inx];
|
||||
}
|
||||
while (try isAscii(utf8_name.nextCodepoint())) |next_char| {
|
||||
if (next_char == ' ') {
|
||||
// a space shouldn't be happening. But if it does, it clues us
|
||||
// in pretty well:
|
||||
//
|
||||
// MyStuff Is Awesome
|
||||
// |^
|
||||
// |next_char
|
||||
// ^
|
||||
// prev_codepoint/ascii_prev_char (and target_inx)
|
||||
target_inx = setNext(lowercase(curr_char), rc, target_inx);
|
||||
target_inx = setNext('_', rc, target_inx);
|
||||
var maybe_curr_char = (try isAscii(utf8_name.nextCodepoint()));
|
||||
if (maybe_curr_char == null) {
|
||||
std.log.err("Error on fromPascalCase processing name '{s}'", .{name});
|
||||
}
|
||||
curr_char = maybe_curr_char.?;
|
||||
maybe_curr_char = (try isAscii(utf8_name.nextCodepoint()));
|
||||
if (maybe_curr_char == null) {
|
||||
// We have reached the end of the string (e.g. "Resource Explorer 2")
|
||||
// We need to do this check before we setNext, so that we don't
|
||||
// end up duplicating the last character
|
||||
break;
|
||||
// std.log.err("Error on fromPascalCase processing name '{s}', curr_char = '{}'", .{ name, curr_char });
|
||||
}
|
||||
target_inx = setNext(lowercase(curr_char), rc, target_inx);
|
||||
prev_char = curr_char;
|
||||
curr_char = maybe_curr_char.?;
|
||||
continue;
|
||||
}
|
||||
if (between(curr_char, 'A', 'Z')) {
|
||||
if (isAcronym(curr_char, next_char)) {
|
||||
// We could be in an acronym at the start of a word. This
|
||||
// is the only case where we actually need to look back at the
|
||||
// previous character, and if that's the case, throw in an
|
||||
// underscore
|
||||
// "SAMLMySAMLAcronymThing");
|
||||
if (between(prev_char, 'a', 'z'))
|
||||
target_inx = setNext('_', rc, target_inx);
|
||||
|
||||
//we are in an acronym - don't snake, just lower
|
||||
target_inx = setNext(lowercase(curr_char), rc, target_inx);
|
||||
} else {
|
||||
target_inx = setNext('_', rc, target_inx);
|
||||
target_inx = setNext(lowercase(curr_char), rc, target_inx);
|
||||
}
|
||||
} else {
|
||||
target_inx = setNext(curr_char, rc, target_inx);
|
||||
}
|
||||
prev_char = curr_char;
|
||||
curr_char = next_char;
|
||||
}
|
||||
// work in the last codepoint - force lowercase
|
||||
target_inx = setNext(lowercase(curr_char), rc, target_inx);
|
||||
|
||||
rc[target_inx] = 0;
|
||||
_ = allocator.resize(rc, target_inx);
|
||||
return rc[0..target_inx];
|
||||
}
|
||||
|
||||
fn isAcronym(char1: u8, char2: u8) bool {
|
||||
return isAcronymChar(char1) and isAcronymChar(char2);
|
||||
}
|
||||
fn isAcronymChar(char: u8) bool {
|
||||
return between(char, 'A', 'Z') or between(char, '0', '9');
|
||||
}
|
||||
fn isAscii(codepoint: ?u21) !?u8 {
|
||||
if (codepoint) |cp| {
|
||||
if (cp > 0xff) return error.UnicodeNotSupported;
|
||||
return @as(u8, @truncate(cp));
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
fn setNext(ascii: u8, slice: []u8, inx: u64) u64 {
|
||||
slice[inx] = ascii;
|
||||
return inx + 1;
|
||||
}
|
||||
|
||||
fn lowercase(ascii: u8) u8 {
|
||||
var lowercase_char = ascii;
|
||||
if (between(ascii, 'A', 'Z'))
|
||||
lowercase_char = ascii + ('a' - 'A');
|
||||
return lowercase_char;
|
||||
}
|
||||
|
||||
fn between(char: u8, from: u8, to: u8) bool {
|
||||
return char >= from and char <= to;
|
||||
}
|
||||
|
||||
test "converts from PascalCase to snake_case" {
|
||||
const allocator = std.testing.allocator;
|
||||
const snake_case = try fromPascalCase(allocator, "MyPascalCaseThing");
|
||||
defer allocator.free(snake_case);
|
||||
try expectEqualStrings("my_pascal_case_thing", snake_case);
|
||||
}
|
||||
test "handles from PascalCase acronyms to snake_case" {
|
||||
const allocator = std.testing.allocator;
|
||||
const snake_case = try fromPascalCase(allocator, "SAMLMySAMLAcronymThing");
|
||||
defer allocator.free(snake_case);
|
||||
try expectEqualStrings("saml_my_saml_acronym_thing", snake_case);
|
||||
}
|
||||
test "spaces in the name" {
|
||||
const allocator = std.testing.allocator;
|
||||
const snake_case = try fromPascalCase(allocator, "API Gateway");
|
||||
defer allocator.free(snake_case);
|
||||
try expectEqualStrings("api_gateway", snake_case);
|
||||
}
|
||||
|
||||
test "S3" {
|
||||
const allocator = std.testing.allocator;
|
||||
const snake_case = try fromPascalCase(allocator, "S3");
|
||||
defer allocator.free(snake_case);
|
||||
try expectEqualStrings("s3", snake_case);
|
||||
}
|
||||
|
||||
test "ec2" {
|
||||
const allocator = std.testing.allocator;
|
||||
const snake_case = try fromPascalCase(allocator, "EC2");
|
||||
defer allocator.free(snake_case);
|
||||
try expectEqualStrings("ec2", snake_case);
|
||||
}
|
||||
|
||||
test "IoT 1Click Devices Service" {
|
||||
const allocator = std.testing.allocator;
|
||||
const snake_case = try fromPascalCase(allocator, "IoT 1Click Devices Service");
|
||||
defer allocator.free(snake_case);
|
||||
// NOTE: There is some debate amoung humans about what this should
|
||||
// turn into. Should it be iot_1click_... or iot_1_click...?
|
||||
try expectEqualStrings("iot_1_click_devices_service", snake_case);
|
||||
}
|
||||
test "Resource Explorer 2" {
|
||||
const allocator = std.testing.allocator;
|
||||
const snake_case = try fromPascalCase(allocator, "Resource Explorer 2");
|
||||
defer allocator.free(snake_case);
|
||||
// NOTE: There is some debate amoung humans about what this should
|
||||
// turn into. Should it be iot_1click_... or iot_1_click...?
|
||||
try expectEqualStrings("resource_explorer_2", snake_case);
|
||||
}
|
|
@ -1,33 +0,0 @@
|
|||
const std = @import("std");
|
||||
const case = @import("case");
|
||||
|
||||
const Allocator = std.mem.Allocator;
|
||||
|
||||
pub fn constantName(allocator: Allocator, id: []const u8, comptime to_case: case.Case) ![]const u8 {
|
||||
// There are some ids that don't follow consistent rules, so we'll
|
||||
// look for the exceptions and, if not found, revert to the snake case
|
||||
// algorithm
|
||||
|
||||
var buf = std.mem.zeroes([256]u8);
|
||||
@memcpy(buf[0..id.len], id);
|
||||
|
||||
var name = try allocator.dupe(u8, id);
|
||||
|
||||
const simple_replacements = &.{
|
||||
&.{ "DevOps", "Devops" },
|
||||
&.{ "IoT", "Iot" },
|
||||
&.{ "FSx", "Fsx" },
|
||||
&.{ "CloudFront", "Cloudfront" },
|
||||
};
|
||||
|
||||
inline for (simple_replacements) |rep| {
|
||||
if (std.mem.indexOf(u8, name, rep[0])) |idx| @memcpy(name[idx .. idx + rep[0].len], rep[1]);
|
||||
}
|
||||
|
||||
if (to_case == .snake) {
|
||||
if (std.mem.eql(u8, id, "SESv2")) return try std.fmt.allocPrint(allocator, "ses_v2", .{});
|
||||
if (std.mem.eql(u8, id, "ETag")) return try std.fmt.allocPrint(allocator, "e_tag", .{});
|
||||
}
|
||||
|
||||
return try case.allocTo(allocator, to_case, name);
|
||||
}
|
|
@ -1,41 +0,0 @@
|
|||
const std = @import("std");
|
||||
|
||||
pub fn build(b: *std.Build) void {
|
||||
const target = b.standardTargetOptions(.{});
|
||||
const optimize = b.standardOptimizeOption(.{});
|
||||
|
||||
const lib_mod = b.addModule("date", .{
|
||||
.root_source_file = b.path("src/root.zig"),
|
||||
.target = target,
|
||||
.optimize = optimize,
|
||||
});
|
||||
|
||||
const lib = b.addLibrary(.{
|
||||
.linkage = .static,
|
||||
.name = "date",
|
||||
.root_module = lib_mod,
|
||||
});
|
||||
|
||||
b.installArtifact(lib);
|
||||
|
||||
const lib_unit_tests = b.addTest(.{
|
||||
.root_module = lib_mod,
|
||||
});
|
||||
|
||||
const run_lib_unit_tests = b.addRunArtifact(lib_unit_tests);
|
||||
|
||||
const test_step = b.step("test", "Run unit tests");
|
||||
test_step.dependOn(&run_lib_unit_tests.step);
|
||||
|
||||
const dep_zeit = b.dependency("zeit", .{
|
||||
.target = target,
|
||||
.optimize = optimize,
|
||||
});
|
||||
lib_mod.addImport("zeit", dep_zeit.module("zeit"));
|
||||
|
||||
const dep_json = b.dependency("json", .{
|
||||
.target = target,
|
||||
.optimize = optimize,
|
||||
});
|
||||
lib_mod.addImport("json", dep_json.module("json"));
|
||||
}
|
|
@ -1,20 +0,0 @@
|
|||
.{
|
||||
.name = .date,
|
||||
.version = "0.0.0",
|
||||
.fingerprint = 0xaa9e377a226d739e, // Changing this has security and trust implications.
|
||||
.minimum_zig_version = "0.14.0",
|
||||
.dependencies = .{
|
||||
.zeit = .{
|
||||
.url = "git+https://github.com/rockorager/zeit#f86d568b89a5922f084dae524a1eaf709855cd5e",
|
||||
.hash = "zeit-0.6.0-5I6bkzt5AgC1_BCuSzXkV0JHeF4Mhti1Z_jFC7E_nmD2",
|
||||
},
|
||||
.json = .{
|
||||
.path = "../json",
|
||||
},
|
||||
},
|
||||
.paths = .{
|
||||
"build.zig",
|
||||
"build.zig.zon",
|
||||
"src",
|
||||
},
|
||||
}
|
|
@ -1,118 +0,0 @@
|
|||
const std = @import("std");
|
||||
const log = std.log.scoped(.date);
|
||||
const zeit = @import("zeit");
|
||||
|
||||
pub const DateTime = struct {
|
||||
day: u8,
|
||||
month: u8,
|
||||
year: u16,
|
||||
hour: u8,
|
||||
minute: u8,
|
||||
second: u8,
|
||||
|
||||
pub fn fromInstant(val: zeit.Instant) DateTime {
|
||||
return fromTime(val.time());
|
||||
}
|
||||
|
||||
pub fn fromTime(val: zeit.Time) DateTime {
|
||||
return DateTime{
|
||||
.day = val.day,
|
||||
.month = @intFromEnum(val.month),
|
||||
.year = @intCast(val.year),
|
||||
.hour = val.hour,
|
||||
.minute = val.minute,
|
||||
.second = val.second,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn time(self: DateTime) zeit.Time {
|
||||
return zeit.Time{
|
||||
.day = @intCast(self.day),
|
||||
.month = @enumFromInt(self.month),
|
||||
.year = self.year,
|
||||
.hour = @intCast(self.hour),
|
||||
.minute = @intCast(self.minute),
|
||||
.second = @intCast(self.second),
|
||||
};
|
||||
}
|
||||
|
||||
pub fn instant(self: DateTime) !zeit.Instant {
|
||||
return try zeit.instant(.{ .source = .{ .time = self.time() } });
|
||||
}
|
||||
};
|
||||
|
||||
pub fn timestampToDateTime(timestamp: zeit.Seconds) DateTime {
|
||||
const ins = zeit.instant(.{ .source = .{ .unix_timestamp = timestamp } }) catch @panic("Failed to create instant from timestamp");
|
||||
return DateTime.fromInstant(ins);
|
||||
}
|
||||
|
||||
pub fn parseEnglishToTimestamp(data: []const u8) !i64 {
|
||||
return try dateTimeToTimestamp(try parseEnglishToDateTime(data));
|
||||
}
|
||||
|
||||
/// Converts a string to a timestamp value. May not handle dates before the
|
||||
/// epoch. Dates should look like "Fri, 03 Jun 2022 18:12:36 GMT"
|
||||
pub fn parseEnglishToDateTime(data: []const u8) !DateTime {
|
||||
const ins = try zeit.instant(.{ .source = .{ .rfc1123 = data } });
|
||||
return DateTime.fromInstant(ins);
|
||||
}
|
||||
|
||||
pub fn parseIso8601ToTimestamp(data: []const u8) !i64 {
|
||||
return try dateTimeToTimestamp(try parseIso8601ToDateTime(data));
|
||||
}
|
||||
|
||||
/// Converts a string to a timestamp value. May not handle dates before the
|
||||
/// epoch
|
||||
pub fn parseIso8601ToDateTime(data: []const u8) !DateTime {
|
||||
const ins = try zeit.instant(.{ .source = .{ .iso8601 = data } });
|
||||
return DateTime.fromInstant(ins);
|
||||
}
|
||||
|
||||
pub fn dateTimeToTimestamp(datetime: DateTime) !zeit.Seconds {
|
||||
return (try datetime.instant()).unixTimestamp();
|
||||
}
|
||||
|
||||
fn printDateTime(dt: DateTime) void {
|
||||
log.debug("{:0>4}-{:0>2}-{:0>2}T{:0>2}:{:0>2}:{:0<2}Z", .{
|
||||
dt.year,
|
||||
dt.month,
|
||||
dt.day,
|
||||
dt.hour,
|
||||
dt.minute,
|
||||
dt.second,
|
||||
});
|
||||
}
|
||||
|
||||
pub fn printNowUtc() void {
|
||||
printDateTime(timestampToDateTime(std.time.timestamp()));
|
||||
}
|
||||
|
||||
test "Convert timestamp to datetime" {
|
||||
printDateTime(timestampToDateTime(std.time.timestamp()));
|
||||
try std.testing.expectEqual(DateTime{ .year = 2020, .month = 8, .day = 28, .hour = 9, .minute = 32, .second = 27 }, timestampToDateTime(1598607147));
|
||||
|
||||
try std.testing.expectEqual(DateTime{ .year = 2020, .month = 11, .day = 1, .hour = 5, .minute = 6, .second = 7 }, timestampToDateTime(1604207167));
|
||||
// Get time for date: https://wtools.io/convert-date-time-to-unix-time
|
||||
try std.testing.expectEqual(DateTime{ .year = 2015, .month = 8, .day = 30, .hour = 12, .minute = 36, .second = 0 }, timestampToDateTime(1440938160));
|
||||
}
|
||||
|
||||
test "Convert datetime to timestamp" {
|
||||
try std.testing.expectEqual(@as(i64, 1598607147), try dateTimeToTimestamp(DateTime{ .year = 2020, .month = 8, .day = 28, .hour = 9, .minute = 32, .second = 27 }));
|
||||
try std.testing.expectEqual(@as(i64, 1604207167), try dateTimeToTimestamp(DateTime{ .year = 2020, .month = 11, .day = 1, .hour = 5, .minute = 6, .second = 7 }));
|
||||
try std.testing.expectEqual(@as(i64, 1440938160), try dateTimeToTimestamp(DateTime{ .year = 2015, .month = 8, .day = 30, .hour = 12, .minute = 36, .second = 0 }));
|
||||
}
|
||||
|
||||
test "Convert ISO8601 string to timestamp" {
|
||||
try std.testing.expectEqual(DateTime{ .year = 2020, .month = 8, .day = 28, .hour = 9, .minute = 32, .second = 27 }, try parseIso8601ToDateTime("20200828T093227"));
|
||||
try std.testing.expectEqual(DateTime{ .year = 2020, .month = 8, .day = 28, .hour = 9, .minute = 32, .second = 27 }, try parseIso8601ToDateTime("2020-08-28T9:32:27Z"));
|
||||
try std.testing.expectEqual(DateTime{ .year = 2020, .month = 11, .day = 1, .hour = 5, .minute = 6, .second = 7 }, try parseIso8601ToDateTime("2020-11-01T5:06:7Z"));
|
||||
try std.testing.expectEqual(DateTime{ .year = 2015, .month = 8, .day = 30, .hour = 12, .minute = 36, .second = 0 }, try parseIso8601ToDateTime("2015-08-30T12:36:00.000Z"));
|
||||
}
|
||||
test "Convert datetime to timestamp before 1970" {
|
||||
try std.testing.expectEqual(@as(i64, -449392815), try dateTimeToTimestamp(DateTime{ .year = 1955, .month = 10, .day = 5, .hour = 16, .minute = 39, .second = 45 }));
|
||||
}
|
||||
|
||||
test "Convert whatever AWS is sending us to timestamp" {
|
||||
const string_date = "Fri, 03 Jun 2022 18:12:36 GMT";
|
||||
try std.testing.expectEqual(DateTime{ .year = 2022, .month = 6, .day = 3, .hour = 18, .minute = 12, .second = 36 }, try parseEnglishToDateTime(string_date));
|
||||
}
|
|
@ -1,20 +0,0 @@
|
|||
const std = @import("std");
|
||||
const testing = std.testing;
|
||||
|
||||
const parsing = @import("parsing.zig");
|
||||
pub const DateTime = parsing.DateTime;
|
||||
pub const timestampToDateTime = parsing.timestampToDateTime;
|
||||
pub const parseEnglishToTimestamp = parsing.parseEnglishToTimestamp;
|
||||
pub const parseEnglishToDateTime = parsing.parseEnglishToDateTime;
|
||||
pub const parseIso8601ToTimestamp = parsing.parseIso8601ToTimestamp;
|
||||
pub const parseIso8601ToDateTime = parsing.parseIso8601ToDateTime;
|
||||
pub const dateTimeToTimestamp = parsing.dateTimeToTimestamp;
|
||||
pub const printNowUtc = parsing.printNowUtc;
|
||||
|
||||
const timestamp = @import("timestamp.zig");
|
||||
pub const DateFormat = timestamp.DateFormat;
|
||||
pub const Timestamp = timestamp.Timestamp;
|
||||
|
||||
test {
|
||||
testing.refAllDeclsRecursive(@This());
|
||||
}
|
|
@ -1,69 +0,0 @@
|
|||
const std = @import("std");
|
||||
const zeit = @import("zeit");
|
||||
|
||||
pub const DateFormat = enum {
|
||||
rfc1123,
|
||||
iso8601,
|
||||
};
|
||||
|
||||
pub const Timestamp = enum(zeit.Nanoseconds) {
|
||||
_,
|
||||
|
||||
pub fn jsonStringify(value: Timestamp, jw: anytype) !void {
|
||||
const instant = zeit.instant(.{
|
||||
.source = .{
|
||||
.unix_nano = @intFromEnum(value),
|
||||
},
|
||||
}) catch std.debug.panic("Failed to parse timestamp to instant: {d}", .{value});
|
||||
|
||||
const fmt = "Mon, 02 Jan 2006 15:04:05 GMT";
|
||||
var buf = std.mem.zeroes([fmt.len]u8);
|
||||
|
||||
var fbs = std.io.fixedBufferStream(&buf);
|
||||
instant.time().gofmt(fbs.writer(), fmt) catch std.debug.panic("Failed to format instant: {d}", .{instant.timestamp});
|
||||
|
||||
try jw.write(&buf);
|
||||
}
|
||||
|
||||
pub fn parse(val: []const u8) !Timestamp {
|
||||
const date_format = blk: {
|
||||
if (std.ascii.isDigit(val[0])) {
|
||||
break :blk DateFormat.iso8601;
|
||||
} else {
|
||||
break :blk DateFormat.rfc1123;
|
||||
}
|
||||
};
|
||||
|
||||
const ins = try zeit.instant(.{
|
||||
.source = switch (date_format) {
|
||||
DateFormat.iso8601 => .{
|
||||
.iso8601 = val,
|
||||
},
|
||||
DateFormat.rfc1123 => .{
|
||||
.rfc1123 = val,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
return @enumFromInt(ins.timestamp);
|
||||
}
|
||||
};
|
||||
|
||||
test Timestamp {
|
||||
const in_date = "Wed, 23 Apr 2025 11:23:45 GMT";
|
||||
|
||||
const expected_ts: Timestamp = @enumFromInt(1745407425000000000);
|
||||
const actual_ts = try Timestamp.parse(in_date);
|
||||
|
||||
try std.testing.expectEqual(expected_ts, actual_ts);
|
||||
|
||||
var buf: [100]u8 = undefined;
|
||||
var fbs = std.io.fixedBufferStream(&buf);
|
||||
var counting_writer = std.io.countingWriter(fbs.writer());
|
||||
try Timestamp.jsonStringify(expected_ts, .{}, counting_writer.writer());
|
||||
|
||||
const expected_json = "\"" ++ in_date ++ "\"";
|
||||
const actual_json = buf[0..counting_writer.bytes_written];
|
||||
|
||||
try std.testing.expectEqualStrings(expected_json, actual_json);
|
||||
}
|
|
@ -1,29 +0,0 @@
|
|||
const std = @import("std");
|
||||
|
||||
pub fn build(b: *std.Build) void {
|
||||
const target = b.standardTargetOptions(.{});
|
||||
const optimize = b.standardOptimizeOption(.{});
|
||||
|
||||
const lib_mod = b.addModule("json", .{
|
||||
.root_source_file = b.path("src/json.zig"),
|
||||
.target = target,
|
||||
.optimize = optimize,
|
||||
});
|
||||
|
||||
const lib = b.addLibrary(.{
|
||||
.linkage = .static,
|
||||
.name = "json",
|
||||
.root_module = lib_mod,
|
||||
});
|
||||
|
||||
b.installArtifact(lib);
|
||||
|
||||
const lib_unit_tests = b.addTest(.{
|
||||
.root_module = lib_mod,
|
||||
});
|
||||
|
||||
const run_lib_unit_tests = b.addRunArtifact(lib_unit_tests);
|
||||
|
||||
const test_step = b.step("test", "Run unit tests");
|
||||
test_step.dependOn(&run_lib_unit_tests.step);
|
||||
}
|
|
@ -1,12 +0,0 @@
|
|||
.{
|
||||
.name = .json,
|
||||
.version = "0.0.0",
|
||||
.fingerprint = 0x6b0725452065211c, // Changing this has security and trust implications.
|
||||
.minimum_zig_version = "0.14.0",
|
||||
.dependencies = .{},
|
||||
.paths = .{
|
||||
"build.zig",
|
||||
"build.zig.zon",
|
||||
"src",
|
||||
},
|
||||
}
|
|
@ -1,4 +0,0 @@
|
|||
[tools]
|
||||
pre-commit = "latest"
|
||||
"ubi:DonIsaac/zlint" = "latest"
|
||||
zig = "0.14.1"
|
825
src/aws.zig
825
src/aws.zig
File diff suppressed because it is too large
Load diff
|
@ -463,19 +463,41 @@ fn s3BucketFromPath(path: []const u8) []const u8 {
|
|||
/// allocator: Will be used only to construct the EndPoint struct
|
||||
/// uri: string constructed in such a way that deallocation is needed
|
||||
fn endPointFromUri(allocator: std.mem.Allocator, uri: []const u8, path: []const u8) !EndPoint {
|
||||
const parsed_uri = try std.Uri.parse(uri);
|
||||
|
||||
const scheme = parsed_uri.scheme;
|
||||
const host = try allocator.dupe(u8, parsed_uri.host.?.percent_encoded);
|
||||
const port: u16 = blk: {
|
||||
if (parsed_uri.port) |port| break :blk port;
|
||||
if (std.mem.eql(u8, scheme, "http")) break :blk 80;
|
||||
if (std.mem.eql(u8, scheme, "https")) break :blk 443;
|
||||
break :blk 0;
|
||||
};
|
||||
var scheme: []const u8 = "";
|
||||
var host: []const u8 = "";
|
||||
var port: u16 = 443;
|
||||
var host_start: usize = 0;
|
||||
var host_end: usize = 0;
|
||||
for (uri, 0..) |ch, i| {
|
||||
switch (ch) {
|
||||
':' => {
|
||||
if (!std.mem.eql(u8, scheme, "")) {
|
||||
// here to end is port - this is likely a bug if ipv6 address used
|
||||
const rest_of_uri = uri[i + 1 ..];
|
||||
port = try std.fmt.parseUnsigned(u16, rest_of_uri, 10);
|
||||
host_end = i;
|
||||
}
|
||||
},
|
||||
'/' => {
|
||||
if (host_start == 0) {
|
||||
host_start = i + 2;
|
||||
scheme = uri[0 .. i - 1];
|
||||
if (std.mem.eql(u8, scheme, "http")) {
|
||||
port = 80;
|
||||
} else {
|
||||
port = 443;
|
||||
}
|
||||
}
|
||||
},
|
||||
else => continue,
|
||||
}
|
||||
}
|
||||
if (host_end == 0) {
|
||||
host_end = uri.len;
|
||||
}
|
||||
host = try allocator.dupe(u8, uri[host_start..host_end]);
|
||||
|
||||
log.debug("host: {s}, scheme: {s}, port: {}", .{ host, scheme, port });
|
||||
|
||||
return EndPoint{
|
||||
.uri = uri,
|
||||
.host = host,
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
//! This module provides base data structures for aws http requests
|
||||
const std = @import("std");
|
||||
const log = std.log.scoped(.aws_base);
|
||||
pub const Request = struct {
|
||||
path: []const u8 = "/",
|
||||
query: []const u8 = "",
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
const std = @import("std");
|
||||
const base = @import("aws_http_base.zig");
|
||||
const auth = @import("aws_authentication.zig");
|
||||
const date = @import("date");
|
||||
const date = @import("date.zig");
|
||||
|
||||
const scoped_log = std.log.scoped(.aws_signing);
|
||||
|
||||
|
|
|
@ -8,7 +8,7 @@ pub fn snakeToCamel(allocator: std.mem.Allocator, name: []const u8) ![]u8 {
|
|||
var rc = try allocator.alloc(u8, name.len);
|
||||
while (utf8_name.nextCodepoint()) |cp| {
|
||||
if (cp > 0xff) return error.UnicodeNotSupported;
|
||||
const ascii_char: u8 = @truncate(cp);
|
||||
const ascii_char = @as(u8, @truncate(cp));
|
||||
if (ascii_char != '_') {
|
||||
if (previous_ascii == '_' and ascii_char >= 'a' and ascii_char <= 'z') {
|
||||
const uppercase_char = ascii_char - ('a' - 'A');
|
||||
|
|
414
src/date.zig
Normal file
414
src/date.zig
Normal file
|
@ -0,0 +1,414 @@
|
|||
// From https://gist.github.com/WoodyAtHome/3ef50b17f0fa2860ac52b97af12f8d15
|
||||
// Translated from German. We don't need any local time for this use case, and conversion
|
||||
// really requires the TZ DB.
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
const log = std.log.scoped(.date);
|
||||
|
||||
pub const DateTime = struct { day: u8, month: u8, year: u16, hour: u8, minute: u8, second: u8 };
|
||||
|
||||
const SECONDS_PER_DAY = 86400; //* 24* 60 * 60 */
|
||||
const DAYS_PER_YEAR = 365; //* Normal year (no leap year) */
|
||||
|
||||
pub fn timestampToDateTime(timestamp: i64) DateTime {
|
||||
|
||||
// aus https://de.wikipedia.org/wiki/Unixzeit
|
||||
const unixtime = @as(u64, @intCast(timestamp));
|
||||
const DAYS_IN_4_YEARS = 1461; //* 4*365 + 1 */
|
||||
const DAYS_IN_100_YEARS = 36524; //* 100*365 + 25 - 1 */
|
||||
const DAYS_IN_400_YEARS = 146097; //* 400*365 + 100 - 4 + 1 */
|
||||
const DAY_NUMBER_ADJUSTED_1970_01_01 = 719468; //* Day number relates to March 1st */
|
||||
|
||||
var dayN: u64 = DAY_NUMBER_ADJUSTED_1970_01_01 + unixtime / SECONDS_PER_DAY;
|
||||
const seconds_since_midnight: u64 = unixtime % SECONDS_PER_DAY;
|
||||
var temp: u64 = 0;
|
||||
|
||||
// Leap year rules for Gregorian Calendars
|
||||
// Any year divisible by 100 is not a leap year unless also divisible by 400
|
||||
temp = 4 * (dayN + DAYS_IN_100_YEARS + 1) / DAYS_IN_400_YEARS - 1;
|
||||
var year = @as(u16, @intCast(100 * temp));
|
||||
dayN -= DAYS_IN_100_YEARS * temp + temp / 4;
|
||||
|
||||
// For Julian calendars, each year divisible by 4 is a leap year
|
||||
temp = 4 * (dayN + DAYS_PER_YEAR + 1) / DAYS_IN_4_YEARS - 1;
|
||||
year += @as(u16, @intCast(temp));
|
||||
dayN -= DAYS_PER_YEAR * temp + temp / 4;
|
||||
|
||||
// dayN calculates the days of the year in relation to March 1
|
||||
var month = @as(u8, @intCast((5 * dayN + 2) / 153));
|
||||
const day = @as(u8, @intCast(dayN - (@as(u64, @intCast(month)) * 153 + 2) / 5 + 1));
|
||||
// 153 = 31+30+31+30+31 Days for the 5 months from March through July
|
||||
// 153 = 31+30+31+30+31 Days for the 5 months from August through December
|
||||
// 31+28 Days for January and February (see below)
|
||||
// +2: Rounding adjustment
|
||||
// +1: The first day in March is March 1st (not March 0)
|
||||
|
||||
month += 3; // Convert from the day that starts on March 1st, to a human year */
|
||||
if (month > 12) { // months 13 and 14 become 1 (January) und 2 (February) of the next year
|
||||
month -= 12;
|
||||
year += 1;
|
||||
}
|
||||
|
||||
const hours = @as(u8, @intCast(seconds_since_midnight / 3600));
|
||||
const minutes = @as(u8, @intCast(seconds_since_midnight % 3600 / 60));
|
||||
const seconds = @as(u8, @intCast(seconds_since_midnight % 60));
|
||||
|
||||
return DateTime{ .day = day, .month = month, .year = year, .hour = hours, .minute = minutes, .second = seconds };
|
||||
}
|
||||
|
||||
pub fn parseEnglishToTimestamp(data: []const u8) !i64 {
|
||||
return try dateTimeToTimestamp(try parseEnglishToDateTime(data));
|
||||
}
|
||||
|
||||
const EnglishParsingState = enum { Start, Day, Month, Year, Hour, Minute, Second, End };
|
||||
/// Converts a string to a timestamp value. May not handle dates before the
|
||||
/// epoch. Dates should look like "Fri, 03 Jun 2022 18:12:36 GMT"
|
||||
pub fn parseEnglishToDateTime(data: []const u8) !DateTime {
|
||||
// Fri, 03 Jun 2022 18:12:36 GMT
|
||||
if (!std.mem.endsWith(u8, data, "GMT")) return error.InvalidFormat;
|
||||
|
||||
var start: usize = 0;
|
||||
var state = EnglishParsingState.Start;
|
||||
// Anything not explicitly set by our string would be 0
|
||||
var rc = DateTime{ .year = 0, .month = 0, .day = 0, .hour = 0, .minute = 0, .second = 0 };
|
||||
for (data, 0..) |ch, i| {
|
||||
switch (ch) {
|
||||
',' => {},
|
||||
' ', ':' => {
|
||||
// State transition
|
||||
|
||||
// We're going to coerce and this might not go well, but we
|
||||
// want the compiler to create checks, so we'll turn on
|
||||
// runtime safety for this block, forcing checks in ReleaseSafe
|
||||
// ReleaseFast modes.
|
||||
const next_state = try endEnglishState(state, &rc, data[start..i]);
|
||||
state = next_state;
|
||||
start = i + 1;
|
||||
},
|
||||
else => {}, // We need to be pretty trusting on this format...
|
||||
}
|
||||
}
|
||||
return rc;
|
||||
}
|
||||
|
||||
fn endEnglishState(current_state: EnglishParsingState, date: *DateTime, prev_data: []const u8) !EnglishParsingState {
|
||||
var next_state: EnglishParsingState = undefined;
|
||||
log.debug("endEnglishState. Current state '{}', data: {s}", .{ current_state, prev_data });
|
||||
|
||||
// Using two switches is slightly less efficient, but more readable
|
||||
switch (current_state) {
|
||||
.End => return error.IllegalStateTransition,
|
||||
.Start => next_state = .Day,
|
||||
.Day => next_state = .Month,
|
||||
.Month => next_state = .Year,
|
||||
.Year => next_state = .Hour,
|
||||
.Hour => next_state = .Minute,
|
||||
.Minute => next_state = .Second,
|
||||
.Second => next_state = .End,
|
||||
}
|
||||
|
||||
switch (current_state) {
|
||||
.Year => date.year = try std.fmt.parseUnsigned(u16, prev_data, 10),
|
||||
.Month => date.month = try parseEnglishMonth(prev_data),
|
||||
.Day => date.day = try std.fmt.parseUnsigned(u8, prev_data, 10),
|
||||
.Hour => date.hour = try std.fmt.parseUnsigned(u8, prev_data, 10),
|
||||
.Minute => date.minute = try std.fmt.parseUnsigned(u8, prev_data, 10),
|
||||
.Second => date.second = try std.fmt.parseUnsigned(u8, prev_data, 10),
|
||||
.Start => {},
|
||||
.End => return error.InvalidState,
|
||||
}
|
||||
return next_state;
|
||||
}
|
||||
|
||||
fn parseEnglishMonth(data: []const u8) !u8 {
|
||||
if (std.ascii.startsWithIgnoreCase(data, "Jan")) return 1;
|
||||
if (std.ascii.startsWithIgnoreCase(data, "Feb")) return 2;
|
||||
if (std.ascii.startsWithIgnoreCase(data, "Mar")) return 3;
|
||||
if (std.ascii.startsWithIgnoreCase(data, "Apr")) return 4;
|
||||
if (std.ascii.startsWithIgnoreCase(data, "May")) return 5;
|
||||
if (std.ascii.startsWithIgnoreCase(data, "Jun")) return 6;
|
||||
if (std.ascii.startsWithIgnoreCase(data, "Jul")) return 7;
|
||||
if (std.ascii.startsWithIgnoreCase(data, "Aug")) return 8;
|
||||
if (std.ascii.startsWithIgnoreCase(data, "Sep")) return 9;
|
||||
if (std.ascii.startsWithIgnoreCase(data, "Oct")) return 10;
|
||||
if (std.ascii.startsWithIgnoreCase(data, "Nov")) return 11;
|
||||
if (std.ascii.startsWithIgnoreCase(data, "Dec")) return 12;
|
||||
return error.InvalidMonth;
|
||||
}
|
||||
pub fn parseIso8601ToTimestamp(data: []const u8) !i64 {
|
||||
return try dateTimeToTimestamp(try parseIso8601ToDateTime(data));
|
||||
}
|
||||
|
||||
const IsoParsingState = enum { Start, Year, Month, Day, Hour, Minute, Second, Millisecond, End };
|
||||
/// Converts a string to a timestamp value. May not handle dates before the
|
||||
/// epoch
|
||||
pub fn parseIso8601ToDateTime(data: []const u8) !DateTime {
|
||||
// Basic format YYYYMMDDThhmmss
|
||||
if (data.len == "YYYYMMDDThhmmss".len and data[8] == 'T')
|
||||
return try parseIso8601BasicFormatToDateTime(data);
|
||||
if (data.len == "YYYYMMDDThhmmssZ".len and data[8] == 'T')
|
||||
return try parseIso8601BasicFormatToDateTime(data);
|
||||
|
||||
var start: usize = 0;
|
||||
var state = IsoParsingState.Start;
|
||||
// Anything not explicitly set by our string would be 0
|
||||
var rc = DateTime{ .year = 0, .month = 0, .day = 0, .hour = 0, .minute = 0, .second = 0 };
|
||||
var zulu_time = false;
|
||||
for (data, 0..) |ch, i| {
|
||||
switch (ch) {
|
||||
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9' => {
|
||||
if (state == .Start) state = .Year;
|
||||
},
|
||||
'?', '~', '%' => {
|
||||
// These characters all specify the type of time (approximate, etc)
|
||||
// and we will ignore
|
||||
},
|
||||
'.', '-', ':', 'T' => {
|
||||
// State transition
|
||||
|
||||
// We're going to coerce and this might not go well, but we
|
||||
// want the compiler to create checks, so we'll turn on
|
||||
// runtime safety for this block, forcing checks in ReleaseSafe
|
||||
// ReleaseFast modes.
|
||||
const next_state = try endIsoState(state, &rc, data[start..i]);
|
||||
state = next_state;
|
||||
start = i + 1;
|
||||
},
|
||||
'Z' => zulu_time = true,
|
||||
else => {
|
||||
log.err("Invalid character: {c}", .{ch});
|
||||
return error.InvalidCharacter;
|
||||
},
|
||||
}
|
||||
}
|
||||
if (!zulu_time) return error.LocalTimeNotSupported;
|
||||
// We know we have a Z at the end of this, so let's grab the last bit
|
||||
// of the string, minus the 'Z', and fly, eagles, fly!
|
||||
_ = try endIsoState(state, &rc, data[start .. data.len - 1]);
|
||||
return rc;
|
||||
}
|
||||
|
||||
fn parseIso8601BasicFormatToDateTime(data: []const u8) !DateTime {
|
||||
return DateTime{
|
||||
.year = try std.fmt.parseUnsigned(u16, data[0..4], 10),
|
||||
.month = try std.fmt.parseUnsigned(u8, data[4..6], 10),
|
||||
.day = try std.fmt.parseUnsigned(u8, data[6..8], 10),
|
||||
.hour = try std.fmt.parseUnsigned(u8, data[9..11], 10),
|
||||
.minute = try std.fmt.parseUnsigned(u8, data[11..13], 10),
|
||||
.second = try std.fmt.parseUnsigned(u8, data[13..15], 10),
|
||||
};
|
||||
}
|
||||
|
||||
fn endIsoState(current_state: IsoParsingState, date: *DateTime, prev_data: []const u8) !IsoParsingState {
|
||||
var next_state: IsoParsingState = undefined;
|
||||
log.debug("endIsoState. Current state '{}', data: {s}", .{ current_state, prev_data });
|
||||
|
||||
// Using two switches is slightly less efficient, but more readable
|
||||
switch (current_state) {
|
||||
.Start, .End => return error.IllegalStateTransition,
|
||||
.Year => next_state = .Month,
|
||||
.Month => next_state = .Day,
|
||||
.Day => next_state = .Hour,
|
||||
.Hour => next_state = .Minute,
|
||||
.Minute => next_state = .Second,
|
||||
.Second => next_state = .Millisecond,
|
||||
.Millisecond => next_state = .End,
|
||||
}
|
||||
|
||||
// TODO: This won't handle signed, which Iso supports. For now, let's fail
|
||||
// explictly
|
||||
switch (current_state) {
|
||||
.Year => date.year = try std.fmt.parseUnsigned(u16, prev_data, 10),
|
||||
.Month => date.month = try std.fmt.parseUnsigned(u8, prev_data, 10),
|
||||
.Day => date.day = try std.fmt.parseUnsigned(u8, prev_data, 10),
|
||||
.Hour => date.hour = try std.fmt.parseUnsigned(u8, prev_data, 10),
|
||||
.Minute => date.minute = try std.fmt.parseUnsigned(u8, prev_data, 10),
|
||||
.Second => date.second = try std.fmt.parseUnsigned(u8, prev_data, 10),
|
||||
.Millisecond => {}, // We'll throw that away - our granularity is 1 second
|
||||
.Start, .End => return error.InvalidState,
|
||||
}
|
||||
return next_state;
|
||||
}
|
||||
pub fn dateTimeToTimestamp(datetime: DateTime) !i64 {
|
||||
const epoch = DateTime{
|
||||
.year = 1970,
|
||||
.month = 1,
|
||||
.day = 1,
|
||||
.hour = 0,
|
||||
.minute = 0,
|
||||
.second = 0,
|
||||
};
|
||||
return secondsBetween(epoch, datetime);
|
||||
}
|
||||
|
||||
const DateTimeToTimestampError = error{
|
||||
DateTimeOutOfRange,
|
||||
};
|
||||
|
||||
fn secondsBetween(start: DateTime, end: DateTime) DateTimeToTimestampError!i64 {
|
||||
try validateDatetime(start);
|
||||
try validateDatetime(end);
|
||||
if (end.year < start.year) return -1 * try secondsBetween(end, start);
|
||||
if (start.month != 1 or
|
||||
start.day != 1 or
|
||||
start.hour != 0 or
|
||||
start.minute != 0 or
|
||||
start.second != 0)
|
||||
{
|
||||
const seconds_into_start_year = secondsFromBeginningOfYear(
|
||||
start.year,
|
||||
start.month,
|
||||
start.day,
|
||||
start.hour,
|
||||
start.minute,
|
||||
start.second,
|
||||
);
|
||||
const new_start = DateTime{
|
||||
.year = start.year,
|
||||
.month = 1,
|
||||
.day = 1,
|
||||
.hour = 0,
|
||||
.minute = 0,
|
||||
.second = 0,
|
||||
};
|
||||
return (try secondsBetween(new_start, end)) - seconds_into_start_year;
|
||||
}
|
||||
const leap_years_between = leapYearsBetween(start.year, end.year);
|
||||
const add_days: u1 = 0;
|
||||
const years_diff = end.year - start.year;
|
||||
// log.debug("Years from epoch: {d}, Leap years: {d}", .{ years_diff, leap_years_between });
|
||||
const days_diff: i32 = (years_diff * DAYS_PER_YEAR) + leap_years_between + add_days;
|
||||
// log.debug("Days with leap year, without month: {d}", .{days_diff});
|
||||
|
||||
const seconds_into_year = secondsFromBeginningOfYear(
|
||||
end.year,
|
||||
end.month,
|
||||
end.day,
|
||||
end.hour,
|
||||
end.minute,
|
||||
end.second,
|
||||
);
|
||||
return (days_diff * SECONDS_PER_DAY) + @as(i64, seconds_into_year);
|
||||
}
|
||||
|
||||
fn validateDatetime(dt: DateTime) !void {
|
||||
if (dt.month > 12 or
|
||||
dt.day > 31 or
|
||||
dt.hour >= 24 or
|
||||
dt.minute >= 60 or
|
||||
dt.second >= 60) return error.DateTimeOutOfRange;
|
||||
}
|
||||
|
||||
fn secondsFromBeginningOfYear(year: u16, month: u8, day: u8, hour: u8, minute: u8, second: u8) u32 {
|
||||
const current_year_is_leap_year = isLeapYear(year);
|
||||
const leap_year_days_per_month: [12]u5 = .{ 31, 29, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31 };
|
||||
const normal_days_per_month: [12]u5 = .{ 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31 };
|
||||
const days_per_month = if (current_year_is_leap_year) leap_year_days_per_month else normal_days_per_month;
|
||||
var current_month: usize = 1;
|
||||
const end_month = month;
|
||||
var days_diff: u32 = 0;
|
||||
while (current_month != end_month) {
|
||||
days_diff += days_per_month[current_month - 1]; // months are 1-based vs array is 0-based
|
||||
current_month += 1;
|
||||
}
|
||||
// log.debug("Days with month, without day: {d}. Day of month {d}, will add {d} days", .{
|
||||
// days_diff,
|
||||
// day,
|
||||
// day - 1,
|
||||
// });
|
||||
// We need -1 because we're not actually including the ending day (that's up to hour/minute)
|
||||
// In other words, days in the month are 1-based, while hours/minutes are zero based
|
||||
days_diff += day - 1;
|
||||
// log.debug("Total days diff: {d}", .{days_diff});
|
||||
var seconds_diff: u32 = days_diff * SECONDS_PER_DAY;
|
||||
|
||||
// From here out, we want to get everything into seconds
|
||||
seconds_diff += @as(u32, hour) * 60 * 60;
|
||||
seconds_diff += @as(u32, minute) * 60;
|
||||
seconds_diff += @as(u32, second);
|
||||
|
||||
return seconds_diff;
|
||||
}
|
||||
fn isLeapYear(year: u16) bool {
|
||||
if (year % 4 != 0) return false;
|
||||
if (year % 400 == 0) return true;
|
||||
if (year % 100 == 0) return false;
|
||||
return true;
|
||||
}
|
||||
|
||||
fn leapYearsBetween(start_year_inclusive: u16, end_year_exclusive: u16) u16 {
|
||||
const start = @min(start_year_inclusive, end_year_exclusive);
|
||||
const end = @max(start_year_inclusive, end_year_exclusive);
|
||||
var current = start;
|
||||
// log.debug("Leap years starting from {d}, ending at {d}", .{ start, end });
|
||||
while (current % 4 != 0 and current < end) {
|
||||
current += 1;
|
||||
}
|
||||
if (current == end) return 0; // No leap years here. E.g. 1971-1973
|
||||
// We're on a potential leap year, and now we can step by 4
|
||||
var rc: u16 = 0;
|
||||
while (current < end) {
|
||||
if (current % 4 == 0) {
|
||||
if (current % 100 != 0) {
|
||||
// log.debug("Year {d} is leap year", .{current});
|
||||
rc += 1;
|
||||
current += 4;
|
||||
continue;
|
||||
}
|
||||
// We're on a century, which is normally not a leap year, unless
|
||||
// it's divisible by 400
|
||||
if (current % 400 == 0) {
|
||||
// log.debug("Year {d} is leap year", .{current});
|
||||
rc += 1;
|
||||
}
|
||||
}
|
||||
current += 4;
|
||||
}
|
||||
return rc;
|
||||
}
|
||||
|
||||
fn printDateTime(dt: DateTime) void {
|
||||
log.debug("{:0>4}-{:0>2}-{:0>2}T{:0>2}:{:0>2}:{:0<2}Z", .{
|
||||
dt.year,
|
||||
dt.month,
|
||||
dt.day,
|
||||
dt.hour,
|
||||
dt.minute,
|
||||
dt.second,
|
||||
});
|
||||
}
|
||||
|
||||
pub fn printNowUtc() void {
|
||||
printDateTime(timestampToDateTime(std.time.timestamp()));
|
||||
}
|
||||
|
||||
test "Convert timestamp to datetime" {
|
||||
printDateTime(timestampToDateTime(std.time.timestamp()));
|
||||
try std.testing.expectEqual(DateTime{ .year = 2020, .month = 8, .day = 28, .hour = 9, .minute = 32, .second = 27 }, timestampToDateTime(1598607147));
|
||||
|
||||
try std.testing.expectEqual(DateTime{ .year = 2020, .month = 11, .day = 1, .hour = 5, .minute = 6, .second = 7 }, timestampToDateTime(1604207167));
|
||||
// Get time for date: https://wtools.io/convert-date-time-to-unix-time
|
||||
try std.testing.expectEqual(DateTime{ .year = 2015, .month = 8, .day = 30, .hour = 12, .minute = 36, .second = 0 }, timestampToDateTime(1440938160));
|
||||
}
|
||||
|
||||
test "Convert datetime to timestamp" {
|
||||
try std.testing.expectEqual(@as(i64, 1598607147), try dateTimeToTimestamp(DateTime{ .year = 2020, .month = 8, .day = 28, .hour = 9, .minute = 32, .second = 27 }));
|
||||
try std.testing.expectEqual(@as(i64, 1604207167), try dateTimeToTimestamp(DateTime{ .year = 2020, .month = 11, .day = 1, .hour = 5, .minute = 6, .second = 7 }));
|
||||
try std.testing.expectEqual(@as(i64, 1440938160), try dateTimeToTimestamp(DateTime{ .year = 2015, .month = 8, .day = 30, .hour = 12, .minute = 36, .second = 0 }));
|
||||
}
|
||||
|
||||
test "Convert ISO8601 string to timestamp" {
|
||||
try std.testing.expectEqual(DateTime{ .year = 2020, .month = 8, .day = 28, .hour = 9, .minute = 32, .second = 27 }, try parseIso8601ToDateTime("20200828T093227"));
|
||||
try std.testing.expectEqual(DateTime{ .year = 2020, .month = 8, .day = 28, .hour = 9, .minute = 32, .second = 27 }, try parseIso8601ToDateTime("2020-08-28T9:32:27Z"));
|
||||
try std.testing.expectEqual(DateTime{ .year = 2020, .month = 11, .day = 1, .hour = 5, .minute = 6, .second = 7 }, try parseIso8601ToDateTime("2020-11-01T5:06:7Z"));
|
||||
try std.testing.expectEqual(DateTime{ .year = 2015, .month = 8, .day = 30, .hour = 12, .minute = 36, .second = 0 }, try parseIso8601ToDateTime("2015-08-30T12:36:00.000Z"));
|
||||
}
|
||||
test "Convert datetime to timestamp before 1970" {
|
||||
try std.testing.expectEqual(@as(i64, -449392815), try dateTimeToTimestamp(DateTime{ .year = 1955, .month = 10, .day = 5, .hour = 16, .minute = 39, .second = 45 }));
|
||||
}
|
||||
|
||||
test "Convert whatever AWS is sending us to timestamp" {
|
||||
const string_date = "Fri, 03 Jun 2022 18:12:36 GMT";
|
||||
try std.testing.expectEqual(DateTime{ .year = 2022, .month = 6, .day = 3, .hour = 18, .minute = 12, .second = 36 }, try parseEnglishToDateTime(string_date));
|
||||
}
|
|
@ -14,117 +14,8 @@ const testing = std.testing;
|
|||
const mem = std.mem;
|
||||
const maxInt = std.math.maxInt;
|
||||
|
||||
pub fn serializeMap(map: anytype, key: []const u8, options: anytype, out_stream: anytype) !void {
|
||||
if (@typeInfo(@TypeOf(map)) == .optional) {
|
||||
if (map) |m| serializeMapInternal(m, key, options, out_stream);
|
||||
} else {
|
||||
serializeMapInternal(map, key, options, out_stream);
|
||||
}
|
||||
}
|
||||
|
||||
fn serializeMapKey(key: []const u8, options: anytype, out_stream: anytype) !void {
|
||||
var child_options = options;
|
||||
if (child_options.whitespace) |*child_ws|
|
||||
child_ws.indent_level += 1;
|
||||
|
||||
try out_stream.writeByte('"');
|
||||
try out_stream.writeAll(key);
|
||||
_ = try out_stream.write("\":");
|
||||
if (options.whitespace) |ws| {
|
||||
if (ws.separator) {
|
||||
try out_stream.writeByte(' ');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn serializeMapAsObject(map: anytype, options: anytype, out_stream: anytype) !void {
|
||||
if (map.len == 0) {
|
||||
try out_stream.writeByte('{');
|
||||
try out_stream.writeByte('}');
|
||||
return;
|
||||
}
|
||||
|
||||
// TODO: Map might be [][]struct{key, value} rather than []struct{key, value}
|
||||
var child_options = options;
|
||||
if (child_options.whitespace) |*whitespace| {
|
||||
whitespace.indent_level += 1;
|
||||
}
|
||||
|
||||
try out_stream.writeByte('{');
|
||||
if (options.whitespace) |_|
|
||||
try out_stream.writeByte('\n');
|
||||
|
||||
for (map, 0..) |tag, i| {
|
||||
// TODO: Deal with escaping and general "json.stringify" the values...
|
||||
if (child_options.whitespace) |ws|
|
||||
try ws.outputIndent(out_stream);
|
||||
try out_stream.writeByte('"');
|
||||
try jsonEscape(tag.key, child_options, out_stream);
|
||||
_ = try out_stream.write("\":");
|
||||
if (child_options.whitespace) |ws| {
|
||||
if (ws.separator) {
|
||||
try out_stream.writeByte(' ');
|
||||
}
|
||||
}
|
||||
try out_stream.writeByte('"');
|
||||
try jsonEscape(tag.value, child_options, out_stream);
|
||||
try out_stream.writeByte('"');
|
||||
if (i < map.len - 1) {
|
||||
try out_stream.writeByte(',');
|
||||
}
|
||||
if (child_options.whitespace) |_|
|
||||
try out_stream.writeByte('\n');
|
||||
}
|
||||
|
||||
if (options.whitespace) |ws|
|
||||
try ws.outputIndent(out_stream);
|
||||
try out_stream.writeByte('}');
|
||||
}
|
||||
|
||||
fn serializeMapInternal(map: anytype, key: []const u8, options: anytype, out_stream: anytype) !bool {
|
||||
try serializeMapKey(key, options, out_stream);
|
||||
return try serializeMapAsObject(map, options, out_stream);
|
||||
}
|
||||
|
||||
// code within jsonEscape lifted from json.zig in stdlib
|
||||
fn jsonEscape(value: []const u8, options: anytype, out_stream: anytype) !void {
|
||||
var i: usize = 0;
|
||||
while (i < value.len) : (i += 1) {
|
||||
switch (value[i]) {
|
||||
// normal ascii character
|
||||
0x20...0x21, 0x23...0x2E, 0x30...0x5B, 0x5D...0x7F => |c| try out_stream.writeByte(c),
|
||||
// only 2 characters that *must* be escaped
|
||||
'\\' => try out_stream.writeAll("\\\\"),
|
||||
'\"' => try out_stream.writeAll("\\\""),
|
||||
// solidus is optional to escape
|
||||
'/' => {
|
||||
if (options.string.String.escape_solidus) {
|
||||
try out_stream.writeAll("\\/");
|
||||
} else {
|
||||
try out_stream.writeByte('/');
|
||||
}
|
||||
},
|
||||
// control characters with short escapes
|
||||
// TODO: option to switch between unicode and 'short' forms?
|
||||
0x8 => try out_stream.writeAll("\\b"),
|
||||
0xC => try out_stream.writeAll("\\f"),
|
||||
'\n' => try out_stream.writeAll("\\n"),
|
||||
'\r' => try out_stream.writeAll("\\r"),
|
||||
'\t' => try out_stream.writeAll("\\t"),
|
||||
else => {
|
||||
const ulen = std.unicode.utf8ByteSequenceLength(value[i]) catch unreachable;
|
||||
// control characters (only things left with 1 byte length) should always be printed as unicode escapes
|
||||
if (ulen == 1 or options.string.String.escape_unicode) {
|
||||
const codepoint = std.unicode.utf8Decode(value[i .. i + ulen]) catch unreachable;
|
||||
try outputUnicodeEscape(codepoint, out_stream);
|
||||
} else {
|
||||
try out_stream.writeAll(value[i .. i + ulen]);
|
||||
}
|
||||
i += ulen - 1;
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
// pub const WriteStream = @import("json/write_stream.zig").WriteStream;
|
||||
// pub const writeStream = @import("json/write_stream.zig").writeStream;
|
||||
|
||||
const StringEscapes = union(enum) {
|
||||
None,
|
||||
|
@ -1371,8 +1262,137 @@ pub const Value = union(enum) {
|
|||
String: []const u8,
|
||||
Array: Array,
|
||||
Object: ObjectMap,
|
||||
|
||||
pub fn jsonStringify(
|
||||
value: @This(),
|
||||
options: StringifyOptions,
|
||||
out_stream: anytype,
|
||||
) @TypeOf(out_stream).Error!void {
|
||||
switch (value) {
|
||||
.Null => try stringify(null, options, out_stream),
|
||||
.Bool => |inner| try stringify(inner, options, out_stream),
|
||||
.Integer => |inner| try stringify(inner, options, out_stream),
|
||||
.Float => |inner| try stringify(inner, options, out_stream),
|
||||
.NumberString => |inner| try out_stream.writeAll(inner),
|
||||
.String => |inner| try stringify(inner, options, out_stream),
|
||||
.Array => |inner| try stringify(inner.items, options, out_stream),
|
||||
.Object => |inner| {
|
||||
try out_stream.writeByte('{');
|
||||
var field_output = false;
|
||||
var child_options = options;
|
||||
if (child_options.whitespace) |*child_whitespace| {
|
||||
child_whitespace.indent_level += 1;
|
||||
}
|
||||
var it = inner.iterator();
|
||||
while (it.next()) |entry| {
|
||||
if (!field_output) {
|
||||
field_output = true;
|
||||
} else {
|
||||
try out_stream.writeByte(',');
|
||||
}
|
||||
if (child_options.whitespace) |child_whitespace| {
|
||||
try out_stream.writeByte('\n');
|
||||
try child_whitespace.outputIndent(out_stream);
|
||||
}
|
||||
|
||||
try stringify(entry.key_ptr, options, out_stream);
|
||||
try out_stream.writeByte(':');
|
||||
if (child_options.whitespace) |child_whitespace| {
|
||||
if (child_whitespace.separator) {
|
||||
try out_stream.writeByte(' ');
|
||||
}
|
||||
}
|
||||
try stringify(entry.value_ptr, child_options, out_stream);
|
||||
}
|
||||
if (field_output) {
|
||||
if (options.whitespace) |whitespace| {
|
||||
try out_stream.writeByte('\n');
|
||||
try whitespace.outputIndent(out_stream);
|
||||
}
|
||||
}
|
||||
try out_stream.writeByte('}');
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub fn dump(self: Value) void {
|
||||
var held = std.debug.getStderrMutex().acquire();
|
||||
defer held.release();
|
||||
|
||||
const stderr = std.io.getStdErr().writer();
|
||||
stringify(self, StringifyOptions{ .whitespace = null }, stderr) catch return;
|
||||
}
|
||||
};
|
||||
|
||||
pub fn dump(value: anytype) void {
|
||||
var held = std.debug.getStderrMutex().acquire();
|
||||
defer held.release();
|
||||
|
||||
const stderr = std.io.getStdErr().writer();
|
||||
stringify(value, StringifyOptions{ .whitespace = null }, stderr) catch return;
|
||||
}
|
||||
|
||||
test "Value.jsonStringify" {
|
||||
{
|
||||
var buffer: [10]u8 = undefined;
|
||||
var fbs = std.io.fixedBufferStream(&buffer);
|
||||
try @as(Value, .Null).jsonStringify(.{}, fbs.writer());
|
||||
try testing.expectEqualSlices(u8, fbs.getWritten(), "null");
|
||||
}
|
||||
{
|
||||
var buffer: [10]u8 = undefined;
|
||||
var fbs = std.io.fixedBufferStream(&buffer);
|
||||
try (Value{ .Bool = true }).jsonStringify(.{}, fbs.writer());
|
||||
try testing.expectEqualSlices(u8, fbs.getWritten(), "true");
|
||||
}
|
||||
{
|
||||
var buffer: [10]u8 = undefined;
|
||||
var fbs = std.io.fixedBufferStream(&buffer);
|
||||
try (Value{ .Integer = 42 }).jsonStringify(.{}, fbs.writer());
|
||||
try testing.expectEqualSlices(u8, fbs.getWritten(), "42");
|
||||
}
|
||||
{
|
||||
var buffer: [10]u8 = undefined;
|
||||
var fbs = std.io.fixedBufferStream(&buffer);
|
||||
try (Value{ .NumberString = "43" }).jsonStringify(.{}, fbs.writer());
|
||||
try testing.expectEqualSlices(u8, fbs.getWritten(), "43");
|
||||
}
|
||||
{
|
||||
var buffer: [10]u8 = undefined;
|
||||
var fbs = std.io.fixedBufferStream(&buffer);
|
||||
try (Value{ .Float = 42 }).jsonStringify(.{}, fbs.writer());
|
||||
try testing.expectEqualSlices(u8, fbs.getWritten(), "4.2e1");
|
||||
}
|
||||
{
|
||||
var buffer: [10]u8 = undefined;
|
||||
var fbs = std.io.fixedBufferStream(&buffer);
|
||||
try (Value{ .String = "weeee" }).jsonStringify(.{}, fbs.writer());
|
||||
try testing.expectEqualSlices(u8, fbs.getWritten(), "\"weeee\"");
|
||||
}
|
||||
{
|
||||
var buffer: [10]u8 = undefined;
|
||||
var fbs = std.io.fixedBufferStream(&buffer);
|
||||
var vals = [_]Value{
|
||||
.{ .Integer = 1 },
|
||||
.{ .Integer = 2 },
|
||||
.{ .NumberString = "3" },
|
||||
};
|
||||
try (Value{
|
||||
.Array = Array.fromOwnedSlice(undefined, &vals),
|
||||
}).jsonStringify(.{}, fbs.writer());
|
||||
try testing.expectEqualSlices(u8, fbs.getWritten(), "[1,2,3]");
|
||||
}
|
||||
{
|
||||
var buffer: [10]u8 = undefined;
|
||||
var fbs = std.io.fixedBufferStream(&buffer);
|
||||
var obj = ObjectMap.init(testing.allocator);
|
||||
defer obj.deinit();
|
||||
try obj.putNoClobber("a", .{ .String = "b" });
|
||||
try (Value{ .Object = obj }).jsonStringify(.{}, fbs.writer());
|
||||
try testing.expectEqualSlices(u8, fbs.getWritten(), "{\"a\":\"b\"}");
|
||||
}
|
||||
}
|
||||
|
||||
/// parse tokens from a stream, returning `false` if they do not decode to `value`
|
||||
fn parsesTo(comptime T: type, value: T, tokens: *TokenStream, options: ParseOptions) !bool {
|
||||
// TODO: should be able to write this function to not require an allocator
|
||||
|
@ -1577,22 +1597,12 @@ fn parseInternal(comptime T: type, token: Token, tokens: *TokenStream, options:
|
|||
.@"enum" => |enumInfo| {
|
||||
switch (token) {
|
||||
.Number => |numberToken| {
|
||||
if (!numberToken.is_integer) {
|
||||
// probably is in scientific notation
|
||||
const n = try std.fmt.parseFloat(f128, numberToken.slice(tokens.slice, tokens.i - 1));
|
||||
return try std.meta.intToEnum(T, @as(i128, @intFromFloat(n)));
|
||||
}
|
||||
|
||||
if (!numberToken.is_integer) return error.UnexpectedToken;
|
||||
const n = try std.fmt.parseInt(enumInfo.tag_type, numberToken.slice(tokens.slice, tokens.i - 1), 10);
|
||||
return try std.meta.intToEnum(T, n);
|
||||
},
|
||||
.String => |stringToken| {
|
||||
const source_slice = stringToken.slice(tokens.slice, tokens.i - 1);
|
||||
|
||||
if (std.meta.hasFn(T, "parse")) {
|
||||
return try T.parse(source_slice);
|
||||
}
|
||||
|
||||
switch (stringToken.escapes) {
|
||||
.None => return std.meta.stringToEnum(T, source_slice) orelse return error.InvalidEnumTag,
|
||||
.Some => {
|
||||
|
@ -2794,3 +2804,397 @@ fn outputUnicodeEscape(
|
|||
try std.fmt.formatIntValue(low, "x", std.fmt.FormatOptions{ .width = 4, .fill = '0' }, out_stream);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn stringify(
|
||||
value: anytype,
|
||||
options: StringifyOptions,
|
||||
out_stream: anytype,
|
||||
) !void {
|
||||
const T = @TypeOf(value);
|
||||
switch (@typeInfo(T)) {
|
||||
.float, .comptime_float => {
|
||||
return std.fmt.format(out_stream, "{e}", .{value});
|
||||
},
|
||||
.int, .comptime_int => {
|
||||
return std.fmt.formatIntValue(value, "", std.fmt.FormatOptions{}, out_stream);
|
||||
},
|
||||
.bool => {
|
||||
return out_stream.writeAll(if (value) "true" else "false");
|
||||
},
|
||||
.null => {
|
||||
return out_stream.writeAll("null");
|
||||
},
|
||||
.optional => {
|
||||
if (value) |payload| {
|
||||
return try stringify(payload, options, out_stream);
|
||||
} else {
|
||||
return try stringify(null, options, out_stream);
|
||||
}
|
||||
},
|
||||
.@"enum" => {
|
||||
if (comptime std.meta.hasFn(T, "jsonStringify")) {
|
||||
return value.jsonStringify(options, out_stream);
|
||||
}
|
||||
|
||||
@compileError("Unable to stringify enum '" ++ @typeName(T) ++ "'");
|
||||
},
|
||||
.@"union" => {
|
||||
if (comptime std.meta.hasFn(T, "jsonStringify")) {
|
||||
return value.jsonStringify(options, out_stream);
|
||||
}
|
||||
|
||||
const info = @typeInfo(T).@"union";
|
||||
if (info.tag_type) |UnionTagType| {
|
||||
inline for (info.fields) |u_field| {
|
||||
if (value == @field(UnionTagType, u_field.name)) {
|
||||
return try stringify(@field(value, u_field.name), options, out_stream);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
@compileError("Unable to stringify untagged union '" ++ @typeName(T) ++ "'");
|
||||
}
|
||||
},
|
||||
.@"struct" => |S| {
|
||||
if (comptime std.meta.hasFn(T, "jsonStringify")) {
|
||||
return value.jsonStringify(options, out_stream);
|
||||
}
|
||||
|
||||
try out_stream.writeByte('{');
|
||||
var field_output = false;
|
||||
var child_options = options;
|
||||
if (child_options.whitespace) |*child_whitespace| {
|
||||
child_whitespace.indent_level += 1;
|
||||
}
|
||||
inline for (S.fields) |Field| {
|
||||
// don't include void fields
|
||||
if (Field.type == void) continue;
|
||||
|
||||
var output_this_field = true;
|
||||
if (!options.emit_null and @typeInfo(Field.type) == .optional and @field(value, Field.name) == null) output_this_field = false;
|
||||
|
||||
const final_name = if (comptime std.meta.hasFn(T, "fieldNameFor"))
|
||||
value.fieldNameFor(Field.name)
|
||||
else
|
||||
Field.name;
|
||||
if (options.exclude_fields) |exclude_fields| {
|
||||
for (exclude_fields) |exclude_field| {
|
||||
if (std.mem.eql(u8, final_name, exclude_field)) {
|
||||
output_this_field = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!field_output) {
|
||||
field_output = output_this_field;
|
||||
} else {
|
||||
if (output_this_field) try out_stream.writeByte(',');
|
||||
}
|
||||
if (child_options.whitespace) |child_whitespace| {
|
||||
if (output_this_field) try out_stream.writeByte('\n');
|
||||
if (output_this_field) try child_whitespace.outputIndent(out_stream);
|
||||
}
|
||||
var field_written = false;
|
||||
if (comptime std.meta.hasFn(T, "jsonStringifyField")) {
|
||||
if (output_this_field) field_written = try value.jsonStringifyField(Field.name, child_options, out_stream);
|
||||
}
|
||||
|
||||
if (!field_written) {
|
||||
if (output_this_field) {
|
||||
try stringify(final_name, options, out_stream);
|
||||
try out_stream.writeByte(':');
|
||||
}
|
||||
if (child_options.whitespace) |child_whitespace| {
|
||||
if (child_whitespace.separator) {
|
||||
if (output_this_field) try out_stream.writeByte(' ');
|
||||
}
|
||||
}
|
||||
if (output_this_field) try stringify(@field(value, Field.name), child_options, out_stream);
|
||||
}
|
||||
}
|
||||
if (field_output) {
|
||||
if (options.whitespace) |whitespace| {
|
||||
try out_stream.writeByte('\n');
|
||||
try whitespace.outputIndent(out_stream);
|
||||
}
|
||||
}
|
||||
try out_stream.writeByte('}');
|
||||
return;
|
||||
},
|
||||
.error_set => return stringify(@as([]const u8, @errorName(value)), options, out_stream),
|
||||
.pointer => |ptr_info| switch (ptr_info.size) {
|
||||
.one => switch (@typeInfo(ptr_info.child)) {
|
||||
.array => {
|
||||
const Slice = []const std.meta.Elem(ptr_info.child);
|
||||
return stringify(@as(Slice, value), options, out_stream);
|
||||
},
|
||||
else => {
|
||||
// TODO: avoid loops?
|
||||
return stringify(value.*, options, out_stream);
|
||||
},
|
||||
},
|
||||
// TODO: .Many when there is a sentinel (waiting for https://github.com/ziglang/zig/pull/3972)
|
||||
.slice => {
|
||||
if (ptr_info.child == u8 and options.string == .String and std.unicode.utf8ValidateSlice(value)) {
|
||||
try out_stream.writeByte('\"');
|
||||
var i: usize = 0;
|
||||
while (i < value.len) : (i += 1) {
|
||||
switch (value[i]) {
|
||||
// normal ascii character
|
||||
0x20...0x21, 0x23...0x2E, 0x30...0x5B, 0x5D...0x7F => |c| try out_stream.writeByte(c),
|
||||
// only 2 characters that *must* be escaped
|
||||
'\\' => try out_stream.writeAll("\\\\"),
|
||||
'\"' => try out_stream.writeAll("\\\""),
|
||||
// solidus is optional to escape
|
||||
'/' => {
|
||||
if (options.string.String.escape_solidus) {
|
||||
try out_stream.writeAll("\\/");
|
||||
} else {
|
||||
try out_stream.writeByte('/');
|
||||
}
|
||||
},
|
||||
// control characters with short escapes
|
||||
// TODO: option to switch between unicode and 'short' forms?
|
||||
0x8 => try out_stream.writeAll("\\b"),
|
||||
0xC => try out_stream.writeAll("\\f"),
|
||||
'\n' => try out_stream.writeAll("\\n"),
|
||||
'\r' => try out_stream.writeAll("\\r"),
|
||||
'\t' => try out_stream.writeAll("\\t"),
|
||||
else => {
|
||||
const ulen = std.unicode.utf8ByteSequenceLength(value[i]) catch unreachable;
|
||||
// control characters (only things left with 1 byte length) should always be printed as unicode escapes
|
||||
if (ulen == 1 or options.string.String.escape_unicode) {
|
||||
const codepoint = std.unicode.utf8Decode(value[i .. i + ulen]) catch unreachable;
|
||||
try outputUnicodeEscape(codepoint, out_stream);
|
||||
} else {
|
||||
try out_stream.writeAll(value[i .. i + ulen]);
|
||||
}
|
||||
i += ulen - 1;
|
||||
},
|
||||
}
|
||||
}
|
||||
try out_stream.writeByte('\"');
|
||||
return;
|
||||
}
|
||||
|
||||
try out_stream.writeByte('[');
|
||||
var child_options = options;
|
||||
if (child_options.whitespace) |*whitespace| {
|
||||
whitespace.indent_level += 1;
|
||||
}
|
||||
for (value, 0..) |x, i| {
|
||||
if (i != 0) {
|
||||
try out_stream.writeByte(',');
|
||||
}
|
||||
if (child_options.whitespace) |child_whitespace| {
|
||||
try out_stream.writeByte('\n');
|
||||
try child_whitespace.outputIndent(out_stream);
|
||||
}
|
||||
try stringify(x, child_options, out_stream);
|
||||
}
|
||||
if (value.len != 0) {
|
||||
if (options.whitespace) |whitespace| {
|
||||
try out_stream.writeByte('\n');
|
||||
try whitespace.outputIndent(out_stream);
|
||||
}
|
||||
}
|
||||
try out_stream.writeByte(']');
|
||||
return;
|
||||
},
|
||||
else => @compileError("Unable to stringify type '" ++ @typeName(T) ++ "'"),
|
||||
},
|
||||
.array => return stringify(&value, options, out_stream),
|
||||
.vector => |info| {
|
||||
const array: [info.len]info.child = value;
|
||||
return stringify(&array, options, out_stream);
|
||||
},
|
||||
else => @compileError("Unable to stringify type '" ++ @typeName(T) ++ "'"),
|
||||
}
|
||||
unreachable;
|
||||
}
|
||||
|
||||
fn teststringify(expected: []const u8, value: anytype, options: StringifyOptions) !void {
|
||||
const ValidationWriter = struct {
|
||||
const Self = @This();
|
||||
pub const Writer = std.io.Writer(*Self, Error, write);
|
||||
pub const Error = error{
|
||||
TooMuchData,
|
||||
DifferentData,
|
||||
};
|
||||
|
||||
expected_remaining: []const u8,
|
||||
|
||||
fn init(exp: []const u8) Self {
|
||||
return .{ .expected_remaining = exp };
|
||||
}
|
||||
|
||||
pub fn writer(self: *Self) Writer {
|
||||
return .{ .context = self };
|
||||
}
|
||||
|
||||
fn write(self: *Self, bytes: []const u8) Error!usize {
|
||||
if (self.expected_remaining.len < bytes.len) {
|
||||
std.log.warn(
|
||||
\\====== expected this output: =========
|
||||
\\{s}
|
||||
\\======== instead found this: =========
|
||||
\\{s}
|
||||
\\======================================
|
||||
, .{
|
||||
self.expected_remaining,
|
||||
bytes,
|
||||
});
|
||||
return error.TooMuchData;
|
||||
}
|
||||
if (!mem.eql(u8, self.expected_remaining[0..bytes.len], bytes)) {
|
||||
std.log.warn(
|
||||
\\====== expected this output: =========
|
||||
\\{s}
|
||||
\\======== instead found this: =========
|
||||
\\{s}
|
||||
\\======================================
|
||||
, .{
|
||||
self.expected_remaining[0..bytes.len],
|
||||
bytes,
|
||||
});
|
||||
return error.DifferentData;
|
||||
}
|
||||
self.expected_remaining = self.expected_remaining[bytes.len..];
|
||||
return bytes.len;
|
||||
}
|
||||
};
|
||||
|
||||
var vos = ValidationWriter.init(expected);
|
||||
try stringify(value, options, vos.writer());
|
||||
if (vos.expected_remaining.len > 0) return error.NotEnoughData;
|
||||
}
|
||||
|
||||
test "stringify basic types" {
|
||||
try teststringify("false", false, StringifyOptions{});
|
||||
try teststringify("true", true, StringifyOptions{});
|
||||
try teststringify("null", @as(?u8, null), StringifyOptions{});
|
||||
try teststringify("null", @as(?*u32, null), StringifyOptions{});
|
||||
try teststringify("42", 42, StringifyOptions{});
|
||||
try teststringify("4.2e1", 42.0, StringifyOptions{});
|
||||
try teststringify("42", @as(u8, 42), StringifyOptions{});
|
||||
try teststringify("42", @as(u128, 42), StringifyOptions{});
|
||||
try teststringify("4.2e1", @as(f32, 42), StringifyOptions{});
|
||||
try teststringify("4.2e1", @as(f64, 42), StringifyOptions{});
|
||||
try teststringify("\"ItBroke\"", @as(anyerror, error.ItBroke), StringifyOptions{});
|
||||
}
|
||||
|
||||
test "stringify string" {
|
||||
try teststringify("\"hello\"", "hello", StringifyOptions{});
|
||||
try teststringify("\"with\\nescapes\\r\"", "with\nescapes\r", StringifyOptions{});
|
||||
try teststringify("\"with\\nescapes\\r\"", "with\nescapes\r", StringifyOptions{ .string = .{ .String = .{ .escape_unicode = true } } });
|
||||
try teststringify("\"with unicode\\u0001\"", "with unicode\u{1}", StringifyOptions{});
|
||||
try teststringify("\"with unicode\\u0001\"", "with unicode\u{1}", StringifyOptions{ .string = .{ .String = .{ .escape_unicode = true } } });
|
||||
try teststringify("\"with unicode\u{80}\"", "with unicode\u{80}", StringifyOptions{});
|
||||
try teststringify("\"with unicode\\u0080\"", "with unicode\u{80}", StringifyOptions{ .string = .{ .String = .{ .escape_unicode = true } } });
|
||||
try teststringify("\"with unicode\u{FF}\"", "with unicode\u{FF}", StringifyOptions{});
|
||||
try teststringify("\"with unicode\\u00ff\"", "with unicode\u{FF}", StringifyOptions{ .string = .{ .String = .{ .escape_unicode = true } } });
|
||||
try teststringify("\"with unicode\u{100}\"", "with unicode\u{100}", StringifyOptions{});
|
||||
try teststringify("\"with unicode\\u0100\"", "with unicode\u{100}", StringifyOptions{ .string = .{ .String = .{ .escape_unicode = true } } });
|
||||
try teststringify("\"with unicode\u{800}\"", "with unicode\u{800}", StringifyOptions{});
|
||||
try teststringify("\"with unicode\\u0800\"", "with unicode\u{800}", StringifyOptions{ .string = .{ .String = .{ .escape_unicode = true } } });
|
||||
try teststringify("\"with unicode\u{8000}\"", "with unicode\u{8000}", StringifyOptions{});
|
||||
try teststringify("\"with unicode\\u8000\"", "with unicode\u{8000}", StringifyOptions{ .string = .{ .String = .{ .escape_unicode = true } } });
|
||||
try teststringify("\"with unicode\u{D799}\"", "with unicode\u{D799}", StringifyOptions{});
|
||||
try teststringify("\"with unicode\\ud799\"", "with unicode\u{D799}", StringifyOptions{ .string = .{ .String = .{ .escape_unicode = true } } });
|
||||
try teststringify("\"with unicode\u{10000}\"", "with unicode\u{10000}", StringifyOptions{});
|
||||
try teststringify("\"with unicode\\ud800\\udc00\"", "with unicode\u{10000}", StringifyOptions{ .string = .{ .String = .{ .escape_unicode = true } } });
|
||||
try teststringify("\"with unicode\u{10FFFF}\"", "with unicode\u{10FFFF}", StringifyOptions{});
|
||||
try teststringify("\"with unicode\\udbff\\udfff\"", "with unicode\u{10FFFF}", StringifyOptions{ .string = .{ .String = .{ .escape_unicode = true } } });
|
||||
try teststringify("\"/\"", "/", StringifyOptions{});
|
||||
try teststringify("\"\\/\"", "/", StringifyOptions{ .string = .{ .String = .{ .escape_solidus = true } } });
|
||||
}
|
||||
|
||||
test "stringify tagged unions" {
|
||||
try teststringify("42", union(enum) {
|
||||
Foo: u32,
|
||||
Bar: bool,
|
||||
}{ .Foo = 42 }, StringifyOptions{});
|
||||
}
|
||||
|
||||
test "stringify struct" {
|
||||
try teststringify("{\"foo\":42}", struct {
|
||||
foo: u32,
|
||||
}{ .foo = 42 }, StringifyOptions{});
|
||||
}
|
||||
|
||||
test "stringify struct with indentation" {
|
||||
try teststringify(
|
||||
\\{
|
||||
\\ "foo": 42,
|
||||
\\ "bar": [
|
||||
\\ 1,
|
||||
\\ 2,
|
||||
\\ 3
|
||||
\\ ]
|
||||
\\}
|
||||
,
|
||||
struct {
|
||||
foo: u32,
|
||||
bar: [3]u32,
|
||||
}{
|
||||
.foo = 42,
|
||||
.bar = .{ 1, 2, 3 },
|
||||
},
|
||||
StringifyOptions{
|
||||
.whitespace = .{},
|
||||
},
|
||||
);
|
||||
try teststringify(
|
||||
"{\n\t\"foo\":42,\n\t\"bar\":[\n\t\t1,\n\t\t2,\n\t\t3\n\t]\n}",
|
||||
struct {
|
||||
foo: u32,
|
||||
bar: [3]u32,
|
||||
}{
|
||||
.foo = 42,
|
||||
.bar = .{ 1, 2, 3 },
|
||||
},
|
||||
StringifyOptions{
|
||||
.whitespace = .{
|
||||
.indent = .Tab,
|
||||
.separator = false,
|
||||
},
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
test "stringify struct with void field" {
|
||||
try teststringify("{\"foo\":42}", struct {
|
||||
foo: u32,
|
||||
bar: void = {},
|
||||
}{ .foo = 42 }, StringifyOptions{});
|
||||
}
|
||||
|
||||
test "stringify array of structs" {
|
||||
const MyStruct = struct {
|
||||
foo: u32,
|
||||
};
|
||||
try teststringify("[{\"foo\":42},{\"foo\":100},{\"foo\":1000}]", [_]MyStruct{
|
||||
MyStruct{ .foo = 42 },
|
||||
MyStruct{ .foo = 100 },
|
||||
MyStruct{ .foo = 1000 },
|
||||
}, StringifyOptions{});
|
||||
}
|
||||
|
||||
test "stringify struct with custom stringifier" {
|
||||
try teststringify("[\"something special\",42]", struct {
|
||||
foo: u32,
|
||||
const Self = @This();
|
||||
pub fn jsonStringify(
|
||||
_: Self,
|
||||
options: StringifyOptions,
|
||||
out_stream: anytype,
|
||||
) !void {
|
||||
try out_stream.writeAll("[\"something special\",");
|
||||
try stringify(42, options, out_stream);
|
||||
try out_stream.writeByte(']');
|
||||
}
|
||||
}{ .foo = 42 }, StringifyOptions{});
|
||||
}
|
||||
|
||||
test "stringify vector" {
|
||||
try teststringify("[1,1]", @as(@Vector(2, u32), @splat(@as(u32, 1))), StringifyOptions{});
|
||||
}
|
|
@ -1,6 +1,6 @@
|
|||
const std = @import("std");
|
||||
const aws = @import("aws.zig");
|
||||
const json = @import("json");
|
||||
const json = @import("json.zig");
|
||||
|
||||
var verbose: u8 = 0;
|
||||
|
||||
|
@ -192,7 +192,7 @@ pub fn main() anyerror!void {
|
|||
const func = fns[0];
|
||||
const arn = func.function_arn.?;
|
||||
// This is a bit ugly. Maybe a helper function in the library would help?
|
||||
var tags = try std.ArrayList(aws.services.lambda.TagKeyValue).initCapacity(allocator, 1);
|
||||
var tags = try std.ArrayList(@typeInfo(try typeForField(services.lambda.tag_resource.Request, "tags")).pointer.child).initCapacity(allocator, 1);
|
||||
defer tags.deinit();
|
||||
tags.appendAssumeCapacity(.{ .key = "Foo", .value = "Bar" });
|
||||
const req = services.lambda.tag_resource.Request{ .resource = arn, .tags = tags.items };
|
||||
|
|
|
@ -39,7 +39,7 @@ fn serviceCount(desired_services: anytype) usize {
|
|||
pub const services = service_list;
|
||||
|
||||
test "services includes sts" {
|
||||
try expectEqualStrings("2011-06-15", services.sts.version.?);
|
||||
try expectEqualStrings("2011-06-15", services.sts.version);
|
||||
}
|
||||
test "sts includes get_caller_identity" {
|
||||
try expectEqualStrings("GetCallerIdentity", services.sts.get_caller_identity.action_name);
|
||||
|
@ -47,9 +47,9 @@ test "sts includes get_caller_identity" {
|
|||
test "can get service and action name from request" {
|
||||
// get request object. This call doesn't have parameters
|
||||
const metadata = services.sts.get_caller_identity.Request.metaInfo();
|
||||
try expectEqualStrings("2011-06-15", metadata.service_metadata.version.?);
|
||||
try expectEqualStrings("2011-06-15", metadata.service_metadata.version);
|
||||
}
|
||||
test "can filter services" {
|
||||
const filtered_services = Services(.{ .sts, .wafv2 }){};
|
||||
try expectEqualStrings("2011-06-15", filtered_services.sts.version.?);
|
||||
try expectEqualStrings("2011-06-15", filtered_services.sts.version);
|
||||
}
|
||||
|
|
36
src/xml.zig
36
src/xml.zig
|
@ -25,7 +25,6 @@ pub const Element = struct {
|
|||
tag: []const u8,
|
||||
attributes: AttributeList,
|
||||
children: ContentList,
|
||||
next_sibling: ?*Element = null,
|
||||
|
||||
fn init(tag: []const u8, alloc: Allocator) Element {
|
||||
return .{
|
||||
|
@ -348,7 +347,7 @@ fn parseDocument(ctx: *ParseContext, backing_allocator: Allocator) !Document {
|
|||
_ = ctx.eatWs();
|
||||
try trySkipComments(ctx, allocator);
|
||||
|
||||
doc.root = (try tryParseElement(ctx, allocator, null)) orelse return error.InvalidDocument;
|
||||
doc.root = (try tryParseElement(ctx, allocator)) orelse return error.InvalidDocument;
|
||||
_ = ctx.eatWs();
|
||||
try trySkipComments(ctx, allocator);
|
||||
|
||||
|
@ -416,12 +415,12 @@ fn tryParseCharData(ctx: *ParseContext, alloc: Allocator) !?[]const u8 {
|
|||
return try dupeAndUnescape(alloc, ctx.source[begin..end]);
|
||||
}
|
||||
|
||||
fn parseContent(ctx: *ParseContext, alloc: Allocator, parent: ?*Element) ParseError!Content {
|
||||
fn parseContent(ctx: *ParseContext, alloc: Allocator) ParseError!Content {
|
||||
if (try tryParseCharData(ctx, alloc)) |cd| {
|
||||
return Content{ .CharData = cd };
|
||||
} else if (try tryParseComment(ctx, alloc)) |comment| {
|
||||
return Content{ .Comment = comment };
|
||||
} else if (try tryParseElement(ctx, alloc, parent)) |elem| {
|
||||
} else if (try tryParseElement(ctx, alloc)) |elem| {
|
||||
return Content{ .Element = elem };
|
||||
} else {
|
||||
return error.UnexpectedCharacter;
|
||||
|
@ -441,7 +440,7 @@ fn tryParseAttr(ctx: *ParseContext, alloc: Allocator) !?*Attribute {
|
|||
return attr;
|
||||
}
|
||||
|
||||
fn tryParseElement(ctx: *ParseContext, alloc: Allocator, parent: ?*Element) !?*Element {
|
||||
fn tryParseElement(ctx: *ParseContext, alloc: Allocator) !?*Element {
|
||||
const start = ctx.offset;
|
||||
if (!ctx.eat('<')) return null;
|
||||
const tag = parseNameNoDupe(ctx) catch {
|
||||
|
@ -470,7 +469,7 @@ fn tryParseElement(ctx: *ParseContext, alloc: Allocator, parent: ?*Element) !?*E
|
|||
break;
|
||||
}
|
||||
|
||||
const content = try parseContent(ctx, alloc, element);
|
||||
const content = try parseContent(ctx, alloc);
|
||||
try element.children.append(content);
|
||||
}
|
||||
|
||||
|
@ -481,23 +480,6 @@ fn tryParseElement(ctx: *ParseContext, alloc: Allocator, parent: ?*Element) !?*E
|
|||
|
||||
_ = ctx.eatWs();
|
||||
try ctx.expect('>');
|
||||
|
||||
if (parent) |p| {
|
||||
var last_element: ?*Element = null;
|
||||
|
||||
for (0..p.children.items.len) |i| {
|
||||
const child = p.children.items[p.children.items.len - i - 1];
|
||||
if (child == .Element) {
|
||||
last_element = child.Element;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (last_element) |lc| {
|
||||
lc.next_sibling = element;
|
||||
}
|
||||
}
|
||||
|
||||
return element;
|
||||
}
|
||||
|
||||
|
@ -508,13 +490,13 @@ test "tryParseElement" {
|
|||
|
||||
{
|
||||
var ctx = ParseContext.init("<= a='b'/>");
|
||||
try testing.expectEqual(@as(?*Element, null), try tryParseElement(&ctx, alloc, null));
|
||||
try testing.expectEqual(@as(?*Element, null), try tryParseElement(&ctx, alloc));
|
||||
try testing.expectEqual(@as(?u8, '<'), ctx.peek());
|
||||
}
|
||||
|
||||
{
|
||||
var ctx = ParseContext.init("<python size='15' color = \"green\"/>");
|
||||
const elem = try tryParseElement(&ctx, alloc, null);
|
||||
const elem = try tryParseElement(&ctx, alloc);
|
||||
try testing.expectEqualSlices(u8, elem.?.tag, "python");
|
||||
|
||||
const size_attr = elem.?.attributes.items[0];
|
||||
|
@ -528,14 +510,14 @@ test "tryParseElement" {
|
|||
|
||||
{
|
||||
var ctx = ParseContext.init("<python>test</python>");
|
||||
const elem = try tryParseElement(&ctx, alloc, null);
|
||||
const elem = try tryParseElement(&ctx, alloc);
|
||||
try testing.expectEqualSlices(u8, elem.?.tag, "python");
|
||||
try testing.expectEqualSlices(u8, elem.?.children.items[0].CharData, "test");
|
||||
}
|
||||
|
||||
{
|
||||
var ctx = ParseContext.init("<a>b<c/>d<e/>f<!--g--></a>");
|
||||
const elem = try tryParseElement(&ctx, alloc, null);
|
||||
const elem = try tryParseElement(&ctx, alloc);
|
||||
try testing.expectEqualSlices(u8, elem.?.tag, "a");
|
||||
try testing.expectEqualSlices(u8, elem.?.children.items[0].CharData, "b");
|
||||
try testing.expectEqualSlices(u8, elem.?.children.items[1].Element.tag, "c");
|
||||
|
|
|
@ -1,793 +0,0 @@
|
|||
const std = @import("std");
|
||||
const mem = std.mem;
|
||||
const Allocator = mem.Allocator;
|
||||
|
||||
/// Options for controlling XML serialization behavior
|
||||
pub const StringifyOptions = struct {
|
||||
/// Controls whitespace insertion for easier human readability
|
||||
whitespace: Whitespace = .minified,
|
||||
|
||||
/// Should optional fields with null value be written?
|
||||
emit_null_optional_fields: bool = true,
|
||||
|
||||
// TODO: Implement
|
||||
/// Arrays/slices of u8 are typically encoded as strings. This option emits them as arrays of numbers instead. Does not affect calls to objectField*().
|
||||
emit_strings_as_arrays: bool = false,
|
||||
|
||||
/// Controls whether to include XML declaration at the beginning
|
||||
include_declaration: bool = true,
|
||||
|
||||
/// Root element name to use when serializing a value that doesn't have a natural name
|
||||
root_name: ?[]const u8 = "root",
|
||||
|
||||
/// Root attributes (e.g. xmlns="...") that will be added to the root element node only
|
||||
root_attributes: []const u8 = "",
|
||||
|
||||
/// Function to determine the element name for an array item based on the element
|
||||
/// name of the array containing the elements. See arrayElementPluralToSingluarTransformation
|
||||
/// and arrayElementNoopTransformation functions for examples
|
||||
arrayElementNameConversion: *const fn (allocator: std.mem.Allocator, name: ?[]const u8) error{OutOfMemory}!?[]const u8 = arrayElementPluralToSingluarTransformation,
|
||||
|
||||
pub const Whitespace = enum {
|
||||
minified,
|
||||
indent_1,
|
||||
indent_2,
|
||||
indent_3,
|
||||
indent_4,
|
||||
indent_8,
|
||||
indent_tab,
|
||||
};
|
||||
};
|
||||
|
||||
/// Error set for XML serialization
|
||||
pub const XmlSerializeError = error{
|
||||
/// Unsupported type for XML serialization
|
||||
UnsupportedType,
|
||||
/// Out of memory
|
||||
OutOfMemory,
|
||||
/// Write error
|
||||
WriteError,
|
||||
};
|
||||
|
||||
/// Serializes a value to XML and writes it to the provided writer
|
||||
pub fn stringify(
|
||||
value: anytype,
|
||||
options: StringifyOptions,
|
||||
writer: anytype,
|
||||
) !void {
|
||||
// Write XML declaration if requested
|
||||
if (options.include_declaration)
|
||||
try writer.writeAll("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n");
|
||||
|
||||
// Start serialization with the root element
|
||||
const root_name = options.root_name;
|
||||
if (@typeInfo(@TypeOf(value)) != .optional or value == null)
|
||||
try serializeValue(value, root_name, options, writer.any(), 0)
|
||||
else
|
||||
try serializeValue(value.?, root_name, options, writer.any(), 0);
|
||||
}
|
||||
|
||||
/// Serializes a value to XML and returns an allocated string
|
||||
pub fn stringifyAlloc(
|
||||
allocator: Allocator,
|
||||
value: anytype,
|
||||
options: StringifyOptions,
|
||||
) ![]u8 {
|
||||
var list = std.ArrayList(u8).init(allocator);
|
||||
errdefer list.deinit();
|
||||
|
||||
try stringify(value, options, list.writer());
|
||||
return list.toOwnedSlice();
|
||||
}
|
||||
|
||||
/// Internal function to serialize a value with proper indentation
|
||||
fn serializeValue(
|
||||
value: anytype,
|
||||
element_name: ?[]const u8,
|
||||
options: StringifyOptions,
|
||||
writer: anytype,
|
||||
depth: usize,
|
||||
) !void {
|
||||
const T = @TypeOf(value);
|
||||
|
||||
// const output_indent = !(!options.emit_null_optional_fields and @typeInfo(@TypeOf(value)) == .optional and value == null);
|
||||
const output_indent = options.emit_null_optional_fields or @typeInfo(@TypeOf(value)) != .optional or value != null;
|
||||
|
||||
if (output_indent and element_name != null)
|
||||
try writeIndent(writer, depth, options.whitespace);
|
||||
|
||||
// Start element tag
|
||||
if (@typeInfo(T) != .optional and @typeInfo(T) != .array) {
|
||||
if (element_name) |n| {
|
||||
try writer.writeAll("<");
|
||||
try writer.writeAll(n);
|
||||
if (depth == 0 and options.root_attributes.len > 0) {
|
||||
try writer.writeByte(' ');
|
||||
try writer.writeAll(options.root_attributes);
|
||||
}
|
||||
try writer.writeAll(">");
|
||||
}
|
||||
}
|
||||
|
||||
// Handle different types
|
||||
switch (@typeInfo(T)) {
|
||||
.bool => try writer.writeAll(if (value) "true" else "false"),
|
||||
.int, .comptime_int, .float, .comptime_float => try writer.print("{}", .{value}),
|
||||
.pointer => |ptr_info| {
|
||||
switch (ptr_info.size) {
|
||||
.one => {
|
||||
// We don't want to write the opening tag a second time, so
|
||||
// we will pass null, then come back and close before returning
|
||||
//
|
||||
// ...but...in the event of a *[]const u8, we do want to pass that in,
|
||||
// but only if emit_strings_as_arrays is true
|
||||
const child_ti = @typeInfo(ptr_info.child);
|
||||
const el_name = if (options.emit_strings_as_arrays and child_ti == .array and child_ti.array.child == u8)
|
||||
element_name
|
||||
else
|
||||
null;
|
||||
try serializeValue(value.*, el_name, options, writer, depth);
|
||||
try writeClose(writer, element_name);
|
||||
return;
|
||||
},
|
||||
.slice => {
|
||||
if (ptr_info.child == u8) {
|
||||
// String type
|
||||
try serializeString(writer, element_name, value, options, depth);
|
||||
} else {
|
||||
// Array of values
|
||||
if (options.whitespace != .minified) {
|
||||
try writer.writeByte('\n');
|
||||
}
|
||||
|
||||
var buf: [256]u8 = undefined;
|
||||
var fba = std.heap.FixedBufferAllocator.init(&buf);
|
||||
const alloc = fba.allocator();
|
||||
const item_name = try options.arrayElementNameConversion(alloc, element_name);
|
||||
|
||||
for (value) |item| {
|
||||
try serializeValue(item, item_name, options, writer, depth + 1);
|
||||
if (options.whitespace != .minified) {
|
||||
try writer.writeByte('\n');
|
||||
}
|
||||
}
|
||||
|
||||
try writeIndent(writer, depth, options.whitespace);
|
||||
}
|
||||
},
|
||||
else => return error.UnsupportedType,
|
||||
}
|
||||
},
|
||||
.array => |array_info| {
|
||||
if (!options.emit_strings_as_arrays or array_info.child != u8) {
|
||||
if (element_name) |n| {
|
||||
try writer.writeAll("<");
|
||||
try writer.writeAll(n);
|
||||
try writer.writeAll(">");
|
||||
}
|
||||
}
|
||||
if (array_info.child == u8) {
|
||||
// Fixed-size string
|
||||
const slice = &value;
|
||||
try serializeString(writer, element_name, slice, options, depth);
|
||||
} else {
|
||||
// Fixed-size array
|
||||
if (options.whitespace != .minified) {
|
||||
try writer.writeByte('\n');
|
||||
}
|
||||
|
||||
var buf: [256]u8 = undefined;
|
||||
var fba = std.heap.FixedBufferAllocator.init(&buf);
|
||||
const alloc = fba.allocator();
|
||||
const item_name = try options.arrayElementNameConversion(alloc, element_name);
|
||||
|
||||
for (value) |item| {
|
||||
try serializeValue(item, item_name, options, writer, depth + 1);
|
||||
if (options.whitespace != .minified) {
|
||||
try writer.writeByte('\n');
|
||||
}
|
||||
}
|
||||
|
||||
try writeIndent(writer, depth, options.whitespace);
|
||||
}
|
||||
if (!options.emit_strings_as_arrays or array_info.child != u8)
|
||||
try writeClose(writer, element_name);
|
||||
return;
|
||||
},
|
||||
.@"struct" => |struct_info| {
|
||||
if (options.whitespace != .minified) {
|
||||
try writer.writeByte('\n');
|
||||
}
|
||||
|
||||
inline for (struct_info.fields) |field| {
|
||||
const field_name =
|
||||
if (std.meta.hasFn(T, "fieldNameFor"))
|
||||
value.fieldNameFor(field.name)
|
||||
else
|
||||
field.name; // TODO: field mapping
|
||||
|
||||
const field_value = @field(value, field.name);
|
||||
try serializeValue(
|
||||
field_value,
|
||||
field_name,
|
||||
options,
|
||||
writer,
|
||||
depth + 1,
|
||||
);
|
||||
|
||||
if (options.whitespace != .minified) {
|
||||
if (!options.emit_null_optional_fields and @typeInfo(@TypeOf(field_value)) == .optional and field_value == null) {
|
||||
// Skip writing anything
|
||||
} else {
|
||||
try writer.writeByte('\n');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
try writeIndent(writer, depth, options.whitespace);
|
||||
},
|
||||
.optional => {
|
||||
if (options.emit_null_optional_fields or value != null) {
|
||||
if (element_name) |n| {
|
||||
try writer.writeAll("<");
|
||||
try writer.writeAll(n);
|
||||
try writer.writeAll(">");
|
||||
}
|
||||
}
|
||||
if (value) |payload| {
|
||||
try serializeValue(payload, null, options, writer, depth);
|
||||
} else {
|
||||
// For null values, we'll write an empty element
|
||||
// We've already written the opening tag, so just close it immediately
|
||||
if (options.emit_null_optional_fields)
|
||||
try writeClose(writer, element_name);
|
||||
return;
|
||||
}
|
||||
},
|
||||
.null => {
|
||||
// Empty element
|
||||
},
|
||||
.@"enum" => {
|
||||
try std.fmt.format(writer, "{s}", .{@tagName(value)});
|
||||
},
|
||||
.@"union" => |union_info| {
|
||||
if (union_info.tag_type) |_| {
|
||||
inline for (union_info.fields) |field| {
|
||||
if (@field(std.meta.Tag(T), field.name) == std.meta.activeTag(value)) {
|
||||
try serializeValue(
|
||||
@field(value, field.name),
|
||||
field.name,
|
||||
options,
|
||||
writer,
|
||||
depth,
|
||||
);
|
||||
break;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
return error.UnsupportedType;
|
||||
}
|
||||
},
|
||||
else => return error.UnsupportedType,
|
||||
}
|
||||
|
||||
try writeClose(writer, element_name);
|
||||
}
|
||||
|
||||
fn writeClose(writer: anytype, element_name: ?[]const u8) !void {
|
||||
// Close element tag
|
||||
if (element_name) |n| {
|
||||
try writer.writeAll("</");
|
||||
try writer.writeAll(n);
|
||||
try writer.writeAll(">");
|
||||
}
|
||||
}
|
||||
|
||||
/// Writes indentation based on depth and indent level
|
||||
fn writeIndent(writer: anytype, depth: usize, whitespace: StringifyOptions.Whitespace) @TypeOf(writer).Error!void {
|
||||
var char: u8 = ' ';
|
||||
const n_chars = switch (whitespace) {
|
||||
.minified => return,
|
||||
.indent_1 => 1 * depth,
|
||||
.indent_2 => 2 * depth,
|
||||
.indent_3 => 3 * depth,
|
||||
.indent_4 => 4 * depth,
|
||||
.indent_8 => 8 * depth,
|
||||
.indent_tab => blk: {
|
||||
char = '\t';
|
||||
break :blk depth;
|
||||
},
|
||||
};
|
||||
try writer.writeByteNTimes(char, n_chars);
|
||||
}
|
||||
|
||||
fn serializeString(
|
||||
writer: anytype,
|
||||
element_name: ?[]const u8,
|
||||
value: []const u8,
|
||||
options: StringifyOptions,
|
||||
depth: usize,
|
||||
) @TypeOf(writer).Error!void {
|
||||
if (options.emit_strings_as_arrays) {
|
||||
// if (true) return error.seestackrun;
|
||||
for (value) |c| {
|
||||
try writeIndent(writer, depth + 1, options.whitespace);
|
||||
|
||||
var buf: [256]u8 = undefined;
|
||||
var fba = std.heap.FixedBufferAllocator.init(&buf);
|
||||
const alloc = fba.allocator();
|
||||
const item_name = try options.arrayElementNameConversion(alloc, element_name);
|
||||
if (item_name) |n| {
|
||||
try writer.writeAll("<");
|
||||
try writer.writeAll(n);
|
||||
try writer.writeAll(">");
|
||||
}
|
||||
try writer.print("{d}", .{c});
|
||||
try writeClose(writer, item_name);
|
||||
if (options.whitespace != .minified) {
|
||||
try writer.writeByte('\n');
|
||||
}
|
||||
}
|
||||
return;
|
||||
}
|
||||
try escapeString(writer, value);
|
||||
}
|
||||
/// Escapes special characters in XML strings
|
||||
fn escapeString(writer: anytype, value: []const u8) @TypeOf(writer).Error!void {
|
||||
for (value) |c| {
|
||||
switch (c) {
|
||||
'&' => try writer.writeAll("&"),
|
||||
'<' => try writer.writeAll("<"),
|
||||
'>' => try writer.writeAll(">"),
|
||||
'"' => try writer.writeAll("""),
|
||||
'\'' => try writer.writeAll("'"),
|
||||
else => try writer.writeByte(c),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Does no transformation on the input array
|
||||
pub fn arrayElementNoopTransformation(allocator: std.mem.Allocator, name: ?[]const u8) !?[]const u8 {
|
||||
_ = allocator;
|
||||
return name;
|
||||
}
|
||||
|
||||
/// Attempts to convert a plural name to singular for array items
|
||||
pub fn arrayElementPluralToSingluarTransformation(allocator: std.mem.Allocator, name: ?[]const u8) !?[]const u8 {
|
||||
if (name == null or name.?.len < 3) return name;
|
||||
|
||||
const n = name.?;
|
||||
// There are a ton of these words, I'm just adding two for now
|
||||
// https://wordmom.com/nouns/end-e
|
||||
const es_exceptions = &[_][]const u8{
|
||||
"types",
|
||||
"bytes",
|
||||
};
|
||||
for (es_exceptions) |exception| {
|
||||
if (std.mem.eql(u8, exception, n)) {
|
||||
return n[0 .. n.len - 1];
|
||||
}
|
||||
}
|
||||
// Very basic English pluralization rules
|
||||
if (std.mem.endsWith(u8, n, "s")) {
|
||||
if (std.mem.endsWith(u8, n, "ies")) {
|
||||
// e.g., "entries" -> "entry"
|
||||
return try std.mem.concat(allocator, u8, &[_][]const u8{ n[0 .. n.len - 3], "y" });
|
||||
} else if (std.mem.endsWith(u8, n, "es")) {
|
||||
return n[0 .. n.len - 2]; // e.g., "boxes" -> "box"
|
||||
} else {
|
||||
return n[0 .. n.len - 1]; // e.g., "items" -> "item"
|
||||
}
|
||||
}
|
||||
|
||||
return name; // Not recognized as plural
|
||||
}
|
||||
|
||||
// Tests
|
||||
test "stringify basic types" {
|
||||
const testing = std.testing;
|
||||
const allocator = testing.allocator;
|
||||
|
||||
// Test boolean
|
||||
{
|
||||
const result = try stringifyAlloc(allocator, true, .{});
|
||||
defer allocator.free(result);
|
||||
try testing.expectEqualStrings("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<root>true</root>", result);
|
||||
}
|
||||
|
||||
// Test comptime integer
|
||||
{
|
||||
const result = try stringifyAlloc(allocator, 42, .{});
|
||||
defer allocator.free(result);
|
||||
try testing.expectEqualStrings("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<root>42</root>", result);
|
||||
}
|
||||
|
||||
// Test integer
|
||||
{
|
||||
const result = try stringifyAlloc(allocator, @as(usize, 42), .{});
|
||||
defer allocator.free(result);
|
||||
try testing.expectEqualStrings("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<root>42</root>", result);
|
||||
}
|
||||
|
||||
// Test float
|
||||
{
|
||||
const result = try stringifyAlloc(allocator, 3.14, .{});
|
||||
defer allocator.free(result);
|
||||
try testing.expectEqualStrings("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<root>3.14e0</root>", result);
|
||||
}
|
||||
|
||||
// Test string
|
||||
{
|
||||
const result = try stringifyAlloc(allocator, "hello", .{});
|
||||
// @compileLog(@typeInfo(@TypeOf("hello")).pointer.size);
|
||||
// @compileLog(@typeName(@typeInfo(@TypeOf("hello")).pointer.child));
|
||||
defer allocator.free(result);
|
||||
try testing.expectEqualStrings("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<root>hello</root>", result);
|
||||
}
|
||||
|
||||
// Test string with special characters
|
||||
{
|
||||
const result = try stringifyAlloc(allocator, "hello & world < > \" '", .{});
|
||||
defer allocator.free(result);
|
||||
try testing.expectEqualStrings("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<root>hello & world < > " '</root>", result);
|
||||
}
|
||||
}
|
||||
|
||||
test "stringify arrays" {
|
||||
const testing = std.testing;
|
||||
const allocator = testing.allocator;
|
||||
|
||||
// Test array of integers
|
||||
{
|
||||
const arr = [_]i32{ 1, 2, 3 };
|
||||
const result = try stringifyAlloc(allocator, arr, .{});
|
||||
defer allocator.free(result);
|
||||
try testing.expectEqualStrings("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<root><root>1</root><root>2</root><root>3</root></root>", result);
|
||||
}
|
||||
|
||||
// Test array of strings
|
||||
{
|
||||
const arr = [_][]const u8{ "one", "two", "three" };
|
||||
const result = try stringifyAlloc(allocator, arr, .{});
|
||||
defer allocator.free(result);
|
||||
try testing.expectEqualStrings("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<root><root>one</root><root>two</root><root>three</root></root>", result);
|
||||
}
|
||||
|
||||
// Test array with custom root name
|
||||
{
|
||||
const arr = [_]i32{ 1, 2, 3 };
|
||||
const result = try stringifyAlloc(allocator, arr, .{ .root_name = "items" });
|
||||
defer allocator.free(result);
|
||||
try testing.expectEqualStrings("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<items><item>1</item><item>2</item><item>3</item></items>", result);
|
||||
}
|
||||
}
|
||||
|
||||
test "stringify structs" {
|
||||
const testing = std.testing;
|
||||
const allocator = testing.allocator;
|
||||
|
||||
const Person = struct {
|
||||
name: []const u8,
|
||||
age: u32,
|
||||
is_active: bool,
|
||||
};
|
||||
|
||||
// Test basic struct
|
||||
{
|
||||
const person = Person{
|
||||
.name = "John",
|
||||
.age = 30,
|
||||
.is_active = true,
|
||||
};
|
||||
|
||||
const result = try stringifyAlloc(allocator, person, .{});
|
||||
defer allocator.free(result);
|
||||
try testing.expectEqualStrings("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<root><name>John</name><age>30</age><is_active>true</is_active></root>", result);
|
||||
}
|
||||
|
||||
// Test struct with pretty printing
|
||||
{
|
||||
const person = Person{
|
||||
.name = "John",
|
||||
.age = 30,
|
||||
.is_active = true,
|
||||
};
|
||||
|
||||
const result = try stringifyAlloc(allocator, person, .{ .whitespace = .indent_4 });
|
||||
defer allocator.free(result);
|
||||
try testing.expectEqualStrings("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<root>\n <name>John</name>\n <age>30</age>\n <is_active>true</is_active>\n</root>", result);
|
||||
}
|
||||
|
||||
// Test nested struct
|
||||
{
|
||||
const Address = struct {
|
||||
street: []const u8,
|
||||
city: []const u8,
|
||||
};
|
||||
|
||||
const PersonWithAddress = struct {
|
||||
name: []const u8,
|
||||
address: Address,
|
||||
};
|
||||
|
||||
const person = PersonWithAddress{
|
||||
.name = "John",
|
||||
.address = Address{
|
||||
.street = "123 Main St",
|
||||
.city = "Anytown",
|
||||
},
|
||||
};
|
||||
|
||||
const result = try stringifyAlloc(allocator, person, .{ .whitespace = .indent_4 });
|
||||
defer allocator.free(result);
|
||||
try testing.expectEqualStrings("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<root>\n <name>John</name>\n <address>\n <street>123 Main St</street>\n <city>Anytown</city>\n </address>\n</root>", result);
|
||||
}
|
||||
}
|
||||
|
||||
test "stringify optional values" {
|
||||
const testing = std.testing;
|
||||
const allocator = testing.allocator;
|
||||
|
||||
const Person = struct {
|
||||
name: []const u8,
|
||||
middle_name: ?[]const u8,
|
||||
};
|
||||
|
||||
// Test with present optional
|
||||
{
|
||||
const person = Person{
|
||||
.name = "John",
|
||||
.middle_name = "Robert",
|
||||
};
|
||||
|
||||
const result = try stringifyAlloc(allocator, person, .{});
|
||||
defer allocator.free(result);
|
||||
try testing.expectEqualStrings("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<root><name>John</name><middle_name>Robert</middle_name></root>", result);
|
||||
}
|
||||
|
||||
// Test with null optional
|
||||
{
|
||||
const person = Person{
|
||||
.name = "John",
|
||||
.middle_name = null,
|
||||
};
|
||||
|
||||
const result = try stringifyAlloc(allocator, person, .{});
|
||||
defer allocator.free(result);
|
||||
try testing.expectEqualStrings("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<root><name>John</name><middle_name></middle_name></root>", result);
|
||||
}
|
||||
}
|
||||
|
||||
test "stringify optional values with emit_null_optional_fields == false" {
|
||||
const testing = std.testing;
|
||||
const allocator = testing.allocator;
|
||||
|
||||
const Person = struct {
|
||||
name: []const u8,
|
||||
middle_name: ?[]const u8,
|
||||
};
|
||||
|
||||
// Test with present optional
|
||||
{
|
||||
const person = Person{
|
||||
.name = "John",
|
||||
.middle_name = "Robert",
|
||||
};
|
||||
|
||||
const result = try stringifyAlloc(allocator, person, .{ .emit_null_optional_fields = false });
|
||||
defer allocator.free(result);
|
||||
try testing.expectEqualStrings("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<root><name>John</name><middle_name>Robert</middle_name></root>", result);
|
||||
}
|
||||
|
||||
// Test with null optional
|
||||
{
|
||||
const person = Person{
|
||||
.name = "John",
|
||||
.middle_name = null,
|
||||
};
|
||||
|
||||
const result = try stringifyAlloc(allocator, person, .{ .emit_null_optional_fields = false });
|
||||
defer allocator.free(result);
|
||||
try testing.expectEqualStrings("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<root><name>John</name></root>", result);
|
||||
}
|
||||
}
|
||||
|
||||
test "stringify with custom options" {
|
||||
const testing = std.testing;
|
||||
const allocator = testing.allocator;
|
||||
|
||||
const Person = struct {
|
||||
first_name: []const u8,
|
||||
last_name: []const u8,
|
||||
};
|
||||
|
||||
const person = Person{
|
||||
.first_name = "John",
|
||||
.last_name = "Doe",
|
||||
};
|
||||
|
||||
// Test without XML declaration
|
||||
{
|
||||
const result = try stringifyAlloc(allocator, person, .{ .include_declaration = false });
|
||||
defer allocator.free(result);
|
||||
try testing.expectEqualStrings("<root><first_name>John</first_name><last_name>Doe</last_name></root>", result);
|
||||
}
|
||||
|
||||
// Test with custom root name
|
||||
{
|
||||
const result = try stringifyAlloc(allocator, person, .{ .root_name = "person" });
|
||||
defer allocator.free(result);
|
||||
try testing.expectEqualStrings("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<person><first_name>John</first_name><last_name>Doe</last_name></person>", result);
|
||||
}
|
||||
|
||||
// Test with custom indent level
|
||||
{
|
||||
const result = try stringifyAlloc(allocator, person, .{ .whitespace = .indent_2 });
|
||||
defer allocator.free(result);
|
||||
try testing.expectEqualStrings(
|
||||
\\<?xml version="1.0" encoding="UTF-8"?>
|
||||
\\<root>
|
||||
\\ <first_name>John</first_name>
|
||||
\\ <last_name>Doe</last_name>
|
||||
\\</root>
|
||||
, result);
|
||||
}
|
||||
|
||||
// Test with output []u8 as array
|
||||
{
|
||||
// pointer, size 1, child == .array, child.array.child == u8
|
||||
// @compileLog(@typeInfo(@typeInfo(@TypeOf("foo")).pointer.child));
|
||||
const result = try stringifyAlloc(allocator, "foo", .{ .emit_strings_as_arrays = true, .root_name = "bytes" });
|
||||
defer allocator.free(result);
|
||||
try testing.expectEqualStrings("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<bytes><byte>102</byte><byte>111</byte><byte>111</byte></bytes>", result);
|
||||
}
|
||||
}
|
||||
|
||||
test "structs with custom field names" {
|
||||
const testing = std.testing;
|
||||
const allocator = testing.allocator;
|
||||
|
||||
const Person = struct {
|
||||
first_name: []const u8,
|
||||
last_name: []const u8,
|
||||
|
||||
pub fn fieldNameFor(_: @This(), comptime field_name: []const u8) []const u8 {
|
||||
if (std.mem.eql(u8, field_name, "first_name")) return "GivenName";
|
||||
if (std.mem.eql(u8, field_name, "last_name")) return "FamilyName";
|
||||
unreachable;
|
||||
}
|
||||
};
|
||||
|
||||
const person = Person{
|
||||
.first_name = "John",
|
||||
.last_name = "Doe",
|
||||
};
|
||||
|
||||
{
|
||||
const result = try stringifyAlloc(allocator, person, .{ .whitespace = .indent_2 });
|
||||
defer allocator.free(result);
|
||||
try testing.expectEqualStrings(
|
||||
\\<?xml version="1.0" encoding="UTF-8"?>
|
||||
\\<root>
|
||||
\\ <GivenName>John</GivenName>
|
||||
\\ <FamilyName>Doe</FamilyName>
|
||||
\\</root>
|
||||
, result);
|
||||
}
|
||||
}
|
||||
|
||||
test "structs with optional values" {
|
||||
const testing = std.testing;
|
||||
const allocator = testing.allocator;
|
||||
|
||||
const Person = struct {
|
||||
first_name: []const u8,
|
||||
middle_name: ?[]const u8 = null,
|
||||
last_name: []const u8,
|
||||
};
|
||||
|
||||
const person = Person{
|
||||
.first_name = "John",
|
||||
.last_name = "Doe",
|
||||
};
|
||||
|
||||
{
|
||||
const result = try stringifyAlloc(
|
||||
allocator,
|
||||
person,
|
||||
.{
|
||||
.whitespace = .indent_2,
|
||||
.emit_null_optional_fields = false,
|
||||
.root_attributes = "xmlns=\"http://example.com/blah/xxxx/\"",
|
||||
},
|
||||
);
|
||||
defer allocator.free(result);
|
||||
try testing.expectEqualStrings(
|
||||
\\<?xml version="1.0" encoding="UTF-8"?>
|
||||
\\<root xmlns="http://example.com/blah/xxxx/">
|
||||
\\ <first_name>John</first_name>
|
||||
\\ <last_name>Doe</last_name>
|
||||
\\</root>
|
||||
, result);
|
||||
}
|
||||
}
|
||||
|
||||
test "optional structs with value" {
|
||||
const testing = std.testing;
|
||||
const allocator = testing.allocator;
|
||||
|
||||
const Person = struct {
|
||||
first_name: []const u8,
|
||||
middle_name: ?[]const u8 = null,
|
||||
last_name: []const u8,
|
||||
};
|
||||
|
||||
const person: ?Person = Person{
|
||||
.first_name = "John",
|
||||
.last_name = "Doe",
|
||||
};
|
||||
|
||||
{
|
||||
const result = try stringifyAlloc(
|
||||
allocator,
|
||||
person,
|
||||
.{
|
||||
.whitespace = .indent_2,
|
||||
.emit_null_optional_fields = false,
|
||||
.root_attributes = "xmlns=\"http://example.com/blah/xxxx/\"",
|
||||
},
|
||||
);
|
||||
defer allocator.free(result);
|
||||
try testing.expectEqualStrings(
|
||||
\\<?xml version="1.0" encoding="UTF-8"?>
|
||||
\\<root xmlns="http://example.com/blah/xxxx/">
|
||||
\\ <first_name>John</first_name>
|
||||
\\ <last_name>Doe</last_name>
|
||||
\\</root>
|
||||
, result);
|
||||
}
|
||||
}
|
||||
|
||||
test "nested optional structs with value" {
|
||||
const testing = std.testing;
|
||||
const allocator = testing.allocator;
|
||||
|
||||
const Name = struct {
|
||||
first_name: []const u8,
|
||||
middle_name: ?[]const u8 = null,
|
||||
last_name: []const u8,
|
||||
};
|
||||
|
||||
const Person = struct {
|
||||
name: ?Name,
|
||||
};
|
||||
|
||||
const person: ?Person = Person{
|
||||
.name = .{
|
||||
.first_name = "John",
|
||||
.last_name = "Doe",
|
||||
},
|
||||
};
|
||||
|
||||
{
|
||||
const result = try stringifyAlloc(
|
||||
allocator,
|
||||
person,
|
||||
.{
|
||||
.whitespace = .indent_2,
|
||||
.emit_null_optional_fields = false,
|
||||
.root_attributes = "xmlns=\"http://example.com/blah/xxxx/\"",
|
||||
},
|
||||
);
|
||||
defer allocator.free(result);
|
||||
try testing.expectEqualStrings(
|
||||
\\<?xml version="1.0" encoding="UTF-8"?>
|
||||
\\<root xmlns="http://example.com/blah/xxxx/">
|
||||
\\ <name>
|
||||
\\ <first_name>John</first_name>
|
||||
\\ <last_name>Doe</last_name>
|
||||
\\ </name>
|
||||
\\</root>
|
||||
, result);
|
||||
}
|
||||
}
|
|
@ -1,7 +1,6 @@
|
|||
const std = @import("std");
|
||||
const xml = @import("xml.zig");
|
||||
const date = @import("date");
|
||||
const sm = @import("service_manifest");
|
||||
const date = @import("date.zig");
|
||||
|
||||
const log = std.log.scoped(.xml_shaper);
|
||||
|
||||
|
@ -95,52 +94,6 @@ pub fn parse(comptime T: type, source: []const u8, options: ParseOptions) !Parse
|
|||
return Parsed(T).init(arena_allocator, try parseInternal(T, root, opts), parsed);
|
||||
}
|
||||
|
||||
pub const XmlArrayStyle = enum {
|
||||
collection, // Has a container element and list of child elements
|
||||
repeated_root, // Repeats the same element without a container, e.g. S3 ListBucketResult
|
||||
};
|
||||
|
||||
fn detectArrayStyle(comptime T: type, element: *xml.Element, options: ParseOptions) !XmlArrayStyle {
|
||||
_ = options;
|
||||
|
||||
if (@typeInfo(T) != .@"struct") {
|
||||
return .collection;
|
||||
}
|
||||
|
||||
// does the element have child elements that match our expected struct?
|
||||
const field_names = comptime blk: {
|
||||
var result: [std.meta.fieldNames(T).len]struct {
|
||||
[]const u8,
|
||||
} = undefined;
|
||||
|
||||
for (std.meta.fieldNames(T), 0..) |field_name, i| {
|
||||
const key = if (@hasDecl(T, "fieldNameFor"))
|
||||
T.fieldNameFor(undefined, field_name)
|
||||
else
|
||||
field_name;
|
||||
|
||||
result[i] = .{key};
|
||||
}
|
||||
|
||||
break :blk std.StaticStringMap(void).initComptime(result);
|
||||
};
|
||||
|
||||
var matching_fields: usize = 0;
|
||||
var element_iterator = element.elements();
|
||||
|
||||
while (element_iterator.next()) |el| {
|
||||
if (field_names.has(el.tag)) {
|
||||
matching_fields += 1;
|
||||
}
|
||||
}
|
||||
|
||||
if (matching_fields > 0) {
|
||||
return .repeated_root;
|
||||
}
|
||||
|
||||
return .collection;
|
||||
}
|
||||
|
||||
fn parseInternal(comptime T: type, element: *xml.Element, options: ParseOptions) !T {
|
||||
switch (@typeInfo(T)) {
|
||||
.bool => {
|
||||
|
@ -209,10 +162,8 @@ fn parseInternal(comptime T: type, element: *xml.Element, options: ParseOptions)
|
|||
return try parseInternal(optional_info.child, element, options);
|
||||
}
|
||||
},
|
||||
.@"enum" => {
|
||||
if (T == date.Timestamp) {
|
||||
return try date.Timestamp.parse(element.children.items[0].CharData);
|
||||
}
|
||||
.@"enum" => |enum_info| {
|
||||
_ = enum_info;
|
||||
// const numeric: ?enum_info.tag_type = std.fmt.parseInt(enum_info.tag_type, element.children.items[0].CharData, 10) catch null;
|
||||
// if (numeric) |num| {
|
||||
// return std.meta.intToEnum(T, num);
|
||||
|
@ -377,31 +328,23 @@ fn parseInternal(comptime T: type, element: *xml.Element, options: ParseOptions)
|
|||
// <Item>bar</Item>
|
||||
// <Items>
|
||||
if (ptr_info.child != u8) {
|
||||
const array_style = try detectArrayStyle(ptr_info.child, element, options);
|
||||
|
||||
log.debug("type = {s}, style = {s}, ptr_info.child == {s}, element = {s}", .{ @typeName(T), @tagName(array_style), @typeName(ptr_info.child), element.tag });
|
||||
|
||||
log.debug("type = {s}, ptr_info.child == {s}, element = {s}", .{ @typeName(T), @typeName(ptr_info.child), element.tag });
|
||||
var iterator = element.elements();
|
||||
var children = std.ArrayList(ptr_info.child).init(allocator);
|
||||
defer children.deinit();
|
||||
|
||||
switch (array_style) {
|
||||
.collection => {
|
||||
var iterator = element.elements();
|
||||
while (iterator.next()) |child_element| {
|
||||
try children.append(try parseInternal(ptr_info.child, child_element, options));
|
||||
}
|
||||
},
|
||||
.repeated_root => {
|
||||
var current: ?*Element = element;
|
||||
while (current) |el| : (current = el.next_sibling) {
|
||||
if (!std.mem.eql(u8, el.tag, element.tag)) continue;
|
||||
|
||||
try children.append(try parseInternal(ptr_info.child, el, options));
|
||||
}
|
||||
},
|
||||
while (iterator.next()) |child_element| {
|
||||
try children.append(try parseInternal(ptr_info.child, child_element, options));
|
||||
}
|
||||
|
||||
return children.toOwnedSlice();
|
||||
// var inx: usize = 0;
|
||||
// while (inx < children.len) {
|
||||
// switch (element.children.items[inx]) {
|
||||
// .Element => children[inx] = try parseInternal(ptr_info.child, element.children.items[inx].Element, options),
|
||||
// .CharData => children[inx] = try allocator.dupe(u8, element.children.items[inx].CharData),
|
||||
// .Comment => children[inx] = try allocator.dupe(u8, element.children.items[inx].Comment), // This might be an error...
|
||||
// }
|
||||
// inx += 1;
|
||||
// }
|
||||
}
|
||||
return try allocator.dupe(u8, element.children.items[0].CharData);
|
||||
},
|
||||
|
@ -793,33 +736,3 @@ test "compiler assertion failure 2" {
|
|||
defer parsed_data.deinit();
|
||||
try testing.expect(parsed_data.parsed_value.key_group_list.?.quantity == 42);
|
||||
}
|
||||
|
||||
test "can parse list objects" {
|
||||
const data =
|
||||
\\<?xml version="1.0" encoding="UTF-8"?>
|
||||
\\<ListBucketResult>
|
||||
\\ <Contents>
|
||||
\\ <Key>file1.txt</Key>
|
||||
\\ <Size>1024</Size>
|
||||
\\ </Contents>
|
||||
\\ <Contents>
|
||||
\\ <Key>file2.jpg</Key>
|
||||
\\ <Size>2048</Size>
|
||||
\\ </Contents>
|
||||
\\</ListBucketResult>
|
||||
;
|
||||
|
||||
const Response = sm.s3.list_objects_v2.Response;
|
||||
|
||||
const parsed_data = try parse(Response, data, .{ .allocator = testing.allocator });
|
||||
defer parsed_data.deinit();
|
||||
|
||||
const response: Response = parsed_data.parsed_value;
|
||||
const s3_objects: []sm.s3.Object = response.contents.?;
|
||||
|
||||
try testing.expectEqual(2, s3_objects.len);
|
||||
try testing.expectEqualStrings(s3_objects[0].key.?, "file1.txt");
|
||||
try testing.expectEqualStrings(s3_objects[1].key.?, "file2.jpg");
|
||||
try testing.expectEqual(s3_objects[0].size.?, 1024);
|
||||
try testing.expectEqual(s3_objects[1].size.?, 2048);
|
||||
}
|
||||
|
|
Loading…
Add table
Reference in a new issue