Compare commits

..

5 commits

Author SHA1 Message Date
ab47cb9deb
better test web server management
Some checks failed
aws-zig mach nominated build / build-zig-nominated-mach-latest (push) Failing after 1m13s
2025-04-17 17:42:26 -07:00
ae8298b18c
update CI based on master
Some checks failed
aws-zig mach nominated build / build-zig-nominated-mach-latest (push) Failing after 3h10m4s
2025-04-16 19:38:49 -07:00
5cb0c3cc88
add test server timeout
Some checks failed
aws-zig mach nominated build / build-zig-nominated-mach-latest (push) Failing after 7s
2025-04-16 19:36:03 -07:00
3e146f143c
sync workflows from master branch to zig-mach branch 2025-03-21 12:48:55 -07:00
838f0ffb96
fix json serialization for null/empty maps 2025-03-21 12:43:07 -07:00
31 changed files with 1118 additions and 2068 deletions

6
.envrc
View file

@ -1,8 +1,8 @@
# vi: ft=sh
# shellcheck shell=bash
if ! has zvm_direnv_version || ! zvm_direnv_version 2.0.0; then
source_url "https://git.lerch.org/lobo/zvm-direnv/raw/tag/2.0.0/direnvrc" "sha256-8Umzxj32hFU6G0a7Wrq0KTNDQ8XEuje2A3s2ljh/hFY="
if ! has zvm_direnv_version || ! zvm_direnv_version 1.0.0; then
source_url "https://git.lerch.org/lobo/zvm-direnv/raw/tag/1.0.0/direnvrc" "sha256-Gtddvcr6aJsrjKd53uChxA1reQmJgEBpmPUWmMdtDIQ="
fi
use zig 0.14.0
use zig 2024.11.0-mach

View file

@ -1,6 +1,5 @@
name: AWS-Zig Build
on:
workflow_dispatch:
push:
branches:
- 'master'
@ -18,19 +17,11 @@ jobs:
- name: Check out repository code
uses: actions/checkout@v4
- name: Setup Zig
uses: https://github.com/mlugg/setup-zig@v1.2.1
uses: mlugg/setup-zig@v1.2.1
with:
version: 0.14.0
- name: Restore Zig caches
uses: https://github.com/Hanaasagi/zig-action-cache@3954aae427f8b05914e08dfd79f15e1f2e435929
- name: Ulimit
run: ulimit -a
- name: Run smoke test
run: zig build smoke-test --verbose
- name: Run tests
run: zig build test --verbose --summary all
- name: Run tests (release mode)
run: zig build test -Doptimize=ReleaseSafe --verbose
run: zig build test --verbose
# Zig build scripts don't have the ability to import depenedencies directly
# (https://github.com/ziglang/zig/issues/18164). We can allow downstream
# build scripts to import aws with a few tweaks, but we can't @import("src/aws.zig")
@ -75,7 +66,7 @@ jobs:
# - run: echo "URL of foo (1) is ${{ steps.sign.outputs.URL_1 }}"
- name: Publish source code with generated models
run: |
curl -s --user ${{ github.actor }}:${{ secrets.PACKAGE_PUSH }} \
curl --user ${{ github.actor }}:${{ secrets.PACKAGE_PUSH }} \
--upload-file ${{ runner.temp }}/${{ github.sha }}-with-models.tar.gz \
https://git.lerch.org/api/packages/lobo/generic/aws-sdk-with-models/${{ github.sha }}/${{ github.sha }}-with-models.tar.gz
- name: Build example

View file

@ -26,17 +26,11 @@ jobs:
with:
ref: zig-develop
- name: Setup Zig
uses: https://github.com/mlugg/setup-zig@v1.2.1
uses: mlugg/setup-zig@v1.2.1
with:
version: master
- name: Restore Zig caches
uses: https://github.com/Hanaasagi/zig-action-cache@3954aae427f8b05914e08dfd79f15e1f2e435929
- name: Run smoke test
run: zig build smoke-test --verbose
- name: Run full tests
run: zig build test --verbose --summary all
- name: Run tests (release mode)
run: zig build test -Doptimize=ReleaseSafe --verbose
- name: Run tests
run: zig build test --verbose
# Zig package manager expects everything to be inside a directory in the archive,
# which it then strips out on download. So we need to shove everything inside a directory
# the way GitHub/Gitea does for repo archives
@ -69,7 +63,7 @@ jobs:
# - run: echo "URL of foo (1) is ${{ steps.sign.outputs.URL_1 }}"
- name: Publish source code with generated models
run: |
curl -s --user ${{ github.actor }}:${{ secrets.PACKAGE_PUSH }} \
curl --user ${{ github.actor }}:${{ secrets.PACKAGE_PUSH }} \
--upload-file ${{ runner.temp }}/${{ github.sha }}${{ env.PKG_PREFIX }}-with-models.tar.gz \
https://git.lerch.org/api/packages/lobo/generic/aws-sdk-with-models/${{ github.sha }}/${{ github.sha }}${{ env.PKG_PREFIX }}-with-models.tar.gz
- name: Build example

View file

@ -20,18 +20,11 @@ jobs:
with:
ref: zig-0.13
- name: Setup Zig
uses: https://github.com/mlugg/setup-zig@v1.2.1
uses: mlugg/setup-zig@v1.2.1
with:
version: 0.13.0
- name: Restore Zig caches
uses: https://github.com/Hanaasagi/zig-action-cache@3954aae427f8b05914e08dfd79f15e1f2e435929
- name: Run smoke test
run: zig build smoke-test --verbose
- name: Run full tests
run: zig build test --verbose --summary all
# Release mode fix not backported to 0.13.0 code
#- name: Run tests (release mode)
# run: zig build test -Doptimize=ReleaseSafe --verbose
- name: Run tests
run: zig build test --verbose
# Zig build scripts don't have the ability to import depenedencies directly
# (https://github.com/ziglang/zig/issues/18164). We can allow downstream
# build scripts to import aws with a few tweaks, but we can't @import("src/aws.zig")
@ -76,7 +69,7 @@ jobs:
# - run: echo "URL of foo (1) is ${{ steps.sign.outputs.URL_1 }}"
- name: Publish source code with generated models
run: |
curl -s --user ${{ github.actor }}:${{ secrets.PACKAGE_PUSH }} \
curl --user ${{ github.actor }}:${{ secrets.PACKAGE_PUSH }} \
--upload-file ${{ runner.temp }}/${{ github.sha }}-with-models.tar.gz \
https://git.lerch.org/api/packages/lobo/generic/aws-sdk-with-models/${{ github.sha }}/${{ github.sha }}-with-models.tar.gz
- name: Build example

31
.github/workflows/build.yaml vendored Normal file
View file

@ -0,0 +1,31 @@
name: AWS-Zig Build
run-name: ${{ github.actor }} building AWS Zig SDK
on:
push:
branches:
- '*'
- '!zig-develop*'
jobs:
build-zig-0-12-0-amd64:
runs-on: ubuntu-latest
env:
ZIG_VERSION: 0.13.0
ARCH: x86_64
if: ${{ github.env.GITEA_ACTIONS != 'true' }}
steps:
- name: Check out repository code
uses: actions/checkout@v4
# ARCH is fine, but we can't substitute directly because zig
# uses x86_64 instead of amd64. They also use aarch64 instead of arm64.
#
# However, arm64/linux isn't quite fully tier 1 yet, so this is more of a
# TODO: https://github.com/ziglang/zig/issues/2443
- name: Install zig
run: |
wget -q https://ziglang.org/download/${ZIG_VERSION}/zig-linux-${ARCH}-${ZIG_VERSION}.tar.xz
sudo tar x -C /usr/local -f zig-linux-${ARCH}-${ZIG_VERSION}.tar.xz
sudo ln -s /usr/local/zig-linux-${ARCH}-${ZIG_VERSION}/zig /usr/local/bin/zig
- name: Run tests
run: zig build test -Dbroken-windows --verbose # Github runners try to run the windows tests despite disabling foreign checks
- name: Build example
run: ( cd example && zig build ) # Make sure example builds

36
.github/workflows/zig-mach.yaml vendored Normal file
View file

@ -0,0 +1,36 @@
name: aws-zig mach nominated build
run-name: ${{ github.actor }} building AWS Zig SDK
on:
push:
branches:
- 'zig-develop*'
jobs:
build-zig-mach-latest:
runs-on: ubuntu-latest
# Need to use the default container with node and all that, so we can
# use JS-based actions like actions/checkout@v3...
# container:
# image: alpine:3.15.0
env:
ZIG_VERSION: mach-latest
ARCH: x86_64
if: ${{ github.env.GITEA_ACTIONS != 'true' }}
steps:
- name: Check out repository code
uses: actions/checkout@v4
# ARCH is fine, but we can't substitute directly because zig
# uses x86_64 instead of amd64. They also use aarch64 instead of arm64.
#
# However, arm64/linux isn't quite fully tier 1 yet, so this is more of a
# TODO: https://github.com/ziglang/zig/issues/2443
- name: Install zig
run: |
apt-get update && apt-get install -y jq
file="$(curl -Osw '%{filename_effective}' "$(curl -s https://machengine.org/zig/index.json |jq -r '."'${ZIG_VERSION}'"."x86_64-linux".tarball')")"
sudo tar x -C /usr/local -f "${file}"
sudo ln -s /usr/local/"${file%%.tar.xz}"/zig /usr/local/bin/zig
zig version
- name: Run tests
run: zig build test -Dbroken-windows --verbose
- name: Build example
run: ( cd example && zig build ) # Make sure example builds

36
.github/workflows/zig-nightly.yaml vendored Normal file
View file

@ -0,0 +1,36 @@
name: aws-zig nightly build
run-name: ${{ github.actor }} building AWS Zig SDK
on:
push:
branches:
- 'zig-develop*'
jobs:
build-zig-nightly:
runs-on: ubuntu-latest
# Need to use the default container with node and all that, so we can
# use JS-based actions like actions/checkout@v3...
# container:
# image: alpine:3.15.0
env:
ZIG_VERSION: master
ARCH: x86_64
if: ${{ github.env.GITEA_ACTIONS != 'true' }}
steps:
- name: Check out repository code
uses: actions/checkout@v4
# ARCH is fine, but we can't substitute directly because zig
# uses x86_64 instead of amd64. They also use aarch64 instead of arm64.
#
# However, arm64/linux isn't quite fully tier 1 yet, so this is more of a
# TODO: https://github.com/ziglang/zig/issues/2443
- name: Install zig
run: |
apt-get update && apt-get install -y jq
file="$(curl -Osw '%{filename_effective}' "$(curl -s https://ziglang.org/download/index.json |jq -r '."'${ZIG_VERSION}'"."x86_64-linux".tarball')")"
sudo tar x -C /usr/local -f "${file}"
sudo ln -s /usr/local/"${file%%.tar.xz}"/zig /usr/local/bin/zig
zig version
- name: Run tests
run: zig build test -Dbroken-windows --verbose
- name: Build example
run: ( cd example && zig build ) # Make sure example builds

View file

@ -1,11 +1,11 @@
AWS SDK for Zig
===============
[Zig 0.14](https://ziglang.org/download/#release-0.14.0):
[Zig 0.13](https://ziglang.org/download/#release-0.13.0):
[![Build Status: Zig 0.14.0](https://git.lerch.org/lobo/aws-sdk-for-zig/actions/workflows/build.yaml/badge.svg)](https://git.lerch.org/lobo/aws-sdk-for-zig/actions?workflow=build.yaml&state=closed)
[![Build Status: Zig 0.13.0](https://git.lerch.org/lobo/aws-sdk-for-zig/actions/workflows/build.yaml/badge.svg)](https://git.lerch.org/lobo/aws-sdk-for-zig/actions?workflow=build.yaml&state=closed)
[Last Mach Nominated Zig Version](https://machengine.org/docs/nominated-zig/):
[Last Mach Nominated Zig Version](https://machengine.org/about/nominated-zig/):
[![Build Status: Mach nominated](https://git.lerch.org/lobo/aws-sdk-for-zig/actions/workflows/zig-mach.yaml/badge.svg)](https://git.lerch.org/lobo/aws-sdk-for-zig/actions?workflow=zig-mach.yaml&state=closed)
@ -13,13 +13,12 @@ AWS SDK for Zig
[![Build Status: Zig Nightly](https://git.lerch.org/lobo/aws-sdk-for-zig/actions/workflows/zig-nightly.yaml/badge.svg)](https://git.lerch.org/lobo/aws-sdk-for-zig/actions?workflow=zig-nightly.yaml&state=closed)
[Zig 0.13](https://ziglang.org/download/#release-0.13.0):
[![Build Status: Zig 0.13.0](https://git.lerch.org/lobo/aws-sdk-for-zig/actions/workflows/zig-previous.yaml/badge.svg)](https://git.lerch.org/lobo/aws-sdk-for-zig/actions?workflow=zig-previous.yaml&state=closed)
**NOTE ON BUILD STATUS**: The nightly/mach nominated version of this currently
panics under CI, but I have not yet reproduced this panic. Running manually on
multiple machines appears to be working properly
Current executable size for the demo is 980k after compiling with -Doptimize=ReleaseSmall
in x86_64-linux, and will vary based on services used. Tested targets:
in x86_linux, and will vary based on services used. Tested targets:
* x86_64-linux
* riscv64-linux
@ -31,38 +30,22 @@ in x86_64-linux, and will vary based on services used. Tested targets:
Tested targets are built, but not continuously tested, by CI.
Branches
--------
Zig-Develop Branch
------------------
* **master**: This branch tracks the latest released zig version
* **zig-0.13**: This branch tracks the previous released zig version (0.13 currently).
Support for the previous version is best effort, generally
degrading over time. Fixes will generally appear in master, then
backported into the previous version.
* **zig-mach**: This branch tracks the latest mach nominated version. A separate
branch is necessary as mach nominated is usually, but not always,
more recent than the latest production zig. Support for the mach
version is best effort.
* **zig-develop**: This branch tracks zig nightly, and is used mainly as a canary
for breaking changes that will need to be dealt with when
a new mach nominated version or new zig release appear.
Expect significant delays in any build failures.
Other branches/tags exist but are unsupported
This branch is intended for use with the in-development version of Zig. This
starts with 0.12.0-dev.3180+83e578a18. This is aligned with [Mach Engine's Nominated
Zig Versions](https://machengine.org/about/nominated-zig/). Nightly zig versions
are difficult to keep up with and there is no special effort made there, build
status is FYI (and used as a canary for nominated zig versions).
Building
--------
`zig build` should work. It will build the code generation project, fetch model
files from upstream AWS Go SDK v2, run the code generation, then build the main
project with the generated code. Testing can be done with `zig build test`. Note that
this command tests on all supported architectures, so for a faster testing
process, use `zig build smoke-test` instead.
project with the generated code. Testing can be done with `zig test`.
To make development even faster, a build option is provided to avoid the use of
LLVM. To use this, use the command `zig build -Dno-llvm smoke-test`. This
can reduce build/test time 300%. Note, however, native code generation in zig
is not yet complete, so you may see errors.
Using
-----
@ -70,8 +53,7 @@ Using
This is designed for use with the Zig package manager, and exposes a module
called "aws". Set up `build.zig.zon` and add the dependency/module to your project
as normal and the package manager should do its thing. A full example can be found
in [/example](example/build.zig.zon). This can also be used at build time in
a downstream project's `build.zig`.
in [/example](example/README.md).
Configuring the module and/or Running the demo
----------------------------------------------
@ -79,8 +61,8 @@ Configuring the module and/or Running the demo
This library mimics the aws c libraries for it's work, so it operates like most
other 'AWS things'. [/src/main.zig](src/main.zig) gives you a handful of examples
for working with services. For local testing or alternative endpoints, there's
no real standard, so there is code to look for an environment variable
`AWS_ENDPOINT_URL` variable that will supersede all other configuration.
no real standard, so there is code to look for `AWS_ENDPOINT_URL` environment
variable that will supersede all other configuration.
Limitations
-----------
@ -101,6 +83,13 @@ TODO List:
* Implement timeouts and other TODO's in the code
* Add option to cache signature keys
Services without TLS 1.3 support
--------------------------------
All AWS services should support TLS 1.3 at this point, but there are many regions
and several partitions, and not all of them have been tested, so your mileage
may vary. If something doesn't work, please submit an issue to let others know.
Dependency tree
---------------

259
build.zig
View file

@ -19,7 +19,14 @@ const test_targets = [_]std.Target.Query{
};
pub fn build(b: *Builder) !void {
// Standard target options allows the person running `zig build` to choose
// what target to build for. Here we do not override the defaults, which
// means any target is allowed, and the default is native. Other options
// for restricting supported target set are available.
const target = b.standardTargetOptions(.{});
// Standard release options allow the person running `zig build` to select
// between Debug, ReleaseSafe, ReleaseFast, and ReleaseSmall.
const optimize = b.standardOptimizeOption(.{});
const no_llvm = b.option(
@ -27,36 +34,62 @@ pub fn build(b: *Builder) !void {
"no-llvm",
"Disable LLVM",
) orelse false;
const broken_windows = b.option(
bool,
"broken-windows",
"Windows is broken in this environment (do not run Windows tests)",
) orelse false;
const no_bin = b.option(bool, "no-bin", "skip emitting binary") orelse false;
const test_filters: []const []const u8 = b.option(
[]const []const u8,
"test-filter",
"Skip tests that do not match any of the specified filters",
) orelse &.{};
const dep_mods = try getDependencyModules(b, .{
.target = target,
.optimize = optimize,
});
const mod_exe = b.createModule(.{
// TODO: Embed the current git version in the code. We can do this
// by looking for .git/HEAD (if it exists, follow the ref to /ref/heads/whatevs,
// grab that commit, and use b.addOptions/exe.addOptions to generate the
// Options file. See https://github.com/ziglang/zig/issues/14979 for usage
// example.
//
// From there, I'm not sure what the generated file looks like or quite how
// to use, but that should be easy. It may also give some ideas on the
// code gen piece itself, though it might be nice to leave as a seperate
// executable
// TODO: This executable should not be built when importing as a package.
// It relies on code gen and is all fouled up when getting imported
const exe = b.addExecutable(.{
.name = "demo",
.root_source_file = b.path("src/main.zig"),
.target = target,
.optimize = optimize,
});
configure(mod_exe, dep_mods, true);
const exe = b.addExecutable(.{
.name = "demo",
.root_module = mod_exe,
.use_llvm = !no_llvm,
exe.use_llvm = !no_llvm;
const smithy_dep = b.dependency("smithy", .{
// These are the arguments to the dependency. It expects a target and optimization level.
.target = target,
.optimize = optimize,
});
const smithy_module = smithy_dep.module("smithy");
exe.root_module.addImport("smithy", smithy_module); // not sure this should be here...
// Expose module to others
_ = b.addModule("aws", .{
.root_source_file = b.path("src/aws.zig"),
.imports = &.{.{ .name = "smithy", .module = smithy_module }},
});
// Expose module to others
_ = b.addModule("aws-signing", .{
.root_source_file = b.path("src/aws_signing.zig"),
.imports = &.{.{ .name = "smithy", .module = smithy_module }},
});
// TODO: This does not work correctly due to https://github.com/ziglang/zig/issues/16354
//
// We are working here with kind of a weird dependency though. So we can do this
// another way
//
// TODO: These target/optimize are not correct, as we need to run the thing
// const codegen = b.anonymousDependency("codegen/", @import("codegen/build.zig"), .{
// .target = target,
// .optimize = optimize,
// });
// const codegen_cmd = b.addRunArtifact(codegen.artifact("codegen"));
// exe.step.dependOn(&codegen_cmd.step);
const run_cmd = b.addRunArtifact(exe);
run_cmd.step.dependOn(b.getInstallStep());
@ -67,77 +100,62 @@ pub fn build(b: *Builder) !void {
const run_step = b.step("run", "Run the app");
run_step.dependOn(&run_cmd.step);
const cg = b.step("gen", "Generate zig service code from smithy models");
const gen_step = blk: {
const cg = b.step("gen", "Generate zig service code from smithy models");
const cg_mod = b.createModule(.{
.root_source_file = b.path("codegen/src/main.zig"),
// We need this generated for the host, not the real target
.target = b.graph.host,
.optimize = if (b.verbose) .Debug else .ReleaseSafe,
});
configure(cg_mod, dep_mods, false);
const cg_exe = b.addExecutable(.{
.name = "codegen",
.root_source_file = b.path("codegen/src/main.zig"),
// We need this generated for the host, not the real target
.target = b.graph.host,
.optimize = if (b.verbose) .Debug else .ReleaseSafe,
});
cg_exe.use_llvm = !no_llvm;
cg_exe.root_module.addImport("smithy", smithy_dep.module("smithy"));
var cg_cmd = b.addRunArtifact(cg_exe);
cg_cmd.addArg("--models");
const hash = hash_blk: {
for (b.available_deps) |dep| {
const dep_name = dep.@"0";
const dep_hash = dep.@"1";
if (std.mem.eql(u8, dep_name, "models"))
break :hash_blk dep_hash;
}
return error.DependencyNamedModelsNotFoundInBuildZigZon;
};
cg_cmd.addArg(try std.fs.path.join(
b.allocator,
&[_][]const u8{
b.graph.global_cache_root.path.?,
"p",
hash,
models_subdir,
},
));
cg_cmd.addArg("--output");
cg_cmd.addDirectoryArg(b.path("src/models"));
if (b.verbose)
cg_cmd.addArg("--verbose");
// cg_cmd.step.dependOn(&fetch_step.step);
// TODO: this should use zig_exe from std.Build
// codegen should store a hash in a comment
// this would be hash of the exe that created the file
// concatenated with hash of input json. this would
// allow skipping generated files. May not include hash
// of contents of output file as maybe we want to tweak
// manually??
//
// All the hashes can be in service_manifest.zig, which
// could be fun to just parse and go nuts. Top of
// file, generator exe hash. Each import has comment
// with both input and output hash and we can decide
// later about warning on manual changes...
const cg_exe = b.addExecutable(.{
.name = "codegen",
.root_module = cg_mod,
});
var cg_cmd = b.addRunArtifact(cg_exe);
cg_cmd.addArg("--models");
cg_cmd.addArg(try std.fs.path.join(
b.allocator,
&[_][]const u8{
try b.dependency("models", .{}).path("").getPath3(b, null).toString(b.allocator),
models_subdir,
},
));
cg_cmd.addArg("--output");
const cg_output_dir = cg_cmd.addOutputDirectoryArg("src/models");
if (b.verbose)
cg_cmd.addArg("--verbose");
// cg_cmd.step.dependOn(&fetch_step.step);
// TODO: this should use zig_exe from std.Build
// codegen should store a hash in a comment
// this would be hash of the exe that created the file
// concatenated with hash of input json. this would
// allow skipping generated files. May not include hash
// of contents of output file as maybe we want to tweak
// manually??
//
// All the hashes can be in service_manifest.zig, which
// could be fun to just parse and go nuts. Top of
// file, generator exe hash. Each import has comment
// with both input and output hash and we can decide
// later about warning on manual changes...
cg.dependOn(&cg_cmd.step);
break :blk cg;
};
cg.dependOn(&cg_cmd.step);
exe.step.dependOn(cg);
// This allows us to have each module depend on the
// generated service manifest.
const service_manifest_module = b.createModule(.{
.root_source_file = cg_output_dir.path(b, "service_manifest.zig"),
.target = target,
.optimize = optimize,
});
configure(service_manifest_module, dep_mods, true);
mod_exe.addImport("service_manifest", service_manifest_module);
// Expose module to others
const mod_aws = b.addModule("aws", .{
.root_source_file = b.path("src/aws.zig"),
.target = target,
.optimize = optimize,
});
mod_aws.addImport("service_manifest", service_manifest_module);
configure(mod_aws, dep_mods, true);
// Expose module to others
const mod_aws_signing = b.addModule("aws-signing", .{
.root_source_file = b.path("src/aws_signing.zig"),
});
configure(mod_aws_signing, dep_mods, false);
exe.step.dependOn(gen_step);
// Similar to creating the run step earlier, this exposes a `test` step to
// the `zig build --help` menu, providing a way for the user to request
@ -160,23 +178,15 @@ pub fn build(b: *Builder) !void {
// test_step.dependOn(&run_unit_tests.step);
for (test_targets) |t| {
if (broken_windows and t.os_tag == .windows) continue;
const mod_unit_tests = b.createModule(.{
// Creates a step for unit testing. This only builds the test executable
// but does not run it.
const unit_tests = b.addTest(.{
.root_source_file = b.path("src/aws.zig"),
.target = b.resolveTargetQuery(t),
.optimize = optimize,
});
mod_unit_tests.addImport("service_manifest", service_manifest_module);
configure(mod_unit_tests, dep_mods, true);
// Creates a step for unit testing. This only builds the test executable
// but does not run it.
const unit_tests = b.addTest(.{
.root_module = mod_unit_tests,
.filters = test_filters,
});
unit_tests.step.dependOn(cg);
unit_tests.root_module.addImport("smithy", smithy_dep.module("smithy"));
unit_tests.step.dependOn(gen_step);
unit_tests.use_llvm = !no_llvm;
const run_unit_tests = b.addRunArtifact(unit_tests);
@ -195,51 +205,16 @@ pub fn build(b: *Builder) !void {
// Creates a step for unit testing. This only builds the test executable
// but does not run it.
const smoke_test = b.addTest(.{
.root_module = mod_aws,
.filters = test_filters,
.root_source_file = b.path("src/aws.zig"),
.target = target,
.optimize = optimize,
});
smoke_test.use_llvm = !no_llvm;
smoke_test.step.dependOn(cg);
smoke_test.root_module.addImport("smithy", smithy_dep.module("smithy"));
smoke_test.step.dependOn(gen_step);
const run_smoke_test = b.addRunArtifact(smoke_test);
smoke_test_step.dependOn(&run_smoke_test.step);
if (no_bin) {
b.getInstallStep().dependOn(&exe.step);
} else {
b.installArtifact(exe);
}
}
fn configure(compile: *std.Build.Module, modules: std.StringHashMap(*std.Build.Module), include_time: bool) void {
compile.addImport("smithy", modules.get("smithy").?);
compile.addImport("date", modules.get("date").?);
compile.addImport("json", modules.get("json").?);
if (include_time) compile.addImport("zeit", modules.get("zeit").?);
}
fn getDependencyModules(b: *std.Build, args: anytype) !std.StringHashMap(*std.Build.Module) {
var result = std.StringHashMap(*std.Build.Module).init(b.allocator);
// External dependencies
const dep_smithy = b.dependency("smithy", args);
const mod_smithy = dep_smithy.module("smithy");
try result.putNoClobber("smithy", mod_smithy);
const dep_zeit = b.dependency("zeit", args);
const mod_zeit = dep_zeit.module("zeit");
try result.putNoClobber("zeit", mod_zeit);
// End External dependencies
// Private modules/dependencies
const dep_json = b.dependency("json", args);
const mod_json = dep_json.module("json");
try result.putNoClobber("json", mod_json);
const dep_date = b.dependency("date", args);
const mod_date = dep_date.module("date");
try result.putNoClobber("date", mod_date);
// End private modules/dependencies
return result;
b.installArtifact(exe);
}

View file

@ -1,35 +1,22 @@
.{
.name = .aws,
.name = "aws",
.version = "0.0.1",
.fingerprint = 0x1f26b7b27005bb49,
.paths = .{
"build.zig",
"build.zig.zon",
"src",
"codegen",
"lib",
"README.md",
"LICENSE",
},
.dependencies = .{
.smithy = .{
.url = "https://git.lerch.org/lobo/smithy/archive/fd9be1afbfcc60d52896c077d8e9c963bb667bf1.tar.gz",
.hash = "smithy-1.0.0-uAyBgZPSAgBHStx7nrj0u3sN66g8Ppnn3XFUEJhn00rP",
.url = "https://git.lerch.org/lobo/smithy/archive/3ed98751bc414e005af6ad185feb213d4366c0db.tar.gz",
.hash = "12204a784751a4ad5ed6c8955ba91fcbc4a3cad6c5a7da38f39abf074ef801d13172",
},
.models = .{
.url = "https://github.com/aws/aws-sdk-go-v2/archive/refs/tags/release-2025-05-05.tar.gz",
.hash = "N-V-__8AAKWdeiawujEcrfukQbb8lLAiQIRT0uG5gCcm4b7W",
},
.zeit = .{
.url = "git+https://github.com/rockorager/zeit#fb6557ad4bd0cd0f0f728ae978061d7fe992c528",
.hash = "zeit-0.6.0-5I6bk29nAgDhK6AVMtXMWhkKTYgUncrWjnlI_8X9DPSd",
},
.date = .{
.path = "lib/date",
},
.json = .{
.path = "lib/json",
.url = "https://github.com/aws/aws-sdk-go-v2/archive/58cf6509525a12d64fd826da883bfdbacbd2f00e.tar.gz",
.hash = "122017a2f3081ce83c23e0c832feb1b8b4176d507b6077f522855dc774bcf83ee315",
},
},
}

150
codegen/src/json.zig Normal file
View file

@ -0,0 +1,150 @@
const std = @import("std");
// options is a json.Options, but since we're using our hacked json.zig we don't want to
// specifically call this out
pub fn serializeMap(map: anytype, key: []const u8, options: anytype, out_stream: anytype) !bool {
if (@typeInfo(@TypeOf(map)) == .optional) {
if (map == null)
return false
else
return serializeMapInternal(map.?, key, options, out_stream);
}
return serializeMapInternal(map, key, options, out_stream);
}
fn serializeMapInternal(map: anytype, key: []const u8, options: anytype, out_stream: anytype) !bool {
if (map.len == 0) {
var child_options = options;
if (child_options.whitespace) |*child_ws|
child_ws.indent_level += 1;
try out_stream.writeByte('"');
try out_stream.writeAll(key);
_ = try out_stream.write("\":");
if (options.whitespace) |ws| {
if (ws.separator) {
try out_stream.writeByte(' ');
}
}
try out_stream.writeByte('{');
try out_stream.writeByte('}');
return true;
}
// TODO: Map might be [][]struct{key, value} rather than []struct{key, value}
var child_options = options;
if (child_options.whitespace) |*child_ws|
child_ws.indent_level += 1;
try out_stream.writeByte('"');
try out_stream.writeAll(key);
_ = try out_stream.write("\":");
if (options.whitespace) |ws| {
if (ws.separator) {
try out_stream.writeByte(' ');
}
}
try out_stream.writeByte('{');
if (options.whitespace) |_|
try out_stream.writeByte('\n');
for (map, 0..) |tag, i| {
if (tag.key == null or tag.value == null) continue;
// TODO: Deal with escaping and general "json.stringify" the values...
if (child_options.whitespace) |ws|
try ws.outputIndent(out_stream);
try out_stream.writeByte('"');
try jsonEscape(tag.key.?, child_options, out_stream);
_ = try out_stream.write("\":");
if (child_options.whitespace) |ws| {
if (ws.separator) {
try out_stream.writeByte(' ');
}
}
try out_stream.writeByte('"');
try jsonEscape(tag.value.?, child_options, out_stream);
try out_stream.writeByte('"');
if (i < map.len - 1) {
try out_stream.writeByte(',');
}
if (child_options.whitespace) |_|
try out_stream.writeByte('\n');
}
if (options.whitespace) |ws|
try ws.outputIndent(out_stream);
try out_stream.writeByte('}');
return true;
}
// code within jsonEscape lifted from json.zig in stdlib
fn jsonEscape(value: []const u8, options: anytype, out_stream: anytype) !void {
var i: usize = 0;
while (i < value.len) : (i += 1) {
switch (value[i]) {
// normal ascii character
0x20...0x21, 0x23...0x2E, 0x30...0x5B, 0x5D...0x7F => |c| try out_stream.writeByte(c),
// only 2 characters that *must* be escaped
'\\' => try out_stream.writeAll("\\\\"),
'\"' => try out_stream.writeAll("\\\""),
// solidus is optional to escape
'/' => {
if (options.string.String.escape_solidus) {
try out_stream.writeAll("\\/");
} else {
try out_stream.writeByte('/');
}
},
// control characters with short escapes
// TODO: option to switch between unicode and 'short' forms?
0x8 => try out_stream.writeAll("\\b"),
0xC => try out_stream.writeAll("\\f"),
'\n' => try out_stream.writeAll("\\n"),
'\r' => try out_stream.writeAll("\\r"),
'\t' => try out_stream.writeAll("\\t"),
else => {
const ulen = std.unicode.utf8ByteSequenceLength(value[i]) catch unreachable;
// control characters (only things left with 1 byte length) should always be printed as unicode escapes
if (ulen == 1 or options.string.String.escape_unicode) {
const codepoint = std.unicode.utf8Decode(value[i .. i + ulen]) catch unreachable;
try outputUnicodeEscape(codepoint, out_stream);
} else {
try out_stream.writeAll(value[i .. i + ulen]);
}
i += ulen - 1;
},
}
}
}
// outputUnicodeEscape and assert lifted from json.zig in stdlib
fn outputUnicodeEscape(
codepoint: u21,
out_stream: anytype,
) !void {
if (codepoint <= 0xFFFF) {
// If the character is in the Basic Multilingual Plane (U+0000 through U+FFFF),
// then it may be represented as a six-character sequence: a reverse solidus, followed
// by the lowercase letter u, followed by four hexadecimal digits that encode the character's code point.
try out_stream.writeAll("\\u");
try std.fmt.formatIntValue(codepoint, "x", std.fmt.FormatOptions{ .width = 4, .fill = '0' }, out_stream);
} else {
assert(codepoint <= 0x10FFFF);
// To escape an extended character that is not in the Basic Multilingual Plane,
// the character is represented as a 12-character sequence, encoding the UTF-16 surrogate pair.
const high = @as(u16, @intCast((codepoint - 0x10000) >> 10)) + 0xD800;
const low = @as(u16, @intCast(codepoint & 0x3FF)) + 0xDC00;
try out_stream.writeAll("\\u");
try std.fmt.formatIntValue(high, "x", std.fmt.FormatOptions{ .width = 4, .fill = '0' }, out_stream);
try out_stream.writeAll("\\u");
try std.fmt.formatIntValue(low, "x", std.fmt.FormatOptions{ .width = 4, .fill = '0' }, out_stream);
}
}
/// This function invokes undefined behavior when `ok` is `false`.
/// In Debug and ReleaseSafe modes, calls to this function are always
/// generated, and the `unreachable` statement triggers a panic.
/// In ReleaseFast and ReleaseSmall modes, calls to this function are
/// optimized away, and in fact the optimizer is able to use the assertion
/// in its heuristics.
/// Inside a test block, it is best to use the `std.testing` module rather
/// than this function, because this function may not detect a test failure
/// in ReleaseFast and ReleaseSmall mode. Outside of a test block, this assert
/// function is the correct function to use.
pub fn assert(ok: bool) void {
if (!ok) unreachable; // assertion failure
}

View file

@ -2,6 +2,7 @@ const std = @import("std");
const smithy = @import("smithy");
const snake = @import("snake.zig");
const Hasher = @import("Hasher.zig");
const json_zig = @embedFile("json.zig");
var verbose = false;
@ -32,6 +33,8 @@ pub fn main() anyerror!void {
if (std.mem.eql(u8, "--models", arg))
models_dir = try std.fs.cwd().openDir(args[i + 1], .{ .iterate = true });
}
// TODO: Seems like we should remove this in favor of a package
try output_dir.writeFile(.{ .sub_path = "json.zig", .data = json_zig });
// TODO: We need a different way to handle this file...
const manifest_file_started = false;
@ -183,13 +186,8 @@ fn processFile(file_name: []const u8, output_dir: std.fs.Dir, manifest: anytype)
defer arena.deinit();
const allocator = arena.allocator();
_ = try writer.write("const std = @import(\"std\");\n");
_ = try writer.write("const smithy = @import(\"smithy\");\n");
_ = try writer.write("const json = @import(\"json\");\n");
_ = try writer.write("const date = @import(\"date\");\n");
_ = try writer.write("const zeit = @import(\"zeit\");\n");
_ = try writer.write("\n");
_ = try writer.write("const serializeMap = json.serializeMap;\n");
_ = try writer.write("\n");
_ = try writer.write("const serializeMap = @import(\"json.zig\").serializeMap;\n");
_ = try writer.write("const smithy = @import(\"smithy\");\n\n");
if (verbose) std.log.info("Processing file: {s}", .{file_name});
const service_names = generateServicesForFilePath(allocator, ";", file_name, writer) catch |err| {
std.log.err("Error processing file: {s}", .{file_name});
@ -373,7 +371,7 @@ fn generateServices(allocator: std.mem.Allocator, comptime _: []const u8, file:
};
for (services.items) |service| {
var sdk_id: []const u8 = undefined;
const version: ?[]const u8 = service.shape.service.version;
const version: []const u8 = service.shape.service.version;
const name: []const u8 = service.name;
var arn_namespace: ?[]const u8 = undefined;
var sigv4_name: ?[]const u8 = null;
@ -404,10 +402,7 @@ fn generateServices(allocator: std.mem.Allocator, comptime _: []const u8, file:
const constant_name = try constantName(allocator, sdk_id);
try constant_names.append(constant_name);
try writer.print("const Self = @This();\n", .{});
if (version) |v|
try writer.print("pub const version: ?[]const u8 = \"{s}\";\n", .{v})
else
try writer.print("pub const version: ?[]const u8 = null;\n", .{});
try writer.print("pub const version: []const u8 = \"{s}\";\n", .{version});
try writer.print("pub const sdk_id: []const u8 = \"{s}\";\n", .{sdk_id});
if (arn_namespace) |a| {
try writer.print("pub const arn_namespace: ?[]const u8 = \"{s}\";\n", .{a});
@ -418,10 +413,7 @@ fn generateServices(allocator: std.mem.Allocator, comptime _: []const u8, file:
// TODO: This really should just be ".whatevs". We're fully qualifying here, which isn't typical
try writer.print("pub const aws_protocol: smithy.AwsProtocol = {};\n\n", .{aws_protocol});
_ = try writer.write("pub const service_metadata: struct {\n");
if (version) |v|
try writer.print(" version: ?[]const u8 = \"{s}\",\n", .{v})
else
try writer.print(" version: ?[]const u8 = null,\n", .{});
try writer.print(" version: []const u8 = \"{s}\",\n", .{version});
try writer.print(" sdk_id: []const u8 = \"{s}\",\n", .{sdk_id});
if (arn_namespace) |a| {
try writer.print(" arn_namespace: ?[]const u8 = \"{s}\",\n", .{a});
@ -443,7 +435,7 @@ fn generateServices(allocator: std.mem.Allocator, comptime _: []const u8, file:
fn generateAdditionalTypes(allocator: std.mem.Allocator, file_state: FileGenerationState, writer: anytype) !void {
// More types may be added during processing
while (file_state.additional_types_to_generate.pop()) |t| {
while (file_state.additional_types_to_generate.popOrNull()) |t| {
if (file_state.additional_types_generated.getEntry(t.name) != null) continue;
// std.log.info("\t\t{s}", .{t.name});
var type_stack = std.ArrayList(*const smithy.ShapeInfo).init(allocator);
@ -724,7 +716,7 @@ fn generateTypeFor(shape_id: []const u8, writer: anytype, state: GenerationState
// The serializer will have to deal with the idea we might be an array
return try generateTypeFor(shape.set.member_target, writer, state, true);
},
.timestamp => |s| try generateSimpleTypeFor(s, "date.Timestamp", writer),
.timestamp => |s| try generateSimpleTypeFor(s, "f128", writer),
.blob => |s| try generateSimpleTypeFor(s, "[]const u8", writer),
.boolean => |s| try generateSimpleTypeFor(s, "bool", writer),
.double => |s| try generateSimpleTypeFor(s, "f64", writer),
@ -970,6 +962,5 @@ fn avoidReserved(snake_name: []const u8) []const u8 {
if (std.mem.eql(u8, snake_name, "export")) return "@\"export\"";
if (std.mem.eql(u8, snake_name, "union")) return "@\"union\"";
if (std.mem.eql(u8, snake_name, "enum")) return "@\"enum\"";
if (std.mem.eql(u8, snake_name, "inline")) return "@\"inline\"";
return snake_name;
}

View file

@ -1,13 +1,12 @@
.{
.name = .myapp,
.name = "myapp",
.version = "0.0.1",
.fingerprint = 0x8798022a511224c5,
.paths = .{""},
.dependencies = .{
.aws = .{
.url = "https://git.lerch.org/api/packages/lobo/generic/aws-sdk-with-models/7a6086447c1249b0e5b5b5f3873d2f7932bea56d/7a6086447c1249b0e5b5b5f3873d2f7932bea56d-with-models.tar.gz",
.hash = "aws-0.0.1-SbsFcGN_CQCBjurpc2GEMw4c_qAkGu6KpuVnLBLY4L4q",
.url = "https://git.lerch.org/api/packages/lobo/generic/aws-sdk-with-models/e02fb699fc47f19d19cad99209bd480ca6963295/e02fb699fc47f19d19cad99209bd480ca6963295nominated-zig-with-models.tar.gz",
.hash = "1220fa9b39c985449936f0e3f02bbb6fdafa64435e502eb78fd47d457b96876b7968",
},
},
}

View file

@ -1,41 +0,0 @@
const std = @import("std");
pub fn build(b: *std.Build) void {
const target = b.standardTargetOptions(.{});
const optimize = b.standardOptimizeOption(.{});
const lib_mod = b.addModule("date", .{
.root_source_file = b.path("src/root.zig"),
.target = target,
.optimize = optimize,
});
const lib = b.addLibrary(.{
.linkage = .static,
.name = "date",
.root_module = lib_mod,
});
b.installArtifact(lib);
const lib_unit_tests = b.addTest(.{
.root_module = lib_mod,
});
const run_lib_unit_tests = b.addRunArtifact(lib_unit_tests);
const test_step = b.step("test", "Run unit tests");
test_step.dependOn(&run_lib_unit_tests.step);
const dep_zeit = b.dependency("zeit", .{
.target = target,
.optimize = optimize,
});
lib_mod.addImport("zeit", dep_zeit.module("zeit"));
const dep_json = b.dependency("json", .{
.target = target,
.optimize = optimize,
});
lib_mod.addImport("json", dep_json.module("json"));
}

View file

@ -1,20 +0,0 @@
.{
.name = .date,
.version = "0.0.0",
.fingerprint = 0xaa9e377a226d739e, // Changing this has security and trust implications.
.minimum_zig_version = "0.14.0",
.dependencies = .{
.zeit = .{
.url = "git+https://github.com/rockorager/zeit#fb6557ad4bd0cd0f0f728ae978061d7fe992c528",
.hash = "zeit-0.6.0-5I6bk29nAgDhK6AVMtXMWhkKTYgUncrWjnlI_8X9DPSd",
},
.json = .{
.path = "../json",
},
},
.paths = .{
"build.zig",
"build.zig.zon",
"src",
},
}

View file

@ -1,206 +0,0 @@
// From https://gist.github.com/WoodyAtHome/3ef50b17f0fa2860ac52b97af12f8d15
// Translated from German. We don't need any local time for this use case, and conversion
// really requires the TZ DB.
const std = @import("std");
const log = std.log.scoped(.date);
const zeit = @import("zeit");
pub const DateTime = struct {
day: u8,
month: u8,
year: u16,
hour: u8,
minute: u8,
second: u8,
pub fn fromInstant(val: zeit.Instant) DateTime {
return fromTime(val.time());
}
pub fn fromTime(val: zeit.Time) DateTime {
return DateTime{
.day = val.day,
.month = @intFromEnum(val.month),
.year = @intCast(val.year),
.hour = val.hour,
.minute = val.minute,
.second = val.second,
};
}
pub fn time(self: DateTime) zeit.Time {
return zeit.Time{
.day = @intCast(self.day),
.month = @enumFromInt(self.month),
.year = self.year,
.hour = @intCast(self.hour),
.minute = @intCast(self.minute),
.second = @intCast(self.second),
};
}
pub fn instant(self: DateTime) !zeit.Instant {
return try zeit.instant(.{ .source = .{ .time = self.time() } });
}
};
pub fn timestampToDateTime(timestamp: zeit.Seconds) DateTime {
const ins = zeit.instant(.{ .source = .{ .unix_timestamp = timestamp } }) catch @panic("Failed to create instant from timestamp");
return DateTime.fromInstant(ins);
}
pub fn parseEnglishToTimestamp(data: []const u8) !i64 {
return try dateTimeToTimestamp(try parseEnglishToDateTime(data));
}
/// Converts a string to a timestamp value. May not handle dates before the
/// epoch. Dates should look like "Fri, 03 Jun 2022 18:12:36 GMT"
pub fn parseEnglishToDateTime(data: []const u8) !DateTime {
const ins = try zeit.instant(.{ .source = .{ .rfc1123 = data } });
return DateTime.fromInstant(ins);
}
pub fn parseIso8601ToTimestamp(data: []const u8) !i64 {
return try dateTimeToTimestamp(try parseIso8601ToDateTime(data));
}
const IsoParsingState = enum { Start, Year, Month, Day, Hour, Minute, Second, Millisecond, End };
/// Converts a string to a timestamp value. May not handle dates before the
/// epoch
pub fn parseIso8601ToDateTime(data: []const u8) !DateTime {
// Basic format YYYYMMDDThhmmss
if (data.len == "YYYYMMDDThhmmss".len and data[8] == 'T')
return try parseIso8601BasicFormatToDateTime(data);
if (data.len == "YYYYMMDDThhmmssZ".len and data[8] == 'T')
return try parseIso8601BasicFormatToDateTime(data);
var start: usize = 0;
var state = IsoParsingState.Start;
// Anything not explicitly set by our string would be 0
var rc = DateTime{ .year = 0, .month = 0, .day = 0, .hour = 0, .minute = 0, .second = 0 };
var zulu_time = false;
for (data, 0..) |ch, i| {
switch (ch) {
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9' => {
if (state == .Start) state = .Year;
},
'?', '~', '%' => {
// These characters all specify the type of time (approximate, etc)
// and we will ignore
},
'.', '-', ':', 'T' => {
// State transition
// We're going to coerce and this might not go well, but we
// want the compiler to create checks, so we'll turn on
// runtime safety for this block, forcing checks in ReleaseSafe
// ReleaseFast modes.
const next_state = try endIsoState(state, &rc, data[start..i]);
state = next_state;
start = i + 1;
},
'Z' => zulu_time = true,
else => {
log.err("Invalid character: {c}", .{ch});
return error.InvalidCharacter;
},
}
}
if (!zulu_time) return error.LocalTimeNotSupported;
// We know we have a Z at the end of this, so let's grab the last bit
// of the string, minus the 'Z', and fly, eagles, fly!
_ = try endIsoState(state, &rc, data[start .. data.len - 1]);
return rc;
}
fn parseIso8601BasicFormatToDateTime(data: []const u8) !DateTime {
return DateTime{
.year = try std.fmt.parseUnsigned(u16, data[0..4], 10),
.month = try std.fmt.parseUnsigned(u8, data[4..6], 10),
.day = try std.fmt.parseUnsigned(u8, data[6..8], 10),
.hour = try std.fmt.parseUnsigned(u8, data[9..11], 10),
.minute = try std.fmt.parseUnsigned(u8, data[11..13], 10),
.second = try std.fmt.parseUnsigned(u8, data[13..15], 10),
};
}
fn endIsoState(current_state: IsoParsingState, date: *DateTime, prev_data: []const u8) !IsoParsingState {
var next_state: IsoParsingState = undefined;
log.debug("endIsoState. Current state '{}', data: {s}", .{ current_state, prev_data });
// Using two switches is slightly less efficient, but more readable
switch (current_state) {
.Start, .End => return error.IllegalStateTransition,
.Year => next_state = .Month,
.Month => next_state = .Day,
.Day => next_state = .Hour,
.Hour => next_state = .Minute,
.Minute => next_state = .Second,
.Second => next_state = .Millisecond,
.Millisecond => next_state = .End,
}
// TODO: This won't handle signed, which Iso supports. For now, let's fail
// explictly
switch (current_state) {
.Year => date.year = try std.fmt.parseUnsigned(u16, prev_data, 10),
.Month => date.month = try std.fmt.parseUnsigned(u8, prev_data, 10),
.Day => date.day = try std.fmt.parseUnsigned(u8, prev_data, 10),
.Hour => date.hour = try std.fmt.parseUnsigned(u8, prev_data, 10),
.Minute => date.minute = try std.fmt.parseUnsigned(u8, prev_data, 10),
.Second => date.second = try std.fmt.parseUnsigned(u8, prev_data, 10),
.Millisecond => {}, // We'll throw that away - our granularity is 1 second
.Start, .End => return error.InvalidState,
}
return next_state;
}
pub fn dateTimeToTimestamp(datetime: DateTime) !zeit.Seconds {
return (try datetime.instant()).unixTimestamp();
}
fn printDateTime(dt: DateTime) void {
log.debug("{:0>4}-{:0>2}-{:0>2}T{:0>2}:{:0>2}:{:0<2}Z", .{
dt.year,
dt.month,
dt.day,
dt.hour,
dt.minute,
dt.second,
});
}
pub fn printNowUtc() void {
printDateTime(timestampToDateTime(std.time.timestamp()));
}
test "Convert timestamp to datetime" {
printDateTime(timestampToDateTime(std.time.timestamp()));
try std.testing.expectEqual(DateTime{ .year = 2020, .month = 8, .day = 28, .hour = 9, .minute = 32, .second = 27 }, timestampToDateTime(1598607147));
try std.testing.expectEqual(DateTime{ .year = 2020, .month = 11, .day = 1, .hour = 5, .minute = 6, .second = 7 }, timestampToDateTime(1604207167));
// Get time for date: https://wtools.io/convert-date-time-to-unix-time
try std.testing.expectEqual(DateTime{ .year = 2015, .month = 8, .day = 30, .hour = 12, .minute = 36, .second = 0 }, timestampToDateTime(1440938160));
}
test "Convert datetime to timestamp" {
try std.testing.expectEqual(@as(i64, 1598607147), try dateTimeToTimestamp(DateTime{ .year = 2020, .month = 8, .day = 28, .hour = 9, .minute = 32, .second = 27 }));
try std.testing.expectEqual(@as(i64, 1604207167), try dateTimeToTimestamp(DateTime{ .year = 2020, .month = 11, .day = 1, .hour = 5, .minute = 6, .second = 7 }));
try std.testing.expectEqual(@as(i64, 1440938160), try dateTimeToTimestamp(DateTime{ .year = 2015, .month = 8, .day = 30, .hour = 12, .minute = 36, .second = 0 }));
}
test "Convert ISO8601 string to timestamp" {
try std.testing.expectEqual(DateTime{ .year = 2020, .month = 8, .day = 28, .hour = 9, .minute = 32, .second = 27 }, try parseIso8601ToDateTime("20200828T093227"));
try std.testing.expectEqual(DateTime{ .year = 2020, .month = 8, .day = 28, .hour = 9, .minute = 32, .second = 27 }, try parseIso8601ToDateTime("2020-08-28T9:32:27Z"));
try std.testing.expectEqual(DateTime{ .year = 2020, .month = 11, .day = 1, .hour = 5, .minute = 6, .second = 7 }, try parseIso8601ToDateTime("2020-11-01T5:06:7Z"));
try std.testing.expectEqual(DateTime{ .year = 2015, .month = 8, .day = 30, .hour = 12, .minute = 36, .second = 0 }, try parseIso8601ToDateTime("2015-08-30T12:36:00.000Z"));
}
test "Convert datetime to timestamp before 1970" {
try std.testing.expectEqual(@as(i64, -449392815), try dateTimeToTimestamp(DateTime{ .year = 1955, .month = 10, .day = 5, .hour = 16, .minute = 39, .second = 45 }));
}
test "Convert whatever AWS is sending us to timestamp" {
const string_date = "Fri, 03 Jun 2022 18:12:36 GMT";
try std.testing.expectEqual(DateTime{ .year = 2022, .month = 6, .day = 3, .hour = 18, .minute = 12, .second = 36 }, try parseEnglishToDateTime(string_date));
}

View file

@ -1,20 +0,0 @@
const std = @import("std");
const testing = std.testing;
const parsing = @import("parsing.zig");
pub const DateTime = parsing.DateTime;
pub const timestampToDateTime = parsing.timestampToDateTime;
pub const parseEnglishToTimestamp = parsing.parseEnglishToTimestamp;
pub const parseEnglishToDateTime = parsing.parseEnglishToDateTime;
pub const parseIso8601ToTimestamp = parsing.parseIso8601ToTimestamp;
pub const parseIso8601ToDateTime = parsing.parseIso8601ToDateTime;
pub const dateTimeToTimestamp = parsing.dateTimeToTimestamp;
pub const printNowUtc = parsing.printNowUtc;
const timestamp = @import("timestamp.zig");
pub const DateFormat = timestamp.DateFormat;
pub const Timestamp = timestamp.Timestamp;
test {
testing.refAllDeclsRecursive(@This());
}

View file

@ -1,68 +0,0 @@
const std = @import("std");
const zeit = @import("zeit");
const json = @import("json");
pub const DateFormat = enum {
rfc1123,
iso8601,
};
pub const Timestamp = enum(zeit.Nanoseconds) {
_,
pub fn jsonStringify(value: Timestamp, options: json.StringifyOptions, out_stream: anytype) !void {
_ = options;
const instant = try zeit.instant(.{
.source = .{
.unix_nano = @intFromEnum(value),
},
});
try out_stream.writeAll("\"");
try instant.time().gofmt(out_stream, "Mon, 02 Jan 2006 15:04:05 GMT");
try out_stream.writeAll("\"");
}
pub fn parse(val: []const u8) !Timestamp {
const date_format = blk: {
if (std.ascii.isDigit(val[0])) {
break :blk DateFormat.iso8601;
} else {
break :blk DateFormat.rfc1123;
}
};
const ins = try zeit.instant(.{
.source = switch (date_format) {
DateFormat.iso8601 => .{
.iso8601 = val,
},
DateFormat.rfc1123 => .{
.rfc1123 = val,
},
},
});
return @enumFromInt(ins.timestamp);
}
};
test Timestamp {
const in_date = "Wed, 23 Apr 2025 11:23:45 GMT";
const expected_ts: Timestamp = @enumFromInt(1745407425000000000);
const actual_ts = try Timestamp.parse(in_date);
try std.testing.expectEqual(expected_ts, actual_ts);
var buf: [100]u8 = undefined;
var fbs = std.io.fixedBufferStream(&buf);
var counting_writer = std.io.countingWriter(fbs.writer());
try Timestamp.jsonStringify(expected_ts, .{}, counting_writer.writer());
const expected_json = "\"" ++ in_date ++ "\"";
const actual_json = buf[0..counting_writer.bytes_written];
try std.testing.expectEqualStrings(expected_json, actual_json);
}

View file

@ -1,29 +0,0 @@
const std = @import("std");
pub fn build(b: *std.Build) void {
const target = b.standardTargetOptions(.{});
const optimize = b.standardOptimizeOption(.{});
const lib_mod = b.addModule("json", .{
.root_source_file = b.path("src/json.zig"),
.target = target,
.optimize = optimize,
});
const lib = b.addLibrary(.{
.linkage = .static,
.name = "json",
.root_module = lib_mod,
});
b.installArtifact(lib);
const lib_unit_tests = b.addTest(.{
.root_module = lib_mod,
});
const run_lib_unit_tests = b.addRunArtifact(lib_unit_tests);
const test_step = b.step("test", "Run unit tests");
test_step.dependOn(&run_lib_unit_tests.step);
}

View file

@ -1,12 +0,0 @@
.{
.name = .json,
.version = "0.0.0",
.fingerprint = 0x6b0725452065211c, // Changing this has security and trust implications.
.minimum_zig_version = "0.14.0",
.dependencies = .{},
.paths = .{
"build.zig",
"build.zig.zon",
"src",
},
}

View file

@ -1,21 +1,16 @@
const builtin = @import("builtin");
const std = @import("std");
const zeit = @import("zeit");
const awshttp = @import("aws_http.zig");
const json = @import("json");
const json = @import("json.zig");
const url = @import("url.zig");
const case = @import("case.zig");
const date = @import("date");
const date = @import("date.zig");
const servicemodel = @import("servicemodel.zig");
const xml_shaper = @import("xml_shaper.zig");
const xml_serializer = @import("xml_serializer.zig");
const scoped_log = std.log.scoped(.aws);
const Allocator = std.mem.Allocator;
const ArenaAllocator = std.heap.ArenaAllocator;
/// control all logs directly/indirectly used by aws sdk. Not recommended for
/// use under normal circumstances, but helpful for times when the zig logging
/// controls are insufficient (e.g. use in build script)
@ -95,7 +90,7 @@ pub const Options = struct {
pub const Diagnostics = struct {
http_code: i64,
response_body: []const u8,
allocator: Allocator,
allocator: std.mem.Allocator,
pub fn deinit(self: *Diagnostics) void {
self.allocator.free(self.response_body);
@ -117,12 +112,12 @@ pub const ClientOptions = struct {
proxy: ?std.http.Client.Proxy = null,
};
pub const Client = struct {
allocator: Allocator,
allocator: std.mem.Allocator,
aws_http: awshttp.AwsHttp,
const Self = @This();
pub fn init(allocator: Allocator, options: ClientOptions) Self {
pub fn init(allocator: std.mem.Allocator, options: ClientOptions) Self {
return Self{
.allocator = allocator,
.aws_http = awshttp.AwsHttp.init(allocator, options.proxy),
@ -157,7 +152,7 @@ pub fn Request(comptime request_action: anytype) type {
// every codegenned request object includes a metaInfo function to get
// pointers to service and action
log.debug("call: prefix {s}, sigv4 {s}, version {?s}, action {s}", .{
log.debug("call: prefix {s}, sigv4 {s}, version {s}, action {s}", .{
Self.service_meta.endpoint_prefix,
Self.service_meta.sigv4_name,
Self.service_meta.version,
@ -232,7 +227,7 @@ pub fn Request(comptime request_action: anytype) type {
// We don't know if we need a body...guessing here, this should cover most
var buffer = std.ArrayList(u8).init(options.client.allocator);
defer buffer.deinit();
var nameAllocator = ArenaAllocator.init(options.client.allocator);
var nameAllocator = std.heap.ArenaAllocator.init(options.client.allocator);
defer nameAllocator.deinit();
if (Self.service_meta.aws_protocol == .rest_json_1) {
if (std.mem.eql(u8, "PUT", aws_request.method) or std.mem.eql(u8, "POST", aws_request.method)) {
@ -240,8 +235,6 @@ pub fn Request(comptime request_action: anytype) type {
}
}
aws_request.body = buffer.items;
var rest_xml_body: ?[]const u8 = null;
defer if (rest_xml_body) |b| options.client.allocator.free(b);
if (Self.service_meta.aws_protocol == .rest_xml) {
if (std.mem.eql(u8, "PUT", aws_request.method) or std.mem.eql(u8, "POST", aws_request.method)) {
if (@hasDecl(ActionRequest, "http_payload")) {
@ -249,49 +242,7 @@ pub fn Request(comptime request_action: anytype) type {
// the http_payload declaration on the request type.
// Hopefully these will always be ?[]const u8, otherwise
// we should see a compile error on this line
const payload = @field(request, ActionRequest.http_payload);
const T = @TypeOf(payload);
var body_assigned = false;
if (T == ?[]const u8) {
aws_request.body = payload.?;
body_assigned = true;
}
if (T == []const u8) {
aws_request.body = payload;
body_assigned = true;
}
if (!body_assigned) {
const sm = ActionRequest.metaInfo().service_metadata;
if (!std.mem.eql(u8, sm.endpoint_prefix, "s3"))
// Because the attributes below are most likely only
// applicable to s3, we are better off to fail
// early. This portion of the code base should
// only be executed for s3 as no other known
// service uses this protocol
return error.NotImplemented;
const attrs = try std.fmt.allocPrint(
options.client.allocator,
"xmlns=\"http://{s}.amazonaws.com/doc/{s}/\"",
.{ sm.endpoint_prefix, sm.version.? },
); // Version required for the protocol, we should panic if it is not present
defer options.client.allocator.free(attrs); // once serialized, the value should be copied over
// Need to serialize this
rest_xml_body = try xml_serializer.stringifyAlloc(
options.client.allocator,
payload,
.{
.whitespace = .indent_2,
.root_name = request.fieldNameFor(ActionRequest.http_payload),
.root_attributes = attrs,
.emit_null_optional_fields = false,
.include_declaration = false,
},
);
aws_request.body = rest_xml_body.?;
}
aws_request.body = @field(request, ActionRequest.http_payload).?;
} else {
return error.NotImplemented;
}
@ -312,9 +263,9 @@ pub fn Request(comptime request_action: anytype) type {
fn callJson(request: ActionRequest, options: Options) !FullResponseType {
const target =
try std.fmt.allocPrint(options.client.allocator, "{s}.{s}", .{
Self.service_meta.name,
action.action_name,
});
Self.service_meta.name,
action.action_name,
});
defer options.client.allocator.free(target);
var buffer = std.ArrayList(u8).init(options.client.allocator);
@ -329,7 +280,7 @@ pub fn Request(comptime request_action: anytype) type {
// for a boxed member with no observable difference." But we're
// seeing a lot of differences here between spec and reality
//
var nameAllocator = ArenaAllocator.init(options.client.allocator);
var nameAllocator = std.heap.ArenaAllocator.init(options.client.allocator);
defer nameAllocator.deinit();
try json.stringify(request, .{ .whitespace = .{} }, buffer.writer());
@ -362,29 +313,25 @@ pub fn Request(comptime request_action: anytype) type {
const continuation = if (buffer.items.len > 0) "&" else "";
const query = if (Self.service_meta.aws_protocol == .query)
""
try std.fmt.allocPrint(options.client.allocator, "", .{})
else // EC2
try std.fmt.allocPrint(options.client.allocator, "?Action={s}&Version={s}", .{
action.action_name,
Self.service_meta.version.?, // Version required for the protocol, we should panic if it is not present
Self.service_meta.version,
});
defer if (Self.service_meta.aws_protocol != .query) {
options.client.allocator.free(query);
};
defer options.client.allocator.free(query);
// Note: EC2 avoided the Action={s}&Version={s} in the body, but it's
// but it's required, so I'm not sure why that code was put in
// originally?
const body =
try std.fmt.allocPrint(options.client.allocator, "Action={s}&Version={s}{s}{s}", .{
action.action_name,
Self.service_meta.version.?, // Version required for the protocol, we should panic if it is not present
continuation,
buffer.items,
});
action.action_name,
Self.service_meta.version,
continuation,
buffer.items,
});
defer options.client.allocator.free(body);
return try Self.callAws(.{
.query = query,
.body = body,
@ -404,8 +351,7 @@ pub fn Request(comptime request_action: anytype) type {
},
);
defer response.deinit();
if (response.response_code != options.success_http_code and response.response_code != 404) {
if (response.response_code != options.success_http_code) {
try reportTraffic(options.client.allocator, "Call Failed", aws_request, response, log.err);
if (options.diagnostics) |d| {
d.http_code = response.response_code;
@ -434,7 +380,6 @@ pub fn Request(comptime request_action: anytype) type {
.header_name = @field(action.Response.http_header, f.name),
};
}
inline for (fields) |f| {
for (response.headers) |header| {
if (std.mem.eql(u8, header.name, f.?.header_name)) {
@ -451,7 +396,7 @@ pub fn Request(comptime request_action: anytype) type {
//
// Note: issues found on zig 0.9.0
setHeaderValue(
full_response.arena.allocator(),
options.client.allocator,
&full_response.response,
f.?.name,
f.?.T,
@ -473,7 +418,7 @@ pub fn Request(comptime request_action: anytype) type {
}
fn setHeaderValue(
allocator: Allocator,
allocator: std.mem.Allocator,
response: anytype,
comptime field_name: []const u8,
comptime field_type: type,
@ -493,76 +438,51 @@ pub fn Request(comptime request_action: anytype) type {
// First, we need to determine if we care about a response at all
// If the expected result has no fields, there's no sense in
// doing any more work. Let's bail early
const fields = @typeInfo(action.Response).@"struct".fields;
var expected_body_field_len = fields.len;
if (@hasDecl(action.Response, "http_header")) {
comptime var expected_body_field_len = std.meta.fields(action.Response).len;
if (@hasDecl(action.Response, "http_header"))
expected_body_field_len -= std.meta.fields(@TypeOf(action.Response.http_header)).len;
}
var buf_request_id: [256]u8 = undefined;
const request_id = try requestIdFromHeaders(&buf_request_id, options.client.allocator, aws_request, response);
const arena = ArenaAllocator.init(options.client.allocator);
if (@hasDecl(action.Response, "http_payload")) {
var rc = try FullResponseType.init(.{
.arena = arena,
var rc = FullResponseType{
.response = .{},
.request_id = request_id,
.response_metadata = .{
.request_id = try requestIdFromHeaders(aws_request, response, options),
},
.parser_options = .{ .json = .{} },
.raw_parsed = .{ .raw = .{} },
});
.allocator = options.client.allocator,
};
const body_field = @field(rc.response, action.Response.http_payload);
const BodyField = @TypeOf(body_field);
if (BodyField == []const u8 or BodyField == ?[]const u8) {
expected_body_field_len = 0;
// We can't use body_field for this set - only @field will work
@field(rc.response, action.Response.http_payload) = try rc.arena.allocator().dupe(u8, response.body);
@field(rc.response, action.Response.http_payload) = try options.client.allocator.dupe(u8, response.body);
return rc;
}
rc.deinit();
}
// We don't care about the body if there are no fields we expect there...
if (fields.len == 0 or expected_body_field_len == 0 or response.body.len == 0) {
// Makes sure we can't get here with an `action.Response` that has required fields
// Without this block there is a compilation error when running tests
// Perhaps there is a better way to handle this
{
comptime var required_fields = 0;
inline for (fields) |field| {
const field_type_info = @typeInfo(field.type);
if (field_type_info != .optional and field.defaultValue() == null) {
required_fields += 1;
}
}
if (required_fields > 0) unreachable;
}
if (std.meta.fields(action.Response).len == 0 or expected_body_field_len == 0) {
// Do we care if an unexpected body comes in?
return try FullResponseType.init(.{
.arena = arena,
.request_id = request_id,
return FullResponseType{
.response = .{},
});
.response_metadata = .{
.request_id = try requestIdFromHeaders(aws_request, response, options),
},
.parser_options = .{ .json = .{} },
.raw_parsed = .{ .raw = .{} },
.allocator = options.client.allocator,
};
}
const content_type = try getContentType(response.headers);
return switch (content_type) {
.json => try jsonReturn(aws_request, options, response),
.xml => try xmlReturn(aws_request, options, response),
};
const isJson = try isJsonResponse(response.headers);
if (!isJson) return try xmlReturn(aws_request, options, response);
return try jsonReturn(aws_request, options, response);
}
fn jsonReturn(aws_request: awshttp.HttpRequest, options: Options, response: awshttp.HttpResult) !FullResponseType {
var arena = ArenaAllocator.init(options.client.allocator);
const parser_options = json.ParseOptions{
.allocator = arena.allocator(),
.allocator = options.client.allocator,
.allow_camel_case_conversion = true, // new option
.allow_snake_case_conversion = true, // new option
.allow_unknown_fields = true, // new option. Cannot yet handle non-struct fields though
@ -598,24 +518,26 @@ pub fn Request(comptime request_action: anytype) type {
// We can grab index [0] as structs are guaranteed by zig to be returned in the order
// declared, and we're declaring in that order in ServerResponse().
const real_response = @field(parsed_response, @typeInfo(response_types.NormalResponse).@"struct".fields[0].name);
return try FullResponseType.init(.{
.arena = arena,
return FullResponseType{
.response = @field(real_response, @typeInfo(@TypeOf(real_response)).@"struct".fields[0].name),
.request_id = real_response.ResponseMetadata.RequestId,
.response_metadata = .{
.request_id = try options.client.allocator.dupe(u8, real_response.ResponseMetadata.RequestId),
},
.parser_options = .{ .json = parser_options },
.raw_parsed = .{ .server = parsed_response },
});
.allocator = options.client.allocator,
};
} else {
// Conditions 2 or 3 (no wrapping)
var buf_request_id: [256]u8 = undefined;
const request_id = try requestIdFromHeaders(&buf_request_id, options.client.allocator, aws_request, response);
return try FullResponseType.init(.{
.arena = arena,
return FullResponseType{
.response = parsed_response,
.request_id = request_id,
.response_metadata = .{
.request_id = try requestIdFromHeaders(aws_request, response, options),
},
.parser_options = .{ .json = parser_options },
.raw_parsed = .{ .raw = parsed_response },
});
.allocator = options.client.allocator,
};
}
}
@ -667,13 +589,7 @@ pub fn Request(comptime request_action: anytype) type {
// }
//
// Big thing is that requestid, which we'll need to fetch "manually"
var arena = ArenaAllocator.init(options.client.allocator);
const xml_options = xml_shaper.ParseOptions{
.allocator = arena.allocator(),
.elementToParse = findResult,
};
const xml_options = xml_shaper.ParseOptions{ .allocator = options.client.allocator, .elementToParse = findResult };
var body: []const u8 = result.body;
var free_body = false;
if (result.body.len < 20) {
@ -694,23 +610,24 @@ pub fn Request(comptime request_action: anytype) type {
defer if (free_body) options.client.allocator.free(body);
const parsed = try xml_shaper.parse(action.Response, body, xml_options);
errdefer parsed.deinit();
var buf_request_id: [256]u8 = undefined;
// This needs to get into FullResponseType somehow: defer parsed.deinit();
const request_id = blk: {
if (parsed.document.root.getCharData("requestId")) |elem| {
break :blk elem;
}
break :blk try requestIdFromHeaders(&buf_request_id, options.client.allocator, request, result);
if (parsed.document.root.getCharData("requestId")) |elem|
break :blk try options.client.allocator.dupe(u8, elem);
break :blk try requestIdFromHeaders(request, result, options);
};
defer options.client.allocator.free(request_id);
return try FullResponseType.init(.{
.arena = arena,
return FullResponseType{
.response = parsed.parsed_value,
.request_id = request_id,
.response_metadata = .{
.request_id = try options.client.allocator.dupe(u8, request_id),
},
.parser_options = .{ .xml = xml_options },
.raw_parsed = .{ .xml = parsed },
});
.allocator = options.client.allocator,
};
}
const ServerResponseTypes = struct {
NormalResponse: type,
RawResponse: type,
@ -771,13 +688,17 @@ pub fn Request(comptime request_action: anytype) type {
fn ParsedJsonData(comptime T: type) type {
return struct {
raw_response_parsed: bool,
parsed_response_ptr: *T,
allocator: Allocator,
allocator: std.mem.Allocator,
const MySelf = @This();
pub fn deinit(self: MySelf) void {
self.allocator.destroy(self.parsed_response_ptr);
// This feels like it should result in a use after free, but it
// seems to be working?
if (self.raw_response_parsed)
self.allocator.destroy(self.parsed_response_ptr);
}
};
}
@ -785,7 +706,6 @@ pub fn Request(comptime request_action: anytype) type {
fn parseJsonData(comptime response_types: ServerResponseTypes, data: []const u8, options: Options, parser_options: json.ParseOptions) !ParsedJsonData(response_types.NormalResponse) {
// Now it's time to start looking at the actual data. Job 1 will
// be to figure out if this is a raw response or wrapped
const allocator = options.client.allocator;
// Extract the first json key
const key = firstJsonKey(data);
@ -793,13 +713,11 @@ pub fn Request(comptime request_action: anytype) type {
std.mem.eql(u8, key, action.action_name ++ "Response") or
std.mem.eql(u8, key, action.action_name ++ "Result") or
isOtherNormalResponse(response_types.NormalResponse, key);
var raw_response_parsed = false;
var stream = json.TokenStream.init(data);
const parsed_response_ptr = blk: {
const ptr = try allocator.create(response_types.NormalResponse);
errdefer allocator.destroy(ptr);
if (!response_types.isRawPossible or found_normal_json_response) {
ptr.* = (json.parse(response_types.NormalResponse, &stream, parser_options) catch |e| {
if (!response_types.isRawPossible or found_normal_json_response)
break :blk &(json.parse(response_types.NormalResponse, &stream, parser_options) catch |e| {
log.err(
\\Call successful, but unexpected response from service.
\\This could be the result of a bug or a stale set of code generated
@ -815,31 +733,32 @@ pub fn Request(comptime request_action: anytype) type {
return e;
});
break :blk ptr;
}
log.debug("Appears server has provided a raw response", .{});
raw_response_parsed = true;
const ptr = try options.client.allocator.create(response_types.NormalResponse);
errdefer options.client.allocator.destroy(ptr);
@field(ptr.*, std.meta.fields(action.Response)[0].name) =
json.parse(response_types.RawResponse, &stream, parser_options) catch |e| {
log.err(
\\Call successful, but unexpected response from service.
\\This could be the result of a bug or a stale set of code generated
\\service models.
\\
\\Model Type: {}
\\
\\Response from server:
\\
\\{s}
\\
, .{ action.Response, data });
return e;
};
log.err(
\\Call successful, but unexpected response from service.
\\This could be the result of a bug or a stale set of code generated
\\service models.
\\
\\Model Type: {}
\\
\\Response from server:
\\
\\{s}
\\
, .{ action.Response, data });
return e;
};
break :blk ptr;
};
return ParsedJsonData(response_types.NormalResponse){
.parsed_response_ptr = parsed_response_ptr,
.allocator = allocator,
.raw_response_parsed = raw_response_parsed,
.parsed_response_ptr = @constCast(parsed_response_ptr), //TODO: why doesn't changing const->var above fix this?
.allocator = options.client.allocator,
};
}
};
@ -858,16 +777,8 @@ fn coerceFromString(comptime T: type, val: []const u8) anyerror!T {
// TODO: This is terrible...fix it
switch (T) {
bool => return std.ascii.eqlIgnoreCase(val, "true"),
i64, i128 => return parseInt(T, val) catch |e| {
log.err("Invalid string representing {s}: {s}", .{ @typeName(T), val });
return e;
},
f64, f128 => return std.fmt.parseFloat(T, val) catch |e| {
log.err("Invalid string representing {s}: {s}", .{ @typeName(T), val });
return e;
},
date.Timestamp => return date.Timestamp.parse(val) catch |e| {
log.debug("Failed to parse timestamp from string '{s}': {}", .{ val, e });
i64 => return parseInt(T, val) catch |e| {
log.err("Invalid string representing i64: {s}", .{val});
return e;
},
else => return val,
@ -893,14 +804,14 @@ fn parseInt(comptime T: type, val: []const u8) !T {
return rc;
}
fn generalAllocPrint(allocator: Allocator, val: anytype) !?[]const u8 {
fn generalAllocPrint(allocator: std.mem.Allocator, val: anytype) !?[]const u8 {
switch (@typeInfo(@TypeOf(val))) {
.optional => if (val) |v| return generalAllocPrint(allocator, v) else return null,
.array, .pointer => return try std.fmt.allocPrint(allocator, "{s}", .{val}),
else => return try std.fmt.allocPrint(allocator, "{any}", .{val}),
}
}
fn headersFor(allocator: Allocator, request: anytype) ![]awshttp.Header {
fn headersFor(allocator: std.mem.Allocator, request: anytype) ![]awshttp.Header {
log.debug("Checking for headers to include for type {}", .{@TypeOf(request)});
if (!@hasDecl(@TypeOf(request), "http_header")) return &[_]awshttp.Header{};
const http_header = @TypeOf(request).http_header;
@ -924,7 +835,7 @@ fn headersFor(allocator: Allocator, request: anytype) ![]awshttp.Header {
return headers.toOwnedSlice();
}
fn freeHeadersFor(allocator: Allocator, request: anytype, headers: []const awshttp.Header) void {
fn freeHeadersFor(allocator: std.mem.Allocator, request: anytype, headers: []const awshttp.Header) void {
if (!@hasDecl(@TypeOf(request), "http_header")) return;
const http_header = @TypeOf(request).http_header;
const fields = std.meta.fields(@TypeOf(http_header));
@ -951,28 +862,23 @@ fn firstJsonKey(data: []const u8) []const u8 {
log.debug("First json key: {s}", .{key});
return key;
}
pub const ContentType = enum {
json,
xml,
};
fn getContentType(headers: []const awshttp.Header) !ContentType {
fn isJsonResponse(headers: []const awshttp.Header) !bool {
// EC2 ignores our accept type, but technically query protocol only
// returns XML as well. So, we'll ignore the protocol here and just
// look at the return type
var isJson: ?bool = null;
for (headers) |h| {
if (std.ascii.eqlIgnoreCase("Content-Type", h.name)) {
if (std.mem.startsWith(u8, h.value, "application/json")) {
return .json;
isJson = true;
} else if (std.mem.startsWith(u8, h.value, "application/x-amz-json-1.0")) {
return .json;
isJson = true;
} else if (std.mem.startsWith(u8, h.value, "application/x-amz-json-1.1")) {
return .json;
isJson = true;
} else if (std.mem.startsWith(u8, h.value, "text/xml")) {
return .xml;
isJson = false;
} else if (std.mem.startsWith(u8, h.value, "application/xml")) {
return .xml;
isJson = false;
} else {
log.err("Unexpected content type: {s}", .{h.value});
return error.UnexpectedContentType;
@ -980,12 +886,11 @@ fn getContentType(headers: []const awshttp.Header) !ContentType {
break;
}
}
return error.ContentTypeNotFound;
if (isJson == null) return error.ContentTypeNotFound;
return isJson.?;
}
/// Get request ID from headers.
/// Allocation is only used in case of an error. Caller does not need to free the returned buffer.
fn requestIdFromHeaders(buf: []u8, allocator: Allocator, request: awshttp.HttpRequest, response: awshttp.HttpResult) ![]u8 {
/// Get request ID from headers. Caller responsible for freeing memory
fn requestIdFromHeaders(request: awshttp.HttpRequest, response: awshttp.HttpResult, options: Options) ![]u8 {
var rid: ?[]const u8 = null;
// This "thing" is called:
// * Host ID
@ -1005,14 +910,11 @@ fn requestIdFromHeaders(buf: []u8, allocator: Allocator, request: awshttp.HttpRe
host_id = header.value;
}
if (rid) |r| {
if (host_id) |h| {
return try std.fmt.bufPrint(buf, "{s}, host_id: {s}", .{ r, h });
}
@memcpy(buf[0..r.len], r);
return buf[0..r.len];
if (host_id) |h|
return try std.fmt.allocPrint(options.client.allocator, "{s}, host_id: {s}", .{ r, h });
return try options.client.allocator.dupe(u8, r);
}
try reportTraffic(allocator, "Request ID not found", request, response, log.err);
try reportTraffic(options.client.allocator, "Request ID not found", request, response, log.err);
return error.RequestIdNotFound;
}
fn ServerResponse(comptime action: anytype) type {
@ -1030,14 +932,14 @@ fn ServerResponse(comptime action: anytype) type {
.{
.name = action.action_name ++ "Result",
.type = T,
.default_value_ptr = null,
.default_value = null,
.is_comptime = false,
.alignment = 0,
},
.{
.name = "ResponseMetadata",
.type = ResponseMetadata,
.default_value_ptr = null,
.default_value = null,
.is_comptime = false,
.alignment = 0,
},
@ -1053,7 +955,7 @@ fn ServerResponse(comptime action: anytype) type {
.{
.name = action.action_name ++ "Response",
.type = Result,
.default_value_ptr = null,
.default_value = null,
.is_comptime = false,
.alignment = 0,
},
@ -1065,62 +967,65 @@ fn ServerResponse(comptime action: anytype) type {
}
fn FullResponse(comptime action: anytype) type {
return struct {
pub const ResponseMetadata = struct {
request_id: []const u8,
};
pub const RawParsed = union(enum) {
response: action.Response,
response_metadata: struct {
request_id: []u8,
},
parser_options: union(enum) {
json: json.ParseOptions,
xml: xml_shaper.ParseOptions,
},
raw_parsed: union(enum) {
server: ServerResponse(action),
raw: action.Response,
xml: xml_shaper.Parsed(action.Response),
};
pub const FullResponseOptions = struct {
response: action.Response = undefined,
request_id: []const u8,
raw_parsed: RawParsed = .{ .raw = undefined },
arena: ArenaAllocator,
};
response: action.Response = undefined,
raw_parsed: RawParsed = .{ .raw = undefined },
response_metadata: ResponseMetadata,
arena: ArenaAllocator,
},
allocator: std.mem.Allocator,
const Self = @This();
pub fn init(options: FullResponseOptions) !Self {
var arena = options.arena;
const request_id = try arena.allocator().dupe(u8, options.request_id);
return Self{
.arena = arena,
.response = options.response,
.raw_parsed = options.raw_parsed,
.response_metadata = .{
.request_id = request_id,
},
};
}
pub fn deinit(self: Self) void {
self.arena.deinit();
switch (self.raw_parsed) {
// Server is json only (so far)
.server => json.parseFree(ServerResponse(action), self.raw_parsed.server, self.parser_options.json),
// Raw is json only (so far)
.raw => json.parseFree(action.Response, self.raw_parsed.raw, self.parser_options.json),
.xml => |xml| xml.deinit(),
}
self.allocator.free(self.response_metadata.request_id);
const Response = @TypeOf(self.response);
if (@hasDecl(Response, "http_header")) {
inline for (std.meta.fields(@TypeOf(Response.http_header))) |f| {
safeFree(self.allocator, @field(self.response, f.name));
}
}
if (@hasDecl(Response, "http_payload")) {
const body_field = @field(self.response, Response.http_payload);
const BodyField = @TypeOf(body_field);
if (BodyField == []const u8) {
self.allocator.free(body_field);
}
if (BodyField == ?[]const u8) {
if (body_field) |f|
self.allocator.free(f);
}
}
}
};
}
fn safeFree(allocator: Allocator, obj: anytype) void {
fn safeFree(allocator: std.mem.Allocator, obj: anytype) void {
switch (@typeInfo(@TypeOf(obj))) {
.pointer => allocator.free(obj),
.optional => if (obj) |o| safeFree(allocator, o),
else => {},
}
}
fn queryFieldTransformer(allocator: Allocator, field_name: []const u8) anyerror![]const u8 {
fn queryFieldTransformer(allocator: std.mem.Allocator, field_name: []const u8) anyerror![]const u8 {
return try case.snakeToPascal(allocator, field_name);
}
fn buildPath(
allocator: Allocator,
allocator: std.mem.Allocator,
raw_uri: []const u8,
comptime ActionRequest: type,
request: anytype,
@ -1207,7 +1112,7 @@ fn uriEncodeByte(char: u8, writer: anytype, encode_slash: bool) !void {
}
}
fn buildQuery(allocator: Allocator, request: anytype) ![]const u8 {
fn buildQuery(allocator: std.mem.Allocator, request: anytype) ![]const u8 {
// query should look something like this:
// pub const http_query = .{
// .master_region = "MasterRegion",
@ -1238,7 +1143,7 @@ fn addQueryArg(comptime ValueType: type, prefix: []const u8, key: []const u8, va
},
// if this is a pointer, we want to make sure it is more than just a string
.pointer => |ptr| {
if (ptr.child == u8 or ptr.size != .slice) {
if (ptr.child == u8 or ptr.size != .Slice) {
// This is just a string
return try addBasicQueryArg(prefix, key, value, writer);
}
@ -1329,7 +1234,7 @@ pub fn IgnoringWriter(comptime WriterType: type) type {
}
fn reportTraffic(
allocator: Allocator,
allocator: std.mem.Allocator,
info: []const u8,
request: awshttp.HttpRequest,
response: awshttp.HttpResult,
@ -1348,8 +1253,7 @@ fn reportTraffic(
}
try writer.print("\tContent-Type: {s}\n\n", .{request.content_type});
try writer.print("Request URL: {s}\n", .{request.path});
try writer.writeAll("Request Body:\n");
_ = try writer.write("Request Body:\n");
try writer.print("-------------\n{s}\n", .{request.body});
_ = try writer.write("-------------\n");
_ = try writer.write("Response Headers:\n");
@ -1532,7 +1436,7 @@ test "basic json request serialization" {
// for a boxed member with no observable difference." But we're
// seeing a lot of differences here between spec and reality
//
var nameAllocator = ArenaAllocator.init(allocator);
var nameAllocator = std.heap.ArenaAllocator.init(allocator);
defer nameAllocator.deinit();
try json.stringify(request, .{ .whitespace = .{} }, buffer.writer());
try std.testing.expectEqualStrings(
@ -1616,8 +1520,8 @@ test {
std.testing.refAllDecls(xml_shaper);
}
const TestOptions = struct {
allocator: Allocator,
arena: ?*ArenaAllocator = null,
allocator: std.mem.Allocator,
arena: ?*std.heap.ArenaAllocator = null,
server_port: ?u16 = null,
server_remaining_requests: usize = 1,
server_response: []const u8 = "unset",
@ -1706,8 +1610,8 @@ const TestOptions = struct {
fn threadMain(options: *TestOptions) !void {
// https://github.com/ziglang/zig/blob/d2be725e4b14c33dbd39054e33d926913eee3cd4/lib/compiler/std-docs.zig#L22-L54
options.arena = try options.allocator.create(ArenaAllocator);
options.arena.?.* = ArenaAllocator.init(options.allocator);
options.arena = try options.allocator.create(std.heap.ArenaAllocator);
options.arena.?.* = std.heap.ArenaAllocator.init(options.allocator);
const allocator = options.arena.?.allocator();
options.allocator = allocator;
@ -1718,7 +1622,7 @@ fn threadMain(options: *TestOptions) !void {
options.test_server_runtime_uri = try std.fmt.allocPrint(options.allocator, "http://127.0.0.1:{d}", .{options.server_port.?});
log.debug("server listening at {s}", .{options.test_server_runtime_uri.?});
log.info("starting server thread, tid {d}", .{std.Thread.getCurrentId()});
// var arena = ArenaAllocator.init(options.allocator);
// var arena = std.heap.ArenaAllocator.init(options.allocator);
// defer arena.deinit();
// var aa = arena.allocator();
// We're in control of all requests/responses, so this flag will tell us
@ -1798,11 +1702,11 @@ fn serveRequest(options: *TestOptions, request: *std.http.Server.Request) !void
////////////////////////////////////////////////////////////////////////
const TestSetup = struct {
allocator: Allocator,
allocator: std.mem.Allocator,
request_options: TestOptions,
server_thread: std.Thread = undefined,
creds: aws_auth.Credentials = undefined,
client: Client = undefined,
client: *Client = undefined,
started: bool = false,
const Self = @This();
@ -1839,8 +1743,8 @@ const TestSetup = struct {
null,
);
aws_creds.static_credentials = self.creds;
const client = Client.init(self.allocator, .{});
self.client = client;
var client = Client.init(self.allocator, .{});
self.client = &client;
return .{
.region = "us-west-2",
.client = client,
@ -2351,44 +2255,6 @@ test "ec2_query_with_input: EC2 describe instances" {
try std.testing.expectEqualStrings("i-0212d7d1f62b96676", call.response.reservations.?[1].instances.?[0].instance_id.?);
try std.testing.expectEqualStrings("123456789012:found-me", call.response.reservations.?[1].instances.?[0].tags.?[0].value.?);
}
test "rest_xml_with_input_s3: S3 create bucket" {
const allocator = std.testing.allocator;
var test_harness = TestSetup.init(.{
.allocator = allocator,
.server_response =
\\
,
.server_response_headers = &.{ // I don't see content type coming back in actual S3 requests
.{ .name = "x-amzn-RequestId", .value = "9PEYBAZ9J7TPRX43" },
.{ .name = "x-amz-id-2", .value = "u7lzgW0tIyRP15vSUsVOXxJ37OfVCO8lZmLIVuqeq5EE4tNp9qebb5fy+/kendlZpR4YQE+y4Xg=" },
},
});
defer test_harness.deinit();
errdefer test_harness.creds.deinit();
const options = try test_harness.start();
const s3 = (Services(.{.s3}){}).s3;
const call = try test_harness.client.call(s3.create_bucket.Request{
.bucket = "",
.create_bucket_configuration = .{
.location_constraint = "us-west-2",
},
}, options);
defer call.deinit();
test_harness.stop();
// Request expectations
try std.testing.expectEqual(std.http.Method.PUT, test_harness.request_options.request_method);
try std.testing.expectEqualStrings("/", test_harness.request_options.request_target);
try std.testing.expectEqualStrings(
\\<CreateBucketConfiguration xmlns="http://s3.amazonaws.com/doc/2006-03-01/">
\\ <LocationConstraint>us-west-2</LocationConstraint>
\\</CreateBucketConfiguration>
, test_harness.request_options.request_body);
// Response expectations
try std.testing.expectEqualStrings(
"9PEYBAZ9J7TPRX43, host_id: u7lzgW0tIyRP15vSUsVOXxJ37OfVCO8lZmLIVuqeq5EE4tNp9qebb5fy+/kendlZpR4YQE+y4Xg=",
call.response_metadata.request_id,
);
}
test "rest_xml_no_input: S3 list buckets" {
const allocator = std.testing.allocator;
var test_harness = TestSetup.init(.{
@ -2523,11 +2389,10 @@ test "json_1_1: ECR timestamps" {
// defer std.testing.log_level = old;
// std.testing.log_level = .debug;
const allocator = std.testing.allocator;
var test_harness = TestSetup.init(.{
.allocator = allocator,
.server_response =
\\{"authorizationData":[{"authorizationToken":"***","expiresAt":"2022-05-17T06:56:13.652000+00:00","proxyEndpoint":"https://146325435496.dkr.ecr.us-west-2.amazonaws.com"}]}
\\{"authorizationData":[{"authorizationToken":"***","expiresAt":1.7385984915E9,"proxyEndpoint":"https://146325435496.dkr.ecr.us-west-2.amazonaws.com"}]}
// \\{"authorizationData":[{"authorizationToken":"***","expiresAt":1.738598491557E9,"proxyEndpoint":"https://146325435496.dkr.ecr.us-west-2.amazonaws.com"}]}
,
.server_response_headers = &.{
@ -2552,13 +2417,7 @@ test "json_1_1: ECR timestamps" {
try std.testing.expectEqualStrings("***", call.response.authorization_data.?[0].authorization_token.?);
try std.testing.expectEqualStrings("https://146325435496.dkr.ecr.us-west-2.amazonaws.com", call.response.authorization_data.?[0].proxy_endpoint.?);
// try std.testing.expectEqual(@as(i64, 1.73859841557E9), call.response.authorization_data.?[0].expires_at.?);
const expected_ins = try zeit.instant(.{
.source = .{ .iso8601 = "2022-05-17T06:56:13.652000+00:00" },
});
const expected_ts: date.Timestamp = @enumFromInt(expected_ins.timestamp);
try std.testing.expectEqual(expected_ts, call.response.authorization_data.?[0].expires_at.?);
try std.testing.expectEqual(@as(f128, 1.7385984915E9), call.response.authorization_data.?[0].expires_at.?);
}
var test_error_log_enabled = true;
test "test server timeout works" {

View file

@ -463,19 +463,41 @@ fn s3BucketFromPath(path: []const u8) []const u8 {
/// allocator: Will be used only to construct the EndPoint struct
/// uri: string constructed in such a way that deallocation is needed
fn endPointFromUri(allocator: std.mem.Allocator, uri: []const u8, path: []const u8) !EndPoint {
const parsed_uri = try std.Uri.parse(uri);
const scheme = parsed_uri.scheme;
const host = try allocator.dupe(u8, parsed_uri.host.?.percent_encoded);
const port: u16 = blk: {
if (parsed_uri.port) |port| break :blk port;
if (std.mem.eql(u8, scheme, "http")) break :blk 80;
if (std.mem.eql(u8, scheme, "https")) break :blk 443;
break :blk 0;
};
var scheme: []const u8 = "";
var host: []const u8 = "";
var port: u16 = 443;
var host_start: usize = 0;
var host_end: usize = 0;
for (uri, 0..) |ch, i| {
switch (ch) {
':' => {
if (!std.mem.eql(u8, scheme, "")) {
// here to end is port - this is likely a bug if ipv6 address used
const rest_of_uri = uri[i + 1 ..];
port = try std.fmt.parseUnsigned(u16, rest_of_uri, 10);
host_end = i;
}
},
'/' => {
if (host_start == 0) {
host_start = i + 2;
scheme = uri[0 .. i - 1];
if (std.mem.eql(u8, scheme, "http")) {
port = 80;
} else {
port = 443;
}
}
},
else => continue,
}
}
if (host_end == 0) {
host_end = uri.len;
}
host = try allocator.dupe(u8, uri[host_start..host_end]);
log.debug("host: {s}, scheme: {s}, port: {}", .{ host, scheme, port });
return EndPoint{
.uri = uri,
.host = host,

View file

@ -1,7 +1,7 @@
const std = @import("std");
const base = @import("aws_http_base.zig");
const auth = @import("aws_authentication.zig");
const date = @import("date");
const date = @import("date.zig");
const scoped_log = std.log.scoped(.aws_signing);
@ -662,12 +662,12 @@ fn canonicalUri(allocator: std.mem.Allocator, path: []const u8, double_encode: b
}
defer allocator.free(encoded_once);
var encoded_twice = try encodeUri(allocator, encoded_once);
defer allocator.free(encoded_twice);
log.debug("encoded path (2): {s}", .{encoded_twice});
if (std.mem.lastIndexOf(u8, encoded_twice, "?")) |i| {
return try allocator.dupe(u8, encoded_twice[0..i]);
_ = allocator.resize(encoded_twice, i);
return encoded_twice[0..i];
}
return try allocator.dupe(u8, encoded_twice);
return encoded_twice;
}
fn encodeParamPart(allocator: std.mem.Allocator, path: []const u8) ![]const u8 {
@ -936,7 +936,6 @@ fn canonicalHeaderValue(allocator: std.mem.Allocator, value: []const u8) ![]cons
const in_quote = false;
var start: usize = 0;
const rc = try allocator.alloc(u8, value.len);
defer allocator.free(rc);
var rc_inx: usize = 0;
for (value, 0..) |c, i| {
if (!started and !std.ascii.isWhitespace(c)) {
@ -954,7 +953,8 @@ fn canonicalHeaderValue(allocator: std.mem.Allocator, value: []const u8) ![]cons
// Trim end
while (std.ascii.isWhitespace(rc[rc_inx - 1]))
rc_inx -= 1;
return try allocator.dupe(u8, rc[0..rc_inx]);
_ = allocator.resize(rc, rc_inx);
return rc[0..rc_inx];
}
fn lessThan(context: void, lhs: std.http.Header, rhs: std.http.Header) bool {
_ = context;
@ -986,7 +986,6 @@ test "canonical uri" {
const path = "/documents and settings/?foo=bar";
const expected = "/documents%2520and%2520settings/";
const actual = try canonicalUri(allocator, path, true);
defer allocator.free(actual);
try std.testing.expectEqualStrings(expected, actual);

414
src/date.zig Normal file
View file

@ -0,0 +1,414 @@
// From https://gist.github.com/WoodyAtHome/3ef50b17f0fa2860ac52b97af12f8d15
// Translated from German. We don't need any local time for this use case, and conversion
// really requires the TZ DB.
const std = @import("std");
const log = std.log.scoped(.date);
pub const DateTime = struct { day: u8, month: u8, year: u16, hour: u8, minute: u8, second: u8 };
const SECONDS_PER_DAY = 86400; //* 24* 60 * 60 */
const DAYS_PER_YEAR = 365; //* Normal year (no leap year) */
pub fn timestampToDateTime(timestamp: i64) DateTime {
// aus https://de.wikipedia.org/wiki/Unixzeit
const unixtime = @as(u64, @intCast(timestamp));
const DAYS_IN_4_YEARS = 1461; //* 4*365 + 1 */
const DAYS_IN_100_YEARS = 36524; //* 100*365 + 25 - 1 */
const DAYS_IN_400_YEARS = 146097; //* 400*365 + 100 - 4 + 1 */
const DAY_NUMBER_ADJUSTED_1970_01_01 = 719468; //* Day number relates to March 1st */
var dayN: u64 = DAY_NUMBER_ADJUSTED_1970_01_01 + unixtime / SECONDS_PER_DAY;
const seconds_since_midnight: u64 = unixtime % SECONDS_PER_DAY;
var temp: u64 = 0;
// Leap year rules for Gregorian Calendars
// Any year divisible by 100 is not a leap year unless also divisible by 400
temp = 4 * (dayN + DAYS_IN_100_YEARS + 1) / DAYS_IN_400_YEARS - 1;
var year = @as(u16, @intCast(100 * temp));
dayN -= DAYS_IN_100_YEARS * temp + temp / 4;
// For Julian calendars, each year divisible by 4 is a leap year
temp = 4 * (dayN + DAYS_PER_YEAR + 1) / DAYS_IN_4_YEARS - 1;
year += @as(u16, @intCast(temp));
dayN -= DAYS_PER_YEAR * temp + temp / 4;
// dayN calculates the days of the year in relation to March 1
var month = @as(u8, @intCast((5 * dayN + 2) / 153));
const day = @as(u8, @intCast(dayN - (@as(u64, @intCast(month)) * 153 + 2) / 5 + 1));
// 153 = 31+30+31+30+31 Days for the 5 months from March through July
// 153 = 31+30+31+30+31 Days for the 5 months from August through December
// 31+28 Days for January and February (see below)
// +2: Rounding adjustment
// +1: The first day in March is March 1st (not March 0)
month += 3; // Convert from the day that starts on March 1st, to a human year */
if (month > 12) { // months 13 and 14 become 1 (January) und 2 (February) of the next year
month -= 12;
year += 1;
}
const hours = @as(u8, @intCast(seconds_since_midnight / 3600));
const minutes = @as(u8, @intCast(seconds_since_midnight % 3600 / 60));
const seconds = @as(u8, @intCast(seconds_since_midnight % 60));
return DateTime{ .day = day, .month = month, .year = year, .hour = hours, .minute = minutes, .second = seconds };
}
pub fn parseEnglishToTimestamp(data: []const u8) !i64 {
return try dateTimeToTimestamp(try parseEnglishToDateTime(data));
}
const EnglishParsingState = enum { Start, Day, Month, Year, Hour, Minute, Second, End };
/// Converts a string to a timestamp value. May not handle dates before the
/// epoch. Dates should look like "Fri, 03 Jun 2022 18:12:36 GMT"
pub fn parseEnglishToDateTime(data: []const u8) !DateTime {
// Fri, 03 Jun 2022 18:12:36 GMT
if (!std.mem.endsWith(u8, data, "GMT")) return error.InvalidFormat;
var start: usize = 0;
var state = EnglishParsingState.Start;
// Anything not explicitly set by our string would be 0
var rc = DateTime{ .year = 0, .month = 0, .day = 0, .hour = 0, .minute = 0, .second = 0 };
for (data, 0..) |ch, i| {
switch (ch) {
',' => {},
' ', ':' => {
// State transition
// We're going to coerce and this might not go well, but we
// want the compiler to create checks, so we'll turn on
// runtime safety for this block, forcing checks in ReleaseSafe
// ReleaseFast modes.
const next_state = try endEnglishState(state, &rc, data[start..i]);
state = next_state;
start = i + 1;
},
else => {}, // We need to be pretty trusting on this format...
}
}
return rc;
}
fn endEnglishState(current_state: EnglishParsingState, date: *DateTime, prev_data: []const u8) !EnglishParsingState {
var next_state: EnglishParsingState = undefined;
log.debug("endEnglishState. Current state '{}', data: {s}", .{ current_state, prev_data });
// Using two switches is slightly less efficient, but more readable
switch (current_state) {
.End => return error.IllegalStateTransition,
.Start => next_state = .Day,
.Day => next_state = .Month,
.Month => next_state = .Year,
.Year => next_state = .Hour,
.Hour => next_state = .Minute,
.Minute => next_state = .Second,
.Second => next_state = .End,
}
switch (current_state) {
.Year => date.year = try std.fmt.parseUnsigned(u16, prev_data, 10),
.Month => date.month = try parseEnglishMonth(prev_data),
.Day => date.day = try std.fmt.parseUnsigned(u8, prev_data, 10),
.Hour => date.hour = try std.fmt.parseUnsigned(u8, prev_data, 10),
.Minute => date.minute = try std.fmt.parseUnsigned(u8, prev_data, 10),
.Second => date.second = try std.fmt.parseUnsigned(u8, prev_data, 10),
.Start => {},
.End => return error.InvalidState,
}
return next_state;
}
fn parseEnglishMonth(data: []const u8) !u8 {
if (std.ascii.startsWithIgnoreCase(data, "Jan")) return 1;
if (std.ascii.startsWithIgnoreCase(data, "Feb")) return 2;
if (std.ascii.startsWithIgnoreCase(data, "Mar")) return 3;
if (std.ascii.startsWithIgnoreCase(data, "Apr")) return 4;
if (std.ascii.startsWithIgnoreCase(data, "May")) return 5;
if (std.ascii.startsWithIgnoreCase(data, "Jun")) return 6;
if (std.ascii.startsWithIgnoreCase(data, "Jul")) return 7;
if (std.ascii.startsWithIgnoreCase(data, "Aug")) return 8;
if (std.ascii.startsWithIgnoreCase(data, "Sep")) return 9;
if (std.ascii.startsWithIgnoreCase(data, "Oct")) return 10;
if (std.ascii.startsWithIgnoreCase(data, "Nov")) return 11;
if (std.ascii.startsWithIgnoreCase(data, "Dec")) return 12;
return error.InvalidMonth;
}
pub fn parseIso8601ToTimestamp(data: []const u8) !i64 {
return try dateTimeToTimestamp(try parseIso8601ToDateTime(data));
}
const IsoParsingState = enum { Start, Year, Month, Day, Hour, Minute, Second, Millisecond, End };
/// Converts a string to a timestamp value. May not handle dates before the
/// epoch
pub fn parseIso8601ToDateTime(data: []const u8) !DateTime {
// Basic format YYYYMMDDThhmmss
if (data.len == "YYYYMMDDThhmmss".len and data[8] == 'T')
return try parseIso8601BasicFormatToDateTime(data);
if (data.len == "YYYYMMDDThhmmssZ".len and data[8] == 'T')
return try parseIso8601BasicFormatToDateTime(data);
var start: usize = 0;
var state = IsoParsingState.Start;
// Anything not explicitly set by our string would be 0
var rc = DateTime{ .year = 0, .month = 0, .day = 0, .hour = 0, .minute = 0, .second = 0 };
var zulu_time = false;
for (data, 0..) |ch, i| {
switch (ch) {
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9' => {
if (state == .Start) state = .Year;
},
'?', '~', '%' => {
// These characters all specify the type of time (approximate, etc)
// and we will ignore
},
'.', '-', ':', 'T' => {
// State transition
// We're going to coerce and this might not go well, but we
// want the compiler to create checks, so we'll turn on
// runtime safety for this block, forcing checks in ReleaseSafe
// ReleaseFast modes.
const next_state = try endIsoState(state, &rc, data[start..i]);
state = next_state;
start = i + 1;
},
'Z' => zulu_time = true,
else => {
log.err("Invalid character: {c}", .{ch});
return error.InvalidCharacter;
},
}
}
if (!zulu_time) return error.LocalTimeNotSupported;
// We know we have a Z at the end of this, so let's grab the last bit
// of the string, minus the 'Z', and fly, eagles, fly!
_ = try endIsoState(state, &rc, data[start .. data.len - 1]);
return rc;
}
fn parseIso8601BasicFormatToDateTime(data: []const u8) !DateTime {
return DateTime{
.year = try std.fmt.parseUnsigned(u16, data[0..4], 10),
.month = try std.fmt.parseUnsigned(u8, data[4..6], 10),
.day = try std.fmt.parseUnsigned(u8, data[6..8], 10),
.hour = try std.fmt.parseUnsigned(u8, data[9..11], 10),
.minute = try std.fmt.parseUnsigned(u8, data[11..13], 10),
.second = try std.fmt.parseUnsigned(u8, data[13..15], 10),
};
}
fn endIsoState(current_state: IsoParsingState, date: *DateTime, prev_data: []const u8) !IsoParsingState {
var next_state: IsoParsingState = undefined;
log.debug("endIsoState. Current state '{}', data: {s}", .{ current_state, prev_data });
// Using two switches is slightly less efficient, but more readable
switch (current_state) {
.Start, .End => return error.IllegalStateTransition,
.Year => next_state = .Month,
.Month => next_state = .Day,
.Day => next_state = .Hour,
.Hour => next_state = .Minute,
.Minute => next_state = .Second,
.Second => next_state = .Millisecond,
.Millisecond => next_state = .End,
}
// TODO: This won't handle signed, which Iso supports. For now, let's fail
// explictly
switch (current_state) {
.Year => date.year = try std.fmt.parseUnsigned(u16, prev_data, 10),
.Month => date.month = try std.fmt.parseUnsigned(u8, prev_data, 10),
.Day => date.day = try std.fmt.parseUnsigned(u8, prev_data, 10),
.Hour => date.hour = try std.fmt.parseUnsigned(u8, prev_data, 10),
.Minute => date.minute = try std.fmt.parseUnsigned(u8, prev_data, 10),
.Second => date.second = try std.fmt.parseUnsigned(u8, prev_data, 10),
.Millisecond => {}, // We'll throw that away - our granularity is 1 second
.Start, .End => return error.InvalidState,
}
return next_state;
}
pub fn dateTimeToTimestamp(datetime: DateTime) !i64 {
const epoch = DateTime{
.year = 1970,
.month = 1,
.day = 1,
.hour = 0,
.minute = 0,
.second = 0,
};
return secondsBetween(epoch, datetime);
}
const DateTimeToTimestampError = error{
DateTimeOutOfRange,
};
fn secondsBetween(start: DateTime, end: DateTime) DateTimeToTimestampError!i64 {
try validateDatetime(start);
try validateDatetime(end);
if (end.year < start.year) return -1 * try secondsBetween(end, start);
if (start.month != 1 or
start.day != 1 or
start.hour != 0 or
start.minute != 0 or
start.second != 0)
{
const seconds_into_start_year = secondsFromBeginningOfYear(
start.year,
start.month,
start.day,
start.hour,
start.minute,
start.second,
);
const new_start = DateTime{
.year = start.year,
.month = 1,
.day = 1,
.hour = 0,
.minute = 0,
.second = 0,
};
return (try secondsBetween(new_start, end)) - seconds_into_start_year;
}
const leap_years_between = leapYearsBetween(start.year, end.year);
const add_days: u1 = 0;
const years_diff = end.year - start.year;
// log.debug("Years from epoch: {d}, Leap years: {d}", .{ years_diff, leap_years_between });
const days_diff: i32 = (years_diff * DAYS_PER_YEAR) + leap_years_between + add_days;
// log.debug("Days with leap year, without month: {d}", .{days_diff});
const seconds_into_year = secondsFromBeginningOfYear(
end.year,
end.month,
end.day,
end.hour,
end.minute,
end.second,
);
return (days_diff * SECONDS_PER_DAY) + @as(i64, seconds_into_year);
}
fn validateDatetime(dt: DateTime) !void {
if (dt.month > 12 or
dt.day > 31 or
dt.hour >= 24 or
dt.minute >= 60 or
dt.second >= 60) return error.DateTimeOutOfRange;
}
fn secondsFromBeginningOfYear(year: u16, month: u8, day: u8, hour: u8, minute: u8, second: u8) u32 {
const current_year_is_leap_year = isLeapYear(year);
const leap_year_days_per_month: [12]u5 = .{ 31, 29, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31 };
const normal_days_per_month: [12]u5 = .{ 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31 };
const days_per_month = if (current_year_is_leap_year) leap_year_days_per_month else normal_days_per_month;
var current_month: usize = 1;
const end_month = month;
var days_diff: u32 = 0;
while (current_month != end_month) {
days_diff += days_per_month[current_month - 1]; // months are 1-based vs array is 0-based
current_month += 1;
}
// log.debug("Days with month, without day: {d}. Day of month {d}, will add {d} days", .{
// days_diff,
// day,
// day - 1,
// });
// We need -1 because we're not actually including the ending day (that's up to hour/minute)
// In other words, days in the month are 1-based, while hours/minutes are zero based
days_diff += day - 1;
// log.debug("Total days diff: {d}", .{days_diff});
var seconds_diff: u32 = days_diff * SECONDS_PER_DAY;
// From here out, we want to get everything into seconds
seconds_diff += @as(u32, hour) * 60 * 60;
seconds_diff += @as(u32, minute) * 60;
seconds_diff += @as(u32, second);
return seconds_diff;
}
fn isLeapYear(year: u16) bool {
if (year % 4 != 0) return false;
if (year % 400 == 0) return true;
if (year % 100 == 0) return false;
return true;
}
fn leapYearsBetween(start_year_inclusive: u16, end_year_exclusive: u16) u16 {
const start = @min(start_year_inclusive, end_year_exclusive);
const end = @max(start_year_inclusive, end_year_exclusive);
var current = start;
// log.debug("Leap years starting from {d}, ending at {d}", .{ start, end });
while (current % 4 != 0 and current < end) {
current += 1;
}
if (current == end) return 0; // No leap years here. E.g. 1971-1973
// We're on a potential leap year, and now we can step by 4
var rc: u16 = 0;
while (current < end) {
if (current % 4 == 0) {
if (current % 100 != 0) {
// log.debug("Year {d} is leap year", .{current});
rc += 1;
current += 4;
continue;
}
// We're on a century, which is normally not a leap year, unless
// it's divisible by 400
if (current % 400 == 0) {
// log.debug("Year {d} is leap year", .{current});
rc += 1;
}
}
current += 4;
}
return rc;
}
fn printDateTime(dt: DateTime) void {
log.debug("{:0>4}-{:0>2}-{:0>2}T{:0>2}:{:0>2}:{:0<2}Z", .{
dt.year,
dt.month,
dt.day,
dt.hour,
dt.minute,
dt.second,
});
}
pub fn printNowUtc() void {
printDateTime(timestampToDateTime(std.time.timestamp()));
}
test "Convert timestamp to datetime" {
printDateTime(timestampToDateTime(std.time.timestamp()));
try std.testing.expectEqual(DateTime{ .year = 2020, .month = 8, .day = 28, .hour = 9, .minute = 32, .second = 27 }, timestampToDateTime(1598607147));
try std.testing.expectEqual(DateTime{ .year = 2020, .month = 11, .day = 1, .hour = 5, .minute = 6, .second = 7 }, timestampToDateTime(1604207167));
// Get time for date: https://wtools.io/convert-date-time-to-unix-time
try std.testing.expectEqual(DateTime{ .year = 2015, .month = 8, .day = 30, .hour = 12, .minute = 36, .second = 0 }, timestampToDateTime(1440938160));
}
test "Convert datetime to timestamp" {
try std.testing.expectEqual(@as(i64, 1598607147), try dateTimeToTimestamp(DateTime{ .year = 2020, .month = 8, .day = 28, .hour = 9, .minute = 32, .second = 27 }));
try std.testing.expectEqual(@as(i64, 1604207167), try dateTimeToTimestamp(DateTime{ .year = 2020, .month = 11, .day = 1, .hour = 5, .minute = 6, .second = 7 }));
try std.testing.expectEqual(@as(i64, 1440938160), try dateTimeToTimestamp(DateTime{ .year = 2015, .month = 8, .day = 30, .hour = 12, .minute = 36, .second = 0 }));
}
test "Convert ISO8601 string to timestamp" {
try std.testing.expectEqual(DateTime{ .year = 2020, .month = 8, .day = 28, .hour = 9, .minute = 32, .second = 27 }, try parseIso8601ToDateTime("20200828T093227"));
try std.testing.expectEqual(DateTime{ .year = 2020, .month = 8, .day = 28, .hour = 9, .minute = 32, .second = 27 }, try parseIso8601ToDateTime("2020-08-28T9:32:27Z"));
try std.testing.expectEqual(DateTime{ .year = 2020, .month = 11, .day = 1, .hour = 5, .minute = 6, .second = 7 }, try parseIso8601ToDateTime("2020-11-01T5:06:7Z"));
try std.testing.expectEqual(DateTime{ .year = 2015, .month = 8, .day = 30, .hour = 12, .minute = 36, .second = 0 }, try parseIso8601ToDateTime("2015-08-30T12:36:00.000Z"));
}
test "Convert datetime to timestamp before 1970" {
try std.testing.expectEqual(@as(i64, -449392815), try dateTimeToTimestamp(DateTime{ .year = 1955, .month = 10, .day = 5, .hour = 16, .minute = 39, .second = 45 }));
}
test "Convert whatever AWS is sending us to timestamp" {
const string_date = "Fri, 03 Jun 2022 18:12:36 GMT";
try std.testing.expectEqual(DateTime{ .year = 2022, .month = 6, .day = 3, .hour = 18, .minute = 12, .second = 36 }, try parseEnglishToDateTime(string_date));
}

View file

@ -14,116 +14,8 @@ const testing = std.testing;
const mem = std.mem;
const maxInt = std.math.maxInt;
pub fn serializeMap(map: anytype, key: []const u8, options: anytype, out_stream: anytype) !bool {
if (@typeInfo(@TypeOf(map)) == .optional) {
if (map == null)
return false
else
return serializeMapInternal(map.?, key, options, out_stream);
}
return serializeMapInternal(map, key, options, out_stream);
}
fn serializeMapInternal(map: anytype, key: []const u8, options: anytype, out_stream: anytype) !bool {
if (map.len == 0) {
var child_options = options;
if (child_options.whitespace) |*child_ws|
child_ws.indent_level += 1;
try out_stream.writeByte('"');
try out_stream.writeAll(key);
_ = try out_stream.write("\":");
if (options.whitespace) |ws| {
if (ws.separator) {
try out_stream.writeByte(' ');
}
}
try out_stream.writeByte('{');
try out_stream.writeByte('}');
return true;
}
// TODO: Map might be [][]struct{key, value} rather than []struct{key, value}
var child_options = options;
if (child_options.whitespace) |*child_ws|
child_ws.indent_level += 1;
try out_stream.writeByte('"');
try out_stream.writeAll(key);
_ = try out_stream.write("\":");
if (options.whitespace) |ws| {
if (ws.separator) {
try out_stream.writeByte(' ');
}
}
try out_stream.writeByte('{');
if (options.whitespace) |_|
try out_stream.writeByte('\n');
for (map, 0..) |tag, i| {
if (tag.key == null or tag.value == null) continue;
// TODO: Deal with escaping and general "json.stringify" the values...
if (child_options.whitespace) |ws|
try ws.outputIndent(out_stream);
try out_stream.writeByte('"');
try jsonEscape(tag.key.?, child_options, out_stream);
_ = try out_stream.write("\":");
if (child_options.whitespace) |ws| {
if (ws.separator) {
try out_stream.writeByte(' ');
}
}
try out_stream.writeByte('"');
try jsonEscape(tag.value.?, child_options, out_stream);
try out_stream.writeByte('"');
if (i < map.len - 1) {
try out_stream.writeByte(',');
}
if (child_options.whitespace) |_|
try out_stream.writeByte('\n');
}
if (options.whitespace) |ws|
try ws.outputIndent(out_stream);
try out_stream.writeByte('}');
return true;
}
// code within jsonEscape lifted from json.zig in stdlib
fn jsonEscape(value: []const u8, options: anytype, out_stream: anytype) !void {
var i: usize = 0;
while (i < value.len) : (i += 1) {
switch (value[i]) {
// normal ascii character
0x20...0x21, 0x23...0x2E, 0x30...0x5B, 0x5D...0x7F => |c| try out_stream.writeByte(c),
// only 2 characters that *must* be escaped
'\\' => try out_stream.writeAll("\\\\"),
'\"' => try out_stream.writeAll("\\\""),
// solidus is optional to escape
'/' => {
if (options.string.String.escape_solidus) {
try out_stream.writeAll("\\/");
} else {
try out_stream.writeByte('/');
}
},
// control characters with short escapes
// TODO: option to switch between unicode and 'short' forms?
0x8 => try out_stream.writeAll("\\b"),
0xC => try out_stream.writeAll("\\f"),
'\n' => try out_stream.writeAll("\\n"),
'\r' => try out_stream.writeAll("\\r"),
'\t' => try out_stream.writeAll("\\t"),
else => {
const ulen = std.unicode.utf8ByteSequenceLength(value[i]) catch unreachable;
// control characters (only things left with 1 byte length) should always be printed as unicode escapes
if (ulen == 1 or options.string.String.escape_unicode) {
const codepoint = std.unicode.utf8Decode(value[i .. i + ulen]) catch unreachable;
try outputUnicodeEscape(codepoint, out_stream);
} else {
try out_stream.writeAll(value[i .. i + ulen]);
}
i += ulen - 1;
},
}
}
}
// pub const WriteStream = @import("json/write_stream.zig").WriteStream;
// pub const writeStream = @import("json/write_stream.zig").writeStream;
const StringEscapes = union(enum) {
None,
@ -1424,8 +1316,8 @@ pub const Value = union(enum) {
}
pub fn dump(self: Value) void {
std.debug.lockStdErr();
defer std.debug.unlockStdErr();
var held = std.debug.getStderrMutex().acquire();
defer held.release();
const stderr = std.io.getStdErr().writer();
stringify(self, StringifyOptions{ .whitespace = null }, stderr) catch return;
@ -1705,22 +1597,12 @@ fn parseInternal(comptime T: type, token: Token, tokens: *TokenStream, options:
.@"enum" => |enumInfo| {
switch (token) {
.Number => |numberToken| {
if (!numberToken.is_integer) {
// probably is in scientific notation
const n = try std.fmt.parseFloat(f128, numberToken.slice(tokens.slice, tokens.i - 1));
return try std.meta.intToEnum(T, @as(i128, @intFromFloat(n)));
}
if (!numberToken.is_integer) return error.UnexpectedToken;
const n = try std.fmt.parseInt(enumInfo.tag_type, numberToken.slice(tokens.slice, tokens.i - 1), 10);
return try std.meta.intToEnum(T, n);
},
.String => |stringToken| {
const source_slice = stringToken.slice(tokens.slice, tokens.i - 1);
if (std.meta.hasFn(T, "parse")) {
return try T.parse(source_slice);
}
switch (stringToken.escapes) {
.None => return std.meta.stringToEnum(T, source_slice) orelse return error.InvalidEnumTag,
.Some => {
@ -1841,7 +1723,7 @@ fn parseInternal(comptime T: type, token: Token, tokens: *TokenStream, options:
}
inline for (structInfo.fields, 0..) |field, i| {
if (!fields_seen[i]) {
if (field.default_value_ptr) |default_value_ptr| {
if (field.default_value) |default_value_ptr| {
if (!field.is_comptime) {
const default_value = @as(*align(1) const field.type, @ptrCast(default_value_ptr)).*;
@field(r, field.name) = default_value;
@ -1891,18 +1773,18 @@ fn parseInternal(comptime T: type, token: Token, tokens: *TokenStream, options:
.pointer => |ptrInfo| {
const allocator = options.allocator orelse return error.AllocatorRequired;
switch (ptrInfo.size) {
.one => {
.One => {
const r: T = try allocator.create(ptrInfo.child);
errdefer allocator.destroy(r);
r.* = try parseInternal(ptrInfo.child, token, tokens, options);
return r;
},
.slice => {
.Slice => {
switch (token) {
.ArrayBegin => {
var arraylist = std.ArrayList(ptrInfo.child).init(allocator);
errdefer {
while (arraylist.pop()) |v| {
while (arraylist.popOrNull()) |v| {
parseFree(ptrInfo.child, v, options);
}
arraylist.deinit();
@ -1947,7 +1829,7 @@ fn parseInternal(comptime T: type, token: Token, tokens: *TokenStream, options:
if (value_type == null) return error.UnexpectedToken;
var arraylist = std.ArrayList(ptrInfo.child).init(allocator);
errdefer {
while (arraylist.pop()) |v| {
while (arraylist.popOrNull()) |v| {
parseFree(ptrInfo.child, v, options);
}
arraylist.deinit();
@ -1997,7 +1879,7 @@ fn isMapPattern(comptime T: type) bool {
// Let's just double check before proceeding
const ti = @typeInfo(T);
if (ti != .pointer) return false;
if (ti.pointer.size != .slice) return false;
if (ti.pointer.size != .Slice) return false;
const ti_child = @typeInfo(ti.pointer.child);
if (ti_child != .@"struct") return false;
if (ti_child.@"struct".fields.len != 2) return false;
@ -2053,11 +1935,11 @@ pub fn parseFree(comptime T: type, value: T, options: ParseOptions) void {
.pointer => |ptrInfo| {
const allocator = options.allocator orelse unreachable;
switch (ptrInfo.size) {
.one => {
.One => {
parseFree(ptrInfo.child, value.*, options);
allocator.destroy(value);
},
.slice => {
.Slice => {
for (value) |v| {
parseFree(ptrInfo.child, v, options);
}
@ -2402,7 +2284,7 @@ pub const Parser = struct {
return;
}
var value = p.stack.pop().?;
var value = p.stack.pop();
try p.pushToParent(&value);
},
.String => |s| {
@ -2468,7 +2350,7 @@ pub const Parser = struct {
return;
}
var value = p.stack.pop().?;
var value = p.stack.pop();
try p.pushToParent(&value);
},
.ObjectBegin => {
@ -3040,7 +2922,7 @@ pub fn stringify(
},
.error_set => return stringify(@as([]const u8, @errorName(value)), options, out_stream),
.pointer => |ptr_info| switch (ptr_info.size) {
.one => switch (@typeInfo(ptr_info.child)) {
.One => switch (@typeInfo(ptr_info.child)) {
.array => {
const Slice = []const std.meta.Elem(ptr_info.child);
return stringify(@as(Slice, value), options, out_stream);
@ -3051,7 +2933,7 @@ pub fn stringify(
},
},
// TODO: .Many when there is a sentinel (waiting for https://github.com/ziglang/zig/pull/3972)
.slice => {
.Slice => {
if (ptr_info.child == u8 and options.string == .String and std.unicode.utf8ValidateSlice(value)) {
try out_stream.writeByte('\"');
var i: usize = 0;

View file

@ -1,6 +1,6 @@
const std = @import("std");
const aws = @import("aws.zig");
const json = @import("json");
const json = @import("json.zig");
var verbose: u8 = 0;

View file

@ -1,5 +1,5 @@
const std = @import("std");
const service_list = @import("service_manifest");
const service_list = @import("models/service_manifest.zig");
const expectEqualStrings = std.testing.expectEqualStrings;
pub fn Services(comptime service_imports: anytype) type {
@ -12,7 +12,7 @@ pub fn Services(comptime service_imports: anytype) type {
item.* = .{
.name = @tagName(service_imports[i]),
.type = @TypeOf(import_field),
.default_value_ptr = &import_field,
.default_value = &import_field,
.is_comptime = false,
.alignment = 0,
};
@ -39,7 +39,7 @@ fn serviceCount(desired_services: anytype) usize {
pub const services = service_list;
test "services includes sts" {
try expectEqualStrings("2011-06-15", services.sts.version.?);
try expectEqualStrings("2011-06-15", services.sts.version);
}
test "sts includes get_caller_identity" {
try expectEqualStrings("GetCallerIdentity", services.sts.get_caller_identity.action_name);
@ -47,9 +47,9 @@ test "sts includes get_caller_identity" {
test "can get service and action name from request" {
// get request object. This call doesn't have parameters
const metadata = services.sts.get_caller_identity.Request.metaInfo();
try expectEqualStrings("2011-06-15", metadata.service_metadata.version.?);
try expectEqualStrings("2011-06-15", metadata.service_metadata.version);
}
test "can filter services" {
const filtered_services = Services(.{ .sts, .wafv2 }){};
try expectEqualStrings("2011-06-15", filtered_services.sts.version.?);
try expectEqualStrings("2011-06-15", filtered_services.sts.version);
}

View file

@ -24,11 +24,10 @@ fn encodeStruct(
comptime options: EncodingOptions,
) !bool {
var rc = first;
var arena = std.heap.ArenaAllocator.init(allocator);
defer arena.deinit();
const arena_alloc = arena.allocator();
inline for (@typeInfo(@TypeOf(obj)).@"struct".fields) |field| {
const field_name = try options.field_name_transformer(arena_alloc, field.name);
const field_name = try options.field_name_transformer(allocator, field.name);
defer if (options.field_name_transformer.* != defaultTransformer)
allocator.free(field_name);
// @compileLog(@typeInfo(field.field_type).Pointer);
rc = try encodeInternal(allocator, parent, field_name, rc, @field(obj, field.name), writer, options);
}
@ -51,7 +50,7 @@ pub fn encodeInternal(
.optional => if (obj) |o| {
rc = try encodeInternal(allocator, parent, field_name, first, o, writer, options);
},
.pointer => |ti| if (ti.size == .one) {
.pointer => |ti| if (ti.size == .One) {
rc = try encodeInternal(allocator, parent, field_name, first, obj.*, writer, options);
} else {
if (!first) _ = try writer.write("&");

View file

@ -25,7 +25,6 @@ pub const Element = struct {
tag: []const u8,
attributes: AttributeList,
children: ContentList,
next_sibling: ?*Element = null,
fn init(tag: []const u8, alloc: Allocator) Element {
return .{
@ -348,7 +347,7 @@ fn parseDocument(ctx: *ParseContext, backing_allocator: Allocator) !Document {
_ = ctx.eatWs();
try trySkipComments(ctx, allocator);
doc.root = (try tryParseElement(ctx, allocator, null)) orelse return error.InvalidDocument;
doc.root = (try tryParseElement(ctx, allocator)) orelse return error.InvalidDocument;
_ = ctx.eatWs();
try trySkipComments(ctx, allocator);
@ -416,12 +415,12 @@ fn tryParseCharData(ctx: *ParseContext, alloc: Allocator) !?[]const u8 {
return try dupeAndUnescape(alloc, ctx.source[begin..end]);
}
fn parseContent(ctx: *ParseContext, alloc: Allocator, parent: ?*Element) ParseError!Content {
fn parseContent(ctx: *ParseContext, alloc: Allocator) ParseError!Content {
if (try tryParseCharData(ctx, alloc)) |cd| {
return Content{ .CharData = cd };
} else if (try tryParseComment(ctx, alloc)) |comment| {
return Content{ .Comment = comment };
} else if (try tryParseElement(ctx, alloc, parent)) |elem| {
} else if (try tryParseElement(ctx, alloc)) |elem| {
return Content{ .Element = elem };
} else {
return error.UnexpectedCharacter;
@ -441,7 +440,7 @@ fn tryParseAttr(ctx: *ParseContext, alloc: Allocator) !?*Attribute {
return attr;
}
fn tryParseElement(ctx: *ParseContext, alloc: Allocator, parent: ?*Element) !?*Element {
fn tryParseElement(ctx: *ParseContext, alloc: Allocator) !?*Element {
const start = ctx.offset;
if (!ctx.eat('<')) return null;
const tag = parseNameNoDupe(ctx) catch {
@ -470,7 +469,7 @@ fn tryParseElement(ctx: *ParseContext, alloc: Allocator, parent: ?*Element) !?*E
break;
}
const content = try parseContent(ctx, alloc, element);
const content = try parseContent(ctx, alloc);
try element.children.append(content);
}
@ -481,23 +480,6 @@ fn tryParseElement(ctx: *ParseContext, alloc: Allocator, parent: ?*Element) !?*E
_ = ctx.eatWs();
try ctx.expect('>');
if (parent) |p| {
var last_element: ?*Element = null;
for (0..p.children.items.len) |i| {
const child = p.children.items[p.children.items.len - i - 1];
if (child == .Element) {
last_element = child.Element;
break;
}
}
if (last_element) |lc| {
lc.next_sibling = element;
}
}
return element;
}
@ -508,13 +490,13 @@ test "tryParseElement" {
{
var ctx = ParseContext.init("<= a='b'/>");
try testing.expectEqual(@as(?*Element, null), try tryParseElement(&ctx, alloc, null));
try testing.expectEqual(@as(?*Element, null), try tryParseElement(&ctx, alloc));
try testing.expectEqual(@as(?u8, '<'), ctx.peek());
}
{
var ctx = ParseContext.init("<python size='15' color = \"green\"/>");
const elem = try tryParseElement(&ctx, alloc, null);
const elem = try tryParseElement(&ctx, alloc);
try testing.expectEqualSlices(u8, elem.?.tag, "python");
const size_attr = elem.?.attributes.items[0];
@ -528,14 +510,14 @@ test "tryParseElement" {
{
var ctx = ParseContext.init("<python>test</python>");
const elem = try tryParseElement(&ctx, alloc, null);
const elem = try tryParseElement(&ctx, alloc);
try testing.expectEqualSlices(u8, elem.?.tag, "python");
try testing.expectEqualSlices(u8, elem.?.children.items[0].CharData, "test");
}
{
var ctx = ParseContext.init("<a>b<c/>d<e/>f<!--g--></a>");
const elem = try tryParseElement(&ctx, alloc, null);
const elem = try tryParseElement(&ctx, alloc);
try testing.expectEqualSlices(u8, elem.?.tag, "a");
try testing.expectEqualSlices(u8, elem.?.children.items[0].CharData, "b");
try testing.expectEqualSlices(u8, elem.?.children.items[1].Element.tag, "c");
@ -671,10 +653,7 @@ fn dupeAndUnescape(alloc: Allocator, text: []const u8) ![]const u8 {
// This error is not strictly true, but we need to match one of the items
// from the error set provided by the other stdlib calls at the calling site
if (!alloc.resize(str, j)) {
defer alloc.free(str);
return alloc.dupe(u8, str[0..j]) catch return error.OutOfMemory;
}
if (!alloc.resize(str, j)) return error.OutOfMemory;
return str[0..j];
}

View file

@ -1,793 +0,0 @@
const std = @import("std");
const mem = std.mem;
const Allocator = mem.Allocator;
/// Options for controlling XML serialization behavior
pub const StringifyOptions = struct {
/// Controls whitespace insertion for easier human readability
whitespace: Whitespace = .minified,
/// Should optional fields with null value be written?
emit_null_optional_fields: bool = true,
// TODO: Implement
/// Arrays/slices of u8 are typically encoded as strings. This option emits them as arrays of numbers instead. Does not affect calls to objectField*().
emit_strings_as_arrays: bool = false,
/// Controls whether to include XML declaration at the beginning
include_declaration: bool = true,
/// Root element name to use when serializing a value that doesn't have a natural name
root_name: ?[]const u8 = "root",
/// Root attributes (e.g. xmlns="...") that will be added to the root element node only
root_attributes: []const u8 = "",
/// Function to determine the element name for an array item based on the element
/// name of the array containing the elements. See arrayElementPluralToSingluarTransformation
/// and arrayElementNoopTransformation functions for examples
arrayElementNameConversion: *const fn (allocator: std.mem.Allocator, name: ?[]const u8) error{OutOfMemory}!?[]const u8 = arrayElementPluralToSingluarTransformation,
pub const Whitespace = enum {
minified,
indent_1,
indent_2,
indent_3,
indent_4,
indent_8,
indent_tab,
};
};
/// Error set for XML serialization
pub const XmlSerializeError = error{
/// Unsupported type for XML serialization
UnsupportedType,
/// Out of memory
OutOfMemory,
/// Write error
WriteError,
};
/// Serializes a value to XML and writes it to the provided writer
pub fn stringify(
value: anytype,
options: StringifyOptions,
writer: anytype,
) !void {
// Write XML declaration if requested
if (options.include_declaration)
try writer.writeAll("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n");
// Start serialization with the root element
const root_name = options.root_name;
if (@typeInfo(@TypeOf(value)) != .optional or value == null)
try serializeValue(value, root_name, options, writer.any(), 0)
else
try serializeValue(value.?, root_name, options, writer.any(), 0);
}
/// Serializes a value to XML and returns an allocated string
pub fn stringifyAlloc(
allocator: Allocator,
value: anytype,
options: StringifyOptions,
) ![]u8 {
var list = std.ArrayList(u8).init(allocator);
errdefer list.deinit();
try stringify(value, options, list.writer());
return list.toOwnedSlice();
}
/// Internal function to serialize a value with proper indentation
fn serializeValue(
value: anytype,
element_name: ?[]const u8,
options: StringifyOptions,
writer: anytype,
depth: usize,
) !void {
const T = @TypeOf(value);
// const output_indent = !(!options.emit_null_optional_fields and @typeInfo(@TypeOf(value)) == .optional and value == null);
const output_indent = options.emit_null_optional_fields or @typeInfo(@TypeOf(value)) != .optional or value != null;
if (output_indent and element_name != null)
try writeIndent(writer, depth, options.whitespace);
// Start element tag
if (@typeInfo(T) != .optional and @typeInfo(T) != .array) {
if (element_name) |n| {
try writer.writeAll("<");
try writer.writeAll(n);
if (depth == 0 and options.root_attributes.len > 0) {
try writer.writeByte(' ');
try writer.writeAll(options.root_attributes);
}
try writer.writeAll(">");
}
}
// Handle different types
switch (@typeInfo(T)) {
.bool => try writer.writeAll(if (value) "true" else "false"),
.int, .comptime_int, .float, .comptime_float => try writer.print("{}", .{value}),
.pointer => |ptr_info| {
switch (ptr_info.size) {
.one => {
// We don't want to write the opening tag a second time, so
// we will pass null, then come back and close before returning
//
// ...but...in the event of a *[]const u8, we do want to pass that in,
// but only if emit_strings_as_arrays is true
const child_ti = @typeInfo(ptr_info.child);
const el_name = if (options.emit_strings_as_arrays and child_ti == .array and child_ti.array.child == u8)
element_name
else
null;
try serializeValue(value.*, el_name, options, writer, depth);
try writeClose(writer, element_name);
return;
},
.slice => {
if (ptr_info.child == u8) {
// String type
try serializeString(writer, element_name, value, options, depth);
} else {
// Array of values
if (options.whitespace != .minified) {
try writer.writeByte('\n');
}
var buf: [256]u8 = undefined;
var fba = std.heap.FixedBufferAllocator.init(&buf);
const alloc = fba.allocator();
const item_name = try options.arrayElementNameConversion(alloc, element_name);
for (value) |item| {
try serializeValue(item, item_name, options, writer, depth + 1);
if (options.whitespace != .minified) {
try writer.writeByte('\n');
}
}
try writeIndent(writer, depth, options.whitespace);
}
},
else => return error.UnsupportedType,
}
},
.array => |array_info| {
if (!options.emit_strings_as_arrays or array_info.child != u8) {
if (element_name) |n| {
try writer.writeAll("<");
try writer.writeAll(n);
try writer.writeAll(">");
}
}
if (array_info.child == u8) {
// Fixed-size string
const slice = &value;
try serializeString(writer, element_name, slice, options, depth);
} else {
// Fixed-size array
if (options.whitespace != .minified) {
try writer.writeByte('\n');
}
var buf: [256]u8 = undefined;
var fba = std.heap.FixedBufferAllocator.init(&buf);
const alloc = fba.allocator();
const item_name = try options.arrayElementNameConversion(alloc, element_name);
for (value) |item| {
try serializeValue(item, item_name, options, writer, depth + 1);
if (options.whitespace != .minified) {
try writer.writeByte('\n');
}
}
try writeIndent(writer, depth, options.whitespace);
}
if (!options.emit_strings_as_arrays or array_info.child != u8)
try writeClose(writer, element_name);
return;
},
.@"struct" => |struct_info| {
if (options.whitespace != .minified) {
try writer.writeByte('\n');
}
inline for (struct_info.fields) |field| {
const field_name =
if (std.meta.hasFn(T, "fieldNameFor"))
value.fieldNameFor(field.name)
else
field.name; // TODO: field mapping
const field_value = @field(value, field.name);
try serializeValue(
field_value,
field_name,
options,
writer,
depth + 1,
);
if (options.whitespace != .minified) {
if (!options.emit_null_optional_fields and @typeInfo(@TypeOf(field_value)) == .optional and field_value == null) {
// Skip writing anything
} else {
try writer.writeByte('\n');
}
}
}
try writeIndent(writer, depth, options.whitespace);
},
.optional => {
if (options.emit_null_optional_fields or value != null) {
if (element_name) |n| {
try writer.writeAll("<");
try writer.writeAll(n);
try writer.writeAll(">");
}
}
if (value) |payload| {
try serializeValue(payload, null, options, writer, depth);
} else {
// For null values, we'll write an empty element
// We've already written the opening tag, so just close it immediately
if (options.emit_null_optional_fields)
try writeClose(writer, element_name);
return;
}
},
.null => {
// Empty element
},
.@"enum" => {
try std.fmt.format(writer, "{s}", .{@tagName(value)});
},
.@"union" => |union_info| {
if (union_info.tag_type) |_| {
inline for (union_info.fields) |field| {
if (@field(std.meta.Tag(T), field.name) == std.meta.activeTag(value)) {
try serializeValue(
@field(value, field.name),
field.name,
options,
writer,
depth,
);
break;
}
}
} else {
return error.UnsupportedType;
}
},
else => return error.UnsupportedType,
}
try writeClose(writer, element_name);
}
fn writeClose(writer: anytype, element_name: ?[]const u8) !void {
// Close element tag
if (element_name) |n| {
try writer.writeAll("</");
try writer.writeAll(n);
try writer.writeAll(">");
}
}
/// Writes indentation based on depth and indent level
fn writeIndent(writer: anytype, depth: usize, whitespace: StringifyOptions.Whitespace) @TypeOf(writer).Error!void {
var char: u8 = ' ';
const n_chars = switch (whitespace) {
.minified => return,
.indent_1 => 1 * depth,
.indent_2 => 2 * depth,
.indent_3 => 3 * depth,
.indent_4 => 4 * depth,
.indent_8 => 8 * depth,
.indent_tab => blk: {
char = '\t';
break :blk depth;
},
};
try writer.writeByteNTimes(char, n_chars);
}
fn serializeString(
writer: anytype,
element_name: ?[]const u8,
value: []const u8,
options: StringifyOptions,
depth: usize,
) @TypeOf(writer).Error!void {
if (options.emit_strings_as_arrays) {
// if (true) return error.seestackrun;
for (value) |c| {
try writeIndent(writer, depth + 1, options.whitespace);
var buf: [256]u8 = undefined;
var fba = std.heap.FixedBufferAllocator.init(&buf);
const alloc = fba.allocator();
const item_name = try options.arrayElementNameConversion(alloc, element_name);
if (item_name) |n| {
try writer.writeAll("<");
try writer.writeAll(n);
try writer.writeAll(">");
}
try writer.print("{d}", .{c});
try writeClose(writer, item_name);
if (options.whitespace != .minified) {
try writer.writeByte('\n');
}
}
return;
}
try escapeString(writer, value);
}
/// Escapes special characters in XML strings
fn escapeString(writer: anytype, value: []const u8) @TypeOf(writer).Error!void {
for (value) |c| {
switch (c) {
'&' => try writer.writeAll("&amp;"),
'<' => try writer.writeAll("&lt;"),
'>' => try writer.writeAll("&gt;"),
'"' => try writer.writeAll("&quot;"),
'\'' => try writer.writeAll("&apos;"),
else => try writer.writeByte(c),
}
}
}
/// Does no transformation on the input array
pub fn arrayElementNoopTransformation(allocator: std.mem.Allocator, name: ?[]const u8) !?[]const u8 {
_ = allocator;
return name;
}
/// Attempts to convert a plural name to singular for array items
pub fn arrayElementPluralToSingluarTransformation(allocator: std.mem.Allocator, name: ?[]const u8) !?[]const u8 {
if (name == null or name.?.len < 3) return name;
const n = name.?;
// There are a ton of these words, I'm just adding two for now
// https://wordmom.com/nouns/end-e
const es_exceptions = &[_][]const u8{
"types",
"bytes",
};
for (es_exceptions) |exception| {
if (std.mem.eql(u8, exception, n)) {
return n[0 .. n.len - 1];
}
}
// Very basic English pluralization rules
if (std.mem.endsWith(u8, n, "s")) {
if (std.mem.endsWith(u8, n, "ies")) {
// e.g., "entries" -> "entry"
return try std.mem.concat(allocator, u8, &[_][]const u8{ n[0 .. n.len - 3], "y" });
} else if (std.mem.endsWith(u8, n, "es")) {
return n[0 .. n.len - 2]; // e.g., "boxes" -> "box"
} else {
return n[0 .. n.len - 1]; // e.g., "items" -> "item"
}
}
return name; // Not recognized as plural
}
// Tests
test "stringify basic types" {
const testing = std.testing;
const allocator = testing.allocator;
// Test boolean
{
const result = try stringifyAlloc(allocator, true, .{});
defer allocator.free(result);
try testing.expectEqualStrings("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<root>true</root>", result);
}
// Test comptime integer
{
const result = try stringifyAlloc(allocator, 42, .{});
defer allocator.free(result);
try testing.expectEqualStrings("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<root>42</root>", result);
}
// Test integer
{
const result = try stringifyAlloc(allocator, @as(usize, 42), .{});
defer allocator.free(result);
try testing.expectEqualStrings("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<root>42</root>", result);
}
// Test float
{
const result = try stringifyAlloc(allocator, 3.14, .{});
defer allocator.free(result);
try testing.expectEqualStrings("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<root>3.14e0</root>", result);
}
// Test string
{
const result = try stringifyAlloc(allocator, "hello", .{});
// @compileLog(@typeInfo(@TypeOf("hello")).pointer.size);
// @compileLog(@typeName(@typeInfo(@TypeOf("hello")).pointer.child));
defer allocator.free(result);
try testing.expectEqualStrings("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<root>hello</root>", result);
}
// Test string with special characters
{
const result = try stringifyAlloc(allocator, "hello & world < > \" '", .{});
defer allocator.free(result);
try testing.expectEqualStrings("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<root>hello &amp; world &lt; &gt; &quot; &apos;</root>", result);
}
}
test "stringify arrays" {
const testing = std.testing;
const allocator = testing.allocator;
// Test array of integers
{
const arr = [_]i32{ 1, 2, 3 };
const result = try stringifyAlloc(allocator, arr, .{});
defer allocator.free(result);
try testing.expectEqualStrings("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<root><root>1</root><root>2</root><root>3</root></root>", result);
}
// Test array of strings
{
const arr = [_][]const u8{ "one", "two", "three" };
const result = try stringifyAlloc(allocator, arr, .{});
defer allocator.free(result);
try testing.expectEqualStrings("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<root><root>one</root><root>two</root><root>three</root></root>", result);
}
// Test array with custom root name
{
const arr = [_]i32{ 1, 2, 3 };
const result = try stringifyAlloc(allocator, arr, .{ .root_name = "items" });
defer allocator.free(result);
try testing.expectEqualStrings("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<items><item>1</item><item>2</item><item>3</item></items>", result);
}
}
test "stringify structs" {
const testing = std.testing;
const allocator = testing.allocator;
const Person = struct {
name: []const u8,
age: u32,
is_active: bool,
};
// Test basic struct
{
const person = Person{
.name = "John",
.age = 30,
.is_active = true,
};
const result = try stringifyAlloc(allocator, person, .{});
defer allocator.free(result);
try testing.expectEqualStrings("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<root><name>John</name><age>30</age><is_active>true</is_active></root>", result);
}
// Test struct with pretty printing
{
const person = Person{
.name = "John",
.age = 30,
.is_active = true,
};
const result = try stringifyAlloc(allocator, person, .{ .whitespace = .indent_4 });
defer allocator.free(result);
try testing.expectEqualStrings("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<root>\n <name>John</name>\n <age>30</age>\n <is_active>true</is_active>\n</root>", result);
}
// Test nested struct
{
const Address = struct {
street: []const u8,
city: []const u8,
};
const PersonWithAddress = struct {
name: []const u8,
address: Address,
};
const person = PersonWithAddress{
.name = "John",
.address = Address{
.street = "123 Main St",
.city = "Anytown",
},
};
const result = try stringifyAlloc(allocator, person, .{ .whitespace = .indent_4 });
defer allocator.free(result);
try testing.expectEqualStrings("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<root>\n <name>John</name>\n <address>\n <street>123 Main St</street>\n <city>Anytown</city>\n </address>\n</root>", result);
}
}
test "stringify optional values" {
const testing = std.testing;
const allocator = testing.allocator;
const Person = struct {
name: []const u8,
middle_name: ?[]const u8,
};
// Test with present optional
{
const person = Person{
.name = "John",
.middle_name = "Robert",
};
const result = try stringifyAlloc(allocator, person, .{});
defer allocator.free(result);
try testing.expectEqualStrings("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<root><name>John</name><middle_name>Robert</middle_name></root>", result);
}
// Test with null optional
{
const person = Person{
.name = "John",
.middle_name = null,
};
const result = try stringifyAlloc(allocator, person, .{});
defer allocator.free(result);
try testing.expectEqualStrings("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<root><name>John</name><middle_name></middle_name></root>", result);
}
}
test "stringify optional values with emit_null_optional_fields == false" {
const testing = std.testing;
const allocator = testing.allocator;
const Person = struct {
name: []const u8,
middle_name: ?[]const u8,
};
// Test with present optional
{
const person = Person{
.name = "John",
.middle_name = "Robert",
};
const result = try stringifyAlloc(allocator, person, .{ .emit_null_optional_fields = false });
defer allocator.free(result);
try testing.expectEqualStrings("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<root><name>John</name><middle_name>Robert</middle_name></root>", result);
}
// Test with null optional
{
const person = Person{
.name = "John",
.middle_name = null,
};
const result = try stringifyAlloc(allocator, person, .{ .emit_null_optional_fields = false });
defer allocator.free(result);
try testing.expectEqualStrings("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<root><name>John</name></root>", result);
}
}
test "stringify with custom options" {
const testing = std.testing;
const allocator = testing.allocator;
const Person = struct {
first_name: []const u8,
last_name: []const u8,
};
const person = Person{
.first_name = "John",
.last_name = "Doe",
};
// Test without XML declaration
{
const result = try stringifyAlloc(allocator, person, .{ .include_declaration = false });
defer allocator.free(result);
try testing.expectEqualStrings("<root><first_name>John</first_name><last_name>Doe</last_name></root>", result);
}
// Test with custom root name
{
const result = try stringifyAlloc(allocator, person, .{ .root_name = "person" });
defer allocator.free(result);
try testing.expectEqualStrings("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<person><first_name>John</first_name><last_name>Doe</last_name></person>", result);
}
// Test with custom indent level
{
const result = try stringifyAlloc(allocator, person, .{ .whitespace = .indent_2 });
defer allocator.free(result);
try testing.expectEqualStrings(
\\<?xml version="1.0" encoding="UTF-8"?>
\\<root>
\\ <first_name>John</first_name>
\\ <last_name>Doe</last_name>
\\</root>
, result);
}
// Test with output []u8 as array
{
// pointer, size 1, child == .array, child.array.child == u8
// @compileLog(@typeInfo(@typeInfo(@TypeOf("foo")).pointer.child));
const result = try stringifyAlloc(allocator, "foo", .{ .emit_strings_as_arrays = true, .root_name = "bytes" });
defer allocator.free(result);
try testing.expectEqualStrings("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<bytes><byte>102</byte><byte>111</byte><byte>111</byte></bytes>", result);
}
}
test "structs with custom field names" {
const testing = std.testing;
const allocator = testing.allocator;
const Person = struct {
first_name: []const u8,
last_name: []const u8,
pub fn fieldNameFor(_: @This(), comptime field_name: []const u8) []const u8 {
if (std.mem.eql(u8, field_name, "first_name")) return "GivenName";
if (std.mem.eql(u8, field_name, "last_name")) return "FamilyName";
unreachable;
}
};
const person = Person{
.first_name = "John",
.last_name = "Doe",
};
{
const result = try stringifyAlloc(allocator, person, .{ .whitespace = .indent_2 });
defer allocator.free(result);
try testing.expectEqualStrings(
\\<?xml version="1.0" encoding="UTF-8"?>
\\<root>
\\ <GivenName>John</GivenName>
\\ <FamilyName>Doe</FamilyName>
\\</root>
, result);
}
}
test "structs with optional values" {
const testing = std.testing;
const allocator = testing.allocator;
const Person = struct {
first_name: []const u8,
middle_name: ?[]const u8 = null,
last_name: []const u8,
};
const person = Person{
.first_name = "John",
.last_name = "Doe",
};
{
const result = try stringifyAlloc(
allocator,
person,
.{
.whitespace = .indent_2,
.emit_null_optional_fields = false,
.root_attributes = "xmlns=\"http://example.com/blah/xxxx/\"",
},
);
defer allocator.free(result);
try testing.expectEqualStrings(
\\<?xml version="1.0" encoding="UTF-8"?>
\\<root xmlns="http://example.com/blah/xxxx/">
\\ <first_name>John</first_name>
\\ <last_name>Doe</last_name>
\\</root>
, result);
}
}
test "optional structs with value" {
const testing = std.testing;
const allocator = testing.allocator;
const Person = struct {
first_name: []const u8,
middle_name: ?[]const u8 = null,
last_name: []const u8,
};
const person: ?Person = Person{
.first_name = "John",
.last_name = "Doe",
};
{
const result = try stringifyAlloc(
allocator,
person,
.{
.whitespace = .indent_2,
.emit_null_optional_fields = false,
.root_attributes = "xmlns=\"http://example.com/blah/xxxx/\"",
},
);
defer allocator.free(result);
try testing.expectEqualStrings(
\\<?xml version="1.0" encoding="UTF-8"?>
\\<root xmlns="http://example.com/blah/xxxx/">
\\ <first_name>John</first_name>
\\ <last_name>Doe</last_name>
\\</root>
, result);
}
}
test "nested optional structs with value" {
const testing = std.testing;
const allocator = testing.allocator;
const Name = struct {
first_name: []const u8,
middle_name: ?[]const u8 = null,
last_name: []const u8,
};
const Person = struct {
name: ?Name,
};
const person: ?Person = Person{
.name = .{
.first_name = "John",
.last_name = "Doe",
},
};
{
const result = try stringifyAlloc(
allocator,
person,
.{
.whitespace = .indent_2,
.emit_null_optional_fields = false,
.root_attributes = "xmlns=\"http://example.com/blah/xxxx/\"",
},
);
defer allocator.free(result);
try testing.expectEqualStrings(
\\<?xml version="1.0" encoding="UTF-8"?>
\\<root xmlns="http://example.com/blah/xxxx/">
\\ <name>
\\ <first_name>John</first_name>
\\ <last_name>Doe</last_name>
\\ </name>
\\</root>
, result);
}
}

View file

@ -1,7 +1,6 @@
const std = @import("std");
const xml = @import("xml.zig");
const date = @import("date");
const sm = @import("service_manifest");
const date = @import("date.zig");
const log = std.log.scoped(.xml_shaper);
@ -95,52 +94,6 @@ pub fn parse(comptime T: type, source: []const u8, options: ParseOptions) !Parse
return Parsed(T).init(arena_allocator, try parseInternal(T, root, opts), parsed);
}
pub const XmlArrayStyle = enum {
collection, // Has a container element and list of child elements
repeated_root, // Repeats the same element without a container, e.g. S3 ListBucketResult
};
fn detectArrayStyle(comptime T: type, element: *xml.Element, options: ParseOptions) !XmlArrayStyle {
_ = options;
if (@typeInfo(T) != .@"struct") {
return .collection;
}
// does the element have child elements that match our expected struct?
const field_names = comptime blk: {
var result: [std.meta.fieldNames(T).len]struct {
[]const u8,
} = undefined;
for (std.meta.fieldNames(T), 0..) |field_name, i| {
const key = if (@hasDecl(T, "fieldNameFor"))
T.fieldNameFor(undefined, field_name)
else
field_name;
result[i] = .{key};
}
break :blk std.StaticStringMap(void).initComptime(result);
};
var matching_fields: usize = 0;
var element_iterator = element.elements();
while (element_iterator.next()) |el| {
if (field_names.has(el.tag)) {
matching_fields += 1;
}
}
if (matching_fields > 0) {
return .repeated_root;
}
return .collection;
}
fn parseInternal(comptime T: type, element: *xml.Element, options: ParseOptions) !T {
switch (@typeInfo(T)) {
.bool => {
@ -209,10 +162,8 @@ fn parseInternal(comptime T: type, element: *xml.Element, options: ParseOptions)
return try parseInternal(optional_info.child, element, options);
}
},
.@"enum" => {
if (T == date.Timestamp) {
return try date.Timestamp.parse(element.children.items[0].CharData);
}
.@"enum" => |enum_info| {
_ = enum_info;
// const numeric: ?enum_info.tag_type = std.fmt.parseInt(enum_info.tag_type, element.children.items[0].CharData, 10) catch null;
// if (numeric) |num| {
// return std.meta.intToEnum(T, num);
@ -362,13 +313,13 @@ fn parseInternal(comptime T: type, element: *xml.Element, options: ParseOptions)
.pointer => |ptr_info| {
const allocator = options.allocator orelse return error.AllocatorRequired;
switch (ptr_info.size) {
.one => {
.One => {
const r: T = try allocator.create(ptr_info.child);
errdefer allocator.free(r);
r.* = try parseInternal(ptr_info.child, element, options);
return r;
},
.slice => {
.Slice => {
// TODO: Detect and deal with arrays. This will require two
// passes through the element children - one to
// determine if it is an array, one to parse the elements
@ -377,38 +328,30 @@ fn parseInternal(comptime T: type, element: *xml.Element, options: ParseOptions)
// <Item>bar</Item>
// <Items>
if (ptr_info.child != u8) {
const array_style = try detectArrayStyle(ptr_info.child, element, options);
log.debug("type = {s}, style = {s}, ptr_info.child == {s}, element = {s}", .{ @typeName(T), @tagName(array_style), @typeName(ptr_info.child), element.tag });
log.debug("type = {s}, ptr_info.child == {s}, element = {s}", .{ @typeName(T), @typeName(ptr_info.child), element.tag });
var iterator = element.elements();
var children = std.ArrayList(ptr_info.child).init(allocator);
defer children.deinit();
switch (array_style) {
.collection => {
var iterator = element.elements();
while (iterator.next()) |child_element| {
try children.append(try parseInternal(ptr_info.child, child_element, options));
}
},
.repeated_root => {
var current: ?*Element = element;
while (current) |el| : (current = el.next_sibling) {
if (!std.mem.eql(u8, el.tag, element.tag)) continue;
try children.append(try parseInternal(ptr_info.child, el, options));
}
},
while (iterator.next()) |child_element| {
try children.append(try parseInternal(ptr_info.child, child_element, options));
}
return children.toOwnedSlice();
// var inx: usize = 0;
// while (inx < children.len) {
// switch (element.children.items[inx]) {
// .Element => children[inx] = try parseInternal(ptr_info.child, element.children.items[inx].Element, options),
// .CharData => children[inx] = try allocator.dupe(u8, element.children.items[inx].CharData),
// .Comment => children[inx] = try allocator.dupe(u8, element.children.items[inx].Comment), // This might be an error...
// }
// inx += 1;
// }
}
return try allocator.dupe(u8, element.children.items[0].CharData);
},
.many => {
.Many => {
return error.ManyPointerSizeNotImplemented;
},
.c => {
.C => {
return error.CPointerSizeNotImplemented;
},
}
@ -793,33 +736,3 @@ test "compiler assertion failure 2" {
defer parsed_data.deinit();
try testing.expect(parsed_data.parsed_value.key_group_list.?.quantity == 42);
}
test "can parse list objects" {
const data =
\\<?xml version="1.0" encoding="UTF-8"?>
\\<ListBucketResult>
\\ <Contents>
\\ <Key>file1.txt</Key>
\\ <Size>1024</Size>
\\ </Contents>
\\ <Contents>
\\ <Key>file2.jpg</Key>
\\ <Size>2048</Size>
\\ </Contents>
\\</ListBucketResult>
;
const Response = sm.s3.list_objects_v2.Response;
const parsed_data = try parse(Response, data, .{ .allocator = testing.allocator });
defer parsed_data.deinit();
const response: Response = parsed_data.parsed_value;
const s3_objects: []sm.s3.Object = response.contents.?;
try testing.expectEqual(2, s3_objects.len);
try testing.expectEqualStrings(s3_objects[0].key.?, "file1.txt");
try testing.expectEqualStrings(s3_objects[1].key.?, "file2.jpg");
try testing.expectEqual(s3_objects[0].size.?, 1024);
try testing.expectEqual(s3_objects[1].size.?, 2048);
}