Compare commits

..

5 commits

Author SHA1 Message Date
ab47cb9deb
better test web server management
Some checks failed
aws-zig mach nominated build / build-zig-nominated-mach-latest (push) Failing after 1m13s
2025-04-17 17:42:26 -07:00
ae8298b18c
update CI based on master
Some checks failed
aws-zig mach nominated build / build-zig-nominated-mach-latest (push) Failing after 3h10m4s
2025-04-16 19:38:49 -07:00
5cb0c3cc88
add test server timeout
Some checks failed
aws-zig mach nominated build / build-zig-nominated-mach-latest (push) Failing after 7s
2025-04-16 19:36:03 -07:00
3e146f143c
sync workflows from master branch to zig-mach branch 2025-03-21 12:48:55 -07:00
838f0ffb96
fix json serialization for null/empty maps 2025-03-21 12:43:07 -07:00
52 changed files with 3693 additions and 4917 deletions

6
.envrc
View file

@ -1,8 +1,8 @@
# vi: ft=sh
# shellcheck shell=bash
if ! has zvm_direnv_version || ! zvm_direnv_version 2.0.0; then
source_url "https://git.lerch.org/lobo/zvm-direnv/raw/tag/2.0.0/direnvrc" "sha256-8Umzxj32hFU6G0a7Wrq0KTNDQ8XEuje2A3s2ljh/hFY="
if ! has zvm_direnv_version || ! zvm_direnv_version 1.0.0; then
source_url "https://git.lerch.org/lobo/zvm-direnv/raw/tag/1.0.0/direnvrc" "sha256-Gtddvcr6aJsrjKd53uChxA1reQmJgEBpmPUWmMdtDIQ="
fi
use zig 0.14.0
use zig 2024.11.0-mach

View file

@ -1,6 +1,5 @@
name: AWS-Zig Build
on:
workflow_dispatch:
push:
branches:
- 'master'
@ -18,17 +17,11 @@ jobs:
- name: Check out repository code
uses: actions/checkout@v4
- name: Setup Zig
uses: https://github.com/mlugg/setup-zig@v2.0.5
# We will let setup-zig use minimum_zig_version from build.zig.zon
# setup-zig also sets up the zig cache appropriately
- name: Ulimit
run: ulimit -a
- name: Run smoke test
run: zig build smoke-test --verbose
uses: mlugg/setup-zig@v1.2.1
with:
version: 0.14.0
- name: Run tests
run: zig build test --verbose --summary all
- name: Run tests (release mode)
run: zig build test -Doptimize=ReleaseSafe --verbose
run: zig build test --verbose
# Zig build scripts don't have the ability to import depenedencies directly
# (https://github.com/ziglang/zig/issues/18164). We can allow downstream
# build scripts to import aws with a few tweaks, but we can't @import("src/aws.zig")
@ -73,7 +66,7 @@ jobs:
# - run: echo "URL of foo (1) is ${{ steps.sign.outputs.URL_1 }}"
- name: Publish source code with generated models
run: |
curl -s --user ${{ github.actor }}:${{ secrets.PACKAGE_PUSH }} \
curl --user ${{ github.actor }}:${{ secrets.PACKAGE_PUSH }} \
--upload-file ${{ runner.temp }}/${{ github.sha }}-with-models.tar.gz \
https://git.lerch.org/api/packages/lobo/generic/aws-sdk-with-models/${{ github.sha }}/${{ github.sha }}-with-models.tar.gz
- name: Build example

View file

@ -1,8 +1,8 @@
name: aws-zig mach nominated build
on:
workflow_dispatch:
# schedule:
# - cron: '0 12 * * *' # noon UTC, 4AM Pacific
schedule:
- cron: '0 12 * * *' # noon UTC, 4AM Pacific
push:
branches:
- 'zig-mach'
@ -26,7 +26,7 @@ jobs:
with:
ref: zig-mach
- name: Setup Zig
uses: https://github.com/mlugg/setup-zig@v2.0.1
uses: https://github.com/mlugg/setup-zig@v1.2.1
with:
version: mach-latest
- name: Restore Zig caches

View file

@ -26,15 +26,11 @@ jobs:
with:
ref: zig-develop
- name: Setup Zig
uses: https://github.com/mlugg/setup-zig@v2.0.5
uses: mlugg/setup-zig@v1.2.1
with:
version: master
- name: Run smoke test
run: zig build smoke-test --verbose
- name: Run full tests
run: zig build test --verbose --summary all
- name: Run tests (release mode)
run: zig build test -Doptimize=ReleaseSafe --verbose
- name: Run tests
run: zig build test --verbose
# Zig package manager expects everything to be inside a directory in the archive,
# which it then strips out on download. So we need to shove everything inside a directory
# the way GitHub/Gitea does for repo archives
@ -67,7 +63,7 @@ jobs:
# - run: echo "URL of foo (1) is ${{ steps.sign.outputs.URL_1 }}"
- name: Publish source code with generated models
run: |
curl -s --user ${{ github.actor }}:${{ secrets.PACKAGE_PUSH }} \
curl --user ${{ github.actor }}:${{ secrets.PACKAGE_PUSH }} \
--upload-file ${{ runner.temp }}/${{ github.sha }}${{ env.PKG_PREFIX }}-with-models.tar.gz \
https://git.lerch.org/api/packages/lobo/generic/aws-sdk-with-models/${{ github.sha }}/${{ github.sha }}${{ env.PKG_PREFIX }}-with-models.tar.gz
- name: Build example

View file

@ -3,7 +3,7 @@ on:
workflow_dispatch:
push:
branches:
- 'zig-0.14.x'
- 'zig-0.13'
env:
ACTIONS_RUNTIME_TOKEN: ${{ secrets.GITHUB_TOKEN }}
ACTIONS_RUNTIME_URL: ${{ env.GITHUB_SERVER_URL }}/api/actions_pipeline/
@ -18,18 +18,13 @@ jobs:
- name: Check out repository code
uses: actions/checkout@v4
with:
ref: zig-0.14.x
ref: zig-0.13
- name: Setup Zig
uses: https://github.com/mlugg/setup-zig@v2.0.1
uses: mlugg/setup-zig@v1.2.1
with:
version: 0.14.0
- name: Run smoke test
run: zig build smoke-test --verbose
- name: Run full tests
run: zig build test --verbose --summary all
# Release mode fix not backported to 0.13.0 code
#- name: Run tests (release mode)
# run: zig build test -Doptimize=ReleaseSafe --verbose
version: 0.13.0
- name: Run tests
run: zig build test --verbose
# Zig build scripts don't have the ability to import depenedencies directly
# (https://github.com/ziglang/zig/issues/18164). We can allow downstream
# build scripts to import aws with a few tweaks, but we can't @import("src/aws.zig")
@ -74,7 +69,7 @@ jobs:
# - run: echo "URL of foo (1) is ${{ steps.sign.outputs.URL_1 }}"
- name: Publish source code with generated models
run: |
curl -s --user ${{ github.actor }}:${{ secrets.PACKAGE_PUSH }} \
curl --user ${{ github.actor }}:${{ secrets.PACKAGE_PUSH }} \
--upload-file ${{ runner.temp }}/${{ github.sha }}-with-models.tar.gz \
https://git.lerch.org/api/packages/lobo/generic/aws-sdk-with-models/${{ github.sha }}/${{ github.sha }}-with-models.tar.gz
- name: Build example

31
.github/workflows/build.yaml vendored Normal file
View file

@ -0,0 +1,31 @@
name: AWS-Zig Build
run-name: ${{ github.actor }} building AWS Zig SDK
on:
push:
branches:
- '*'
- '!zig-develop*'
jobs:
build-zig-0-12-0-amd64:
runs-on: ubuntu-latest
env:
ZIG_VERSION: 0.13.0
ARCH: x86_64
if: ${{ github.env.GITEA_ACTIONS != 'true' }}
steps:
- name: Check out repository code
uses: actions/checkout@v4
# ARCH is fine, but we can't substitute directly because zig
# uses x86_64 instead of amd64. They also use aarch64 instead of arm64.
#
# However, arm64/linux isn't quite fully tier 1 yet, so this is more of a
# TODO: https://github.com/ziglang/zig/issues/2443
- name: Install zig
run: |
wget -q https://ziglang.org/download/${ZIG_VERSION}/zig-linux-${ARCH}-${ZIG_VERSION}.tar.xz
sudo tar x -C /usr/local -f zig-linux-${ARCH}-${ZIG_VERSION}.tar.xz
sudo ln -s /usr/local/zig-linux-${ARCH}-${ZIG_VERSION}/zig /usr/local/bin/zig
- name: Run tests
run: zig build test -Dbroken-windows --verbose # Github runners try to run the windows tests despite disabling foreign checks
- name: Build example
run: ( cd example && zig build ) # Make sure example builds

36
.github/workflows/zig-mach.yaml vendored Normal file
View file

@ -0,0 +1,36 @@
name: aws-zig mach nominated build
run-name: ${{ github.actor }} building AWS Zig SDK
on:
push:
branches:
- 'zig-develop*'
jobs:
build-zig-mach-latest:
runs-on: ubuntu-latest
# Need to use the default container with node and all that, so we can
# use JS-based actions like actions/checkout@v3...
# container:
# image: alpine:3.15.0
env:
ZIG_VERSION: mach-latest
ARCH: x86_64
if: ${{ github.env.GITEA_ACTIONS != 'true' }}
steps:
- name: Check out repository code
uses: actions/checkout@v4
# ARCH is fine, but we can't substitute directly because zig
# uses x86_64 instead of amd64. They also use aarch64 instead of arm64.
#
# However, arm64/linux isn't quite fully tier 1 yet, so this is more of a
# TODO: https://github.com/ziglang/zig/issues/2443
- name: Install zig
run: |
apt-get update && apt-get install -y jq
file="$(curl -Osw '%{filename_effective}' "$(curl -s https://machengine.org/zig/index.json |jq -r '."'${ZIG_VERSION}'"."x86_64-linux".tarball')")"
sudo tar x -C /usr/local -f "${file}"
sudo ln -s /usr/local/"${file%%.tar.xz}"/zig /usr/local/bin/zig
zig version
- name: Run tests
run: zig build test -Dbroken-windows --verbose
- name: Build example
run: ( cd example && zig build ) # Make sure example builds

36
.github/workflows/zig-nightly.yaml vendored Normal file
View file

@ -0,0 +1,36 @@
name: aws-zig nightly build
run-name: ${{ github.actor }} building AWS Zig SDK
on:
push:
branches:
- 'zig-develop*'
jobs:
build-zig-nightly:
runs-on: ubuntu-latest
# Need to use the default container with node and all that, so we can
# use JS-based actions like actions/checkout@v3...
# container:
# image: alpine:3.15.0
env:
ZIG_VERSION: master
ARCH: x86_64
if: ${{ github.env.GITEA_ACTIONS != 'true' }}
steps:
- name: Check out repository code
uses: actions/checkout@v4
# ARCH is fine, but we can't substitute directly because zig
# uses x86_64 instead of amd64. They also use aarch64 instead of arm64.
#
# However, arm64/linux isn't quite fully tier 1 yet, so this is more of a
# TODO: https://github.com/ziglang/zig/issues/2443
- name: Install zig
run: |
apt-get update && apt-get install -y jq
file="$(curl -Osw '%{filename_effective}' "$(curl -s https://ziglang.org/download/index.json |jq -r '."'${ZIG_VERSION}'"."x86_64-linux".tarball')")"
sudo tar x -C /usr/local -f "${file}"
sudo ln -s /usr/local/"${file%%.tar.xz}"/zig /usr/local/bin/zig
zig version
- name: Run tests
run: zig build test -Dbroken-windows --verbose
- name: Build example
run: ( cd example && zig build ) # Make sure example builds

View file

@ -1,5 +0,0 @@
[tools]
pre-commit = "latest"
"ubi:DonIsaac/zlint" = "latest"
zig = "0.15.1"
zls = "0.15.0"

View file

@ -1,30 +0,0 @@
# See https://pre-commit.com for more information
# See https://pre-commit.com/hooks.html for more hooks
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v3.2.0
hooks:
- id: trailing-whitespace
- id: end-of-file-fixer
- id: check-yaml
- id: check-added-large-files
- repo: https://github.com/batmac/pre-commit-zig
rev: v0.3.0
hooks:
- id: zig-fmt
- id: zig-build
- repo: local
hooks:
- id: smoke-test
name: Run zig build smoke-test
entry: zig
args: ["build", "--verbose", "smoke-test"]
language: system
types: [file]
pass_filenames: false
- id: zlint
name: Run zlint
entry: zlint
args: ["--deny-warnings", "--fix"]
language: system
types: [zig]

View file

@ -1,20 +1,24 @@
AWS SDK for Zig
===============
[Zig 0.15.1](https://ziglang.org/download/#release-0.15.1):
[Zig 0.13](https://ziglang.org/download/#release-0.13.0):
[![Build Status: Zig 0.15.1](https://git.lerch.org/lobo/aws-sdk-for-zig/actions/workflows/build.yaml/badge.svg)](https://git.lerch.org/lobo/aws-sdk-for-zig/actions?workflow=build.yaml&state=closed)
[![Build Status: Zig 0.13.0](https://git.lerch.org/lobo/aws-sdk-for-zig/actions/workflows/build.yaml/badge.svg)](https://git.lerch.org/lobo/aws-sdk-for-zig/actions?workflow=build.yaml&state=closed)
[Last Mach Nominated Zig Version](https://machengine.org/about/nominated-zig/):
[![Build Status: Mach nominated](https://git.lerch.org/lobo/aws-sdk-for-zig/actions/workflows/zig-mach.yaml/badge.svg)](https://git.lerch.org/lobo/aws-sdk-for-zig/actions?workflow=zig-mach.yaml&state=closed)
[Nightly Zig](https://ziglang.org/download/):
[![Build Status: Zig Nightly](https://git.lerch.org/lobo/aws-sdk-for-zig/actions/workflows/zig-nightly.yaml/badge.svg)](https://git.lerch.org/lobo/aws-sdk-for-zig/actions?workflow=zig-nightly.yaml&state=closed)
[Zig 0.14.1](https://ziglang.org/download/#release-0.14.1):
[![Build Status: Zig 0.14.x](https://git.lerch.org/lobo/aws-sdk-for-zig/actions/workflows/zig-previous.yaml/badge.svg)](https://git.lerch.org/lobo/aws-sdk-for-zig/actions?workflow=zig-previous.yaml&state=closed)
**NOTE ON BUILD STATUS**: The nightly/mach nominated version of this currently
panics under CI, but I have not yet reproduced this panic. Running manually on
multiple machines appears to be working properly
Current executable size for the demo is 980k after compiling with -Doptimize=ReleaseSmall
in x86_64-linux, and will vary based on services used. Tested targets:
in x86_linux, and will vary based on services used. Tested targets:
* x86_64-linux
* riscv64-linux
@ -26,34 +30,22 @@ in x86_64-linux, and will vary based on services used. Tested targets:
Tested targets are built, but not continuously tested, by CI.
Branches
--------
Zig-Develop Branch
------------------
* **zig-develop**: This branch tracks zig nightly, and is used mainly as a canary
for breaking changes that will need to be dealt with when
a new zig release appears. Expect significant delays in any
build failures (PRs always welcome!).
* **master**: This branch tracks the latest released zig version
* **zig-0.14.x**: This branch tracks the 0.14/0.14.1 released zig versions.
Support for these previous version is best effort, generally
degrading over time. Fixes will generally appear in master, then
backported into the previous version.
Other branches/tags exist but are unsupported
This branch is intended for use with the in-development version of Zig. This
starts with 0.12.0-dev.3180+83e578a18. This is aligned with [Mach Engine's Nominated
Zig Versions](https://machengine.org/about/nominated-zig/). Nightly zig versions
are difficult to keep up with and there is no special effort made there, build
status is FYI (and used as a canary for nominated zig versions).
Building
--------
`zig build` should work. It will build the code generation project, fetch model
files from upstream AWS Go SDK v2, run the code generation, then build the main
project with the generated code. Testing can be done with `zig build test`. Note that
this command tests on all supported architectures, so for a faster testing
process, use `zig build smoke-test` instead.
project with the generated code. Testing can be done with `zig test`.
To make development even faster, a build option is provided to avoid the use of
LLVM. To use this, use the command `zig build -Dno-llvm smoke-test`. This
can reduce build/test time 300%. Note, however, native code generation in zig
is not yet complete, so you may see errors.
Using
-----
@ -61,8 +53,7 @@ Using
This is designed for use with the Zig package manager, and exposes a module
called "aws". Set up `build.zig.zon` and add the dependency/module to your project
as normal and the package manager should do its thing. A full example can be found
in [/example](example/build.zig.zon). This can also be used at build time in
a downstream project's `build.zig`.
in [/example](example/README.md).
Configuring the module and/or Running the demo
----------------------------------------------
@ -70,8 +61,8 @@ Configuring the module and/or Running the demo
This library mimics the aws c libraries for it's work, so it operates like most
other 'AWS things'. [/src/main.zig](src/main.zig) gives you a handful of examples
for working with services. For local testing or alternative endpoints, there's
no real standard, so there is code to look for an environment variable
`AWS_ENDPOINT_URL` variable that will supersede all other configuration.
no real standard, so there is code to look for `AWS_ENDPOINT_URL` environment
variable that will supersede all other configuration.
Limitations
-----------
@ -91,7 +82,13 @@ TODO List:
* Implement jitter/exponential backoff
* Implement timeouts and other TODO's in the code
* Add option to cache signature keys
* Add CBOR support
Services without TLS 1.3 support
--------------------------------
All AWS services should support TLS 1.3 at this point, but there are many regions
and several partitions, and not all of them have been tested, so your mileage
may vary. If something doesn't work, please submit an issue to let others know.
Dependency tree
---------------

269
build.zig
View file

@ -1,4 +1,5 @@
const std = @import("std");
const builtin = @import("builtin");
const Builder = @import("std").Build;
const models_subdir = "codegen/sdk-codegen/aws-models/"; // note will probably not work on windows
@ -18,7 +19,14 @@ const test_targets = [_]std.Target.Query{
};
pub fn build(b: *Builder) !void {
// Standard target options allows the person running `zig build` to choose
// what target to build for. Here we do not override the defaults, which
// means any target is allowed, and the default is native. Other options
// for restricting supported target set are available.
const target = b.standardTargetOptions(.{});
// Standard release options allow the person running `zig build` to select
// between Debug, ReleaseSafe, ReleaseFast, and ReleaseSmall.
const optimize = b.standardOptimizeOption(.{});
const no_llvm = b.option(
@ -26,36 +34,62 @@ pub fn build(b: *Builder) !void {
"no-llvm",
"Disable LLVM",
) orelse false;
const broken_windows = b.option(
bool,
"broken-windows",
"Windows is broken in this environment (do not run Windows tests)",
) orelse false;
const no_bin = b.option(bool, "no-bin", "skip emitting binary") orelse false;
const test_filters: []const []const u8 = b.option(
[]const []const u8,
"test-filter",
"Skip tests that do not match any of the specified filters",
) orelse &.{};
const dep_mods = try getDependencyModules(b, .{
.target = target,
.optimize = optimize,
});
const mod_exe = b.createModule(.{
// TODO: Embed the current git version in the code. We can do this
// by looking for .git/HEAD (if it exists, follow the ref to /ref/heads/whatevs,
// grab that commit, and use b.addOptions/exe.addOptions to generate the
// Options file. See https://github.com/ziglang/zig/issues/14979 for usage
// example.
//
// From there, I'm not sure what the generated file looks like or quite how
// to use, but that should be easy. It may also give some ideas on the
// code gen piece itself, though it might be nice to leave as a seperate
// executable
// TODO: This executable should not be built when importing as a package.
// It relies on code gen and is all fouled up when getting imported
const exe = b.addExecutable(.{
.name = "demo",
.root_source_file = b.path("src/main.zig"),
.target = target,
.optimize = optimize,
});
configure(mod_exe, dep_mods, true);
const exe = b.addExecutable(.{
.name = "demo",
.root_module = mod_exe,
.use_llvm = !no_llvm,
exe.use_llvm = !no_llvm;
const smithy_dep = b.dependency("smithy", .{
// These are the arguments to the dependency. It expects a target and optimization level.
.target = target,
.optimize = optimize,
});
const smithy_module = smithy_dep.module("smithy");
exe.root_module.addImport("smithy", smithy_module); // not sure this should be here...
// Expose module to others
_ = b.addModule("aws", .{
.root_source_file = b.path("src/aws.zig"),
.imports = &.{.{ .name = "smithy", .module = smithy_module }},
});
// Expose module to others
_ = b.addModule("aws-signing", .{
.root_source_file = b.path("src/aws_signing.zig"),
.imports = &.{.{ .name = "smithy", .module = smithy_module }},
});
// TODO: This does not work correctly due to https://github.com/ziglang/zig/issues/16354
//
// We are working here with kind of a weird dependency though. So we can do this
// another way
//
// TODO: These target/optimize are not correct, as we need to run the thing
// const codegen = b.anonymousDependency("codegen/", @import("codegen/build.zig"), .{
// .target = target,
// .optimize = optimize,
// });
// const codegen_cmd = b.addRunArtifact(codegen.artifact("codegen"));
// exe.step.dependOn(&codegen_cmd.step);
const run_cmd = b.addRunArtifact(exe);
run_cmd.step.dependOn(b.getInstallStep());
@ -66,81 +100,62 @@ pub fn build(b: *Builder) !void {
const run_step = b.step("run", "Run the app");
run_step.dependOn(&run_cmd.step);
const cg = b.step("gen", "Generate zig service code from smithy models");
const gen_step = blk: {
const cg = b.step("gen", "Generate zig service code from smithy models");
const cg_mod = b.createModule(.{
.root_source_file = b.path("codegen/src/main.zig"),
// We need this generated for the host, not the real target
.target = b.graph.host,
.optimize = if (b.verbose) .Debug else .ReleaseSafe,
});
configure(cg_mod, dep_mods, false);
const cg_exe = b.addExecutable(.{
.name = "codegen",
.root_source_file = b.path("codegen/src/main.zig"),
// We need this generated for the host, not the real target
.target = b.graph.host,
.optimize = if (b.verbose) .Debug else .ReleaseSafe,
});
cg_exe.use_llvm = !no_llvm;
cg_exe.root_module.addImport("smithy", smithy_dep.module("smithy"));
var cg_cmd = b.addRunArtifact(cg_exe);
cg_cmd.addArg("--models");
const hash = hash_blk: {
for (b.available_deps) |dep| {
const dep_name = dep.@"0";
const dep_hash = dep.@"1";
if (std.mem.eql(u8, dep_name, "models"))
break :hash_blk dep_hash;
}
return error.DependencyNamedModelsNotFoundInBuildZigZon;
};
cg_cmd.addArg(try std.fs.path.join(
b.allocator,
&[_][]const u8{
b.graph.global_cache_root.path.?,
"p",
hash,
models_subdir,
},
));
cg_cmd.addArg("--output");
cg_cmd.addDirectoryArg(b.path("src/models"));
if (b.verbose)
cg_cmd.addArg("--verbose");
// cg_cmd.step.dependOn(&fetch_step.step);
// TODO: this should use zig_exe from std.Build
// codegen should store a hash in a comment
// this would be hash of the exe that created the file
// concatenated with hash of input json. this would
// allow skipping generated files. May not include hash
// of contents of output file as maybe we want to tweak
// manually??
//
// All the hashes can be in service_manifest.zig, which
// could be fun to just parse and go nuts. Top of
// file, generator exe hash. Each import has comment
// with both input and output hash and we can decide
// later about warning on manual changes...
const cg_exe = b.addExecutable(.{
.name = "codegen",
.root_module = cg_mod,
});
var cg_cmd = b.addRunArtifact(cg_exe);
cg_cmd.addArg("--models");
cg_cmd.addArg(try std.fs.path.join(
b.allocator,
&[_][]const u8{
try b.dependency("models", .{}).path("").getPath3(b, null).toString(b.allocator),
models_subdir,
},
));
cg_cmd.addArg("--output");
const cg_output_dir = cg_cmd.addOutputDirectoryArg("src/models");
if (b.verbose) {
cg_cmd.addArg("--verbose");
}
if (!no_bin) {
b.installArtifact(cg_exe);
}
// cg_cmd.step.dependOn(&fetch_step.step);
// TODO: this should use zig_exe from std.Build
// codegen should store a hash in a comment
// this would be hash of the exe that created the file
// concatenated with hash of input json. this would
// allow skipping generated files. May not include hash
// of contents of output file as maybe we want to tweak
// manually??
//
// All the hashes can be in service_manifest.zig, which
// could be fun to just parse and go nuts. Top of
// file, generator exe hash. Each import has comment
// with both input and output hash and we can decide
// later about warning on manual changes...
cg.dependOn(&cg_cmd.step);
break :blk cg;
};
cg.dependOn(&cg_cmd.step);
exe.step.dependOn(cg);
// This allows us to have each module depend on the
// generated service manifest.
const service_manifest_module = b.createModule(.{
.root_source_file = cg_output_dir.path(b, "service_manifest.zig"),
.target = target,
.optimize = optimize,
});
configure(service_manifest_module, dep_mods, true);
mod_exe.addImport("service_manifest", service_manifest_module);
// Expose module to others
const mod_aws = b.addModule("aws", .{
.root_source_file = b.path("src/aws.zig"),
.target = target,
.optimize = optimize,
});
mod_aws.addImport("service_manifest", service_manifest_module);
configure(mod_aws, dep_mods, true);
// Expose module to others
const mod_aws_signing = b.addModule("aws-signing", .{
.root_source_file = b.path("src/aws_signing.zig"),
});
configure(mod_aws_signing, dep_mods, false);
exe.step.dependOn(gen_step);
// Similar to creating the run step earlier, this exposes a `test` step to
// the `zig build --help` menu, providing a way for the user to request
@ -163,23 +178,15 @@ pub fn build(b: *Builder) !void {
// test_step.dependOn(&run_unit_tests.step);
for (test_targets) |t| {
if (broken_windows and t.os_tag == .windows) continue;
const mod_unit_tests = b.createModule(.{
// Creates a step for unit testing. This only builds the test executable
// but does not run it.
const unit_tests = b.addTest(.{
.root_source_file = b.path("src/aws.zig"),
.target = b.resolveTargetQuery(t),
.optimize = optimize,
});
mod_unit_tests.addImport("service_manifest", service_manifest_module);
configure(mod_unit_tests, dep_mods, true);
// Creates a step for unit testing. This only builds the test executable
// but does not run it.
const unit_tests = b.addTest(.{
.root_module = mod_unit_tests,
.filters = test_filters,
});
unit_tests.step.dependOn(cg);
unit_tests.root_module.addImport("smithy", smithy_dep.module("smithy"));
unit_tests.step.dependOn(gen_step);
unit_tests.use_llvm = !no_llvm;
const run_unit_tests = b.addRunArtifact(unit_tests);
@ -198,56 +205,16 @@ pub fn build(b: *Builder) !void {
// Creates a step for unit testing. This only builds the test executable
// but does not run it.
const smoke_test = b.addTest(.{
.root_module = mod_aws,
.filters = test_filters,
.root_source_file = b.path("src/aws.zig"),
.target = target,
.optimize = optimize,
});
smoke_test.use_llvm = !no_llvm;
smoke_test.step.dependOn(cg);
smoke_test.root_module.addImport("smithy", smithy_dep.module("smithy"));
smoke_test.step.dependOn(gen_step);
const run_smoke_test = b.addRunArtifact(smoke_test);
smoke_test_step.dependOn(&run_smoke_test.step);
if (no_bin) {
b.getInstallStep().dependOn(&exe.step);
} else {
b.installArtifact(exe);
}
}
fn configure(compile: *std.Build.Module, modules: std.StringHashMap(*std.Build.Module), include_time: bool) void {
compile.addImport("smithy", modules.get("smithy").?);
compile.addImport("date", modules.get("date").?);
compile.addImport("json", modules.get("json").?);
compile.addImport("case", modules.get("case").?);
if (include_time) compile.addImport("zeit", modules.get("zeit").?);
}
fn getDependencyModules(b: *std.Build, args: anytype) !std.StringHashMap(*std.Build.Module) {
var result = std.StringHashMap(*std.Build.Module).init(b.allocator);
// External dependencies
const dep_smithy = b.dependency("smithy", args);
const mod_smithy = dep_smithy.module("smithy");
try result.putNoClobber("smithy", mod_smithy);
const dep_zeit = b.dependency("zeit", args);
const mod_zeit = dep_zeit.module("zeit");
try result.putNoClobber("zeit", mod_zeit);
const dep_case = b.dependency("case", args);
const mod_case = dep_case.module("case");
try result.putNoClobber("case", mod_case);
// End External dependencies
// Private modules/dependencies
const dep_json = b.dependency("json", args);
const mod_json = dep_json.module("json");
try result.putNoClobber("json", mod_json);
const dep_date = b.dependency("date", args);
const mod_date = dep_date.module("date");
try result.putNoClobber("date", mod_date);
// End private modules/dependencies
return result;
b.installArtifact(exe);
}

View file

@ -1,40 +1,22 @@
.{
.name = .aws,
.name = "aws",
.version = "0.0.1",
.fingerprint = 0x1f26b7b27005bb49,
.paths = .{
"build.zig",
"build.zig.zon",
"src",
"codegen",
"lib",
"README.md",
"LICENSE",
},
.minimum_zig_version = "0.15.1",
.dependencies = .{
.smithy = .{
.url = "git+https://git.lerch.org/lobo/smithy.git#09c0a618877ebaf8e15fbfc505983876f4e063d5",
.hash = "smithy-1.0.0-uAyBgTnTAgBp2v6vypGcK5-YOCtxs2iEqR-4LfC5FTlS",
.url = "https://git.lerch.org/lobo/smithy/archive/3ed98751bc414e005af6ad185feb213d4366c0db.tar.gz",
.hash = "12204a784751a4ad5ed6c8955ba91fcbc4a3cad6c5a7da38f39abf074ef801d13172",
},
.models = .{
.url = "https://github.com/aws/aws-sdk-go-v2/archive/refs/tags/release-2025-05-05.tar.gz",
.hash = "N-V-__8AAKWdeiawujEcrfukQbb8lLAiQIRT0uG5gCcm4b7W",
},
.zeit = .{
.url = "git+https://github.com/rockorager/zeit?ref=zig-0.15#ed2ca60db118414bda2b12df2039e33bad3b0b88",
.hash = "zeit-0.6.0-5I6bk0J9AgCVa0nnyL0lNY9Xa9F68hHq-ZarhuXNV-Jb",
},
.date = .{
.path = "lib/date",
},
.json = .{
.path = "lib/json",
},
.case = .{
.url = "git+https://github.com/travisstaloch/case.git#f8003fe5f93b65f673d10d41323e347225e8cb87",
.hash = "case-0.0.1-chGYqx_EAADaGJjmoln5M1iMBDTrMdd8to5wdEVpfXm4",
.url = "https://github.com/aws/aws-sdk-go-v2/archive/58cf6509525a12d64fd826da883bfdbacbd2f00e.tar.gz",
.hash = "122017a2f3081ce83c23e0c832feb1b8b4176d507b6077f522855dc774bcf83ee315",
},
},
}

View file

@ -1,19 +1,11 @@
.{
.name = .codegen,
.name = "aws-zig-codegen",
.version = "0.0.1",
.paths = .{
"build.zig",
"build.zig.zon",
"src",
"README.md",
"LICENSE",
},
.fingerprint = 0x41c2ec2d551fe279,
.dependencies = .{
.smithy = .{
.url = "git+https://git.lerch.org/lobo/smithy.git#09c0a618877ebaf8e15fbfc505983876f4e063d5",
.hash = "smithy-1.0.0-uAyBgTnTAgBp2v6vypGcK5-YOCtxs2iEqR-4LfC5FTlS",
.url = "https://git.lerch.org/lobo/smithy/archive/41b61745d25a65817209dd5dddbb5f9b66896a99.tar.gz",
.hash = "122087deb0ae309b2258d59b40d82fe5921fdfc35b420bb59033244851f7f276fa34",
},
},
}

View file

@ -1,10 +0,0 @@
const std = @import("std");
const smithy = @import("smithy");
const FileGenerationState = @This();
protocol: smithy.AwsProtocol,
shapes: std.StringHashMap(smithy.ShapeInfo),
shape_references: std.StringHashMap(u64),
additional_types_to_generate: *std.ArrayList(smithy.ShapeInfo),
additional_types_generated: *std.StringHashMap(void),

View file

@ -1,21 +0,0 @@
const std = @import("std");
const case = @import("case");
const GenerateTypeOptions = @This();
end_structure: bool,
key_case: case.Case,
pub fn endStructure(self: @This(), value: bool) GenerateTypeOptions {
return .{
.end_structure = value,
.key_case = self.key_case,
};
}
pub fn keyCase(self: @This(), value: case.Case) GenerateTypeOptions {
return .{
.end_structure = self.end_structure,
.key_case = value,
};
}

View file

@ -1,53 +0,0 @@
const std = @import("std");
const smithy = @import("smithy");
const FileGenerationState = @import("FileGenerationState.zig");
const GenerationState = @This();
type_stack: *std.ArrayList(*const smithy.ShapeInfo),
file_state: FileGenerationState,
// we will need some sort of "type decls needed" for recursive structures
allocator: std.mem.Allocator,
indent_level: u64,
pub fn appendToTypeStack(self: @This(), shape_info: *const smithy.ShapeInfo) !void {
try self.type_stack.append(self.allocator, shape_info);
}
pub fn popFromTypeStack(self: @This()) void {
_ = self.type_stack.pop();
}
pub fn getTypeRecurrenceCount(self: @This(), id: []const u8) u8 {
var self_occurences: u8 = 0;
for (self.type_stack.items) |i| {
if (std.mem.eql(u8, i.id, id)) {
self_occurences += 1;
}
}
return self_occurences;
}
pub fn indent(self: @This()) GenerationState {
var new_state = self.clone();
new_state.indent_level += 1;
return new_state;
}
pub fn deindent(self: @This()) GenerationState {
var new_state = self.clone();
new_state.indent_level = @max(0, new_state.indent_level - 1);
return new_state;
}
pub fn clone(self: @This()) GenerationState {
return GenerationState{
.type_stack = self.type_stack,
.file_state = self.file_state,
.allocator = self.allocator,
.indent_level = self.indent_level,
};
}

View file

@ -20,6 +20,7 @@ const multihash_len = 1 + 1 + Hash.digest_length;
pub const hex_multihash_len = 2 * multihash_len;
pub const digest_len = Hash.digest_length;
const MultiHashHexDigest = [hex_multihash_len]u8;
const MultihashFunction = enum(u16) {
identity = 0x00,
sha1 = 0x11,
@ -69,7 +70,7 @@ pub fn hex64(x: u64) [16]u8 {
var result: [16]u8 = undefined;
var i: usize = 0;
while (i < 8) : (i += 1) {
const byte: u8 = @truncate(x >> @as(u6, @intCast(8 * i)));
const byte = @as(u8, @truncate(x >> @as(u6, @intCast(8 * i))));
result[i * 2 + 0] = hex_charset[byte >> 4];
result[i * 2 + 1] = hex_charset[byte & 15];
}
@ -107,9 +108,8 @@ pub fn computeDirectoryHash(
const arena = arena_instance.allocator();
// Collect all files, recursively, then sort.
// Normally we're looking at around 300 model files
var all_files = try std.ArrayList(*HashedFile).initCapacity(gpa, 300);
defer all_files.deinit(gpa);
var all_files = std.ArrayList(*HashedFile).init(gpa);
defer all_files.deinit();
var walker = try dir.walk(gpa);
defer walker.deinit();
@ -140,7 +140,7 @@ pub fn computeDirectoryHash(
wait_group.start();
try thread_pool.spawn(workerHashFile, .{ dir, hashed_file, &wait_group });
try all_files.append(gpa, hashed_file);
try all_files.append(hashed_file);
}
}
@ -156,7 +156,7 @@ pub fn computeDirectoryHash(
hasher.update(&hashed_file.hash);
}
if (any_failures) return error.DirectoryHashUnavailable;
if (options.needFileHashes) options.fileHashes = try all_files.toOwnedSlice(gpa);
if (options.needFileHashes) options.fileHashes = try all_files.toOwnedSlice();
return hasher.finalResult();
}
fn workerHashFile(dir: std.fs.Dir, hashed_file: *HashedFile, wg: *std.Thread.WaitGroup) void {

150
codegen/src/json.zig Normal file
View file

@ -0,0 +1,150 @@
const std = @import("std");
// options is a json.Options, but since we're using our hacked json.zig we don't want to
// specifically call this out
pub fn serializeMap(map: anytype, key: []const u8, options: anytype, out_stream: anytype) !bool {
if (@typeInfo(@TypeOf(map)) == .optional) {
if (map == null)
return false
else
return serializeMapInternal(map.?, key, options, out_stream);
}
return serializeMapInternal(map, key, options, out_stream);
}
fn serializeMapInternal(map: anytype, key: []const u8, options: anytype, out_stream: anytype) !bool {
if (map.len == 0) {
var child_options = options;
if (child_options.whitespace) |*child_ws|
child_ws.indent_level += 1;
try out_stream.writeByte('"');
try out_stream.writeAll(key);
_ = try out_stream.write("\":");
if (options.whitespace) |ws| {
if (ws.separator) {
try out_stream.writeByte(' ');
}
}
try out_stream.writeByte('{');
try out_stream.writeByte('}');
return true;
}
// TODO: Map might be [][]struct{key, value} rather than []struct{key, value}
var child_options = options;
if (child_options.whitespace) |*child_ws|
child_ws.indent_level += 1;
try out_stream.writeByte('"');
try out_stream.writeAll(key);
_ = try out_stream.write("\":");
if (options.whitespace) |ws| {
if (ws.separator) {
try out_stream.writeByte(' ');
}
}
try out_stream.writeByte('{');
if (options.whitespace) |_|
try out_stream.writeByte('\n');
for (map, 0..) |tag, i| {
if (tag.key == null or tag.value == null) continue;
// TODO: Deal with escaping and general "json.stringify" the values...
if (child_options.whitespace) |ws|
try ws.outputIndent(out_stream);
try out_stream.writeByte('"');
try jsonEscape(tag.key.?, child_options, out_stream);
_ = try out_stream.write("\":");
if (child_options.whitespace) |ws| {
if (ws.separator) {
try out_stream.writeByte(' ');
}
}
try out_stream.writeByte('"');
try jsonEscape(tag.value.?, child_options, out_stream);
try out_stream.writeByte('"');
if (i < map.len - 1) {
try out_stream.writeByte(',');
}
if (child_options.whitespace) |_|
try out_stream.writeByte('\n');
}
if (options.whitespace) |ws|
try ws.outputIndent(out_stream);
try out_stream.writeByte('}');
return true;
}
// code within jsonEscape lifted from json.zig in stdlib
fn jsonEscape(value: []const u8, options: anytype, out_stream: anytype) !void {
var i: usize = 0;
while (i < value.len) : (i += 1) {
switch (value[i]) {
// normal ascii character
0x20...0x21, 0x23...0x2E, 0x30...0x5B, 0x5D...0x7F => |c| try out_stream.writeByte(c),
// only 2 characters that *must* be escaped
'\\' => try out_stream.writeAll("\\\\"),
'\"' => try out_stream.writeAll("\\\""),
// solidus is optional to escape
'/' => {
if (options.string.String.escape_solidus) {
try out_stream.writeAll("\\/");
} else {
try out_stream.writeByte('/');
}
},
// control characters with short escapes
// TODO: option to switch between unicode and 'short' forms?
0x8 => try out_stream.writeAll("\\b"),
0xC => try out_stream.writeAll("\\f"),
'\n' => try out_stream.writeAll("\\n"),
'\r' => try out_stream.writeAll("\\r"),
'\t' => try out_stream.writeAll("\\t"),
else => {
const ulen = std.unicode.utf8ByteSequenceLength(value[i]) catch unreachable;
// control characters (only things left with 1 byte length) should always be printed as unicode escapes
if (ulen == 1 or options.string.String.escape_unicode) {
const codepoint = std.unicode.utf8Decode(value[i .. i + ulen]) catch unreachable;
try outputUnicodeEscape(codepoint, out_stream);
} else {
try out_stream.writeAll(value[i .. i + ulen]);
}
i += ulen - 1;
},
}
}
}
// outputUnicodeEscape and assert lifted from json.zig in stdlib
fn outputUnicodeEscape(
codepoint: u21,
out_stream: anytype,
) !void {
if (codepoint <= 0xFFFF) {
// If the character is in the Basic Multilingual Plane (U+0000 through U+FFFF),
// then it may be represented as a six-character sequence: a reverse solidus, followed
// by the lowercase letter u, followed by four hexadecimal digits that encode the character's code point.
try out_stream.writeAll("\\u");
try std.fmt.formatIntValue(codepoint, "x", std.fmt.FormatOptions{ .width = 4, .fill = '0' }, out_stream);
} else {
assert(codepoint <= 0x10FFFF);
// To escape an extended character that is not in the Basic Multilingual Plane,
// the character is represented as a 12-character sequence, encoding the UTF-16 surrogate pair.
const high = @as(u16, @intCast((codepoint - 0x10000) >> 10)) + 0xD800;
const low = @as(u16, @intCast(codepoint & 0x3FF)) + 0xDC00;
try out_stream.writeAll("\\u");
try std.fmt.formatIntValue(high, "x", std.fmt.FormatOptions{ .width = 4, .fill = '0' }, out_stream);
try out_stream.writeAll("\\u");
try std.fmt.formatIntValue(low, "x", std.fmt.FormatOptions{ .width = 4, .fill = '0' }, out_stream);
}
}
/// This function invokes undefined behavior when `ok` is `false`.
/// In Debug and ReleaseSafe modes, calls to this function are always
/// generated, and the `unreachable` statement triggers a panic.
/// In ReleaseFast and ReleaseSmall modes, calls to this function are
/// optimized away, and in fact the optimizer is able to use the assertion
/// in its heuristics.
/// Inside a test block, it is best to use the `std.testing` module rather
/// than this function, because this function may not detect a test failure
/// in ReleaseFast and ReleaseSmall mode. Outside of a test block, this assert
/// function is the correct function to use.
pub fn assert(ok: bool) void {
if (!ok) unreachable; // assertion failure
}

File diff suppressed because it is too large Load diff

View file

@ -1 +0,0 @@
pub const json = @import("serialization/json.zig");

View file

@ -1,392 +0,0 @@
const std = @import("std");
const smithy = @import("smithy");
const smithy_tools = @import("../smithy_tools.zig");
const support = @import("../support.zig");
const GenerationState = @import("../GenerationState.zig");
const GenerateTypeOptions = @import("../GenerateTypeOptions.zig");
const Allocator = std.mem.Allocator;
const Shape = smithy_tools.Shape;
const JsonMember = struct {
field_name: []const u8,
json_key: []const u8,
target: []const u8,
type_member: smithy.TypeMember,
shape_info: smithy.ShapeInfo,
};
pub fn generateToJsonFunction(shape_id: []const u8, writer: *std.Io.Writer, state: GenerationState, comptime options: GenerateTypeOptions) !void {
_ = options;
const allocator = state.allocator;
const shape_info = try smithy_tools.getShapeInfo(shape_id, state.file_state.shapes);
const shape = shape_info.shape;
if (try getJsonMembers(allocator, shape, state)) |json_members| {
if (json_members.items.len > 0) {
try writer.writeAll("pub fn jsonStringify(self: @This(), jw: anytype) !void {\n");
try writer.writeAll("try jw.beginObject();\n");
try writer.writeAll("{\n");
for (json_members.items) |member| {
const member_value = try getMemberValueJson(allocator, "self", member);
defer allocator.free(member_value);
try writer.print("try jw.objectField(\"{s}\");\n", .{member.json_key});
try writeMemberJson(
.{
.shape_id = member.target,
.field_name = member.field_name,
.field_value = member_value,
.state = state.indent(),
.member = member.type_member,
},
writer,
);
}
try writer.writeAll("}\n");
try writer.writeAll("try jw.endObject();\n");
try writer.writeAll("}\n\n");
}
}
}
fn getJsonMembers(allocator: Allocator, shape: Shape, state: GenerationState) !?std.ArrayListUnmanaged(JsonMember) {
const is_json_shape = switch (state.file_state.protocol) {
.json_1_0, .json_1_1, .rest_json_1 => true,
else => false,
};
if (!is_json_shape) {
return null;
}
var hash_map = std.StringHashMapUnmanaged(smithy.TypeMember){};
const shape_members = smithy_tools.getShapeMembers(shape);
for (shape_members) |member| {
try hash_map.putNoClobber(state.allocator, member.name, member);
}
for (shape_members) |member| {
for (member.traits) |trait| {
switch (trait) {
.http_header, .http_query => {
std.debug.assert(hash_map.remove(member.name));
break;
},
else => continue,
}
}
}
if (hash_map.count() == 0) {
return null;
}
var json_members = std.ArrayListUnmanaged(JsonMember){};
var iter = hash_map.iterator();
while (iter.next()) |kvp| {
const member = kvp.value_ptr.*;
const key = blk: {
if (smithy_tools.findTrait(.json_name, member.traits)) |trait| {
break :blk trait.json_name;
}
break :blk member.name;
};
try json_members.append(allocator, .{
.field_name = try support.constantName(allocator, member.name, .snake),
.json_key = key,
.target = member.target,
.type_member = member,
.shape_info = try smithy_tools.getShapeInfo(member.target, state.file_state.shapes),
});
}
return json_members;
}
fn getMemberValueJson(allocator: std.mem.Allocator, source: []const u8, member: JsonMember) ![]const u8 {
const member_value = try std.fmt.allocPrint(allocator, "@field({s}, \"{s}\")", .{ source, member.field_name });
defer allocator.free(member_value);
var output_block = std.Io.Writer.Allocating.init(allocator);
defer output_block.deinit();
try writeMemberValue(
&output_block.writer,
member_value,
);
return output_block.toOwnedSlice();
}
fn getShapeJsonValueType(shape: Shape) []const u8 {
return switch (shape) {
.string, .@"enum", .blob, .document, .timestamp => ".string",
.boolean => ".bool",
.integer, .bigInteger, .short, .long => ".integer",
.float, .double, .bigDecimal => ".float",
else => std.debug.panic("Unexpected shape: {}", .{shape}),
};
}
fn writeMemberValue(
writer: *std.Io.Writer,
member_value: []const u8,
) !void {
try writer.writeAll(member_value);
}
const WriteMemberJsonParams = struct {
shape_id: []const u8,
field_name: []const u8,
field_value: []const u8,
state: GenerationState,
member: smithy.TypeMember,
};
fn writeStructureJson(params: WriteMemberJsonParams, writer: *std.Io.Writer) !void {
const shape_type = "structure";
const allocator = params.state.allocator;
const state = params.state;
const shape_info = try smithy_tools.getShapeInfo(params.shape_id, state.file_state.shapes);
const shape = shape_info.shape;
const structure_name = try std.fmt.allocPrint(params.state.allocator, "{s}_{s}_{d}", .{ params.field_name, shape_type, state.indent_level });
defer params.state.allocator.free(structure_name);
const object_value_capture = try std.fmt.allocPrint(allocator, "{s}_capture", .{structure_name});
defer allocator.free(object_value_capture);
try writer.print("\n// start {s}: {s}\n", .{ shape_type, structure_name });
defer writer.print("// end {s}: {s}\n", .{ shape_type, structure_name }) catch std.debug.panic("Unreachable", .{});
if (try getJsonMembers(allocator, shape, state)) |json_members| {
if (json_members.items.len > 0) {
const is_optional = smithy_tools.shapeIsOptional(params.member.traits);
var object_value = params.field_value;
if (is_optional) {
object_value = object_value_capture;
try writer.print("if ({s}) |{s}|", .{ params.field_value, object_value_capture });
try writer.writeAll("{\n");
}
try writer.writeAll("try jw.beginObject();\n");
try writer.writeAll("{\n");
// this is a workaround in case a child structure doesn't have any fields
// and therefore doesn't use the structure variable so we capture it here.
// the compiler should optimize this away
try writer.print("const unused_capture_{s} = {s};\n", .{ structure_name, object_value });
try writer.print("_ = unused_capture_{s};\n", .{structure_name});
for (json_members.items) |member| {
const member_value = try getMemberValueJson(allocator, object_value, member);
defer allocator.free(member_value);
try writer.print("try jw.objectField(\"{s}\");\n", .{member.json_key});
try writeMemberJson(
.{
.shape_id = member.target,
.field_name = member.field_name,
.field_value = member_value,
.state = state.indent(),
.member = member.type_member,
},
writer,
);
}
try writer.writeAll("}\n");
try writer.writeAll("try jw.endObject();\n");
if (is_optional) {
try writer.writeAll("} else {\n");
try writer.writeAll("try jw.write(null);\n");
try writer.writeAll("}\n");
}
}
}
}
fn writeListJson(list: smithy_tools.ListShape, params: WriteMemberJsonParams, writer: *std.Io.Writer) anyerror!void {
const state = params.state;
const allocator = state.allocator;
const list_name = try std.fmt.allocPrint(allocator, "{s}_list_{d}", .{ params.field_name, state.indent_level });
defer state.allocator.free(list_name);
try writer.print("\n// start list: {s}\n", .{list_name});
defer writer.print("// end list: {s}\n", .{list_name}) catch std.debug.panic("Unreachable", .{});
const list_each_value = try std.fmt.allocPrint(allocator, "{s}_value", .{list_name});
defer allocator.free(list_each_value);
const list_capture = try std.fmt.allocPrint(allocator, "{s}_capture", .{list_name});
defer allocator.free(list_capture);
{
const list_is_optional = smithy_tools.shapeIsOptional(list.traits);
var list_value = params.field_value;
if (list_is_optional) {
list_value = list_capture;
try writer.print("if ({s}) |{s}| ", .{
params.field_value,
list_capture,
});
try writer.writeAll("{\n");
}
// start loop
try writer.writeAll("try jw.beginArray();\n");
try writer.print("for ({s}) |{s}|", .{ list_value, list_each_value });
try writer.writeAll("{\n");
try writer.writeAll("try jw.write(");
try writeMemberValue(
writer,
list_each_value,
);
try writer.writeAll(");\n");
try writer.writeAll("}\n");
try writer.writeAll("try jw.endArray();\n");
// end loop
if (list_is_optional) {
try writer.writeAll("} else {\n");
try writer.writeAll("try jw.write(null);\n");
try writer.writeAll("}\n");
}
}
}
fn writeMapJson(map: smithy_tools.MapShape, params: WriteMemberJsonParams, writer: *std.Io.Writer) anyerror!void {
const state = params.state;
const name = params.field_name;
const value = params.field_value;
const allocator = state.allocator;
const map_name = try std.fmt.allocPrint(allocator, "{s}_object_map_{d}", .{ name, state.indent_level });
defer allocator.free(map_name);
try writer.print("\n// start map: {s}\n", .{map_name});
defer writer.print("// end map: {s}\n", .{map_name}) catch std.debug.panic("Unreachable", .{});
const map_value_capture = try std.fmt.allocPrint(allocator, "{s}_kvp", .{map_name});
defer allocator.free(map_value_capture);
const map_capture_key = try std.fmt.allocPrint(allocator, "{s}.key", .{map_value_capture});
defer allocator.free(map_capture_key);
const map_capture_value = try std.fmt.allocPrint(allocator, "{s}.value", .{map_value_capture});
defer allocator.free(map_capture_value);
const value_shape_info = try smithy_tools.getShapeInfo(map.value, state.file_state.shapes);
const value_member = smithy.TypeMember{
.name = "value",
.target = map.value,
.traits = smithy_tools.getShapeTraits(value_shape_info.shape),
};
const map_capture = try std.fmt.allocPrint(state.allocator, "{s}_capture", .{map_name});
{
const map_member = params.member;
const map_is_optional = !smithy_tools.hasTrait(.required, map_member.traits);
var map_value = value;
if (map_is_optional) {
map_value = map_capture;
try writer.print("if ({s}) |{s}| ", .{
value,
map_capture,
});
try writer.writeAll("{\n");
}
try writer.writeAll("try jw.beginObject();\n");
try writer.writeAll("{\n");
// start loop
try writer.print("for ({s}) |{s}|", .{ map_value, map_value_capture });
try writer.writeAll("{\n");
try writer.print("try jw.objectField({s});\n", .{map_capture_key});
try writeMemberJson(.{
.shape_id = map.value,
.field_name = "value",
.field_value = map_capture_value,
.state = state.indent(),
.member = value_member,
}, writer);
try writer.writeAll("}\n");
// end loop
try writer.writeAll("}\n");
try writer.writeAll("try jw.endObject();\n");
if (map_is_optional) {
try writer.writeAll("} else {\n");
try writer.writeAll("try jw.write(null);\n");
try writer.writeAll("}\n");
}
}
}
fn writeScalarJson(comment: []const u8, params: WriteMemberJsonParams, writer: *std.Io.Writer) anyerror!void {
try writer.print("try jw.write({s}); // {s}\n\n", .{ params.field_value, comment });
}
fn writeMemberJson(params: WriteMemberJsonParams, writer: *std.Io.Writer) anyerror!void {
const shape_id = params.shape_id;
const state = params.state;
const shape_info = try smithy_tools.getShapeInfo(shape_id, state.file_state.shapes);
const shape = shape_info.shape;
if (state.getTypeRecurrenceCount(shape_id) > 2) {
return;
}
try state.appendToTypeStack(&shape_info);
defer state.popFromTypeStack();
switch (shape) {
.structure, .uniontype => try writeStructureJson(params, writer),
.list => |l| try writeListJson(l, params, writer),
.map => |m| try writeMapJson(m, params, writer),
.timestamp => try writeScalarJson("timestamp", params, writer),
.string => try writeScalarJson("string", params, writer),
.@"enum" => try writeScalarJson("enum", params, writer),
.document => try writeScalarJson("document", params, writer),
.blob => try writeScalarJson("blob", params, writer),
.boolean => try writeScalarJson("bool", params, writer),
.float => try writeScalarJson("float", params, writer),
.integer => try writeScalarJson("integer", params, writer),
.long => try writeScalarJson("long", params, writer),
.double => try writeScalarJson("double", params, writer),
.bigDecimal => try writeScalarJson("bigDecimal", params, writer),
.bigInteger => try writeScalarJson("bigInteger", params, writer),
.unit => try writeScalarJson("unit", params, writer),
.byte => try writeScalarJson("byte", params, writer),
.short => try writeScalarJson("short", params, writer),
.service, .resource, .operation, .member, .set => std.debug.panic("Shape type not supported: {}", .{shape}),
}
}

View file

@ -1,67 +0,0 @@
const std = @import("std");
const smithy = @import("smithy");
pub const Shape = @FieldType(smithy.ShapeInfo, "shape");
pub const ServiceShape = @TypeOf((Shape{ .service = undefined }).service);
pub const ListShape = @TypeOf((Shape{ .list = undefined }).list);
pub const MapShape = @TypeOf((Shape{ .map = undefined }).map);
pub fn getShapeInfo(id: []const u8, shapes: std.StringHashMap(smithy.ShapeInfo)) !smithy.ShapeInfo {
return shapes.get(id) orelse {
std.debug.print("Shape ID not found. This is most likely a bug. Shape ID: {s}\n", .{id});
return error.InvalidType;
};
}
pub fn getShapeTraits(shape: Shape) []smithy.Trait {
return switch (shape) {
.service, .operation, .resource => std.debug.panic("Unexpected shape type: {}", .{shape}),
inline else => |s| s.traits,
};
}
pub fn getShapeMembers(shape: Shape) []smithy.TypeMember {
return switch (shape) {
inline .structure, .uniontype => |s| s.members,
else => std.debug.panic("Unexpected shape type: {}", .{shape}),
};
}
pub fn shapeIsLeaf(shape: Shape) bool {
return switch (shape) {
.@"enum",
.bigDecimal,
.bigInteger,
.blob,
.boolean,
.byte,
.document,
.double,
.float,
.integer,
.long,
.short,
.string,
.timestamp,
=> true,
else => false,
};
}
pub fn shapeIsOptional(traits: []smithy.Trait) bool {
return !hasTrait(.required, traits);
}
pub fn findTrait(trait_type: smithy.TraitType, traits: []smithy.Trait) ?smithy.Trait {
for (traits) |trait| {
if (trait == trait_type) {
return trait;
}
}
return null;
}
pub fn hasTrait(trait_type: smithy.TraitType, traits: []smithy.Trait) bool {
return findTrait(trait_type, traits) != null;
}

157
codegen/src/snake.zig Normal file
View file

@ -0,0 +1,157 @@
const std = @import("std");
const expectEqualStrings = std.testing.expectEqualStrings;
pub fn fromPascalCase(allocator: std.mem.Allocator, name: []const u8) ![]u8 {
const rc = try allocator.alloc(u8, name.len * 2); // This is overkill, but is > the maximum length possibly needed
errdefer allocator.free(rc);
var utf8_name = (std.unicode.Utf8View.init(name) catch unreachable).iterator();
var target_inx: u64 = 0;
var curr_char = (try isAscii(utf8_name.nextCodepoint())).?;
target_inx = setNext(lowercase(curr_char), rc, target_inx);
var prev_char = curr_char;
if (try isAscii(utf8_name.nextCodepoint())) |ch| {
curr_char = ch;
} else {
// Single character only - we're done here
_ = setNext(0, rc, target_inx);
return rc[0..target_inx];
}
while (try isAscii(utf8_name.nextCodepoint())) |next_char| {
if (next_char == ' ') {
// a space shouldn't be happening. But if it does, it clues us
// in pretty well:
//
// MyStuff Is Awesome
// |^
// |next_char
// ^
// prev_codepoint/ascii_prev_char (and target_inx)
target_inx = setNext(lowercase(curr_char), rc, target_inx);
target_inx = setNext('_', rc, target_inx);
var maybe_curr_char = (try isAscii(utf8_name.nextCodepoint()));
if (maybe_curr_char == null) {
std.log.err("Error on fromPascalCase processing name '{s}'", .{name});
}
curr_char = maybe_curr_char.?;
maybe_curr_char = (try isAscii(utf8_name.nextCodepoint()));
if (maybe_curr_char == null) {
// We have reached the end of the string (e.g. "Resource Explorer 2")
// We need to do this check before we setNext, so that we don't
// end up duplicating the last character
break;
// std.log.err("Error on fromPascalCase processing name '{s}', curr_char = '{}'", .{ name, curr_char });
}
target_inx = setNext(lowercase(curr_char), rc, target_inx);
prev_char = curr_char;
curr_char = maybe_curr_char.?;
continue;
}
if (between(curr_char, 'A', 'Z')) {
if (isAcronym(curr_char, next_char)) {
// We could be in an acronym at the start of a word. This
// is the only case where we actually need to look back at the
// previous character, and if that's the case, throw in an
// underscore
// "SAMLMySAMLAcronymThing");
if (between(prev_char, 'a', 'z'))
target_inx = setNext('_', rc, target_inx);
//we are in an acronym - don't snake, just lower
target_inx = setNext(lowercase(curr_char), rc, target_inx);
} else {
target_inx = setNext('_', rc, target_inx);
target_inx = setNext(lowercase(curr_char), rc, target_inx);
}
} else {
target_inx = setNext(curr_char, rc, target_inx);
}
prev_char = curr_char;
curr_char = next_char;
}
// work in the last codepoint - force lowercase
target_inx = setNext(lowercase(curr_char), rc, target_inx);
rc[target_inx] = 0;
_ = allocator.resize(rc, target_inx);
return rc[0..target_inx];
}
fn isAcronym(char1: u8, char2: u8) bool {
return isAcronymChar(char1) and isAcronymChar(char2);
}
fn isAcronymChar(char: u8) bool {
return between(char, 'A', 'Z') or between(char, '0', '9');
}
fn isAscii(codepoint: ?u21) !?u8 {
if (codepoint) |cp| {
if (cp > 0xff) return error.UnicodeNotSupported;
return @as(u8, @truncate(cp));
}
return null;
}
fn setNext(ascii: u8, slice: []u8, inx: u64) u64 {
slice[inx] = ascii;
return inx + 1;
}
fn lowercase(ascii: u8) u8 {
var lowercase_char = ascii;
if (between(ascii, 'A', 'Z'))
lowercase_char = ascii + ('a' - 'A');
return lowercase_char;
}
fn between(char: u8, from: u8, to: u8) bool {
return char >= from and char <= to;
}
test "converts from PascalCase to snake_case" {
const allocator = std.testing.allocator;
const snake_case = try fromPascalCase(allocator, "MyPascalCaseThing");
defer allocator.free(snake_case);
try expectEqualStrings("my_pascal_case_thing", snake_case);
}
test "handles from PascalCase acronyms to snake_case" {
const allocator = std.testing.allocator;
const snake_case = try fromPascalCase(allocator, "SAMLMySAMLAcronymThing");
defer allocator.free(snake_case);
try expectEqualStrings("saml_my_saml_acronym_thing", snake_case);
}
test "spaces in the name" {
const allocator = std.testing.allocator;
const snake_case = try fromPascalCase(allocator, "API Gateway");
defer allocator.free(snake_case);
try expectEqualStrings("api_gateway", snake_case);
}
test "S3" {
const allocator = std.testing.allocator;
const snake_case = try fromPascalCase(allocator, "S3");
defer allocator.free(snake_case);
try expectEqualStrings("s3", snake_case);
}
test "ec2" {
const allocator = std.testing.allocator;
const snake_case = try fromPascalCase(allocator, "EC2");
defer allocator.free(snake_case);
try expectEqualStrings("ec2", snake_case);
}
test "IoT 1Click Devices Service" {
const allocator = std.testing.allocator;
const snake_case = try fromPascalCase(allocator, "IoT 1Click Devices Service");
defer allocator.free(snake_case);
// NOTE: There is some debate amoung humans about what this should
// turn into. Should it be iot_1click_... or iot_1_click...?
try expectEqualStrings("iot_1_click_devices_service", snake_case);
}
test "Resource Explorer 2" {
const allocator = std.testing.allocator;
const snake_case = try fromPascalCase(allocator, "Resource Explorer 2");
defer allocator.free(snake_case);
// NOTE: There is some debate amoung humans about what this should
// turn into. Should it be iot_1click_... or iot_1_click...?
try expectEqualStrings("resource_explorer_2", snake_case);
}

View file

@ -1,33 +0,0 @@
const std = @import("std");
const case = @import("case");
const Allocator = std.mem.Allocator;
pub fn constantName(allocator: Allocator, id: []const u8, comptime to_case: case.Case) ![]const u8 {
// There are some ids that don't follow consistent rules, so we'll
// look for the exceptions and, if not found, revert to the snake case
// algorithm
var buf = std.mem.zeroes([256]u8);
@memcpy(buf[0..id.len], id);
var name = try allocator.dupe(u8, id);
const simple_replacements = &.{
&.{ "DevOps", "Devops" },
&.{ "IoT", "Iot" },
&.{ "FSx", "Fsx" },
&.{ "CloudFront", "Cloudfront" },
};
inline for (simple_replacements) |rep| {
if (std.mem.indexOf(u8, name, rep[0])) |idx| @memcpy(name[idx .. idx + rep[0].len], rep[1]);
}
if (to_case == .snake) {
if (std.mem.eql(u8, id, "SESv2")) return try std.fmt.allocPrint(allocator, "ses_v2", .{});
if (std.mem.eql(u8, id, "ETag")) return try std.fmt.allocPrint(allocator, "e_tag", .{});
}
return try case.allocTo(allocator, to_case, name);
}

View file

@ -15,17 +15,15 @@ pub fn build(b: *std.Build) void {
// set a preferred release mode, allowing the user to decide how to optimize.
const optimize = b.standardOptimizeOption(.{});
const mod_exe = b.createModule(.{
const exe = b.addExecutable(.{
.name = "tmp",
// In this case the main source file is merely a path, however, in more
// complicated build scripts, this could be a generated file.
.root_source_file = b.path("src/main.zig"),
.target = target,
.optimize = optimize,
});
const exe = b.addExecutable(.{
.name = "tmp",
.root_module = mod_exe,
});
const aws_dep = b.dependency("aws", .{
// These are the two arguments to the dependency. It expects a target and optimization level.
.target = target,
@ -61,15 +59,12 @@ pub fn build(b: *std.Build) void {
const run_step = b.step("run", "Run the app");
run_step.dependOn(&run_cmd.step);
const mod_unit_tests = b.createModule(.{
.root_source_file = b.path("src/main.zig"),
.target = target,
.optimize = optimize,
});
// Creates a step for unit testing. This only builds the test executable
// but does not run it.
const unit_tests = b.addTest(.{
.root_module = mod_unit_tests,
.root_source_file = b.path("src/main.zig"),
.target = target,
.optimize = optimize,
});
const run_unit_tests = b.addRunArtifact(unit_tests);

View file

@ -1,13 +1,12 @@
.{
.name = .myapp,
.name = "myapp",
.version = "0.0.1",
.fingerprint = 0x8798022a511224c5,
.paths = .{""},
.dependencies = .{
.aws = .{
.url = "https://git.lerch.org/api/packages/lobo/generic/aws-sdk-with-models/cfc8aee1a6b54eac4a58893674361f1ad58e8595/cfc8aee1a6b54eac4a58893674361f1ad58e8595-with-models.tar.gz",
.hash = "aws-0.0.1-SbsFcK8HCgA-P7sjZP5z7J7ZfZLTkQ4osD0qgbyUgTzG",
.url = "https://git.lerch.org/api/packages/lobo/generic/aws-sdk-with-models/e02fb699fc47f19d19cad99209bd480ca6963295/e02fb699fc47f19d19cad99209bd480ca6963295nominated-zig-with-models.tar.gz",
.hash = "1220fa9b39c985449936f0e3f02bbb6fdafa64435e502eb78fd47d457b96876b7968",
},
},
}

View file

@ -15,10 +15,10 @@ pub fn main() anyerror!void {
var gpa = std.heap.GeneralPurposeAllocator(.{}){};
defer _ = gpa.deinit();
const allocator = gpa.allocator();
var stdout_buffer: [1024]u8 = undefined;
var stdout_raw = std.fs.File.stdout().writer(&stdout_buffer);
const stdout = &stdout_raw.interface;
defer stdout.flush() catch unreachable;
const stdout_raw = std.io.getStdOut().writer();
var bw = std.io.bufferedWriter(stdout_raw);
defer bw.flush() catch unreachable;
const stdout = bw.writer();
// To use a proxy, uncomment the following with your own configuration
// const proxy = std.http.Proxy{

View file

@ -1,41 +0,0 @@
const std = @import("std");
pub fn build(b: *std.Build) void {
const target = b.standardTargetOptions(.{});
const optimize = b.standardOptimizeOption(.{});
const lib_mod = b.addModule("date", .{
.root_source_file = b.path("src/root.zig"),
.target = target,
.optimize = optimize,
});
const lib = b.addLibrary(.{
.linkage = .static,
.name = "date",
.root_module = lib_mod,
});
b.installArtifact(lib);
const lib_unit_tests = b.addTest(.{
.root_module = lib_mod,
});
const run_lib_unit_tests = b.addRunArtifact(lib_unit_tests);
const test_step = b.step("test", "Run unit tests");
test_step.dependOn(&run_lib_unit_tests.step);
const dep_zeit = b.dependency("zeit", .{
.target = target,
.optimize = optimize,
});
lib_mod.addImport("zeit", dep_zeit.module("zeit"));
const dep_json = b.dependency("json", .{
.target = target,
.optimize = optimize,
});
lib_mod.addImport("json", dep_json.module("json"));
}

View file

@ -1,20 +0,0 @@
.{
.name = .date,
.version = "0.0.0",
.fingerprint = 0xaa9e377a226d739e, // Changing this has security and trust implications.
.minimum_zig_version = "0.14.0",
.dependencies = .{
.zeit = .{
.url = "git+https://github.com/rockorager/zeit?ref=zig-0.15#ed2ca60db118414bda2b12df2039e33bad3b0b88",
.hash = "zeit-0.6.0-5I6bk0J9AgCVa0nnyL0lNY9Xa9F68hHq-ZarhuXNV-Jb",
},
.json = .{
.path = "../json",
},
},
.paths = .{
"build.zig",
"build.zig.zon",
"src",
},
}

View file

@ -1,118 +0,0 @@
const std = @import("std");
const log = std.log.scoped(.date);
const zeit = @import("zeit");
pub const DateTime = struct {
day: u8,
month: u8,
year: u16,
hour: u8,
minute: u8,
second: u8,
pub fn fromInstant(val: zeit.Instant) DateTime {
return fromTime(val.time());
}
pub fn fromTime(val: zeit.Time) DateTime {
return DateTime{
.day = val.day,
.month = @intFromEnum(val.month),
.year = @intCast(val.year),
.hour = val.hour,
.minute = val.minute,
.second = val.second,
};
}
pub fn time(self: DateTime) zeit.Time {
return zeit.Time{
.day = @intCast(self.day),
.month = @enumFromInt(self.month),
.year = self.year,
.hour = @intCast(self.hour),
.minute = @intCast(self.minute),
.second = @intCast(self.second),
};
}
pub fn instant(self: DateTime) !zeit.Instant {
return try zeit.instant(.{ .source = .{ .time = self.time() } });
}
};
pub fn timestampToDateTime(timestamp: zeit.Seconds) DateTime {
const ins = zeit.instant(.{ .source = .{ .unix_timestamp = timestamp } }) catch @panic("Failed to create instant from timestamp");
return DateTime.fromInstant(ins);
}
pub fn parseEnglishToTimestamp(data: []const u8) !i64 {
return try dateTimeToTimestamp(try parseEnglishToDateTime(data));
}
/// Converts a string to a timestamp value. May not handle dates before the
/// epoch. Dates should look like "Fri, 03 Jun 2022 18:12:36 GMT"
pub fn parseEnglishToDateTime(data: []const u8) !DateTime {
const ins = try zeit.instant(.{ .source = .{ .rfc1123 = data } });
return DateTime.fromInstant(ins);
}
pub fn parseIso8601ToTimestamp(data: []const u8) !i64 {
return try dateTimeToTimestamp(try parseIso8601ToDateTime(data));
}
/// Converts a string to a timestamp value. May not handle dates before the
/// epoch
pub fn parseIso8601ToDateTime(data: []const u8) !DateTime {
const ins = try zeit.instant(.{ .source = .{ .iso8601 = data } });
return DateTime.fromInstant(ins);
}
pub fn dateTimeToTimestamp(datetime: DateTime) !zeit.Seconds {
return (try datetime.instant()).unixTimestamp();
}
fn printDateTime(dt: DateTime) void {
log.debug("{:0>4}-{:0>2}-{:0>2}T{:0>2}:{:0>2}:{:0<2}Z", .{
dt.year,
dt.month,
dt.day,
dt.hour,
dt.minute,
dt.second,
});
}
pub fn printNowUtc() void {
printDateTime(timestampToDateTime(std.time.timestamp()));
}
test "Convert timestamp to datetime" {
printDateTime(timestampToDateTime(std.time.timestamp()));
try std.testing.expectEqual(DateTime{ .year = 2020, .month = 8, .day = 28, .hour = 9, .minute = 32, .second = 27 }, timestampToDateTime(1598607147));
try std.testing.expectEqual(DateTime{ .year = 2020, .month = 11, .day = 1, .hour = 5, .minute = 6, .second = 7 }, timestampToDateTime(1604207167));
// Get time for date: https://wtools.io/convert-date-time-to-unix-time
try std.testing.expectEqual(DateTime{ .year = 2015, .month = 8, .day = 30, .hour = 12, .minute = 36, .second = 0 }, timestampToDateTime(1440938160));
}
test "Convert datetime to timestamp" {
try std.testing.expectEqual(@as(i64, 1598607147), try dateTimeToTimestamp(DateTime{ .year = 2020, .month = 8, .day = 28, .hour = 9, .minute = 32, .second = 27 }));
try std.testing.expectEqual(@as(i64, 1604207167), try dateTimeToTimestamp(DateTime{ .year = 2020, .month = 11, .day = 1, .hour = 5, .minute = 6, .second = 7 }));
try std.testing.expectEqual(@as(i64, 1440938160), try dateTimeToTimestamp(DateTime{ .year = 2015, .month = 8, .day = 30, .hour = 12, .minute = 36, .second = 0 }));
}
test "Convert ISO8601 string to timestamp" {
try std.testing.expectEqual(DateTime{ .year = 2020, .month = 8, .day = 28, .hour = 9, .minute = 32, .second = 27 }, try parseIso8601ToDateTime("20200828T093227"));
try std.testing.expectEqual(DateTime{ .year = 2020, .month = 8, .day = 28, .hour = 9, .minute = 32, .second = 27 }, try parseIso8601ToDateTime("2020-08-28T9:32:27Z"));
try std.testing.expectEqual(DateTime{ .year = 2020, .month = 11, .day = 1, .hour = 5, .minute = 6, .second = 7 }, try parseIso8601ToDateTime("2020-11-01T5:06:7Z"));
try std.testing.expectEqual(DateTime{ .year = 2015, .month = 8, .day = 30, .hour = 12, .minute = 36, .second = 0 }, try parseIso8601ToDateTime("2015-08-30T12:36:00.000Z"));
}
test "Convert datetime to timestamp before 1970" {
try std.testing.expectEqual(@as(i64, -449392815), try dateTimeToTimestamp(DateTime{ .year = 1955, .month = 10, .day = 5, .hour = 16, .minute = 39, .second = 45 }));
}
test "Convert whatever AWS is sending us to timestamp" {
const string_date = "Fri, 03 Jun 2022 18:12:36 GMT";
try std.testing.expectEqual(DateTime{ .year = 2022, .month = 6, .day = 3, .hour = 18, .minute = 12, .second = 36 }, try parseEnglishToDateTime(string_date));
}

View file

@ -1,20 +0,0 @@
const std = @import("std");
const testing = std.testing;
const parsing = @import("parsing.zig");
pub const DateTime = parsing.DateTime;
pub const timestampToDateTime = parsing.timestampToDateTime;
pub const parseEnglishToTimestamp = parsing.parseEnglishToTimestamp;
pub const parseEnglishToDateTime = parsing.parseEnglishToDateTime;
pub const parseIso8601ToTimestamp = parsing.parseIso8601ToTimestamp;
pub const parseIso8601ToDateTime = parsing.parseIso8601ToDateTime;
pub const dateTimeToTimestamp = parsing.dateTimeToTimestamp;
pub const printNowUtc = parsing.printNowUtc;
const timestamp = @import("timestamp.zig");
pub const DateFormat = timestamp.DateFormat;
pub const Timestamp = timestamp.Timestamp;
test {
testing.refAllDeclsRecursive(@This());
}

View file

@ -1,69 +0,0 @@
const std = @import("std");
const zeit = @import("zeit");
pub const DateFormat = enum {
rfc1123,
iso8601,
};
pub const Timestamp = enum(zeit.Nanoseconds) {
_,
pub fn jsonStringify(value: Timestamp, jw: anytype) !void {
const instant = zeit.instant(.{
.source = .{
.unix_nano = @intFromEnum(value),
},
}) catch std.debug.panic("Failed to parse timestamp to instant: {d}", .{value});
const fmt = "Mon, 02 Jan 2006 15:04:05 GMT";
var buf: [fmt.len]u8 = undefined;
var fbs = std.Io.Writer.fixed(&buf);
instant.time().gofmt(&fbs, fmt) catch std.debug.panic("Failed to format instant: {d}", .{instant.timestamp});
try jw.write(&buf);
}
pub fn parse(val: []const u8) !Timestamp {
const date_format = blk: {
if (std.ascii.isDigit(val[0])) {
break :blk DateFormat.iso8601;
} else {
break :blk DateFormat.rfc1123;
}
};
const ins = try zeit.instant(.{
.source = switch (date_format) {
DateFormat.iso8601 => .{
.iso8601 = val,
},
DateFormat.rfc1123 => .{
.rfc1123 = val,
},
},
});
return @enumFromInt(ins.timestamp);
}
};
test Timestamp {
const in_date = "Wed, 23 Apr 2025 11:23:45 GMT";
const expected_ts: Timestamp = @enumFromInt(1745407425000000000);
const actual_ts = try Timestamp.parse(in_date);
try std.testing.expectEqual(expected_ts, actual_ts);
var buf: [100]u8 = undefined;
var fbs = std.io.fixedBufferStream(&buf);
var counting_writer = std.io.countingWriter(fbs.writer());
try Timestamp.jsonStringify(expected_ts, .{}, counting_writer.writer());
const expected_json = "\"" ++ in_date ++ "\"";
const actual_json = buf[0..counting_writer.bytes_written];
try std.testing.expectEqualStrings(expected_json, actual_json);
}

View file

@ -1,29 +0,0 @@
const std = @import("std");
pub fn build(b: *std.Build) void {
const target = b.standardTargetOptions(.{});
const optimize = b.standardOptimizeOption(.{});
const lib_mod = b.addModule("json", .{
.root_source_file = b.path("src/json.zig"),
.target = target,
.optimize = optimize,
});
const lib = b.addLibrary(.{
.linkage = .static,
.name = "json",
.root_module = lib_mod,
});
b.installArtifact(lib);
const lib_unit_tests = b.addTest(.{
.root_module = lib_mod,
});
const run_lib_unit_tests = b.addRunArtifact(lib_unit_tests);
const test_step = b.step("test", "Run unit tests");
test_step.dependOn(&run_lib_unit_tests.step);
}

View file

@ -1,12 +0,0 @@
.{
.name = .json,
.version = "0.0.0",
.fingerprint = 0x6b0725452065211c, // Changing this has security and trust implications.
.minimum_zig_version = "0.14.0",
.dependencies = .{},
.paths = .{
"build.zig",
"build.zig.zon",
"src",
},
}

File diff suppressed because it is too large Load diff

View file

@ -25,7 +25,7 @@ pub const Credentials = struct {
};
}
pub fn deinit(self: Self) void {
std.crypto.secureZero(u8, self.secret_key);
std.crypto.utils.secureZero(u8, self.secret_key);
self.allocator.free(self.secret_key);
self.allocator.free(self.access_key);
if (self.session_token) |t| self.allocator.free(t);

View file

@ -173,12 +173,11 @@ fn getContainerCredentials(allocator: std.mem.Allocator) !?auth.Credentials {
var cl = std.http.Client{ .allocator = allocator };
defer cl.deinit(); // I don't belive connection pooling would help much here as it's non-ssl and local
var aw: std.Io.Writer.Allocating = .init(allocator);
defer aw.deinit();
const response_payload = &aw.writer;
var resp_payload = std.ArrayList(u8).init(allocator);
defer resp_payload.deinit();
const req = try cl.fetch(.{
.location = .{ .url = container_uri },
.response_writer = response_payload,
.response_storage = .{ .dynamic = &resp_payload },
});
if (req.status != .ok and req.status != .not_found) {
log.warn("Bad status code received from container credentials endpoint: {}", .{@intFromEnum(req.status)});
@ -186,8 +185,8 @@ fn getContainerCredentials(allocator: std.mem.Allocator) !?auth.Credentials {
}
if (req.status == .not_found) return null;
log.debug("Read {d} bytes from container credentials endpoint", .{aw.written().len});
if (aw.written().len == 0) return null;
log.debug("Read {d} bytes from container credentials endpoint", .{resp_payload.items.len});
if (resp_payload.items.len == 0) return null;
const CredsResponse = struct {
AccessKeyId: []const u8,
@ -197,8 +196,8 @@ fn getContainerCredentials(allocator: std.mem.Allocator) !?auth.Credentials {
Token: []const u8,
};
const creds_response = blk: {
const res = std.json.parseFromSlice(CredsResponse, allocator, aw.written(), .{}) catch |e| {
log.err("Unexpected Json response from container credentials endpoint: {s}", .{aw.written()});
const res = std.json.parseFromSlice(CredsResponse, allocator, resp_payload.items, .{}) catch |e| {
log.err("Unexpected Json response from container credentials endpoint: {s}", .{resp_payload.items});
log.err("Error parsing json: {}", .{e});
if (@errorReturnTrace()) |trace| {
std.debug.dumpStackTrace(trace.*);
@ -225,27 +224,26 @@ fn getImdsv2Credentials(allocator: std.mem.Allocator) !?auth.Credentials {
defer cl.deinit(); // I don't belive connection pooling would help much here as it's non-ssl and local
// Get token
{
var aw: std.Io.Writer.Allocating = .init(allocator);
defer aw.deinit();
const response_payload = &aw.writer;
var resp_payload = std.ArrayList(u8).init(allocator);
defer resp_payload.deinit();
const req = try cl.fetch(.{
.method = .PUT,
.location = .{ .url = "http://169.254.169.254/latest/api/token" },
.extra_headers = &[_]std.http.Header{
.{ .name = "X-aws-ec2-metadata-token-ttl-seconds", .value = "21600" },
},
.response_writer = response_payload,
.response_storage = .{ .dynamic = &resp_payload },
});
if (req.status != .ok) {
log.warn("Bad status code received from IMDS v2: {}", .{@intFromEnum(req.status)});
return null;
}
if (aw.written().len == 0) {
if (resp_payload.items.len == 0) {
log.warn("Unexpected zero response from IMDS v2", .{});
return null;
}
token = try aw.toOwnedSlice();
token = try resp_payload.toOwnedSlice();
errdefer if (token) |t| allocator.free(t);
}
std.debug.assert(token != null);
@ -267,16 +265,15 @@ fn getImdsRoleName(allocator: std.mem.Allocator, client: *std.http.Client, imds_
// "InstanceProfileArn" : "arn:aws:iam::550620852718:instance-profile/ec2-dev",
// "InstanceProfileId" : "AIPAYAM4POHXCFNKZ7HU2"
// }
var aw: std.Io.Writer.Allocating = .init(allocator);
defer aw.deinit();
const response_payload = &aw.writer;
var resp_payload = std.ArrayList(u8).init(allocator);
defer resp_payload.deinit();
const req = try client.fetch(.{
.method = .GET,
.location = .{ .url = "http://169.254.169.254/latest/meta-data/iam/info" },
.extra_headers = &[_]std.http.Header{
.{ .name = "X-aws-ec2-metadata-token", .value = imds_token },
},
.response_writer = response_payload,
.response_storage = .{ .dynamic = &resp_payload },
});
if (req.status != .ok and req.status != .not_found) {
@ -284,7 +281,7 @@ fn getImdsRoleName(allocator: std.mem.Allocator, client: *std.http.Client, imds_
return null;
}
if (req.status == .not_found) return null;
if (aw.written().len == 0) {
if (resp_payload.items.len == 0) {
log.warn("Unexpected empty response from IMDS endpoint post token", .{});
return null;
}
@ -295,8 +292,8 @@ fn getImdsRoleName(allocator: std.mem.Allocator, client: *std.http.Client, imds_
InstanceProfileArn: []const u8,
InstanceProfileId: []const u8,
};
const imds_response = std.json.parseFromSlice(ImdsResponse, allocator, aw.written(), .{}) catch |e| {
log.err("Unexpected Json response from IMDS endpoint: {s}", .{aw.written()});
const imds_response = std.json.parseFromSlice(ImdsResponse, allocator, resp_payload.items, .{}) catch |e| {
log.err("Unexpected Json response from IMDS endpoint: {s}", .{resp_payload.items});
log.err("Error parsing json: {}", .{e});
if (@errorReturnTrace()) |trace| {
std.debug.dumpStackTrace(trace.*);
@ -318,16 +315,15 @@ fn getImdsRoleName(allocator: std.mem.Allocator, client: *std.http.Client, imds_
fn getImdsCredentials(allocator: std.mem.Allocator, client: *std.http.Client, role_name: []const u8, imds_token: []u8) !?auth.Credentials {
const url = try std.fmt.allocPrint(allocator, "http://169.254.169.254/latest/meta-data/iam/security-credentials/{s}/", .{role_name});
defer allocator.free(url);
var aw: std.Io.Writer.Allocating = .init(allocator);
defer aw.deinit();
const response_payload = &aw.writer;
var resp_payload = std.ArrayList(u8).init(allocator);
defer resp_payload.deinit();
const req = try client.fetch(.{
.method = .GET,
.location = .{ .url = url },
.extra_headers = &[_]std.http.Header{
.{ .name = "X-aws-ec2-metadata-token", .value = imds_token },
},
.response_writer = response_payload,
.response_storage = .{ .dynamic = &resp_payload },
});
if (req.status != .ok and req.status != .not_found) {
@ -335,7 +331,7 @@ fn getImdsCredentials(allocator: std.mem.Allocator, client: *std.http.Client, ro
return null;
}
if (req.status == .not_found) return null;
if (aw.written().len == 0) {
if (resp_payload.items.len == 0) {
log.warn("Unexpected empty response from IMDS role endpoint", .{});
return null;
}
@ -350,8 +346,8 @@ fn getImdsCredentials(allocator: std.mem.Allocator, client: *std.http.Client, ro
Token: []const u8,
Expiration: []const u8,
};
const imds_response = std.json.parseFromSlice(ImdsResponse, allocator, aw.written(), .{}) catch |e| {
log.err("Unexpected Json response from IMDS endpoint: {s}", .{aw.written()});
const imds_response = std.json.parseFromSlice(ImdsResponse, allocator, resp_payload.items, .{}) catch |e| {
log.err("Unexpected Json response from IMDS endpoint: {s}", .{resp_payload.items});
log.err("Error parsing json: {}", .{e});
if (@errorReturnTrace()) |trace| {
std.debug.dumpStackTrace(trace.*);

View file

@ -90,37 +90,8 @@ pub const Options = struct {
dualstack: bool = false,
sigv4_service_name: ?[]const u8 = null,
mock: ?Mock = null,
};
/// mocking methods for isolated testing
pub const Mock = struct {
/// Used to provide consistent signing
signing_time: ?i64,
/// context is desiged to be type-erased pointer (@intFromPtr)
context: usize = 0,
request_fn: *const fn (
usize,
std.http.Method,
std.Uri,
std.http.Client.RequestOptions,
) std.http.Client.RequestError!std.http.Client.Request,
send_body_complete: *const fn (usize, []u8) std.Io.Writer.Error!void,
receive_head: *const fn (usize) std.http.Client.Request.ReceiveHeadError!std.http.Client.Response,
reader_decompressing: *const fn (usize) *std.Io.Reader,
fn request(m: Mock, method: std.http.Method, uri: std.Uri, options: std.http.Client.RequestOptions) std.http.Client.RequestError!std.http.Client.Request {
return m.request_fn(m.context, method, uri, options);
}
fn sendBodyComplete(m: Mock, body: []u8) std.Io.Writer.Error!void {
return m.send_body_complete(m.context, body);
}
fn receiveHead(m: Mock) std.http.Client.Request.ReceiveHeadError!std.http.Client.Response {
return m.receive_head(m.context);
}
fn readerDecompressing(m: Mock) *std.Io.Reader {
return m.reader_decompressing(m.context);
}
/// Used for testing to provide consistent signing. If null, will use current time
signing_time: ?i64 = null,
};
pub const Header = std.http.Header;
@ -192,9 +163,9 @@ pub const AwsHttp = struct {
.region = getRegion(service, options.region),
.service = options.sigv4_service_name orelse service,
.credentials = creds,
.signing_time = if (options.mock) |m| m.signing_time else null,
.signing_time = options.signing_time,
};
return try self.makeRequest(endpoint, request, signing_config, options);
return try self.makeRequest(endpoint, request, signing_config);
}
/// makeRequest is a low level http/https function that can be used inside
@ -213,13 +184,7 @@ pub const AwsHttp = struct {
/// Content-Length: (length of body)
///
/// Return value is an HttpResult, which will need the caller to deinit().
pub fn makeRequest(
self: Self,
endpoint: EndPoint,
request: HttpRequest,
signing_config: ?signing.Config,
options: Options,
) !HttpResult {
pub fn makeRequest(self: Self, endpoint: EndPoint, request: HttpRequest, signing_config: ?signing.Config) !HttpResult {
var request_cp = request;
log.debug("Request Path: {s}", .{request_cp.path});
@ -234,8 +199,8 @@ pub const AwsHttp = struct {
// We will use endpoint instead
request_cp.path = endpoint.path;
var request_headers = std.ArrayList(std.http.Header){};
defer request_headers.deinit(self.allocator);
var request_headers = std.ArrayList(std.http.Header).init(self.allocator);
defer request_headers.deinit();
const len = try addHeaders(self.allocator, &request_headers, endpoint.host, request_cp.body, request_cp.content_type, request_cp.headers);
defer if (len) |l| self.allocator.free(l);
@ -248,10 +213,10 @@ pub const AwsHttp = struct {
}
}
var headers = std.ArrayList(std.http.Header){};
defer headers.deinit(self.allocator);
var headers = std.ArrayList(std.http.Header).init(self.allocator);
defer headers.deinit();
for (request_cp.headers) |header|
try headers.append(self.allocator, .{ .name = header.name, .value = header.value });
try headers.append(.{ .name = header.name, .value = header.value });
log.debug("All Request Headers:", .{});
for (headers.items) |h| {
log.debug("\t{s}: {s}", .{ h.name, h.value });
@ -263,12 +228,18 @@ pub const AwsHttp = struct {
// TODO: Fix this proxy stuff. This is all a kludge just to compile, but std.http.Client has it all built in now
var cl = std.http.Client{ .allocator = self.allocator, .https_proxy = if (self.proxy) |*p| @constCast(p) else null };
defer cl.deinit(); // TODO: Connection pooling
const method = std.meta.stringToEnum(std.http.Method, request_cp.method).?;
// Fetch API in 0.15.1 is insufficient as it does not provide
// server headers. We'll construct and send the request ourselves
const uri = try std.Uri.parse(url);
const req_options: std.http.Client.RequestOptions = .{
const method = std.meta.stringToEnum(std.http.Method, request_cp.method).?;
var server_header_buffer: [16 * 1024]u8 = undefined;
var resp_payload = std.ArrayList(u8).init(self.allocator);
defer resp_payload.deinit();
const req = try cl.fetch(.{
.server_header_buffer = &server_header_buffer,
.method = method,
.payload = if (request_cp.body.len > 0) request_cp.body else null,
.response_storage = .{ .dynamic = &resp_payload },
.raw_uri = true,
.location = .{ .url = url },
// we need full control over most headers. I wish libraries would do a
// better job of having default headers as an opt-in...
.headers = .{
@ -280,13 +251,7 @@ pub const AwsHttp = struct {
.content_type = .omit,
},
.extra_headers = headers.items,
};
var req = if (options.mock) |m|
try m.request(method, uri, req_options) // This will call the test harness
else
try cl.request(method, uri, req_options);
defer req.deinit();
});
// TODO: Need to test for payloads > 2^14. I believe one of our tests does this, but not sure
// if (request_cp.body.len > 0) {
// // Workaround for https://github.com/ziglang/zig/issues/15626
@ -301,69 +266,33 @@ pub const AwsHttp = struct {
// }
// try req.wait();
if (request_cp.body.len > 0) {
// This seems a bit silly, but we can't have a []const u8 here
// because when it sends, it's using a writer, and this becomes
// the buffer of the writer. It's conceivable that something
// in the chain then does actually modify the body of the request
// so we'll need to duplicate it here
const req_body = try self.allocator.dupe(u8, request_cp.body);
defer self.allocator.free(req_body); // docs for sendBodyComplete say it flushes, so no need to outlive this
if (options.mock) |m|
try m.sendBodyComplete(req_body)
else
try req.sendBodyComplete(req_body);
} else if (options.mock == null) try req.sendBodiless();
// if (options.mock == null) log.err("Request sent. Body len {d}, uri {f}", .{ request_cp.body.len, uri });
var response = if (options.mock) |m| try m.receiveHead() else try req.receiveHead(&.{});
// TODO: Timeout - is this now above us?
log.debug(
"Request Complete. Response code {d}: {?s}",
.{ @intFromEnum(response.head.status), response.head.status.phrase() },
.{ @intFromEnum(req.status), req.status.phrase() },
);
log.debug("Response headers:", .{});
var resp_headers = std.ArrayList(Header){};
defer resp_headers.deinit(self.allocator);
var it = response.head.iterateHeaders();
var resp_headers = std.ArrayList(Header).init(
self.allocator,
);
defer resp_headers.deinit();
var it = std.http.HeaderIterator.init(server_header_buffer[0..]);
while (it.next()) |h| { // even though we don't expect to fill the buffer,
// we don't get a length, but looks via stdlib source
// it should be ok to call next on the undefined memory
log.debug(" {s}: {s}", .{ h.name, h.value });
try resp_headers.append(self.allocator, .{
try resp_headers.append(.{
.name = try (self.allocator.dupe(u8, h.name)),
.value = try (self.allocator.dupe(u8, h.value)),
});
}
// This is directly lifted from fetch, as there is no function in
// 0.15.1 client to negotiate decompression
const decompress_buffer: []u8 = switch (response.head.content_encoding) {
.identity => &.{},
.zstd => try self.allocator.alloc(u8, std.compress.zstd.default_window_len),
.deflate, .gzip => try self.allocator.alloc(u8, std.compress.flate.max_window_len),
.compress => return error.UnsupportedCompressionMethod,
};
defer self.allocator.free(decompress_buffer);
var transfer_buffer: [64]u8 = undefined;
var decompress: std.http.Decompress = undefined;
const reader = response.readerDecompressing(&transfer_buffer, &decompress, decompress_buffer);
// Not sure on optimal size here, but should definitely be > 0
var aw = try std.Io.Writer.Allocating.initCapacity(self.allocator, 128);
defer aw.deinit();
const response_writer = &aw.writer;
_ = reader.streamRemaining(response_writer) catch |err| switch (err) {
error.ReadFailed => return response.bodyErr().?,
else => |e| return e,
};
log.debug("raw response body:\n{s}", .{aw.written()});
log.debug("raw response body:\n{s}", .{resp_payload.items});
const rc = HttpResult{
.response_code = @intFromEnum(response.head.status),
.body = try aw.toOwnedSlice(),
.headers = try resp_headers.toOwnedSlice(self.allocator),
.response_code = @intFromEnum(req.status),
.body = try resp_payload.toOwnedSlice(),
.headers = try resp_headers.toOwnedSlice(),
.allocator = self.allocator,
};
return rc;
@ -376,21 +305,15 @@ fn getRegion(service: []const u8, region: []const u8) []const u8 {
return region;
}
fn addHeaders(
allocator: std.mem.Allocator,
headers: *std.ArrayList(std.http.Header),
host: []const u8,
body: []const u8,
content_type: []const u8,
additional_headers: []const Header,
) !?[]const u8 {
// We don't need body because they were to add a Content-Length header. But
// that is being added by the client send() function, so we don't want it
// on the request twice. But I also feel pretty strongly that send() should
// be providing us control, because I think if we don't add it here, it
// won't get signed, and we would really prefer it to be signed. So, we
// will wait and watch for this situation to change in stdlib
fn addHeaders(allocator: std.mem.Allocator, headers: *std.ArrayList(std.http.Header), host: []const u8, body: []const u8, content_type: []const u8, additional_headers: []const Header) !?[]const u8 {
// We don't need allocator and body because they were to add a
// Content-Length header. But that is being added by the client send()
// function, so we don't want it on the request twice. But I also feel
// pretty strongly that send() should be providing us control, because
// I think if we don't add it here, it won't get signed, and we would
// really prefer it to be signed. So, we will wait and watch for this
// situation to change in stdlib
_ = allocator;
_ = body;
var has_content_type = false;
for (additional_headers) |h| {
@ -399,12 +322,12 @@ fn addHeaders(
break;
}
}
try headers.append(allocator, .{ .name = "Accept", .value = "application/json" });
try headers.append(allocator, .{ .name = "Host", .value = host });
try headers.append(allocator, .{ .name = "User-Agent", .value = "zig-aws 1.0" });
try headers.append(.{ .name = "Accept", .value = "application/json" });
try headers.append(.{ .name = "Host", .value = host });
try headers.append(.{ .name = "User-Agent", .value = "zig-aws 1.0" });
if (!has_content_type)
try headers.append(allocator, .{ .name = "Content-Type", .value = content_type });
try headers.appendSlice(allocator, additional_headers);
try headers.append(.{ .name = "Content-Type", .value = content_type });
try headers.appendSlice(additional_headers);
return null;
}
@ -540,19 +463,41 @@ fn s3BucketFromPath(path: []const u8) []const u8 {
/// allocator: Will be used only to construct the EndPoint struct
/// uri: string constructed in such a way that deallocation is needed
fn endPointFromUri(allocator: std.mem.Allocator, uri: []const u8, path: []const u8) !EndPoint {
const parsed_uri = try std.Uri.parse(uri);
const scheme = parsed_uri.scheme;
const host = try allocator.dupe(u8, parsed_uri.host.?.percent_encoded);
const port: u16 = blk: {
if (parsed_uri.port) |port| break :blk port;
if (std.mem.eql(u8, scheme, "http")) break :blk 80;
if (std.mem.eql(u8, scheme, "https")) break :blk 443;
break :blk 0;
};
var scheme: []const u8 = "";
var host: []const u8 = "";
var port: u16 = 443;
var host_start: usize = 0;
var host_end: usize = 0;
for (uri, 0..) |ch, i| {
switch (ch) {
':' => {
if (!std.mem.eql(u8, scheme, "")) {
// here to end is port - this is likely a bug if ipv6 address used
const rest_of_uri = uri[i + 1 ..];
port = try std.fmt.parseUnsigned(u16, rest_of_uri, 10);
host_end = i;
}
},
'/' => {
if (host_start == 0) {
host_start = i + 2;
scheme = uri[0 .. i - 1];
if (std.mem.eql(u8, scheme, "http")) {
port = 80;
} else {
port = 443;
}
}
},
else => continue,
}
}
if (host_end == 0) {
host_end = uri.len;
}
host = try allocator.dupe(u8, uri[host_start..host_end]);
log.debug("host: {s}, scheme: {s}, port: {}", .{ host, scheme, port });
return EndPoint{
.uri = uri,
.host = host,

View file

@ -1,5 +1,6 @@
//! This module provides base data structures for aws http requests
const std = @import("std");
const log = std.log.scoped(.aws_base);
pub const Request = struct {
path: []const u8 = "/",
query: []const u8 = "",

View file

@ -1,7 +1,7 @@
const std = @import("std");
const base = @import("aws_http_base.zig");
const auth = @import("aws_authentication.zig");
const date = @import("date");
const date = @import("date.zig");
const scoped_log = std.log.scoped(.aws_signing);
@ -157,7 +157,7 @@ pub const SigningError = error{
XAmzExpiresHeaderInRequest,
/// Used if the request headers already includes x-amz-region-set
XAmzRegionSetHeaderInRequest,
} || error{OutOfMemory};
} || std.fmt.AllocPrintError;
const forbidden_headers = .{
.{ .name = "x-amz-content-sha256", .err = SigningError.XAmzContentSha256HeaderInRequest },
@ -240,10 +240,6 @@ pub fn signRequest(allocator: std.mem.Allocator, request: base.Request, config:
// regardless of whether we're sticking the header on the request
std.debug.assert(config.signed_body_header == .none or
config.signed_body_header == .sha256);
log.debug(
"Request body len: {d}. First 5 bytes (max): {s}",
.{ request.body.len, request.body[0..@min(request.body.len, 5)] },
);
const payload_hash = try hash(allocator, request.body, .sha256);
if (config.signed_body_header == .sha256) {
// From the AWS nitro enclaves SDK, it appears that there is no reason
@ -316,12 +312,12 @@ pub fn signRequest(allocator: std.mem.Allocator, request: base.Request, config:
.name = "Authorization",
.value = try std.fmt.allocPrint(
allocator,
"AWS4-HMAC-SHA256 Credential={s}/{s}, SignedHeaders={s}, Signature={x}",
"AWS4-HMAC-SHA256 Credential={s}/{s}, SignedHeaders={s}, Signature={s}",
.{
config.credentials.access_key,
scope,
canonical_request.headers.signed_headers,
signature,
std.fmt.fmtSliceHexLower(signature),
},
),
};
@ -352,7 +348,7 @@ pub fn freeSignedRequest(allocator: std.mem.Allocator, request: *base.Request, c
pub const credentialsFn = *const fn ([]const u8) ?Credentials;
pub fn verifyServerRequest(allocator: std.mem.Allocator, request: *std.http.Server.Request, request_body_reader: *std.Io.Reader, credentials_fn: credentialsFn) !bool {
pub fn verifyServerRequest(allocator: std.mem.Allocator, request: *std.http.Server.Request, request_body_reader: anytype, credentials_fn: credentialsFn) !bool {
var unverified_request = try UnverifiedRequest.init(allocator, request);
defer unverified_request.deinit();
return verify(allocator, unverified_request, request_body_reader, credentials_fn);
@ -363,19 +359,17 @@ pub const UnverifiedRequest = struct {
target: []const u8,
method: std.http.Method,
allocator: std.mem.Allocator,
raw: *std.http.Server.Request,
pub fn init(allocator: std.mem.Allocator, request: *std.http.Server.Request) !UnverifiedRequest {
var al = std.ArrayList(std.http.Header){};
defer al.deinit(allocator);
var al = std.ArrayList(std.http.Header).init(allocator);
defer al.deinit();
var it = request.iterateHeaders();
while (it.next()) |h| try al.append(allocator, h);
while (it.next()) |h| try al.append(h);
return .{
.target = request.head.target,
.method = request.head.method,
.headers = try al.toOwnedSlice(allocator),
.headers = try al.toOwnedSlice(),
.allocator = allocator,
.raw = request,
};
}
@ -393,7 +387,7 @@ pub const UnverifiedRequest = struct {
}
};
pub fn verify(allocator: std.mem.Allocator, request: UnverifiedRequest, request_body_reader: *std.Io.Reader, credentials_fn: credentialsFn) !bool {
pub fn verify(allocator: std.mem.Allocator, request: UnverifiedRequest, request_body_reader: anytype, credentials_fn: credentialsFn) !bool {
var arena = std.heap.ArenaAllocator.init(allocator);
defer arena.deinit();
const aa = arena.allocator();
@ -426,10 +420,10 @@ pub fn verify(allocator: std.mem.Allocator, request: UnverifiedRequest, request_
return verifyParsedAuthorization(
aa,
request,
request_body_reader,
credential.?,
signed_headers.?,
signature.?,
request_body_reader,
credentials_fn,
);
}
@ -437,10 +431,10 @@ pub fn verify(allocator: std.mem.Allocator, request: UnverifiedRequest, request_
fn verifyParsedAuthorization(
allocator: std.mem.Allocator,
request: UnverifiedRequest,
request_body_reader: anytype,
credential: []const u8,
signed_headers: []const u8,
signature: []const u8,
request_body_reader: *std.Io.Reader,
credentials_fn: credentialsFn,
) !bool {
// AWS4-HMAC-SHA256
@ -500,7 +494,7 @@ fn verifyParsedAuthorization(
.content_type = request.getFirstHeaderValue("content-type").?,
};
signed_request.query = request.target[signed_request.path.len..]; // TODO: should this be +1? query here would include '?'
signed_request.body = try request_body_reader.allocRemaining(allocator, .unlimited);
signed_request.body = try request_body_reader.readAllAlloc(allocator, std.math.maxInt(usize));
defer allocator.free(signed_request.body);
signed_request = try signRequest(allocator, signed_request, config);
defer freeSignedRequest(allocator, &signed_request, config);
@ -551,7 +545,7 @@ fn getSigningKey(allocator: std.mem.Allocator, signing_date: []const u8, config:
defer {
// secureZero avoids compiler optimizations that may say
// "WTF are you doing this thing? Looks like nothing to me. It's silly and we will remove it"
std.crypto.secureZero(u8, secret); // zero our copy of secret
std.crypto.utils.secureZero(u8, secret); // zero our copy of secret
allocator.free(secret);
}
// log.debug("secret: {s}", .{secret});
@ -668,18 +662,18 @@ fn canonicalUri(allocator: std.mem.Allocator, path: []const u8, double_encode: b
}
defer allocator.free(encoded_once);
var encoded_twice = try encodeUri(allocator, encoded_once);
defer allocator.free(encoded_twice);
log.debug("encoded path (2): {s}", .{encoded_twice});
if (std.mem.lastIndexOf(u8, encoded_twice, "?")) |i| {
return try allocator.dupe(u8, encoded_twice[0..i]);
_ = allocator.resize(encoded_twice, i);
return encoded_twice[0..i];
}
return try allocator.dupe(u8, encoded_twice);
return encoded_twice;
}
fn encodeParamPart(allocator: std.mem.Allocator, path: []const u8) ![]const u8 {
const unreserved_marks = "-_.!~*'()";
var encoded = try std.ArrayList(u8).initCapacity(allocator, path.len);
defer encoded.deinit(allocator);
defer encoded.deinit();
for (path) |c| {
var should_encode = true;
for (unreserved_marks) |r|
@ -691,16 +685,16 @@ fn encodeParamPart(allocator: std.mem.Allocator, path: []const u8) ![]const u8 {
should_encode = false;
if (!should_encode) {
try encoded.append(allocator, c);
try encoded.append(c);
continue;
}
// Whatever remains, encode it
try encoded.append(allocator, '%');
const hex = try std.fmt.allocPrint(allocator, "{X}", .{&[_]u8{c}});
try encoded.append('%');
const hex = try std.fmt.allocPrint(allocator, "{s}", .{std.fmt.fmtSliceHexUpper(&[_]u8{c})});
defer allocator.free(hex);
try encoded.appendSlice(allocator, hex);
try encoded.appendSlice(hex);
}
return encoded.toOwnedSlice(allocator);
return encoded.toOwnedSlice();
}
// URI encode every byte except the unreserved characters:
@ -721,7 +715,7 @@ fn encodeUri(allocator: std.mem.Allocator, path: []const u8) ![]u8 {
const reserved_characters = ";,/?:@&=+$#";
const unreserved_marks = "-_.!~*'()";
var encoded = try std.ArrayList(u8).initCapacity(allocator, path.len);
defer encoded.deinit(allocator);
defer encoded.deinit();
// if (std.mem.startsWith(u8, path, "/2017-03-31/tags/arn")) {
// try encoded.appendSlice("/2017-03-31/tags/arn%25253Aaws%25253Alambda%25253Aus-west-2%25253A550620852718%25253Afunction%25253Aawsome-lambda-LambdaStackawsomeLambda");
// return encoded.toOwnedSlice();
@ -744,16 +738,16 @@ fn encodeUri(allocator: std.mem.Allocator, path: []const u8) ![]u8 {
should_encode = false;
if (!should_encode) {
try encoded.append(allocator, c);
try encoded.append(c);
continue;
}
// Whatever remains, encode it
try encoded.append(allocator, '%');
const hex = try std.fmt.allocPrint(allocator, "{X}", .{&[_]u8{c}});
try encoded.append('%');
const hex = try std.fmt.allocPrint(allocator, "{s}", .{std.fmt.fmtSliceHexUpper(&[_]u8{c})});
defer allocator.free(hex);
try encoded.appendSlice(allocator, hex);
try encoded.appendSlice(hex);
}
return encoded.toOwnedSlice(allocator);
return encoded.toOwnedSlice();
}
fn canonicalQueryString(allocator: std.mem.Allocator, path: []const u8) ![]const u8 {
@ -806,25 +800,25 @@ fn canonicalQueryString(allocator: std.mem.Allocator, path: []const u8) ![]const
// Split this by component
var portions = std.mem.splitScalar(u8, query, '&');
var sort_me = std.ArrayList([]const u8){};
defer sort_me.deinit(allocator);
var sort_me = std.ArrayList([]const u8).init(allocator);
defer sort_me.deinit();
while (portions.next()) |item|
try sort_me.append(allocator, item);
try sort_me.append(item);
std.sort.pdq([]const u8, sort_me.items, {}, lessThanBinary);
var normalized = try std.ArrayList(u8).initCapacity(allocator, path.len);
defer normalized.deinit(allocator);
defer normalized.deinit();
var first = true;
for (sort_me.items) |i| {
if (!first) try normalized.append(allocator, '&');
if (!first) try normalized.append('&');
first = false;
const first_equals = std.mem.indexOf(u8, i, "=");
if (first_equals == null) {
// Rare. This is "foo="
const normed_item = try encodeUri(allocator, i);
defer allocator.free(normed_item);
try normalized.appendSlice(allocator, i); // This should be encoded
try normalized.append(allocator, '=');
try normalized.appendSlice(i); // This should be encoded
try normalized.append('=');
continue;
}
@ -837,12 +831,12 @@ fn canonicalQueryString(allocator: std.mem.Allocator, path: []const u8) ![]const
// Double-encode any = in the value. But not anything else?
const weird_equals_in_value_thing = try replace(allocator, value, "%3D", "%253D");
defer allocator.free(weird_equals_in_value_thing);
try normalized.appendSlice(allocator, key);
try normalized.append(allocator, '=');
try normalized.appendSlice(allocator, weird_equals_in_value_thing);
try normalized.appendSlice(key);
try normalized.append('=');
try normalized.appendSlice(weird_equals_in_value_thing);
}
return normalized.toOwnedSlice(allocator);
return normalized.toOwnedSlice();
}
fn replace(allocator: std.mem.Allocator, haystack: []const u8, needle: []const u8, replacement_value: []const u8) ![]const u8 {
@ -881,7 +875,7 @@ fn canonicalHeaders(allocator: std.mem.Allocator, headers: []const std.http.Head
allocator.free(h.name);
allocator.free(h.value);
}
dest.deinit(allocator);
dest.deinit();
}
var total_len: usize = 0;
var total_name_len: usize = 0;
@ -911,15 +905,15 @@ fn canonicalHeaders(allocator: std.mem.Allocator, headers: []const std.http.Head
defer allocator.free(value);
const n = try std.ascii.allocLowerString(allocator, h.name);
const v = try std.fmt.allocPrint(allocator, "{s}", .{value});
try dest.append(allocator, .{ .name = n, .value = v });
try dest.append(.{ .name = n, .value = v });
}
std.sort.pdq(std.http.Header, dest.items, {}, lessThan);
var dest_str = try std.ArrayList(u8).initCapacity(allocator, total_len);
defer dest_str.deinit(allocator);
defer dest_str.deinit();
var signed_headers = try std.ArrayList(u8).initCapacity(allocator, total_name_len);
defer signed_headers.deinit(allocator);
defer signed_headers.deinit();
var first = true;
for (dest.items) |h| {
dest_str.appendSliceAssumeCapacity(h.name);
@ -932,8 +926,8 @@ fn canonicalHeaders(allocator: std.mem.Allocator, headers: []const std.http.Head
signed_headers.appendSliceAssumeCapacity(h.name);
}
return CanonicalHeaders{
.str = try dest_str.toOwnedSlice(allocator),
.signed_headers = try signed_headers.toOwnedSlice(allocator),
.str = try dest_str.toOwnedSlice(),
.signed_headers = try signed_headers.toOwnedSlice(),
};
}
@ -942,7 +936,6 @@ fn canonicalHeaderValue(allocator: std.mem.Allocator, value: []const u8) ![]cons
const in_quote = false;
var start: usize = 0;
const rc = try allocator.alloc(u8, value.len);
defer allocator.free(rc);
var rc_inx: usize = 0;
for (value, 0..) |c, i| {
if (!started and !std.ascii.isWhitespace(c)) {
@ -960,7 +953,8 @@ fn canonicalHeaderValue(allocator: std.mem.Allocator, value: []const u8) ![]cons
// Trim end
while (std.ascii.isWhitespace(rc[rc_inx - 1]))
rc_inx -= 1;
return try allocator.dupe(u8, rc[0..rc_inx]);
_ = allocator.resize(rc, rc_inx);
return rc[0..rc_inx];
}
fn lessThan(context: void, lhs: std.http.Header, rhs: std.http.Header) bool {
_ = context;
@ -978,7 +972,7 @@ fn hash(allocator: std.mem.Allocator, payload: []const u8, sig_type: SignatureTy
};
var out: [std.crypto.hash.sha2.Sha256.digest_length]u8 = undefined;
std.crypto.hash.sha2.Sha256.hash(to_hash, &out, .{});
return try std.fmt.allocPrint(allocator, "{x}", .{out});
return try std.fmt.allocPrint(allocator, "{s}", .{std.fmt.fmtSliceHexLower(&out)});
}
// SignedHeaders + '\n' +
// HexEncode(Hash(RequestPayload))
@ -992,7 +986,6 @@ test "canonical uri" {
const path = "/documents and settings/?foo=bar";
const expected = "/documents%2520and%2520settings/";
const actual = try canonicalUri(allocator, path, true);
defer allocator.free(actual);
try std.testing.expectEqualStrings(expected, actual);
@ -1016,13 +1009,13 @@ test "canonical query" {
test "canonical headers" {
const allocator = std.testing.allocator;
var headers = try std.ArrayList(std.http.Header).initCapacity(allocator, 5);
defer headers.deinit(allocator);
try headers.append(allocator, .{ .name = "Host", .value = "iam.amazonaws.com" });
try headers.append(allocator, .{ .name = "Content-Type", .value = "application/x-www-form-urlencoded; charset=utf-8" });
try headers.append(allocator, .{ .name = "User-Agent", .value = "This header should be skipped" });
try headers.append(allocator, .{ .name = "My-header1", .value = " a b c " });
try headers.append(allocator, .{ .name = "X-Amz-Date", .value = "20150830T123600Z" });
try headers.append(allocator, .{ .name = "My-header2", .value = " \"a b c\" " });
defer headers.deinit();
try headers.append(.{ .name = "Host", .value = "iam.amazonaws.com" });
try headers.append(.{ .name = "Content-Type", .value = "application/x-www-form-urlencoded; charset=utf-8" });
try headers.append(.{ .name = "User-Agent", .value = "This header should be skipped" });
try headers.append(.{ .name = "My-header1", .value = " a b c " });
try headers.append(.{ .name = "X-Amz-Date", .value = "20150830T123600Z" });
try headers.append(.{ .name = "My-header2", .value = " \"a b c\" " });
const expected =
\\content-type:application/x-www-form-urlencoded; charset=utf-8
\\host:iam.amazonaws.com
@ -1041,12 +1034,12 @@ test "canonical headers" {
test "canonical request" {
const allocator = std.testing.allocator;
var headers = try std.ArrayList(std.http.Header).initCapacity(allocator, 5);
defer headers.deinit(allocator);
try headers.append(allocator, .{ .name = "User-agent", .value = "c sdk v1.0" });
defer headers.deinit();
try headers.append(.{ .name = "User-agent", .value = "c sdk v1.0" });
// In contrast to AWS CRT (aws-c-auth), we add the date as part of the
// signing operation. They add it as part of the canonicalization
try headers.append(allocator, .{ .name = "X-Amz-Date", .value = "20150830T123600Z" });
try headers.append(allocator, .{ .name = "Host", .value = "example.amazonaws.com" });
try headers.append(.{ .name = "X-Amz-Date", .value = "20150830T123600Z" });
try headers.append(.{ .name = "Host", .value = "example.amazonaws.com" });
const req = base.Request{
.path = "/",
.method = "GET",
@ -1101,10 +1094,10 @@ test "can sign" {
const allocator = std.testing.allocator;
var headers = try std.ArrayList(std.http.Header).initCapacity(allocator, 5);
defer headers.deinit(allocator);
try headers.append(allocator, .{ .name = "Content-Type", .value = "application/x-www-form-urlencoded; charset=utf-8" });
try headers.append(allocator, .{ .name = "Content-Length", .value = "13" });
try headers.append(allocator, .{ .name = "Host", .value = "example.amazonaws.com" });
defer headers.deinit();
try headers.append(.{ .name = "Content-Type", .value = "application/x-www-form-urlencoded; charset=utf-8" });
try headers.append(.{ .name = "Content-Length", .value = "13" });
try headers.append(.{ .name = "Host", .value = "example.amazonaws.com" });
const req = base.Request{
.path = "/",
.query = "",
@ -1171,27 +1164,25 @@ test "can verify server request" {
"X-Amz-Date: 20230908T170252Z\r\n" ++
"x-amz-content-sha256: fcde2b2edba56bf408601fb721fe9b5c338d10ee429ea04fae5511b68fbf8fb9\r\n" ++
"Authorization: AWS4-HMAC-SHA256 Credential=ACCESS/20230908/us-west-2/s3/aws4_request, SignedHeaders=accept;content-length;content-type;host;x-amz-content-sha256;x-amz-date;x-amz-storage-class, Signature=fcc43ce73a34c9bd1ddf17e8a435f46a859812822f944f9eeb2aabcd64b03523\r\n\r\nbar";
var reader = std.Io.Reader.fixed(req);
var body_reader = std.Io.Reader.fixed("bar");
var read_buffer: [1024]u8 = undefined;
@memcpy(read_buffer[0..req.len], req);
var server: std.http.Server = .{
.out = undefined, // We're not sending a response here
.reader = .{
.in = &reader,
.interface = undefined,
.state = .received_head,
.max_head_len = req.len,
},
.connection = undefined,
.state = .ready,
.read_buffer = &read_buffer,
.read_buffer_len = req.len,
.next_request_start = 0,
};
var request: std.http.Server.Request = .{
.server = &server,
.head = try std.http.Server.Request.Head.parse(req),
.head_buffer = req,
.head_end = req.len - 3,
.head = try std.http.Server.Request.Head.parse(read_buffer[0 .. req.len - 3]),
.reader_state = undefined,
};
// const old_level = std.testing.log_level;
// std.testing.log_level = .debug;
// defer std.testing.log_level = old_level;
try std.testing.expect(try verifyServerRequest(allocator, &request, &body_reader, struct {
var fbs = std.io.fixedBufferStream("bar");
try std.testing.expect(try verifyServerRequest(allocator, &request, fbs.reader(), struct {
cred: Credentials,
const Self = @This();
@ -1229,25 +1220,22 @@ test "can verify server request without x-amz-content-sha256" {
const req_data = head ++ body;
var read_buffer: [2048]u8 = undefined;
@memcpy(read_buffer[0..req_data.len], req_data);
var reader = std.Io.Reader.fixed(&read_buffer);
var body_reader = std.Io.Reader.fixed(body);
var server: std.http.Server = .{
.out = undefined, // We're not sending a response here
.reader = .{
.interface = undefined,
.in = &reader,
.state = .received_head,
.max_head_len = 1024,
},
.connection = undefined,
.state = .ready,
.read_buffer = &read_buffer,
.read_buffer_len = req_data.len,
.next_request_start = 0,
};
var request: std.http.Server.Request = .{
.server = &server,
.head = try std.http.Server.Request.Head.parse(head),
.head_buffer = head,
.head_end = head.len,
.head = try std.http.Server.Request.Head.parse(read_buffer[0..head.len]),
.reader_state = undefined,
};
{
var h = try std.ArrayList(std.http.Header).initCapacity(allocator, 4);
defer h.deinit(allocator);
var h = std.ArrayList(std.http.Header).init(allocator);
defer h.deinit();
const signed_headers = &[_][]const u8{ "content-type", "host", "x-amz-date", "x-amz-target" };
var it = request.iterateHeaders();
while (it.next()) |source| {
@ -1256,7 +1244,7 @@ test "can verify server request without x-amz-content-sha256" {
match = std.ascii.eqlIgnoreCase(s, source.name);
if (match) break;
}
if (match) try h.append(allocator, .{ .name = source.name, .value = source.value });
if (match) try h.append(.{ .name = source.name, .value = source.value });
}
const req = base.Request{
.path = "/",
@ -1293,7 +1281,9 @@ test "can verify server request without x-amz-content-sha256" {
}
{ // verification
try std.testing.expect(try verifyServerRequest(allocator, &request, &body_reader, struct {
var fis = std.io.fixedBufferStream(body[0..]);
try std.testing.expect(try verifyServerRequest(allocator, &request, fis.reader(), struct {
cred: Credentials,
const Self = @This();

File diff suppressed because it is too large Load diff

47
src/case.zig Normal file
View file

@ -0,0 +1,47 @@
const std = @import("std");
const expectEqualStrings = std.testing.expectEqualStrings;
pub fn snakeToCamel(allocator: std.mem.Allocator, name: []const u8) ![]u8 {
var utf8_name = (std.unicode.Utf8View.init(name) catch unreachable).iterator();
var target_inx: usize = 0;
var previous_ascii: u8 = 0;
var rc = try allocator.alloc(u8, name.len);
while (utf8_name.nextCodepoint()) |cp| {
if (cp > 0xff) return error.UnicodeNotSupported;
const ascii_char = @as(u8, @truncate(cp));
if (ascii_char != '_') {
if (previous_ascii == '_' and ascii_char >= 'a' and ascii_char <= 'z') {
const uppercase_char = ascii_char - ('a' - 'A');
rc[target_inx] = uppercase_char;
} else {
rc[target_inx] = ascii_char;
}
target_inx = target_inx + 1;
}
previous_ascii = ascii_char;
}
// Do we care if the allocator refuses resize?
_ = allocator.resize(rc, target_inx);
return rc[0..target_inx];
}
pub fn snakeToPascal(allocator: std.mem.Allocator, name: []const u8) ![]u8 {
const rc = try snakeToCamel(allocator, name);
if (rc[0] >= 'a' and rc[0] <= 'z') {
const uppercase_char = rc[0] - ('a' - 'A');
rc[0] = uppercase_char;
}
return rc;
}
test "converts from snake to camelCase" {
const allocator = std.testing.allocator;
const camel = try snakeToCamel(allocator, "access_key_id");
defer allocator.free(camel);
try expectEqualStrings("accessKeyId", camel);
}
test "single word" {
const allocator = std.testing.allocator;
const camel = try snakeToCamel(allocator, "word");
defer allocator.free(camel);
try expectEqualStrings("word", camel);
}

414
src/date.zig Normal file
View file

@ -0,0 +1,414 @@
// From https://gist.github.com/WoodyAtHome/3ef50b17f0fa2860ac52b97af12f8d15
// Translated from German. We don't need any local time for this use case, and conversion
// really requires the TZ DB.
const std = @import("std");
const log = std.log.scoped(.date);
pub const DateTime = struct { day: u8, month: u8, year: u16, hour: u8, minute: u8, second: u8 };
const SECONDS_PER_DAY = 86400; //* 24* 60 * 60 */
const DAYS_PER_YEAR = 365; //* Normal year (no leap year) */
pub fn timestampToDateTime(timestamp: i64) DateTime {
// aus https://de.wikipedia.org/wiki/Unixzeit
const unixtime = @as(u64, @intCast(timestamp));
const DAYS_IN_4_YEARS = 1461; //* 4*365 + 1 */
const DAYS_IN_100_YEARS = 36524; //* 100*365 + 25 - 1 */
const DAYS_IN_400_YEARS = 146097; //* 400*365 + 100 - 4 + 1 */
const DAY_NUMBER_ADJUSTED_1970_01_01 = 719468; //* Day number relates to March 1st */
var dayN: u64 = DAY_NUMBER_ADJUSTED_1970_01_01 + unixtime / SECONDS_PER_DAY;
const seconds_since_midnight: u64 = unixtime % SECONDS_PER_DAY;
var temp: u64 = 0;
// Leap year rules for Gregorian Calendars
// Any year divisible by 100 is not a leap year unless also divisible by 400
temp = 4 * (dayN + DAYS_IN_100_YEARS + 1) / DAYS_IN_400_YEARS - 1;
var year = @as(u16, @intCast(100 * temp));
dayN -= DAYS_IN_100_YEARS * temp + temp / 4;
// For Julian calendars, each year divisible by 4 is a leap year
temp = 4 * (dayN + DAYS_PER_YEAR + 1) / DAYS_IN_4_YEARS - 1;
year += @as(u16, @intCast(temp));
dayN -= DAYS_PER_YEAR * temp + temp / 4;
// dayN calculates the days of the year in relation to March 1
var month = @as(u8, @intCast((5 * dayN + 2) / 153));
const day = @as(u8, @intCast(dayN - (@as(u64, @intCast(month)) * 153 + 2) / 5 + 1));
// 153 = 31+30+31+30+31 Days for the 5 months from March through July
// 153 = 31+30+31+30+31 Days for the 5 months from August through December
// 31+28 Days for January and February (see below)
// +2: Rounding adjustment
// +1: The first day in March is March 1st (not March 0)
month += 3; // Convert from the day that starts on March 1st, to a human year */
if (month > 12) { // months 13 and 14 become 1 (January) und 2 (February) of the next year
month -= 12;
year += 1;
}
const hours = @as(u8, @intCast(seconds_since_midnight / 3600));
const minutes = @as(u8, @intCast(seconds_since_midnight % 3600 / 60));
const seconds = @as(u8, @intCast(seconds_since_midnight % 60));
return DateTime{ .day = day, .month = month, .year = year, .hour = hours, .minute = minutes, .second = seconds };
}
pub fn parseEnglishToTimestamp(data: []const u8) !i64 {
return try dateTimeToTimestamp(try parseEnglishToDateTime(data));
}
const EnglishParsingState = enum { Start, Day, Month, Year, Hour, Minute, Second, End };
/// Converts a string to a timestamp value. May not handle dates before the
/// epoch. Dates should look like "Fri, 03 Jun 2022 18:12:36 GMT"
pub fn parseEnglishToDateTime(data: []const u8) !DateTime {
// Fri, 03 Jun 2022 18:12:36 GMT
if (!std.mem.endsWith(u8, data, "GMT")) return error.InvalidFormat;
var start: usize = 0;
var state = EnglishParsingState.Start;
// Anything not explicitly set by our string would be 0
var rc = DateTime{ .year = 0, .month = 0, .day = 0, .hour = 0, .minute = 0, .second = 0 };
for (data, 0..) |ch, i| {
switch (ch) {
',' => {},
' ', ':' => {
// State transition
// We're going to coerce and this might not go well, but we
// want the compiler to create checks, so we'll turn on
// runtime safety for this block, forcing checks in ReleaseSafe
// ReleaseFast modes.
const next_state = try endEnglishState(state, &rc, data[start..i]);
state = next_state;
start = i + 1;
},
else => {}, // We need to be pretty trusting on this format...
}
}
return rc;
}
fn endEnglishState(current_state: EnglishParsingState, date: *DateTime, prev_data: []const u8) !EnglishParsingState {
var next_state: EnglishParsingState = undefined;
log.debug("endEnglishState. Current state '{}', data: {s}", .{ current_state, prev_data });
// Using two switches is slightly less efficient, but more readable
switch (current_state) {
.End => return error.IllegalStateTransition,
.Start => next_state = .Day,
.Day => next_state = .Month,
.Month => next_state = .Year,
.Year => next_state = .Hour,
.Hour => next_state = .Minute,
.Minute => next_state = .Second,
.Second => next_state = .End,
}
switch (current_state) {
.Year => date.year = try std.fmt.parseUnsigned(u16, prev_data, 10),
.Month => date.month = try parseEnglishMonth(prev_data),
.Day => date.day = try std.fmt.parseUnsigned(u8, prev_data, 10),
.Hour => date.hour = try std.fmt.parseUnsigned(u8, prev_data, 10),
.Minute => date.minute = try std.fmt.parseUnsigned(u8, prev_data, 10),
.Second => date.second = try std.fmt.parseUnsigned(u8, prev_data, 10),
.Start => {},
.End => return error.InvalidState,
}
return next_state;
}
fn parseEnglishMonth(data: []const u8) !u8 {
if (std.ascii.startsWithIgnoreCase(data, "Jan")) return 1;
if (std.ascii.startsWithIgnoreCase(data, "Feb")) return 2;
if (std.ascii.startsWithIgnoreCase(data, "Mar")) return 3;
if (std.ascii.startsWithIgnoreCase(data, "Apr")) return 4;
if (std.ascii.startsWithIgnoreCase(data, "May")) return 5;
if (std.ascii.startsWithIgnoreCase(data, "Jun")) return 6;
if (std.ascii.startsWithIgnoreCase(data, "Jul")) return 7;
if (std.ascii.startsWithIgnoreCase(data, "Aug")) return 8;
if (std.ascii.startsWithIgnoreCase(data, "Sep")) return 9;
if (std.ascii.startsWithIgnoreCase(data, "Oct")) return 10;
if (std.ascii.startsWithIgnoreCase(data, "Nov")) return 11;
if (std.ascii.startsWithIgnoreCase(data, "Dec")) return 12;
return error.InvalidMonth;
}
pub fn parseIso8601ToTimestamp(data: []const u8) !i64 {
return try dateTimeToTimestamp(try parseIso8601ToDateTime(data));
}
const IsoParsingState = enum { Start, Year, Month, Day, Hour, Minute, Second, Millisecond, End };
/// Converts a string to a timestamp value. May not handle dates before the
/// epoch
pub fn parseIso8601ToDateTime(data: []const u8) !DateTime {
// Basic format YYYYMMDDThhmmss
if (data.len == "YYYYMMDDThhmmss".len and data[8] == 'T')
return try parseIso8601BasicFormatToDateTime(data);
if (data.len == "YYYYMMDDThhmmssZ".len and data[8] == 'T')
return try parseIso8601BasicFormatToDateTime(data);
var start: usize = 0;
var state = IsoParsingState.Start;
// Anything not explicitly set by our string would be 0
var rc = DateTime{ .year = 0, .month = 0, .day = 0, .hour = 0, .minute = 0, .second = 0 };
var zulu_time = false;
for (data, 0..) |ch, i| {
switch (ch) {
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9' => {
if (state == .Start) state = .Year;
},
'?', '~', '%' => {
// These characters all specify the type of time (approximate, etc)
// and we will ignore
},
'.', '-', ':', 'T' => {
// State transition
// We're going to coerce and this might not go well, but we
// want the compiler to create checks, so we'll turn on
// runtime safety for this block, forcing checks in ReleaseSafe
// ReleaseFast modes.
const next_state = try endIsoState(state, &rc, data[start..i]);
state = next_state;
start = i + 1;
},
'Z' => zulu_time = true,
else => {
log.err("Invalid character: {c}", .{ch});
return error.InvalidCharacter;
},
}
}
if (!zulu_time) return error.LocalTimeNotSupported;
// We know we have a Z at the end of this, so let's grab the last bit
// of the string, minus the 'Z', and fly, eagles, fly!
_ = try endIsoState(state, &rc, data[start .. data.len - 1]);
return rc;
}
fn parseIso8601BasicFormatToDateTime(data: []const u8) !DateTime {
return DateTime{
.year = try std.fmt.parseUnsigned(u16, data[0..4], 10),
.month = try std.fmt.parseUnsigned(u8, data[4..6], 10),
.day = try std.fmt.parseUnsigned(u8, data[6..8], 10),
.hour = try std.fmt.parseUnsigned(u8, data[9..11], 10),
.minute = try std.fmt.parseUnsigned(u8, data[11..13], 10),
.second = try std.fmt.parseUnsigned(u8, data[13..15], 10),
};
}
fn endIsoState(current_state: IsoParsingState, date: *DateTime, prev_data: []const u8) !IsoParsingState {
var next_state: IsoParsingState = undefined;
log.debug("endIsoState. Current state '{}', data: {s}", .{ current_state, prev_data });
// Using two switches is slightly less efficient, but more readable
switch (current_state) {
.Start, .End => return error.IllegalStateTransition,
.Year => next_state = .Month,
.Month => next_state = .Day,
.Day => next_state = .Hour,
.Hour => next_state = .Minute,
.Minute => next_state = .Second,
.Second => next_state = .Millisecond,
.Millisecond => next_state = .End,
}
// TODO: This won't handle signed, which Iso supports. For now, let's fail
// explictly
switch (current_state) {
.Year => date.year = try std.fmt.parseUnsigned(u16, prev_data, 10),
.Month => date.month = try std.fmt.parseUnsigned(u8, prev_data, 10),
.Day => date.day = try std.fmt.parseUnsigned(u8, prev_data, 10),
.Hour => date.hour = try std.fmt.parseUnsigned(u8, prev_data, 10),
.Minute => date.minute = try std.fmt.parseUnsigned(u8, prev_data, 10),
.Second => date.second = try std.fmt.parseUnsigned(u8, prev_data, 10),
.Millisecond => {}, // We'll throw that away - our granularity is 1 second
.Start, .End => return error.InvalidState,
}
return next_state;
}
pub fn dateTimeToTimestamp(datetime: DateTime) !i64 {
const epoch = DateTime{
.year = 1970,
.month = 1,
.day = 1,
.hour = 0,
.minute = 0,
.second = 0,
};
return secondsBetween(epoch, datetime);
}
const DateTimeToTimestampError = error{
DateTimeOutOfRange,
};
fn secondsBetween(start: DateTime, end: DateTime) DateTimeToTimestampError!i64 {
try validateDatetime(start);
try validateDatetime(end);
if (end.year < start.year) return -1 * try secondsBetween(end, start);
if (start.month != 1 or
start.day != 1 or
start.hour != 0 or
start.minute != 0 or
start.second != 0)
{
const seconds_into_start_year = secondsFromBeginningOfYear(
start.year,
start.month,
start.day,
start.hour,
start.minute,
start.second,
);
const new_start = DateTime{
.year = start.year,
.month = 1,
.day = 1,
.hour = 0,
.minute = 0,
.second = 0,
};
return (try secondsBetween(new_start, end)) - seconds_into_start_year;
}
const leap_years_between = leapYearsBetween(start.year, end.year);
const add_days: u1 = 0;
const years_diff = end.year - start.year;
// log.debug("Years from epoch: {d}, Leap years: {d}", .{ years_diff, leap_years_between });
const days_diff: i32 = (years_diff * DAYS_PER_YEAR) + leap_years_between + add_days;
// log.debug("Days with leap year, without month: {d}", .{days_diff});
const seconds_into_year = secondsFromBeginningOfYear(
end.year,
end.month,
end.day,
end.hour,
end.minute,
end.second,
);
return (days_diff * SECONDS_PER_DAY) + @as(i64, seconds_into_year);
}
fn validateDatetime(dt: DateTime) !void {
if (dt.month > 12 or
dt.day > 31 or
dt.hour >= 24 or
dt.minute >= 60 or
dt.second >= 60) return error.DateTimeOutOfRange;
}
fn secondsFromBeginningOfYear(year: u16, month: u8, day: u8, hour: u8, minute: u8, second: u8) u32 {
const current_year_is_leap_year = isLeapYear(year);
const leap_year_days_per_month: [12]u5 = .{ 31, 29, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31 };
const normal_days_per_month: [12]u5 = .{ 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31 };
const days_per_month = if (current_year_is_leap_year) leap_year_days_per_month else normal_days_per_month;
var current_month: usize = 1;
const end_month = month;
var days_diff: u32 = 0;
while (current_month != end_month) {
days_diff += days_per_month[current_month - 1]; // months are 1-based vs array is 0-based
current_month += 1;
}
// log.debug("Days with month, without day: {d}. Day of month {d}, will add {d} days", .{
// days_diff,
// day,
// day - 1,
// });
// We need -1 because we're not actually including the ending day (that's up to hour/minute)
// In other words, days in the month are 1-based, while hours/minutes are zero based
days_diff += day - 1;
// log.debug("Total days diff: {d}", .{days_diff});
var seconds_diff: u32 = days_diff * SECONDS_PER_DAY;
// From here out, we want to get everything into seconds
seconds_diff += @as(u32, hour) * 60 * 60;
seconds_diff += @as(u32, minute) * 60;
seconds_diff += @as(u32, second);
return seconds_diff;
}
fn isLeapYear(year: u16) bool {
if (year % 4 != 0) return false;
if (year % 400 == 0) return true;
if (year % 100 == 0) return false;
return true;
}
fn leapYearsBetween(start_year_inclusive: u16, end_year_exclusive: u16) u16 {
const start = @min(start_year_inclusive, end_year_exclusive);
const end = @max(start_year_inclusive, end_year_exclusive);
var current = start;
// log.debug("Leap years starting from {d}, ending at {d}", .{ start, end });
while (current % 4 != 0 and current < end) {
current += 1;
}
if (current == end) return 0; // No leap years here. E.g. 1971-1973
// We're on a potential leap year, and now we can step by 4
var rc: u16 = 0;
while (current < end) {
if (current % 4 == 0) {
if (current % 100 != 0) {
// log.debug("Year {d} is leap year", .{current});
rc += 1;
current += 4;
continue;
}
// We're on a century, which is normally not a leap year, unless
// it's divisible by 400
if (current % 400 == 0) {
// log.debug("Year {d} is leap year", .{current});
rc += 1;
}
}
current += 4;
}
return rc;
}
fn printDateTime(dt: DateTime) void {
log.debug("{:0>4}-{:0>2}-{:0>2}T{:0>2}:{:0>2}:{:0<2}Z", .{
dt.year,
dt.month,
dt.day,
dt.hour,
dt.minute,
dt.second,
});
}
pub fn printNowUtc() void {
printDateTime(timestampToDateTime(std.time.timestamp()));
}
test "Convert timestamp to datetime" {
printDateTime(timestampToDateTime(std.time.timestamp()));
try std.testing.expectEqual(DateTime{ .year = 2020, .month = 8, .day = 28, .hour = 9, .minute = 32, .second = 27 }, timestampToDateTime(1598607147));
try std.testing.expectEqual(DateTime{ .year = 2020, .month = 11, .day = 1, .hour = 5, .minute = 6, .second = 7 }, timestampToDateTime(1604207167));
// Get time for date: https://wtools.io/convert-date-time-to-unix-time
try std.testing.expectEqual(DateTime{ .year = 2015, .month = 8, .day = 30, .hour = 12, .minute = 36, .second = 0 }, timestampToDateTime(1440938160));
}
test "Convert datetime to timestamp" {
try std.testing.expectEqual(@as(i64, 1598607147), try dateTimeToTimestamp(DateTime{ .year = 2020, .month = 8, .day = 28, .hour = 9, .minute = 32, .second = 27 }));
try std.testing.expectEqual(@as(i64, 1604207167), try dateTimeToTimestamp(DateTime{ .year = 2020, .month = 11, .day = 1, .hour = 5, .minute = 6, .second = 7 }));
try std.testing.expectEqual(@as(i64, 1440938160), try dateTimeToTimestamp(DateTime{ .year = 2015, .month = 8, .day = 30, .hour = 12, .minute = 36, .second = 0 }));
}
test "Convert ISO8601 string to timestamp" {
try std.testing.expectEqual(DateTime{ .year = 2020, .month = 8, .day = 28, .hour = 9, .minute = 32, .second = 27 }, try parseIso8601ToDateTime("20200828T093227"));
try std.testing.expectEqual(DateTime{ .year = 2020, .month = 8, .day = 28, .hour = 9, .minute = 32, .second = 27 }, try parseIso8601ToDateTime("2020-08-28T9:32:27Z"));
try std.testing.expectEqual(DateTime{ .year = 2020, .month = 11, .day = 1, .hour = 5, .minute = 6, .second = 7 }, try parseIso8601ToDateTime("2020-11-01T5:06:7Z"));
try std.testing.expectEqual(DateTime{ .year = 2015, .month = 8, .day = 30, .hour = 12, .minute = 36, .second = 0 }, try parseIso8601ToDateTime("2015-08-30T12:36:00.000Z"));
}
test "Convert datetime to timestamp before 1970" {
try std.testing.expectEqual(@as(i64, -449392815), try dateTimeToTimestamp(DateTime{ .year = 1955, .month = 10, .day = 5, .hour = 16, .minute = 39, .second = 45 }));
}
test "Convert whatever AWS is sending us to timestamp" {
const string_date = "Fri, 03 Jun 2022 18:12:36 GMT";
try std.testing.expectEqual(DateTime{ .year = 2022, .month = 6, .day = 3, .hour = 18, .minute = 12, .second = 36 }, try parseEnglishToDateTime(string_date));
}

View file

@ -14,117 +14,8 @@ const testing = std.testing;
const mem = std.mem;
const maxInt = std.math.maxInt;
pub fn serializeMap(map: anytype, key: []const u8, options: anytype, out_stream: anytype) !void {
if (@typeInfo(@TypeOf(map)) == .optional) {
if (map) |m| serializeMapInternal(m, key, options, out_stream);
} else {
serializeMapInternal(map, key, options, out_stream);
}
}
fn serializeMapKey(key: []const u8, options: anytype, out_stream: anytype) !void {
var child_options = options;
if (child_options.whitespace) |*child_ws|
child_ws.indent_level += 1;
try out_stream.writeByte('"');
try out_stream.writeAll(key);
_ = try out_stream.write("\":");
if (options.whitespace) |ws| {
if (ws.separator) {
try out_stream.writeByte(' ');
}
}
}
fn serializeMapAsObject(map: anytype, options: anytype, out_stream: anytype) !void {
if (map.len == 0) {
try out_stream.writeByte('{');
try out_stream.writeByte('}');
return;
}
// TODO: Map might be [][]struct{key, value} rather than []struct{key, value}
var child_options = options;
if (child_options.whitespace) |*whitespace| {
whitespace.indent_level += 1;
}
try out_stream.writeByte('{');
if (options.whitespace) |_|
try out_stream.writeByte('\n');
for (map, 0..) |tag, i| {
// TODO: Deal with escaping and general "json.stringify" the values...
if (child_options.whitespace) |ws|
try ws.outputIndent(out_stream);
try out_stream.writeByte('"');
try jsonEscape(tag.key, child_options, out_stream);
_ = try out_stream.write("\":");
if (child_options.whitespace) |ws| {
if (ws.separator) {
try out_stream.writeByte(' ');
}
}
try out_stream.writeByte('"');
try jsonEscape(tag.value, child_options, out_stream);
try out_stream.writeByte('"');
if (i < map.len - 1) {
try out_stream.writeByte(',');
}
if (child_options.whitespace) |_|
try out_stream.writeByte('\n');
}
if (options.whitespace) |ws|
try ws.outputIndent(out_stream);
try out_stream.writeByte('}');
}
fn serializeMapInternal(map: anytype, key: []const u8, options: anytype, out_stream: anytype) !bool {
try serializeMapKey(key, options, out_stream);
return try serializeMapAsObject(map, options, out_stream);
}
// code within jsonEscape lifted from json.zig in stdlib
fn jsonEscape(value: []const u8, options: anytype, out_stream: anytype) !void {
var i: usize = 0;
while (i < value.len) : (i += 1) {
switch (value[i]) {
// normal ascii character
0x20...0x21, 0x23...0x2E, 0x30...0x5B, 0x5D...0x7F => |c| try out_stream.writeByte(c),
// only 2 characters that *must* be escaped
'\\' => try out_stream.writeAll("\\\\"),
'\"' => try out_stream.writeAll("\\\""),
// solidus is optional to escape
'/' => {
if (options.string.String.escape_solidus) {
try out_stream.writeAll("\\/");
} else {
try out_stream.writeByte('/');
}
},
// control characters with short escapes
// TODO: option to switch between unicode and 'short' forms?
0x8 => try out_stream.writeAll("\\b"),
0xC => try out_stream.writeAll("\\f"),
'\n' => try out_stream.writeAll("\\n"),
'\r' => try out_stream.writeAll("\\r"),
'\t' => try out_stream.writeAll("\\t"),
else => {
const ulen = std.unicode.utf8ByteSequenceLength(value[i]) catch unreachable;
// control characters (only things left with 1 byte length) should always be printed as unicode escapes
if (ulen == 1 or options.string.String.escape_unicode) {
const codepoint = std.unicode.utf8Decode(value[i .. i + ulen]) catch unreachable;
try outputUnicodeEscape(codepoint, out_stream);
} else {
try out_stream.writeAll(value[i .. i + ulen]);
}
i += ulen - 1;
},
}
}
}
// pub const WriteStream = @import("json/write_stream.zig").WriteStream;
// pub const writeStream = @import("json/write_stream.zig").writeStream;
const StringEscapes = union(enum) {
None,
@ -1371,8 +1262,137 @@ pub const Value = union(enum) {
String: []const u8,
Array: Array,
Object: ObjectMap,
pub fn jsonStringify(
value: @This(),
options: StringifyOptions,
out_stream: anytype,
) @TypeOf(out_stream).Error!void {
switch (value) {
.Null => try stringify(null, options, out_stream),
.Bool => |inner| try stringify(inner, options, out_stream),
.Integer => |inner| try stringify(inner, options, out_stream),
.Float => |inner| try stringify(inner, options, out_stream),
.NumberString => |inner| try out_stream.writeAll(inner),
.String => |inner| try stringify(inner, options, out_stream),
.Array => |inner| try stringify(inner.items, options, out_stream),
.Object => |inner| {
try out_stream.writeByte('{');
var field_output = false;
var child_options = options;
if (child_options.whitespace) |*child_whitespace| {
child_whitespace.indent_level += 1;
}
var it = inner.iterator();
while (it.next()) |entry| {
if (!field_output) {
field_output = true;
} else {
try out_stream.writeByte(',');
}
if (child_options.whitespace) |child_whitespace| {
try out_stream.writeByte('\n');
try child_whitespace.outputIndent(out_stream);
}
try stringify(entry.key_ptr, options, out_stream);
try out_stream.writeByte(':');
if (child_options.whitespace) |child_whitespace| {
if (child_whitespace.separator) {
try out_stream.writeByte(' ');
}
}
try stringify(entry.value_ptr, child_options, out_stream);
}
if (field_output) {
if (options.whitespace) |whitespace| {
try out_stream.writeByte('\n');
try whitespace.outputIndent(out_stream);
}
}
try out_stream.writeByte('}');
},
}
}
pub fn dump(self: Value) void {
var held = std.debug.getStderrMutex().acquire();
defer held.release();
const stderr = std.io.getStdErr().writer();
stringify(self, StringifyOptions{ .whitespace = null }, stderr) catch return;
}
};
pub fn dump(value: anytype) void {
var held = std.debug.getStderrMutex().acquire();
defer held.release();
const stderr = std.io.getStdErr().writer();
stringify(value, StringifyOptions{ .whitespace = null }, stderr) catch return;
}
test "Value.jsonStringify" {
{
var buffer: [10]u8 = undefined;
var fbs = std.io.fixedBufferStream(&buffer);
try @as(Value, .Null).jsonStringify(.{}, fbs.writer());
try testing.expectEqualSlices(u8, fbs.getWritten(), "null");
}
{
var buffer: [10]u8 = undefined;
var fbs = std.io.fixedBufferStream(&buffer);
try (Value{ .Bool = true }).jsonStringify(.{}, fbs.writer());
try testing.expectEqualSlices(u8, fbs.getWritten(), "true");
}
{
var buffer: [10]u8 = undefined;
var fbs = std.io.fixedBufferStream(&buffer);
try (Value{ .Integer = 42 }).jsonStringify(.{}, fbs.writer());
try testing.expectEqualSlices(u8, fbs.getWritten(), "42");
}
{
var buffer: [10]u8 = undefined;
var fbs = std.io.fixedBufferStream(&buffer);
try (Value{ .NumberString = "43" }).jsonStringify(.{}, fbs.writer());
try testing.expectEqualSlices(u8, fbs.getWritten(), "43");
}
{
var buffer: [10]u8 = undefined;
var fbs = std.io.fixedBufferStream(&buffer);
try (Value{ .Float = 42 }).jsonStringify(.{}, fbs.writer());
try testing.expectEqualSlices(u8, fbs.getWritten(), "4.2e1");
}
{
var buffer: [10]u8 = undefined;
var fbs = std.io.fixedBufferStream(&buffer);
try (Value{ .String = "weeee" }).jsonStringify(.{}, fbs.writer());
try testing.expectEqualSlices(u8, fbs.getWritten(), "\"weeee\"");
}
{
var buffer: [10]u8 = undefined;
var fbs = std.io.fixedBufferStream(&buffer);
var vals = [_]Value{
.{ .Integer = 1 },
.{ .Integer = 2 },
.{ .NumberString = "3" },
};
try (Value{
.Array = Array.fromOwnedSlice(undefined, &vals),
}).jsonStringify(.{}, fbs.writer());
try testing.expectEqualSlices(u8, fbs.getWritten(), "[1,2,3]");
}
{
var buffer: [10]u8 = undefined;
var fbs = std.io.fixedBufferStream(&buffer);
var obj = ObjectMap.init(testing.allocator);
defer obj.deinit();
try obj.putNoClobber("a", .{ .String = "b" });
try (Value{ .Object = obj }).jsonStringify(.{}, fbs.writer());
try testing.expectEqualSlices(u8, fbs.getWritten(), "{\"a\":\"b\"}");
}
}
/// parse tokens from a stream, returning `false` if they do not decode to `value`
fn parsesTo(comptime T: type, value: T, tokens: *TokenStream, options: ParseOptions) !bool {
// TODO: should be able to write this function to not require an allocator
@ -1577,22 +1597,12 @@ fn parseInternal(comptime T: type, token: Token, tokens: *TokenStream, options:
.@"enum" => |enumInfo| {
switch (token) {
.Number => |numberToken| {
if (!numberToken.is_integer) {
// probably is in scientific notation
const n = try std.fmt.parseFloat(f128, numberToken.slice(tokens.slice, tokens.i - 1));
return try std.meta.intToEnum(T, @as(i128, @intFromFloat(n)));
}
if (!numberToken.is_integer) return error.UnexpectedToken;
const n = try std.fmt.parseInt(enumInfo.tag_type, numberToken.slice(tokens.slice, tokens.i - 1), 10);
return try std.meta.intToEnum(T, n);
},
.String => |stringToken| {
const source_slice = stringToken.slice(tokens.slice, tokens.i - 1);
if (std.meta.hasFn(T, "parse")) {
return try T.parse(source_slice);
}
switch (stringToken.escapes) {
.None => return std.meta.stringToEnum(T, source_slice) orelse return error.InvalidEnumTag,
.Some => {
@ -1713,7 +1723,7 @@ fn parseInternal(comptime T: type, token: Token, tokens: *TokenStream, options:
}
inline for (structInfo.fields, 0..) |field, i| {
if (!fields_seen[i]) {
if (field.default_value_ptr) |default_value_ptr| {
if (field.default_value) |default_value_ptr| {
if (!field.is_comptime) {
const default_value = @as(*align(1) const field.type, @ptrCast(default_value_ptr)).*;
@field(r, field.name) = default_value;
@ -1763,21 +1773,21 @@ fn parseInternal(comptime T: type, token: Token, tokens: *TokenStream, options:
.pointer => |ptrInfo| {
const allocator = options.allocator orelse return error.AllocatorRequired;
switch (ptrInfo.size) {
.one => {
.One => {
const r: T = try allocator.create(ptrInfo.child);
errdefer allocator.destroy(r);
r.* = try parseInternal(ptrInfo.child, token, tokens, options);
return r;
},
.slice => {
.Slice => {
switch (token) {
.ArrayBegin => {
var arraylist = std.ArrayList(ptrInfo.child){};
var arraylist = std.ArrayList(ptrInfo.child).init(allocator);
errdefer {
while (arraylist.pop()) |v| {
while (arraylist.popOrNull()) |v| {
parseFree(ptrInfo.child, v, options);
}
arraylist.deinit(allocator);
arraylist.deinit();
}
while (true) {
@ -1787,11 +1797,11 @@ fn parseInternal(comptime T: type, token: Token, tokens: *TokenStream, options:
else => {},
}
try arraylist.ensureTotalCapacity(allocator, arraylist.items.len + 1);
try arraylist.ensureTotalCapacity(arraylist.items.len + 1);
const v = try parseInternal(ptrInfo.child, tok, tokens, options);
arraylist.appendAssumeCapacity(v);
}
return arraylist.toOwnedSlice(allocator);
return arraylist.toOwnedSlice();
},
.String => |stringToken| {
if (ptrInfo.child != u8) return error.UnexpectedToken;
@ -1817,12 +1827,12 @@ fn parseInternal(comptime T: type, token: Token, tokens: *TokenStream, options:
if (key_type == null) return error.UnexpectedToken;
const value_type = typeForField(ptrInfo.child, "value");
if (value_type == null) return error.UnexpectedToken;
var arraylist = std.ArrayList(ptrInfo.child){};
var arraylist = std.ArrayList(ptrInfo.child).init(allocator);
errdefer {
while (arraylist.pop()) |v| {
while (arraylist.popOrNull()) |v| {
parseFree(ptrInfo.child, v, options);
}
arraylist.deinit(allocator);
arraylist.deinit();
}
while (true) {
const key = (try tokens.next()) orelse return error.UnexpectedEndOfJson;
@ -1831,13 +1841,13 @@ fn parseInternal(comptime T: type, token: Token, tokens: *TokenStream, options:
else => {},
}
try arraylist.ensureTotalCapacity(allocator, arraylist.items.len + 1);
try arraylist.ensureTotalCapacity(arraylist.items.len + 1);
const key_val = try parseInternal(key_type.?, key, tokens, options);
const val = (try tokens.next()) orelse return error.UnexpectedEndOfJson;
const val_val = try parseInternal(value_type.?, val, tokens, options);
arraylist.appendAssumeCapacity(.{ .key = key_val, .value = val_val });
}
return arraylist.toOwnedSlice(allocator);
return arraylist.toOwnedSlice();
},
else => return error.UnexpectedToken,
}
@ -1869,7 +1879,7 @@ fn isMapPattern(comptime T: type) bool {
// Let's just double check before proceeding
const ti = @typeInfo(T);
if (ti != .pointer) return false;
if (ti.pointer.size != .slice) return false;
if (ti.pointer.size != .Slice) return false;
const ti_child = @typeInfo(ti.pointer.child);
if (ti_child != .@"struct") return false;
if (ti_child.@"struct".fields.len != 2) return false;
@ -1925,11 +1935,11 @@ pub fn parseFree(comptime T: type, value: T, options: ParseOptions) void {
.pointer => |ptrInfo| {
const allocator = options.allocator orelse unreachable;
switch (ptrInfo.size) {
.one => {
.One => {
parseFree(ptrInfo.child, value.*, options);
allocator.destroy(value);
},
.slice => {
.Slice => {
for (value) |v| {
parseFree(ptrInfo.child, v, options);
}
@ -2274,7 +2284,7 @@ pub const Parser = struct {
return;
}
var value = p.stack.pop().?;
var value = p.stack.pop();
try p.pushToParent(&value);
},
.String => |s| {
@ -2340,7 +2350,7 @@ pub const Parser = struct {
return;
}
var value = p.stack.pop().?;
var value = p.stack.pop();
try p.pushToParent(&value);
},
.ObjectBegin => {
@ -2794,3 +2804,397 @@ fn outputUnicodeEscape(
try std.fmt.formatIntValue(low, "x", std.fmt.FormatOptions{ .width = 4, .fill = '0' }, out_stream);
}
}
pub fn stringify(
value: anytype,
options: StringifyOptions,
out_stream: anytype,
) !void {
const T = @TypeOf(value);
switch (@typeInfo(T)) {
.float, .comptime_float => {
return std.fmt.format(out_stream, "{e}", .{value});
},
.int, .comptime_int => {
return std.fmt.formatIntValue(value, "", std.fmt.FormatOptions{}, out_stream);
},
.bool => {
return out_stream.writeAll(if (value) "true" else "false");
},
.null => {
return out_stream.writeAll("null");
},
.optional => {
if (value) |payload| {
return try stringify(payload, options, out_stream);
} else {
return try stringify(null, options, out_stream);
}
},
.@"enum" => {
if (comptime std.meta.hasFn(T, "jsonStringify")) {
return value.jsonStringify(options, out_stream);
}
@compileError("Unable to stringify enum '" ++ @typeName(T) ++ "'");
},
.@"union" => {
if (comptime std.meta.hasFn(T, "jsonStringify")) {
return value.jsonStringify(options, out_stream);
}
const info = @typeInfo(T).@"union";
if (info.tag_type) |UnionTagType| {
inline for (info.fields) |u_field| {
if (value == @field(UnionTagType, u_field.name)) {
return try stringify(@field(value, u_field.name), options, out_stream);
}
}
} else {
@compileError("Unable to stringify untagged union '" ++ @typeName(T) ++ "'");
}
},
.@"struct" => |S| {
if (comptime std.meta.hasFn(T, "jsonStringify")) {
return value.jsonStringify(options, out_stream);
}
try out_stream.writeByte('{');
var field_output = false;
var child_options = options;
if (child_options.whitespace) |*child_whitespace| {
child_whitespace.indent_level += 1;
}
inline for (S.fields) |Field| {
// don't include void fields
if (Field.type == void) continue;
var output_this_field = true;
if (!options.emit_null and @typeInfo(Field.type) == .optional and @field(value, Field.name) == null) output_this_field = false;
const final_name = if (comptime std.meta.hasFn(T, "fieldNameFor"))
value.fieldNameFor(Field.name)
else
Field.name;
if (options.exclude_fields) |exclude_fields| {
for (exclude_fields) |exclude_field| {
if (std.mem.eql(u8, final_name, exclude_field)) {
output_this_field = false;
}
}
}
if (!field_output) {
field_output = output_this_field;
} else {
if (output_this_field) try out_stream.writeByte(',');
}
if (child_options.whitespace) |child_whitespace| {
if (output_this_field) try out_stream.writeByte('\n');
if (output_this_field) try child_whitespace.outputIndent(out_stream);
}
var field_written = false;
if (comptime std.meta.hasFn(T, "jsonStringifyField")) {
if (output_this_field) field_written = try value.jsonStringifyField(Field.name, child_options, out_stream);
}
if (!field_written) {
if (output_this_field) {
try stringify(final_name, options, out_stream);
try out_stream.writeByte(':');
}
if (child_options.whitespace) |child_whitespace| {
if (child_whitespace.separator) {
if (output_this_field) try out_stream.writeByte(' ');
}
}
if (output_this_field) try stringify(@field(value, Field.name), child_options, out_stream);
}
}
if (field_output) {
if (options.whitespace) |whitespace| {
try out_stream.writeByte('\n');
try whitespace.outputIndent(out_stream);
}
}
try out_stream.writeByte('}');
return;
},
.error_set => return stringify(@as([]const u8, @errorName(value)), options, out_stream),
.pointer => |ptr_info| switch (ptr_info.size) {
.One => switch (@typeInfo(ptr_info.child)) {
.array => {
const Slice = []const std.meta.Elem(ptr_info.child);
return stringify(@as(Slice, value), options, out_stream);
},
else => {
// TODO: avoid loops?
return stringify(value.*, options, out_stream);
},
},
// TODO: .Many when there is a sentinel (waiting for https://github.com/ziglang/zig/pull/3972)
.Slice => {
if (ptr_info.child == u8 and options.string == .String and std.unicode.utf8ValidateSlice(value)) {
try out_stream.writeByte('\"');
var i: usize = 0;
while (i < value.len) : (i += 1) {
switch (value[i]) {
// normal ascii character
0x20...0x21, 0x23...0x2E, 0x30...0x5B, 0x5D...0x7F => |c| try out_stream.writeByte(c),
// only 2 characters that *must* be escaped
'\\' => try out_stream.writeAll("\\\\"),
'\"' => try out_stream.writeAll("\\\""),
// solidus is optional to escape
'/' => {
if (options.string.String.escape_solidus) {
try out_stream.writeAll("\\/");
} else {
try out_stream.writeByte('/');
}
},
// control characters with short escapes
// TODO: option to switch between unicode and 'short' forms?
0x8 => try out_stream.writeAll("\\b"),
0xC => try out_stream.writeAll("\\f"),
'\n' => try out_stream.writeAll("\\n"),
'\r' => try out_stream.writeAll("\\r"),
'\t' => try out_stream.writeAll("\\t"),
else => {
const ulen = std.unicode.utf8ByteSequenceLength(value[i]) catch unreachable;
// control characters (only things left with 1 byte length) should always be printed as unicode escapes
if (ulen == 1 or options.string.String.escape_unicode) {
const codepoint = std.unicode.utf8Decode(value[i .. i + ulen]) catch unreachable;
try outputUnicodeEscape(codepoint, out_stream);
} else {
try out_stream.writeAll(value[i .. i + ulen]);
}
i += ulen - 1;
},
}
}
try out_stream.writeByte('\"');
return;
}
try out_stream.writeByte('[');
var child_options = options;
if (child_options.whitespace) |*whitespace| {
whitespace.indent_level += 1;
}
for (value, 0..) |x, i| {
if (i != 0) {
try out_stream.writeByte(',');
}
if (child_options.whitespace) |child_whitespace| {
try out_stream.writeByte('\n');
try child_whitespace.outputIndent(out_stream);
}
try stringify(x, child_options, out_stream);
}
if (value.len != 0) {
if (options.whitespace) |whitespace| {
try out_stream.writeByte('\n');
try whitespace.outputIndent(out_stream);
}
}
try out_stream.writeByte(']');
return;
},
else => @compileError("Unable to stringify type '" ++ @typeName(T) ++ "'"),
},
.array => return stringify(&value, options, out_stream),
.vector => |info| {
const array: [info.len]info.child = value;
return stringify(&array, options, out_stream);
},
else => @compileError("Unable to stringify type '" ++ @typeName(T) ++ "'"),
}
unreachable;
}
fn teststringify(expected: []const u8, value: anytype, options: StringifyOptions) !void {
const ValidationWriter = struct {
const Self = @This();
pub const Writer = std.io.Writer(*Self, Error, write);
pub const Error = error{
TooMuchData,
DifferentData,
};
expected_remaining: []const u8,
fn init(exp: []const u8) Self {
return .{ .expected_remaining = exp };
}
pub fn writer(self: *Self) Writer {
return .{ .context = self };
}
fn write(self: *Self, bytes: []const u8) Error!usize {
if (self.expected_remaining.len < bytes.len) {
std.log.warn(
\\====== expected this output: =========
\\{s}
\\======== instead found this: =========
\\{s}
\\======================================
, .{
self.expected_remaining,
bytes,
});
return error.TooMuchData;
}
if (!mem.eql(u8, self.expected_remaining[0..bytes.len], bytes)) {
std.log.warn(
\\====== expected this output: =========
\\{s}
\\======== instead found this: =========
\\{s}
\\======================================
, .{
self.expected_remaining[0..bytes.len],
bytes,
});
return error.DifferentData;
}
self.expected_remaining = self.expected_remaining[bytes.len..];
return bytes.len;
}
};
var vos = ValidationWriter.init(expected);
try stringify(value, options, vos.writer());
if (vos.expected_remaining.len > 0) return error.NotEnoughData;
}
test "stringify basic types" {
try teststringify("false", false, StringifyOptions{});
try teststringify("true", true, StringifyOptions{});
try teststringify("null", @as(?u8, null), StringifyOptions{});
try teststringify("null", @as(?*u32, null), StringifyOptions{});
try teststringify("42", 42, StringifyOptions{});
try teststringify("4.2e1", 42.0, StringifyOptions{});
try teststringify("42", @as(u8, 42), StringifyOptions{});
try teststringify("42", @as(u128, 42), StringifyOptions{});
try teststringify("4.2e1", @as(f32, 42), StringifyOptions{});
try teststringify("4.2e1", @as(f64, 42), StringifyOptions{});
try teststringify("\"ItBroke\"", @as(anyerror, error.ItBroke), StringifyOptions{});
}
test "stringify string" {
try teststringify("\"hello\"", "hello", StringifyOptions{});
try teststringify("\"with\\nescapes\\r\"", "with\nescapes\r", StringifyOptions{});
try teststringify("\"with\\nescapes\\r\"", "with\nescapes\r", StringifyOptions{ .string = .{ .String = .{ .escape_unicode = true } } });
try teststringify("\"with unicode\\u0001\"", "with unicode\u{1}", StringifyOptions{});
try teststringify("\"with unicode\\u0001\"", "with unicode\u{1}", StringifyOptions{ .string = .{ .String = .{ .escape_unicode = true } } });
try teststringify("\"with unicode\u{80}\"", "with unicode\u{80}", StringifyOptions{});
try teststringify("\"with unicode\\u0080\"", "with unicode\u{80}", StringifyOptions{ .string = .{ .String = .{ .escape_unicode = true } } });
try teststringify("\"with unicode\u{FF}\"", "with unicode\u{FF}", StringifyOptions{});
try teststringify("\"with unicode\\u00ff\"", "with unicode\u{FF}", StringifyOptions{ .string = .{ .String = .{ .escape_unicode = true } } });
try teststringify("\"with unicode\u{100}\"", "with unicode\u{100}", StringifyOptions{});
try teststringify("\"with unicode\\u0100\"", "with unicode\u{100}", StringifyOptions{ .string = .{ .String = .{ .escape_unicode = true } } });
try teststringify("\"with unicode\u{800}\"", "with unicode\u{800}", StringifyOptions{});
try teststringify("\"with unicode\\u0800\"", "with unicode\u{800}", StringifyOptions{ .string = .{ .String = .{ .escape_unicode = true } } });
try teststringify("\"with unicode\u{8000}\"", "with unicode\u{8000}", StringifyOptions{});
try teststringify("\"with unicode\\u8000\"", "with unicode\u{8000}", StringifyOptions{ .string = .{ .String = .{ .escape_unicode = true } } });
try teststringify("\"with unicode\u{D799}\"", "with unicode\u{D799}", StringifyOptions{});
try teststringify("\"with unicode\\ud799\"", "with unicode\u{D799}", StringifyOptions{ .string = .{ .String = .{ .escape_unicode = true } } });
try teststringify("\"with unicode\u{10000}\"", "with unicode\u{10000}", StringifyOptions{});
try teststringify("\"with unicode\\ud800\\udc00\"", "with unicode\u{10000}", StringifyOptions{ .string = .{ .String = .{ .escape_unicode = true } } });
try teststringify("\"with unicode\u{10FFFF}\"", "with unicode\u{10FFFF}", StringifyOptions{});
try teststringify("\"with unicode\\udbff\\udfff\"", "with unicode\u{10FFFF}", StringifyOptions{ .string = .{ .String = .{ .escape_unicode = true } } });
try teststringify("\"/\"", "/", StringifyOptions{});
try teststringify("\"\\/\"", "/", StringifyOptions{ .string = .{ .String = .{ .escape_solidus = true } } });
}
test "stringify tagged unions" {
try teststringify("42", union(enum) {
Foo: u32,
Bar: bool,
}{ .Foo = 42 }, StringifyOptions{});
}
test "stringify struct" {
try teststringify("{\"foo\":42}", struct {
foo: u32,
}{ .foo = 42 }, StringifyOptions{});
}
test "stringify struct with indentation" {
try teststringify(
\\{
\\ "foo": 42,
\\ "bar": [
\\ 1,
\\ 2,
\\ 3
\\ ]
\\}
,
struct {
foo: u32,
bar: [3]u32,
}{
.foo = 42,
.bar = .{ 1, 2, 3 },
},
StringifyOptions{
.whitespace = .{},
},
);
try teststringify(
"{\n\t\"foo\":42,\n\t\"bar\":[\n\t\t1,\n\t\t2,\n\t\t3\n\t]\n}",
struct {
foo: u32,
bar: [3]u32,
}{
.foo = 42,
.bar = .{ 1, 2, 3 },
},
StringifyOptions{
.whitespace = .{
.indent = .Tab,
.separator = false,
},
},
);
}
test "stringify struct with void field" {
try teststringify("{\"foo\":42}", struct {
foo: u32,
bar: void = {},
}{ .foo = 42 }, StringifyOptions{});
}
test "stringify array of structs" {
const MyStruct = struct {
foo: u32,
};
try teststringify("[{\"foo\":42},{\"foo\":100},{\"foo\":1000}]", [_]MyStruct{
MyStruct{ .foo = 42 },
MyStruct{ .foo = 100 },
MyStruct{ .foo = 1000 },
}, StringifyOptions{});
}
test "stringify struct with custom stringifier" {
try teststringify("[\"something special\",42]", struct {
foo: u32,
const Self = @This();
pub fn jsonStringify(
_: Self,
options: StringifyOptions,
out_stream: anytype,
) !void {
try out_stream.writeAll("[\"something special\",");
try stringify(42, options, out_stream);
try out_stream.writeByte(']');
}
}{ .foo = 42 }, StringifyOptions{});
}
test "stringify vector" {
try teststringify("[1,1]", @as(@Vector(2, u32), @splat(@as(u32, 1))), StringifyOptions{});
}

View file

@ -1,6 +1,6 @@
const std = @import("std");
const aws = @import("aws.zig");
const json = @import("json");
const json = @import("json.zig");
var verbose: u8 = 0;
@ -34,8 +34,7 @@ pub fn log(
// Print the message to stderr, silently ignoring any errors
std.debug.lockStdErr();
defer std.debug.unlockStdErr();
var stderr_writer = std.fs.File.stderr().writer(&.{});
const stderr = &stderr_writer.interface;
const stderr = std.io.getStdErr().writer();
nosuspend stderr.print(prefix ++ format ++ "\n", args) catch return;
}
@ -63,14 +62,14 @@ pub fn main() anyerror!void {
var gpa = std.heap.GeneralPurposeAllocator(.{}){};
defer _ = gpa.deinit();
const allocator = gpa.allocator();
var tests = try std.ArrayList(Tests).initCapacity(allocator, @typeInfo(Tests).@"enum".fields.len);
defer tests.deinit(allocator);
var tests = std.ArrayList(Tests).init(allocator);
defer tests.deinit();
var args = try std.process.argsWithAllocator(allocator);
defer args.deinit();
var stdout_buf: [4096]u8 = undefined;
const stdout_raw = std.fs.File.stdout().writer(&stdout_buf);
var stdout = stdout_raw.interface;
defer stdout.flush() catch @panic("could not flush stdout");
const stdout_raw = std.io.getStdOut().writer();
var bw = std.io.bufferedWriter(stdout_raw);
defer bw.flush() catch unreachable;
const stdout = bw.writer();
var arg0: ?[]const u8 = null;
var proxy: ?std.http.Client.Proxy = null;
while (args.next()) |arg| {
@ -100,14 +99,14 @@ pub fn main() anyerror!void {
}
inline for (@typeInfo(Tests).@"enum".fields) |f| {
if (std.mem.eql(u8, f.name, arg)) {
try tests.append(allocator, @field(Tests, f.name));
try tests.append(@field(Tests, f.name));
break;
}
}
}
if (tests.items.len == 0) {
inline for (@typeInfo(Tests).@"enum".fields) |f|
try tests.append(allocator, @field(Tests, f.name));
try tests.append(@field(Tests, f.name));
}
std.log.info("Start\n", .{});
@ -193,8 +192,8 @@ pub fn main() anyerror!void {
const func = fns[0];
const arn = func.function_arn.?;
// This is a bit ugly. Maybe a helper function in the library would help?
var tags = try std.ArrayList(aws.services.lambda.TagKeyValue).initCapacity(allocator, 1);
defer tags.deinit(allocator);
var tags = try std.ArrayList(@typeInfo(try typeForField(services.lambda.tag_resource.Request, "tags")).pointer.child).initCapacity(allocator, 1);
defer tags.deinit();
tags.appendAssumeCapacity(.{ .key = "Foo", .value = "Bar" });
const req = services.lambda.tag_resource.Request{ .resource = arn, .tags = tags.items };
const addtag = try aws.Request(services.lambda.tag_resource).call(req, options);
@ -263,7 +262,7 @@ pub fn main() anyerror!void {
defer result.deinit();
std.log.info("request id: {s}", .{result.response_metadata.request_id});
const list = result.response.key_group_list.?;
std.log.info("key group list max: {d}", .{list.max_items});
std.log.info("key group list max: {?d}", .{list.max_items});
std.log.info("key group quantity: {d}", .{list.quantity});
},
.rest_xml_work_with_s3 => {

View file

@ -1,5 +1,5 @@
const std = @import("std");
const service_list = @import("service_manifest");
const service_list = @import("models/service_manifest.zig");
const expectEqualStrings = std.testing.expectEqualStrings;
pub fn Services(comptime service_imports: anytype) type {
@ -12,9 +12,9 @@ pub fn Services(comptime service_imports: anytype) type {
item.* = .{
.name = @tagName(service_imports[i]),
.type = @TypeOf(import_field),
.default_value_ptr = &import_field,
.default_value = &import_field,
.is_comptime = false,
.alignment = std.meta.alignment(@TypeOf(import_field)),
.alignment = 0,
};
}
@ -39,7 +39,7 @@ fn serviceCount(desired_services: anytype) usize {
pub const services = service_list;
test "services includes sts" {
try expectEqualStrings("2011-06-15", services.sts.version.?);
try expectEqualStrings("2011-06-15", services.sts.version);
}
test "sts includes get_caller_identity" {
try expectEqualStrings("GetCallerIdentity", services.sts.get_caller_identity.action_name);
@ -47,9 +47,9 @@ test "sts includes get_caller_identity" {
test "can get service and action name from request" {
// get request object. This call doesn't have parameters
const metadata = services.sts.get_caller_identity.Request.metaInfo();
try expectEqualStrings("2011-06-15", metadata.service_metadata.version.?);
try expectEqualStrings("2011-06-15", metadata.service_metadata.version);
}
test "can filter services" {
const filtered_services = Services(.{ .sts, .wafv2 }){};
try expectEqualStrings("2011-06-15", filtered_services.sts.version.?);
try expectEqualStrings("2011-06-15", filtered_services.sts.version);
}

View file

@ -11,7 +11,7 @@ pub const EncodingOptions = struct {
field_name_transformer: fieldNameTransformerFn = defaultTransformer,
};
pub fn encode(allocator: std.mem.Allocator, obj: anytype, writer: *std.Io.Writer, comptime options: EncodingOptions) !void {
pub fn encode(allocator: std.mem.Allocator, obj: anytype, writer: anytype, comptime options: EncodingOptions) !void {
_ = try encodeInternal(allocator, "", "", true, obj, writer, options);
}
@ -20,15 +20,14 @@ fn encodeStruct(
parent: []const u8,
first: bool,
obj: anytype,
writer: *std.Io.Writer,
writer: anytype,
comptime options: EncodingOptions,
) !bool {
var rc = first;
var arena = std.heap.ArenaAllocator.init(allocator);
defer arena.deinit();
const arena_alloc = arena.allocator();
inline for (@typeInfo(@TypeOf(obj)).@"struct".fields) |field| {
const field_name = try options.field_name_transformer(arena_alloc, field.name);
const field_name = try options.field_name_transformer(allocator, field.name);
defer if (options.field_name_transformer.* != defaultTransformer)
allocator.free(field_name);
// @compileLog(@typeInfo(field.field_type).Pointer);
rc = try encodeInternal(allocator, parent, field_name, rc, @field(obj, field.name), writer, options);
}
@ -41,7 +40,7 @@ pub fn encodeInternal(
field_name: []const u8,
first: bool,
obj: anytype,
writer: *std.Io.Writer,
writer: anytype,
comptime options: EncodingOptions,
) !bool {
// @compileLog(@typeName(@TypeOf(obj)));
@ -51,24 +50,15 @@ pub fn encodeInternal(
.optional => if (obj) |o| {
rc = try encodeInternal(allocator, parent, field_name, first, o, writer, options);
},
.pointer => |ti| if (ti.size == .one) {
.pointer => |ti| if (ti.size == .One) {
rc = try encodeInternal(allocator, parent, field_name, first, obj.*, writer, options);
} else {
if (!first) _ = try writer.write("&");
// @compileLog(@typeInfo(@TypeOf(obj)));
switch (ti.child) {
// TODO: not sure this first one is valid. How should [][]const u8 be serialized here?
[]const u8 => {
// if (true) @panic("panic at the disco!");
std.log.warn(
"encoding object of type [][]const u8...pretty sure this is wrong {s}{s}={any}",
.{ parent, field_name, obj },
);
try writer.print("{s}{s}={any}", .{ parent, field_name, obj });
},
u8 => try writer.print("{s}{s}={s}", .{ parent, field_name, obj }),
else => try writer.print("{s}{s}={any}", .{ parent, field_name, obj }),
}
if (ti.child == []const u8 or ti.child == u8)
try writer.print("{s}{s}={s}", .{ parent, field_name, obj })
else
try writer.print("{s}{s}={any}", .{ parent, field_name, obj });
rc = false;
},
.@"struct" => if (std.mem.eql(u8, "", field_name)) {
@ -104,29 +94,78 @@ pub fn encodeInternal(
return rc;
}
fn testencode(allocator: std.mem.Allocator, expected: []const u8, value: anytype, comptime options: EncodingOptions) !void {
const ValidationWriter = struct {
const Self = @This();
pub const Writer = std.io.Writer(*Self, Error, write);
pub const Error = error{
TooMuchData,
DifferentData,
};
expected_remaining: []const u8,
fn init(exp: []const u8) Self {
return .{ .expected_remaining = exp };
}
pub fn writer(self: *Self) Writer {
return .{ .context = self };
}
fn write(self: *Self, bytes: []const u8) Error!usize {
// std.debug.print("{s}\n", .{bytes});
if (self.expected_remaining.len < bytes.len) {
std.log.warn(
\\====== expected this output: =========
\\{s}
\\======== instead found this: =========
\\{s}
\\======================================
, .{
self.expected_remaining,
bytes,
});
return error.TooMuchData;
}
if (!std.mem.eql(u8, self.expected_remaining[0..bytes.len], bytes)) {
std.log.warn(
\\====== expected this output: =========
\\{s}
\\======== instead found this: =========
\\{s}
\\======================================
, .{
self.expected_remaining[0..bytes.len],
bytes,
});
return error.DifferentData;
}
self.expected_remaining = self.expected_remaining[bytes.len..];
return bytes.len;
}
};
var vos = ValidationWriter.init(expected);
try encode(allocator, value, vos.writer(), options);
if (vos.expected_remaining.len > 0) return error.NotEnoughData;
}
test "can urlencode an object" {
const expected = "Action=GetCallerIdentity&Version=2021-01-01";
var aw = std.Io.Writer.Allocating.init(std.testing.allocator);
defer aw.deinit();
try encode(
try testencode(
std.testing.allocator,
"Action=GetCallerIdentity&Version=2021-01-01",
.{ .Action = "GetCallerIdentity", .Version = "2021-01-01" },
&aw.writer,
.{},
);
try std.testing.expectEqualStrings(expected, aw.written());
}
test "can urlencode an object with integer" {
const expected = "Action=GetCallerIdentity&Duration=32";
var aw = std.Io.Writer.Allocating.init(std.testing.allocator);
defer aw.deinit();
try encode(
try testencode(
std.testing.allocator,
"Action=GetCallerIdentity&Duration=32",
.{ .Action = "GetCallerIdentity", .Duration = 32 },
&aw.writer,
.{},
);
try std.testing.expectEqualStrings(expected, aw.written());
}
const UnsetValues = struct {
action: ?[]const u8 = null,
@ -135,28 +174,30 @@ const UnsetValues = struct {
val2: ?[]const u8 = null,
};
test "can urlencode an object with unset values" {
const expected = "action=GetCallerIdentity&duration=32";
var aw = std.Io.Writer.Allocating.init(std.testing.allocator);
defer aw.deinit();
try encode(
// var buffer = std.ArrayList(u8).init(std.testing.allocator);
// defer buffer.deinit();
// const writer = buffer.writer();
// try encode(
// std.testing.allocator,
// UnsetValues{ .action = "GetCallerIdentity", .duration = 32 },
// writer,
// .{},
// );
// std.debug.print("\n\nEncoded as '{s}'\n", .{buffer.items});
try testencode(
std.testing.allocator,
"action=GetCallerIdentity&duration=32",
UnsetValues{ .action = "GetCallerIdentity", .duration = 32 },
&aw.writer,
.{},
);
try std.testing.expectEqualStrings(expected, aw.written());
}
test "can urlencode a complex object" {
const expected = "Action=GetCallerIdentity&Version=2021-01-01&complex.innermember=foo";
var aw = std.Io.Writer.Allocating.init(std.testing.allocator);
defer aw.deinit();
try encode(
try testencode(
std.testing.allocator,
"Action=GetCallerIdentity&Version=2021-01-01&complex.innermember=foo",
.{ .Action = "GetCallerIdentity", .Version = "2021-01-01", .complex = .{ .innermember = "foo" } },
&aw.writer,
.{},
);
try std.testing.expectEqualStrings(expected, aw.written());
}
const Filter = struct {
@ -179,28 +220,26 @@ const Request: type = struct {
all_regions: ?bool = null,
};
test "can urlencode an EC2 Filter" {
// TODO: This is a strange test, mainly to document current behavior
// EC2 filters are supposed to be something like
// Filter.Name=foo&Filter.Values=bar or, when there is more, something like
// Filter.1.Name=instance-type&Filter.1.Value.1=m1.small&Filter.1.Value.2=m1.large&Filter.2.Name=block-device-mapping.status&Filter.2.Value.1=attached
//
// This looks like a real PITA, so until it is actually needed, this is
// a placeholder test to track what actual encoding is happening. This
// changed between zig 0.14.x and 0.15.1, and I'm not entirely sure why
// yet, but because the remaining functionality is fine, we're going with
// this
const zig_14x_expected = "filters={ url.Filter{ .name = { 102, 111, 111 }, .values = { { ... } } } }";
_ = zig_14x_expected;
const expected = "filters={ .{ .name = { 102, 111, 111 }, .values = { { ... } } } }";
var aw = std.Io.Writer.Allocating.init(std.testing.allocator);
defer aw.deinit();
try encode(
// TODO: Fix this encoding...
testencode(
std.testing.allocator,
"filters={ url.Filter{ .name = { 102, 111, 111 }, .values = { { ... } } } }",
Request{
.filters = @constCast(&[_]Filter{.{ .name = "foo", .values = @constCast(&[_][]const u8{"bar"}) }}),
},
&aw.writer,
.{},
);
try std.testing.expectEqualStrings(expected, aw.written());
) catch |err| {
var al = std.ArrayList(u8).init(std.testing.allocator);
defer al.deinit();
try encode(
std.testing.allocator,
Request{
.filters = @constCast(&[_]Filter{.{ .name = "foo", .values = @constCast(&[_][]const u8{"bar"}) }}),
},
al.writer(),
.{},
);
std.log.warn("Error found. Full encoding is '{s}'", .{al.items});
return err;
};
}

View file

@ -25,15 +25,12 @@ pub const Element = struct {
tag: []const u8,
attributes: AttributeList,
children: ContentList,
next_sibling: ?*Element = null,
allocator: std.mem.Allocator,
fn init(tag: []const u8, alloc: Allocator) Element {
return .{
.tag = tag,
.attributes = AttributeList{},
.children = ContentList{},
.allocator = alloc,
.attributes = AttributeList.init(alloc),
.children = ContentList.init(alloc),
};
}
@ -350,7 +347,7 @@ fn parseDocument(ctx: *ParseContext, backing_allocator: Allocator) !Document {
_ = ctx.eatWs();
try trySkipComments(ctx, allocator);
doc.root = (try tryParseElement(ctx, allocator, null)) orelse return error.InvalidDocument;
doc.root = (try tryParseElement(ctx, allocator)) orelse return error.InvalidDocument;
_ = ctx.eatWs();
try trySkipComments(ctx, allocator);
@ -418,12 +415,12 @@ fn tryParseCharData(ctx: *ParseContext, alloc: Allocator) !?[]const u8 {
return try dupeAndUnescape(alloc, ctx.source[begin..end]);
}
fn parseContent(ctx: *ParseContext, alloc: Allocator, parent: ?*Element) ParseError!Content {
fn parseContent(ctx: *ParseContext, alloc: Allocator) ParseError!Content {
if (try tryParseCharData(ctx, alloc)) |cd| {
return Content{ .CharData = cd };
} else if (try tryParseComment(ctx, alloc)) |comment| {
return Content{ .Comment = comment };
} else if (try tryParseElement(ctx, alloc, parent)) |elem| {
} else if (try tryParseElement(ctx, alloc)) |elem| {
return Content{ .Element = elem };
} else {
return error.UnexpectedCharacter;
@ -443,7 +440,7 @@ fn tryParseAttr(ctx: *ParseContext, alloc: Allocator) !?*Attribute {
return attr;
}
fn tryParseElement(ctx: *ParseContext, alloc: Allocator, parent: ?*Element) !?*Element {
fn tryParseElement(ctx: *ParseContext, alloc: Allocator) !?*Element {
const start = ctx.offset;
if (!ctx.eat('<')) return null;
const tag = parseNameNoDupe(ctx) catch {
@ -456,7 +453,7 @@ fn tryParseElement(ctx: *ParseContext, alloc: Allocator, parent: ?*Element) !?*E
while (ctx.eatWs()) {
const attr = (try tryParseAttr(ctx, alloc)) orelse break;
try element.attributes.append(element.allocator, attr);
try element.attributes.append(attr);
}
if (ctx.eatStr("/>")) {
@ -472,8 +469,8 @@ fn tryParseElement(ctx: *ParseContext, alloc: Allocator, parent: ?*Element) !?*E
break;
}
const content = try parseContent(ctx, alloc, element);
try element.children.append(element.allocator, content);
const content = try parseContent(ctx, alloc);
try element.children.append(content);
}
const closing_tag = try parseNameNoDupe(ctx);
@ -483,23 +480,6 @@ fn tryParseElement(ctx: *ParseContext, alloc: Allocator, parent: ?*Element) !?*E
_ = ctx.eatWs();
try ctx.expect('>');
if (parent) |p| {
var last_element: ?*Element = null;
for (0..p.children.items.len) |i| {
const child = p.children.items[p.children.items.len - i - 1];
if (child == .Element) {
last_element = child.Element;
break;
}
}
if (last_element) |lc| {
lc.next_sibling = element;
}
}
return element;
}
@ -510,13 +490,13 @@ test "tryParseElement" {
{
var ctx = ParseContext.init("<= a='b'/>");
try testing.expectEqual(@as(?*Element, null), try tryParseElement(&ctx, alloc, null));
try testing.expectEqual(@as(?*Element, null), try tryParseElement(&ctx, alloc));
try testing.expectEqual(@as(?u8, '<'), ctx.peek());
}
{
var ctx = ParseContext.init("<python size='15' color = \"green\"/>");
const elem = try tryParseElement(&ctx, alloc, null);
const elem = try tryParseElement(&ctx, alloc);
try testing.expectEqualSlices(u8, elem.?.tag, "python");
const size_attr = elem.?.attributes.items[0];
@ -530,14 +510,14 @@ test "tryParseElement" {
{
var ctx = ParseContext.init("<python>test</python>");
const elem = try tryParseElement(&ctx, alloc, null);
const elem = try tryParseElement(&ctx, alloc);
try testing.expectEqualSlices(u8, elem.?.tag, "python");
try testing.expectEqualSlices(u8, elem.?.children.items[0].CharData, "test");
}
{
var ctx = ParseContext.init("<a>b<c/>d<e/>f<!--g--></a>");
const elem = try tryParseElement(&ctx, alloc, null);
const elem = try tryParseElement(&ctx, alloc);
try testing.expectEqualSlices(u8, elem.?.tag, "a");
try testing.expectEqualSlices(u8, elem.?.children.items[0].CharData, "b");
try testing.expectEqualSlices(u8, elem.?.children.items[1].Element.tag, "c");
@ -673,10 +653,7 @@ fn dupeAndUnescape(alloc: Allocator, text: []const u8) ![]const u8 {
// This error is not strictly true, but we need to match one of the items
// from the error set provided by the other stdlib calls at the calling site
if (!alloc.resize(str, j)) {
defer alloc.free(str);
return alloc.dupe(u8, str[0..j]) catch return error.OutOfMemory;
}
if (!alloc.resize(str, j)) return error.OutOfMemory;
return str[0..j];
}

View file

@ -1,794 +0,0 @@
const std = @import("std");
const mem = std.mem;
const Allocator = mem.Allocator;
/// Options for controlling XML serialization behavior
pub const StringifyOptions = struct {
/// Controls whitespace insertion for easier human readability
whitespace: Whitespace = .minified,
/// Should optional fields with null value be written?
emit_null_optional_fields: bool = true,
// TODO: Implement
/// Arrays/slices of u8 are typically encoded as strings. This option emits them as arrays of numbers instead. Does not affect calls to objectField*().
emit_strings_as_arrays: bool = false,
/// Controls whether to include XML declaration at the beginning
include_declaration: bool = true,
/// Root element name to use when serializing a value that doesn't have a natural name
root_name: ?[]const u8 = "root",
/// Root attributes (e.g. xmlns="...") that will be added to the root element node only
root_attributes: []const u8 = "",
/// Function to determine the element name for an array item based on the element
/// name of the array containing the elements. See arrayElementPluralToSingluarTransformation
/// and arrayElementNoopTransformation functions for examples
arrayElementNameConversion: *const fn (allocator: std.mem.Allocator, name: ?[]const u8) error{OutOfMemory}!?[]const u8 = arrayElementPluralToSingluarTransformation,
pub const Whitespace = enum {
minified,
indent_1,
indent_2,
indent_3,
indent_4,
indent_8,
indent_tab,
};
};
/// Error set for XML serialization
pub const XmlSerializeError = error{
/// Unsupported type for XML serialization
UnsupportedType,
/// Out of memory
OutOfMemory,
/// Write error
WriteError,
};
/// Serializes a value to XML and writes it to the provided writer
pub fn stringify(
value: anytype,
options: StringifyOptions,
writer: *std.Io.Writer,
) !void {
// Write XML declaration if requested
if (options.include_declaration)
try writer.writeAll("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n");
// Start serialization with the root element
const root_name = options.root_name;
if (@typeInfo(@TypeOf(value)) != .optional or value == null)
try serializeValue(value, root_name, options, writer, 0)
else
try serializeValue(value.?, root_name, options, writer, 0);
}
/// Serializes a value to XML and returns an allocated string
pub fn stringifyAlloc(
allocator: Allocator,
value: anytype,
options: StringifyOptions,
) ![]u8 {
var list = std.Io.Writer.Allocating.init(allocator);
defer list.deinit();
try stringify(value, options, &list.writer);
return list.toOwnedSlice();
}
/// Internal function to serialize a value with proper indentation
fn serializeValue(
value: anytype,
element_name: ?[]const u8,
options: StringifyOptions,
writer: *std.Io.Writer,
depth: usize,
) !void {
const T = @TypeOf(value);
// const output_indent = !(!options.emit_null_optional_fields and @typeInfo(@TypeOf(value)) == .optional and value == null);
const output_indent = options.emit_null_optional_fields or @typeInfo(@TypeOf(value)) != .optional or value != null;
if (output_indent and element_name != null)
try writeIndent(writer, depth, options.whitespace);
// Start element tag
if (@typeInfo(T) != .optional and @typeInfo(T) != .array) {
if (element_name) |n| {
try writer.writeAll("<");
try writer.writeAll(n);
if (depth == 0 and options.root_attributes.len > 0) {
try writer.writeByte(' ');
try writer.writeAll(options.root_attributes);
}
try writer.writeAll(">");
}
}
// Handle different types
switch (@typeInfo(T)) {
.bool => try writer.writeAll(if (value) "true" else "false"),
.int, .comptime_int, .float, .comptime_float => try writer.print("{}", .{value}),
.pointer => |ptr_info| {
switch (ptr_info.size) {
.one => {
// We don't want to write the opening tag a second time, so
// we will pass null, then come back and close before returning
//
// ...but...in the event of a *[]const u8, we do want to pass that in,
// but only if emit_strings_as_arrays is true
const child_ti = @typeInfo(ptr_info.child);
const el_name = if (options.emit_strings_as_arrays and child_ti == .array and child_ti.array.child == u8)
element_name
else
null;
try serializeValue(value.*, el_name, options, writer, depth);
try writeClose(writer, element_name);
return;
},
.slice => {
if (ptr_info.child == u8) {
// String type
try serializeString(writer, element_name, value, options, depth);
} else {
// Array of values
if (options.whitespace != .minified) {
try writer.writeByte('\n');
}
var buf: [256]u8 = undefined;
var fba = std.heap.FixedBufferAllocator.init(&buf);
const alloc = fba.allocator();
const item_name = try options.arrayElementNameConversion(alloc, element_name);
for (value) |item| {
try serializeValue(item, item_name, options, writer, depth + 1);
if (options.whitespace != .minified) {
try writer.writeByte('\n');
}
}
try writeIndent(writer, depth, options.whitespace);
}
},
else => return error.UnsupportedType,
}
},
.array => |array_info| {
if (!options.emit_strings_as_arrays or array_info.child != u8) {
if (element_name) |n| {
try writer.writeAll("<");
try writer.writeAll(n);
try writer.writeAll(">");
}
}
if (array_info.child == u8) {
// Fixed-size string
const slice = &value;
try serializeString(writer, element_name, slice, options, depth);
} else {
// Fixed-size array
if (options.whitespace != .minified) {
try writer.writeByte('\n');
}
var buf: [256]u8 = undefined;
var fba = std.heap.FixedBufferAllocator.init(&buf);
const alloc = fba.allocator();
const item_name = try options.arrayElementNameConversion(alloc, element_name);
for (value) |item| {
try serializeValue(item, item_name, options, writer, depth + 1);
if (options.whitespace != .minified) {
try writer.writeByte('\n');
}
}
try writeIndent(writer, depth, options.whitespace);
}
if (!options.emit_strings_as_arrays or array_info.child != u8)
try writeClose(writer, element_name);
return;
},
.@"struct" => |struct_info| {
if (options.whitespace != .minified) {
try writer.writeByte('\n');
}
inline for (struct_info.fields) |field| {
const field_name =
if (std.meta.hasFn(T, "fieldNameFor"))
value.fieldNameFor(field.name)
else
field.name; // TODO: field mapping
const field_value = @field(value, field.name);
try serializeValue(
field_value,
field_name,
options,
writer,
depth + 1,
);
if (options.whitespace != .minified) {
if (!options.emit_null_optional_fields and @typeInfo(@TypeOf(field_value)) == .optional and field_value == null) {
// Skip writing anything
} else {
try writer.writeByte('\n');
}
}
}
try writeIndent(writer, depth, options.whitespace);
},
.optional => {
if (options.emit_null_optional_fields or value != null) {
if (element_name) |n| {
try writer.writeAll("<");
try writer.writeAll(n);
try writer.writeAll(">");
}
}
if (value) |payload| {
try serializeValue(payload, null, options, writer, depth);
} else {
// For null values, we'll write an empty element
// We've already written the opening tag, so just close it immediately
if (options.emit_null_optional_fields)
try writeClose(writer, element_name);
return;
}
},
.null => {
// Empty element
},
.@"enum" => {
try std.fmt.format(writer, "{s}", .{@tagName(value)});
},
.@"union" => |union_info| {
if (union_info.tag_type) |_| {
inline for (union_info.fields) |field| {
if (@field(std.meta.Tag(T), field.name) == std.meta.activeTag(value)) {
try serializeValue(
@field(value, field.name),
field.name,
options,
writer,
depth,
);
break;
}
}
} else {
return error.UnsupportedType;
}
},
else => return error.UnsupportedType,
}
try writeClose(writer, element_name);
}
fn writeClose(writer: *std.Io.Writer, element_name: ?[]const u8) !void {
// Close element tag
if (element_name) |n| {
try writer.writeAll("</");
try writer.writeAll(n);
try writer.writeAll(">");
}
}
/// Writes indentation based on depth and indent level
fn writeIndent(writer: *std.Io.Writer, depth: usize, whitespace: StringifyOptions.Whitespace) std.Io.Writer.Error!void {
var char: u8 = ' ';
const n_chars = switch (whitespace) {
.minified => return,
.indent_1 => 1 * depth,
.indent_2 => 2 * depth,
.indent_3 => 3 * depth,
.indent_4 => 4 * depth,
.indent_8 => 8 * depth,
.indent_tab => blk: {
char = '\t';
break :blk depth;
},
};
try writer.splatBytesAll(&.{char}, n_chars);
}
fn serializeString(
writer: *std.Io.Writer,
element_name: ?[]const u8,
value: []const u8,
options: StringifyOptions,
depth: usize,
) error{ WriteFailed, OutOfMemory }!void {
if (options.emit_strings_as_arrays) {
// if (true) return error.seestackrun;
for (value) |c| {
try writeIndent(writer, depth + 1, options.whitespace);
var buf: [256]u8 = undefined;
var fba = std.heap.FixedBufferAllocator.init(&buf);
const alloc = fba.allocator();
const item_name = try options.arrayElementNameConversion(alloc, element_name);
if (item_name) |n| {
try writer.writeAll("<");
try writer.writeAll(n);
try writer.writeAll(">");
}
try writer.print("{d}", .{c});
try writeClose(writer, item_name);
if (options.whitespace != .minified) {
try writer.writeByte('\n');
}
}
return;
}
try escapeString(writer, value);
}
/// Escapes special characters in XML strings
fn escapeString(writer: *std.Io.Writer, value: []const u8) std.Io.Writer.Error!void {
for (value) |c| {
switch (c) {
'&' => try writer.writeAll("&amp;"),
'<' => try writer.writeAll("&lt;"),
'>' => try writer.writeAll("&gt;"),
'"' => try writer.writeAll("&quot;"),
'\'' => try writer.writeAll("&apos;"),
else => try writer.writeByte(c),
}
}
}
/// Does no transformation on the input array
pub fn arrayElementNoopTransformation(allocator: std.mem.Allocator, name: ?[]const u8) !?[]const u8 {
_ = allocator;
return name;
}
/// Attempts to convert a plural name to singular for array items
pub fn arrayElementPluralToSingluarTransformation(allocator: std.mem.Allocator, name: ?[]const u8) !?[]const u8 {
if (name == null or name.?.len < 3) return name;
const n = name.?;
// There are a ton of these words, I'm just adding two for now
// https://wordmom.com/nouns/end-e
const es_exceptions = &[_][]const u8{
"types",
"bytes",
};
for (es_exceptions) |exception| {
if (std.mem.eql(u8, exception, n)) {
return n[0 .. n.len - 1];
}
}
// Very basic English pluralization rules
if (std.mem.endsWith(u8, n, "s")) {
if (std.mem.endsWith(u8, n, "ies")) {
// e.g., "entries" -> "entry"
return try std.mem.concat(allocator, u8, &[_][]const u8{ n[0 .. n.len - 3], "y" });
} else if (std.mem.endsWith(u8, n, "es")) {
return n[0 .. n.len - 2]; // e.g., "boxes" -> "box"
} else {
return n[0 .. n.len - 1]; // e.g., "items" -> "item"
}
}
return name; // Not recognized as plural
}
// Tests
test "stringify basic types" {
const testing = std.testing;
const allocator = testing.allocator;
// Test boolean
{
const result = try stringifyAlloc(allocator, true, .{});
defer allocator.free(result);
try testing.expectEqualStrings("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<root>true</root>", result);
}
// Test comptime integer
{
const result = try stringifyAlloc(allocator, 42, .{});
defer allocator.free(result);
try testing.expectEqualStrings("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<root>42</root>", result);
}
// Test integer
{
const result = try stringifyAlloc(allocator, @as(usize, 42), .{});
defer allocator.free(result);
try testing.expectEqualStrings("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<root>42</root>", result);
}
// Test float
{
const result = try stringifyAlloc(allocator, 3.14, .{});
defer allocator.free(result);
// zig 0.14.x outputs 3.14e0, but zig 0.15.1 outputs 3.14. Either *should* be acceptable
try testing.expectEqualStrings("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<root>3.14</root>", result);
}
// Test string
{
const result = try stringifyAlloc(allocator, "hello", .{});
// @compileLog(@typeInfo(@TypeOf("hello")).pointer.size);
// @compileLog(@typeName(@typeInfo(@TypeOf("hello")).pointer.child));
defer allocator.free(result);
try testing.expectEqualStrings("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<root>hello</root>", result);
}
// Test string with special characters
{
const result = try stringifyAlloc(allocator, "hello & world < > \" '", .{});
defer allocator.free(result);
try testing.expectEqualStrings("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<root>hello &amp; world &lt; &gt; &quot; &apos;</root>", result);
}
}
test "stringify arrays" {
const testing = std.testing;
const allocator = testing.allocator;
// Test array of integers
{
const arr = [_]i32{ 1, 2, 3 };
const result = try stringifyAlloc(allocator, arr, .{});
defer allocator.free(result);
try testing.expectEqualStrings("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<root><root>1</root><root>2</root><root>3</root></root>", result);
}
// Test array of strings
{
const arr = [_][]const u8{ "one", "two", "three" };
const result = try stringifyAlloc(allocator, arr, .{});
defer allocator.free(result);
try testing.expectEqualStrings("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<root><root>one</root><root>two</root><root>three</root></root>", result);
}
// Test array with custom root name
{
const arr = [_]i32{ 1, 2, 3 };
const result = try stringifyAlloc(allocator, arr, .{ .root_name = "items" });
defer allocator.free(result);
try testing.expectEqualStrings("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<items><item>1</item><item>2</item><item>3</item></items>", result);
}
}
test "stringify structs" {
const testing = std.testing;
const allocator = testing.allocator;
const Person = struct {
name: []const u8,
age: u32,
is_active: bool,
};
// Test basic struct
{
const person = Person{
.name = "John",
.age = 30,
.is_active = true,
};
const result = try stringifyAlloc(allocator, person, .{});
defer allocator.free(result);
try testing.expectEqualStrings("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<root><name>John</name><age>30</age><is_active>true</is_active></root>", result);
}
// Test struct with pretty printing
{
const person = Person{
.name = "John",
.age = 30,
.is_active = true,
};
const result = try stringifyAlloc(allocator, person, .{ .whitespace = .indent_4 });
defer allocator.free(result);
try testing.expectEqualStrings("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<root>\n <name>John</name>\n <age>30</age>\n <is_active>true</is_active>\n</root>", result);
}
// Test nested struct
{
const Address = struct {
street: []const u8,
city: []const u8,
};
const PersonWithAddress = struct {
name: []const u8,
address: Address,
};
const person = PersonWithAddress{
.name = "John",
.address = Address{
.street = "123 Main St",
.city = "Anytown",
},
};
const result = try stringifyAlloc(allocator, person, .{ .whitespace = .indent_4 });
defer allocator.free(result);
try testing.expectEqualStrings("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<root>\n <name>John</name>\n <address>\n <street>123 Main St</street>\n <city>Anytown</city>\n </address>\n</root>", result);
}
}
test "stringify optional values" {
const testing = std.testing;
const allocator = testing.allocator;
const Person = struct {
name: []const u8,
middle_name: ?[]const u8,
};
// Test with present optional
{
const person = Person{
.name = "John",
.middle_name = "Robert",
};
const result = try stringifyAlloc(allocator, person, .{});
defer allocator.free(result);
try testing.expectEqualStrings("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<root><name>John</name><middle_name>Robert</middle_name></root>", result);
}
// Test with null optional
{
const person = Person{
.name = "John",
.middle_name = null,
};
const result = try stringifyAlloc(allocator, person, .{});
defer allocator.free(result);
try testing.expectEqualStrings("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<root><name>John</name><middle_name></middle_name></root>", result);
}
}
test "stringify optional values with emit_null_optional_fields == false" {
const testing = std.testing;
const allocator = testing.allocator;
const Person = struct {
name: []const u8,
middle_name: ?[]const u8,
};
// Test with present optional
{
const person = Person{
.name = "John",
.middle_name = "Robert",
};
const result = try stringifyAlloc(allocator, person, .{ .emit_null_optional_fields = false });
defer allocator.free(result);
try testing.expectEqualStrings("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<root><name>John</name><middle_name>Robert</middle_name></root>", result);
}
// Test with null optional
{
const person = Person{
.name = "John",
.middle_name = null,
};
const result = try stringifyAlloc(allocator, person, .{ .emit_null_optional_fields = false });
defer allocator.free(result);
try testing.expectEqualStrings("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<root><name>John</name></root>", result);
}
}
test "stringify with custom options" {
const testing = std.testing;
const allocator = testing.allocator;
const Person = struct {
first_name: []const u8,
last_name: []const u8,
};
const person = Person{
.first_name = "John",
.last_name = "Doe",
};
// Test without XML declaration
{
const result = try stringifyAlloc(allocator, person, .{ .include_declaration = false });
defer allocator.free(result);
try testing.expectEqualStrings("<root><first_name>John</first_name><last_name>Doe</last_name></root>", result);
}
// Test with custom root name
{
const result = try stringifyAlloc(allocator, person, .{ .root_name = "person" });
defer allocator.free(result);
try testing.expectEqualStrings("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<person><first_name>John</first_name><last_name>Doe</last_name></person>", result);
}
// Test with custom indent level
{
const result = try stringifyAlloc(allocator, person, .{ .whitespace = .indent_2 });
defer allocator.free(result);
try testing.expectEqualStrings(
\\<?xml version="1.0" encoding="UTF-8"?>
\\<root>
\\ <first_name>John</first_name>
\\ <last_name>Doe</last_name>
\\</root>
, result);
}
// Test with output []u8 as array
{
// pointer, size 1, child == .array, child.array.child == u8
// @compileLog(@typeInfo(@typeInfo(@TypeOf("foo")).pointer.child));
const result = try stringifyAlloc(allocator, "foo", .{ .emit_strings_as_arrays = true, .root_name = "bytes" });
defer allocator.free(result);
try testing.expectEqualStrings("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<bytes><byte>102</byte><byte>111</byte><byte>111</byte></bytes>", result);
}
}
test "structs with custom field names" {
const testing = std.testing;
const allocator = testing.allocator;
const Person = struct {
first_name: []const u8,
last_name: []const u8,
pub fn fieldNameFor(_: @This(), comptime field_name: []const u8) []const u8 {
if (std.mem.eql(u8, field_name, "first_name")) return "GivenName";
if (std.mem.eql(u8, field_name, "last_name")) return "FamilyName";
unreachable;
}
};
const person = Person{
.first_name = "John",
.last_name = "Doe",
};
{
const result = try stringifyAlloc(allocator, person, .{ .whitespace = .indent_2 });
defer allocator.free(result);
try testing.expectEqualStrings(
\\<?xml version="1.0" encoding="UTF-8"?>
\\<root>
\\ <GivenName>John</GivenName>
\\ <FamilyName>Doe</FamilyName>
\\</root>
, result);
}
}
test "structs with optional values" {
const testing = std.testing;
const allocator = testing.allocator;
const Person = struct {
first_name: []const u8,
middle_name: ?[]const u8 = null,
last_name: []const u8,
};
const person = Person{
.first_name = "John",
.last_name = "Doe",
};
{
const result = try stringifyAlloc(
allocator,
person,
.{
.whitespace = .indent_2,
.emit_null_optional_fields = false,
.root_attributes = "xmlns=\"http://example.com/blah/xxxx/\"",
},
);
defer allocator.free(result);
try testing.expectEqualStrings(
\\<?xml version="1.0" encoding="UTF-8"?>
\\<root xmlns="http://example.com/blah/xxxx/">
\\ <first_name>John</first_name>
\\ <last_name>Doe</last_name>
\\</root>
, result);
}
}
test "optional structs with value" {
const testing = std.testing;
const allocator = testing.allocator;
const Person = struct {
first_name: []const u8,
middle_name: ?[]const u8 = null,
last_name: []const u8,
};
const person: ?Person = Person{
.first_name = "John",
.last_name = "Doe",
};
{
const result = try stringifyAlloc(
allocator,
person,
.{
.whitespace = .indent_2,
.emit_null_optional_fields = false,
.root_attributes = "xmlns=\"http://example.com/blah/xxxx/\"",
},
);
defer allocator.free(result);
try testing.expectEqualStrings(
\\<?xml version="1.0" encoding="UTF-8"?>
\\<root xmlns="http://example.com/blah/xxxx/">
\\ <first_name>John</first_name>
\\ <last_name>Doe</last_name>
\\</root>
, result);
}
}
test "nested optional structs with value" {
const testing = std.testing;
const allocator = testing.allocator;
const Name = struct {
first_name: []const u8,
middle_name: ?[]const u8 = null,
last_name: []const u8,
};
const Person = struct {
name: ?Name,
};
const person: ?Person = Person{
.name = .{
.first_name = "John",
.last_name = "Doe",
},
};
{
const result = try stringifyAlloc(
allocator,
person,
.{
.whitespace = .indent_2,
.emit_null_optional_fields = false,
.root_attributes = "xmlns=\"http://example.com/blah/xxxx/\"",
},
);
defer allocator.free(result);
try testing.expectEqualStrings(
\\<?xml version="1.0" encoding="UTF-8"?>
\\<root xmlns="http://example.com/blah/xxxx/">
\\ <name>
\\ <first_name>John</first_name>
\\ <last_name>Doe</last_name>
\\ </name>
\\</root>
, result);
}
}

View file

@ -1,7 +1,6 @@
const std = @import("std");
const xml = @import("xml.zig");
const date = @import("date");
const sm = @import("service_manifest");
const date = @import("date.zig");
const log = std.log.scoped(.xml_shaper);
@ -95,52 +94,6 @@ pub fn parse(comptime T: type, source: []const u8, options: ParseOptions) !Parse
return Parsed(T).init(arena_allocator, try parseInternal(T, root, opts), parsed);
}
pub const XmlArrayStyle = enum {
collection, // Has a container element and list of child elements
repeated_root, // Repeats the same element without a container, e.g. S3 ListBucketResult
};
fn detectArrayStyle(comptime T: type, element: *xml.Element, options: ParseOptions) !XmlArrayStyle {
_ = options;
if (@typeInfo(T) != .@"struct") {
return .collection;
}
// does the element have child elements that match our expected struct?
const field_names = comptime blk: {
var result: [std.meta.fieldNames(T).len]struct {
[]const u8,
} = undefined;
for (std.meta.fieldNames(T), 0..) |field_name, i| {
const key = if (@hasDecl(T, "fieldNameFor"))
T.fieldNameFor(undefined, field_name)
else
field_name;
result[i] = .{key};
}
break :blk std.StaticStringMap(void).initComptime(result);
};
var matching_fields: usize = 0;
var element_iterator = element.elements();
while (element_iterator.next()) |el| {
if (field_names.has(el.tag)) {
matching_fields += 1;
}
}
if (matching_fields > 0) {
return .repeated_root;
}
return .collection;
}
fn parseInternal(comptime T: type, element: *xml.Element, options: ParseOptions) !T {
switch (@typeInfo(T)) {
.bool => {
@ -209,10 +162,8 @@ fn parseInternal(comptime T: type, element: *xml.Element, options: ParseOptions)
return try parseInternal(optional_info.child, element, options);
}
},
.@"enum" => {
if (T == date.Timestamp) {
return try date.Timestamp.parse(element.children.items[0].CharData);
}
.@"enum" => |enum_info| {
_ = enum_info;
// const numeric: ?enum_info.tag_type = std.fmt.parseInt(enum_info.tag_type, element.children.items[0].CharData, 10) catch null;
// if (numeric) |num| {
// return std.meta.intToEnum(T, num);
@ -362,13 +313,13 @@ fn parseInternal(comptime T: type, element: *xml.Element, options: ParseOptions)
.pointer => |ptr_info| {
const allocator = options.allocator orelse return error.AllocatorRequired;
switch (ptr_info.size) {
.one => {
.One => {
const r: T = try allocator.create(ptr_info.child);
errdefer allocator.free(r);
r.* = try parseInternal(ptr_info.child, element, options);
return r;
},
.slice => {
.Slice => {
// TODO: Detect and deal with arrays. This will require two
// passes through the element children - one to
// determine if it is an array, one to parse the elements
@ -377,44 +328,30 @@ fn parseInternal(comptime T: type, element: *xml.Element, options: ParseOptions)
// <Item>bar</Item>
// <Items>
if (ptr_info.child != u8) {
const array_style = try detectArrayStyle(ptr_info.child, element, options);
log.debug("type = {s}, style = {s}, ptr_info.child == {s}, element = {s}", .{ @typeName(T), @tagName(array_style), @typeName(ptr_info.child), element.tag });
var children = std.ArrayList(ptr_info.child){};
defer children.deinit(allocator);
switch (array_style) {
.collection => {
var iterator = element.elements();
while (iterator.next()) |child_element| {
try children.append(
allocator,
try parseInternal(ptr_info.child, child_element, options),
);
}
},
.repeated_root => {
var current: ?*Element = element;
while (current) |el| : (current = el.next_sibling) {
if (!std.mem.eql(u8, el.tag, element.tag)) continue;
try children.append(
allocator,
try parseInternal(ptr_info.child, el, options),
);
}
},
log.debug("type = {s}, ptr_info.child == {s}, element = {s}", .{ @typeName(T), @typeName(ptr_info.child), element.tag });
var iterator = element.elements();
var children = std.ArrayList(ptr_info.child).init(allocator);
defer children.deinit();
while (iterator.next()) |child_element| {
try children.append(try parseInternal(ptr_info.child, child_element, options));
}
return children.toOwnedSlice(allocator);
return children.toOwnedSlice();
// var inx: usize = 0;
// while (inx < children.len) {
// switch (element.children.items[inx]) {
// .Element => children[inx] = try parseInternal(ptr_info.child, element.children.items[inx].Element, options),
// .CharData => children[inx] = try allocator.dupe(u8, element.children.items[inx].CharData),
// .Comment => children[inx] = try allocator.dupe(u8, element.children.items[inx].Comment), // This might be an error...
// }
// inx += 1;
// }
}
return try allocator.dupe(u8, element.children.items[0].CharData);
},
.many => {
.Many => {
return error.ManyPointerSizeNotImplemented;
},
.c => {
.C => {
return error.CPointerSizeNotImplemented;
},
}
@ -799,33 +736,3 @@ test "compiler assertion failure 2" {
defer parsed_data.deinit();
try testing.expect(parsed_data.parsed_value.key_group_list.?.quantity == 42);
}
test "can parse list objects" {
const data =
\\<?xml version="1.0" encoding="UTF-8"?>
\\<ListBucketResult>
\\ <Contents>
\\ <Key>file1.txt</Key>
\\ <Size>1024</Size>
\\ </Contents>
\\ <Contents>
\\ <Key>file2.jpg</Key>
\\ <Size>2048</Size>
\\ </Contents>
\\</ListBucketResult>
;
const Response = sm.s3.list_objects_v2.Response;
const parsed_data = try parse(Response, data, .{ .allocator = testing.allocator });
defer parsed_data.deinit();
const response: Response = parsed_data.parsed_value;
const s3_objects: []sm.s3.Object = response.contents.?;
try testing.expectEqual(2, s3_objects.len);
try testing.expectEqualStrings(s3_objects[0].key.?, "file1.txt");
try testing.expectEqualStrings(s3_objects[1].key.?, "file2.jpg");
try testing.expectEqual(s3_objects[0].size.?, 1024);
try testing.expectEqual(s3_objects[1].size.?, 2048);
}

View file

@ -1,3 +0,0 @@
{
"ignore": ["lib/json/src/json.zig"]
}