Compare commits

..

41 commits

Author SHA1 Message Date
219bd32aa0
diagnose fd issue
All checks were successful
AWS-Zig Build / build-zig-amd64-host (push) Successful in 7m30s
2025-04-17 18:00:48 -07:00
ea14e3b90a
better test web server management
All checks were successful
AWS-Zig Build / build-zig-amd64-host (push) Successful in 7m20s
2025-04-17 17:42:10 -07:00
12c7c46594
add test server timeout 2025-04-16 19:35:47 -07:00
ebb727c464
disable release mode test in zig-mach nightly run
All checks were successful
AWS-Zig Build / build-zig-amd64-host (push) Successful in 7m52s
2025-04-16 19:11:18 -07:00
0d575c92bb
add test filter option
Some checks failed
AWS-Zig Build / build-zig-amd64-host (push) Has been cancelled
2025-04-16 19:07:20 -07:00
98fff010dd
update dependency on example
All checks were successful
AWS-Zig Build / build-zig-amd64-host (push) Successful in 1m51s
2025-04-16 19:01:25 -07:00
072dcc1cec
remove GitHub actions as they seem to be looking for payment info for free accounts 2025-04-16 19:00:18 -07:00
7a6086447c
update CI for release mode and forgejo install
Some checks failed
AWS-Zig Build / build-zig-amd64-host (push) Failing after 6m38s
2025-04-16 18:51:34 -07:00
Simon Hartcher
1b4788f469 fix: undefined behaviour in parseJsonData 2025-04-08 12:49:59 +10:00
Simon Hartcher
b5cd321263 fix(TestSetup): undefined behaviour 2025-04-08 11:39:19 +10:00
8ac7aa47f7
add workflow_dispatch to main build
All checks were successful
AWS-Zig Build / build-zig-amd64-host (push) Successful in 49s
2025-03-30 18:12:37 -07:00
e194debb96
update envrc to better zvm_direnv 2025-03-25 09:32:38 -07:00
e0e09fb19e
add no-bin option as recommended in zig 0.14.0 rlease notes
https://ziglang.org/download/0.14.0/release-notes.html\#Incremental-Compilation
2025-03-23 16:24:20 -07:00
8421fd9e55
merge pr #6 allowing dependency on service model 2025-03-23 16:17:04 -07:00
9e8b3a6fc6
fix json serialization for null/empty maps 2025-03-21 09:59:33 -07:00
34c097e45f
update nominated zig url 2025-03-20 23:28:38 -07:00
ffe3941dbe
allow workflow dispatch on zig previous 2025-03-20 23:24:38 -07:00
cdaf924867
update readme generally and for new branch strategy 2025-03-20 19:57:14 -07:00
6c106c1c71
correct refs in workflows 2025-03-20 19:23:20 -07:00
f325ef4236
update example for zig 0.14.0 2025-03-20 09:00:35 -07:00
30d46261b7
use updated build system for model path 2025-03-20 08:53:47 -07:00
86483ec84d
update gitea ci 2025-03-20 08:25:20 -07:00
4f16553410
update to zig 0.14.0 2025-03-20 08:18:37 -07:00
12e24b01ad
zig 0.14.0 upgrade: merge zig-develop branch 2025-03-20 03:20:43 -07:00
Simon Hartcher
220d45ab20 fix: missing module imports 2025-03-13 15:43:32 +11:00
Simon Hartcher
71495a4d1d chore: add codegen to paths 2025-03-13 14:46:15 +11:00
Simon Hartcher
303af8661c fix: make modules depend on codegen 2025-03-13 14:32:03 +11:00
acd6589909
add support for raw responses beginning with name of single field in response struct 2025-02-05 13:22:52 -08:00
78b36e2316
support for timestamp as f128 (more)
f128 is not really the correct data type long term. More information on the exact details are
https://smithy.io/2.0/aws/protocols/aws-json-1_1-protocol.html and
https://smithy.io/2.0/spec/protocol-traits.html\#timestampformat-trait

But...it will hold all our values and parses correctly, so we can use it for now
2025-02-05 13:21:53 -08:00
b369c29e84
manually set latest mach to remove confusion 2025-02-05 13:14:10 -08:00
e3bb4142d6
update example dependency
All checks were successful
aws-zig mach nominated build / build-zig-nominated-mach-latest (push) Successful in 51s
aws-zig nightly build / build-zig-nightly (push) Successful in 1m35s
2024-12-19 09:04:42 -08:00
e02fb699fc
move away from deprecated API
Some checks failed
aws-zig mach nominated build / build-zig-nominated-mach-latest (push) Successful in 39s
aws-zig nightly build / build-zig-nightly (push) Failing after 1m21s
2024-12-19 08:54:30 -08:00
35fad85c13
add .envrc 2024-12-19 08:48:37 -08:00
88d7e99d6b
add a build option to disable LLVM 2024-12-19 08:43:25 -08:00
debb4dab60
update example dependencies
All checks were successful
aws-zig mach nominated build / build-zig-nominated-mach-latest (push) Successful in 49s
aws-zig nightly build / build-zig-nightly (push) Successful in 1m33s
2024-10-26 18:50:58 -07:00
6240225db2
workaround for zig issue 21815
Some checks failed
aws-zig nightly build / build-zig-nightly (push) Failing after 1m47s
aws-zig mach nominated build / build-zig-nominated-mach-latest (push) Failing after 2m10s
2024-10-26 18:33:30 -07:00
0892914c5b
add build status note to readme
Some checks failed
aws-zig mach nominated build / build-zig-nominated-mach-latest (push) Failing after 23s
aws-zig nightly build / build-zig-nightly (push) Failing after 28s
2024-10-17 12:00:42 -07:00
97b784f8e3
cleanup main, re-enable riscv64-linux 2024-10-17 12:00:20 -07:00
4fa30a70cc
more ci stuff
Some checks failed
aws-zig mach nominated build / build-zig-nominated-mach-latest (push) Failing after 16s
aws-zig nightly build / build-zig-nightly (push) Failing after 27s
2024-10-17 11:28:54 -07:00
9497db373c
ci failing with panic that does not happen locally
Some checks failed
aws-zig mach nominated build / build-zig-nominated-mach-latest (push) Failing after 28s
aws-zig nightly build / build-zig-nightly (push) Failing after 34s
2024-10-17 11:08:14 -07:00
3d78705ea5
update to latest zig nominated
Some checks failed
aws-zig mach nominated build / build-zig-nominated-mach-latest (push) Failing after 26s
aws-zig nightly build / build-zig-nightly (push) Failing after 3m36s
2024-10-17 10:54:01 -07:00
22 changed files with 508 additions and 342 deletions

8
.envrc Normal file
View file

@ -0,0 +1,8 @@
# vi: ft=sh
# shellcheck shell=bash
if ! has zvm_direnv_version || ! zvm_direnv_version 2.0.0; then
source_url "https://git.lerch.org/lobo/zvm-direnv/raw/tag/2.0.0/direnvrc" "sha256-8Umzxj32hFU6G0a7Wrq0KTNDQ8XEuje2A3s2ljh/hFY="
fi
use zig 0.14.0

View file

@ -1,9 +1,9 @@
name: AWS-Zig Build name: AWS-Zig Build
on: on:
workflow_dispatch:
push: push:
branches: branches:
- '*' - 'master'
- '!zig-develop*'
env: env:
ACTIONS_RUNTIME_TOKEN: ${{ secrets.GITHUB_TOKEN }} ACTIONS_RUNTIME_TOKEN: ${{ secrets.GITHUB_TOKEN }}
ACTIONS_RUNTIME_URL: ${{ env.GITHUB_SERVER_URL }}/api/actions_pipeline/ ACTIONS_RUNTIME_URL: ${{ env.GITHUB_SERVER_URL }}/api/actions_pipeline/
@ -18,11 +18,19 @@ jobs:
- name: Check out repository code - name: Check out repository code
uses: actions/checkout@v4 uses: actions/checkout@v4
- name: Setup Zig - name: Setup Zig
uses: mlugg/setup-zig@v1.2.1 uses: https://github.com/mlugg/setup-zig@v1.2.1
with: with:
version: 0.13.0 version: 0.14.0
- name: Restore Zig caches
uses: https://github.com/Hanaasagi/zig-action-cache@3954aae427f8b05914e08dfd79f15e1f2e435929
- name: Ulimit
run: ulimit -a
- name: Run smoke test
run: zig build smoke-test --verbose
- name: Run tests - name: Run tests
run: zig build test --verbose run: zig build test --verbose --summary all
- name: Run tests (release mode)
run: zig build test -Doptimize=ReleaseSafe --verbose
# Zig build scripts don't have the ability to import depenedencies directly # Zig build scripts don't have the ability to import depenedencies directly
# (https://github.com/ziglang/zig/issues/18164). We can allow downstream # (https://github.com/ziglang/zig/issues/18164). We can allow downstream
# build scripts to import aws with a few tweaks, but we can't @import("src/aws.zig") # build scripts to import aws with a few tweaks, but we can't @import("src/aws.zig")
@ -67,7 +75,7 @@ jobs:
# - run: echo "URL of foo (1) is ${{ steps.sign.outputs.URL_1 }}" # - run: echo "URL of foo (1) is ${{ steps.sign.outputs.URL_1 }}"
- name: Publish source code with generated models - name: Publish source code with generated models
run: | run: |
curl --user ${{ github.actor }}:${{ secrets.PACKAGE_PUSH }} \ curl -s --user ${{ github.actor }}:${{ secrets.PACKAGE_PUSH }} \
--upload-file ${{ runner.temp }}/${{ github.sha }}-with-models.tar.gz \ --upload-file ${{ runner.temp }}/${{ github.sha }}-with-models.tar.gz \
https://git.lerch.org/api/packages/lobo/generic/aws-sdk-with-models/${{ github.sha }}/${{ github.sha }}-with-models.tar.gz https://git.lerch.org/api/packages/lobo/generic/aws-sdk-with-models/${{ github.sha }}/${{ github.sha }}-with-models.tar.gz
- name: Build example - name: Build example

View file

@ -1,10 +1,11 @@
name: aws-zig mach nominated build name: aws-zig mach nominated build
on: on:
workflow_dispatch:
schedule: schedule:
- cron: '0 12 * * *' # noon UTC, 4AM Pacific - cron: '0 12 * * *' # noon UTC, 4AM Pacific
push: push:
branches: branches:
- 'zig-develop*' - 'zig-mach'
env: env:
PKG_PREFIX: nominated-zig PKG_PREFIX: nominated-zig
jobs: jobs:
@ -23,13 +24,22 @@ jobs:
- name: Check out repository code - name: Check out repository code
uses: actions/checkout@v4 uses: actions/checkout@v4
with: with:
ref: zig-develop ref: zig-mach
- name: Setup Zig - name: Setup Zig
uses: mlugg/setup-zig@v1.2.1 uses: https://github.com/mlugg/setup-zig@v1.2.1
with: with:
version: mach-latest version: mach-latest
- name: Run tests - name: Restore Zig caches
run: zig build test --verbose uses: https://github.com/Hanaasagi/zig-action-cache@3954aae427f8b05914e08dfd79f15e1f2e435929
- name: Run gen
run: zig build gen --verbose
- name: Run smoke test
run: zig build smoke-test --verbose
- name: Run full tests
run: zig build test --verbose --summary all
# TODO: Zig mach currently tracking behind zig 0.14.0 branch - enable this test after update
# - name: Run tests (release mode)
# run: zig build test -Doptimize=ReleaseSafe --verbose
# Zig package manager expects everything to be inside a directory in the archive, # Zig package manager expects everything to be inside a directory in the archive,
# which it then strips out on download. So we need to shove everything inside a directory # which it then strips out on download. So we need to shove everything inside a directory
# the way GitHub/Gitea does for repo archives # the way GitHub/Gitea does for repo archives
@ -62,7 +72,7 @@ jobs:
# - run: echo "URL of foo (1) is ${{ steps.sign.outputs.URL_1 }}" # - run: echo "URL of foo (1) is ${{ steps.sign.outputs.URL_1 }}"
- name: Publish source code with generated models - name: Publish source code with generated models
run: | run: |
curl --user ${{ github.actor }}:${{ secrets.PACKAGE_PUSH }} \ curl -s --user ${{ github.actor }}:${{ secrets.PACKAGE_PUSH }} \
--upload-file ${{ runner.temp }}/${{ github.sha }}${{ env.PKG_PREFIX }}-with-models.tar.gz \ --upload-file ${{ runner.temp }}/${{ github.sha }}${{ env.PKG_PREFIX }}-with-models.tar.gz \
https://git.lerch.org/api/packages/lobo/generic/aws-sdk-with-models/${{ github.sha }}/${{ github.sha }}${{ env.PKG_PREFIX }}-with-models.tar.gz https://git.lerch.org/api/packages/lobo/generic/aws-sdk-with-models/${{ github.sha }}/${{ github.sha }}${{ env.PKG_PREFIX }}-with-models.tar.gz
- name: Build example - name: Build example

View file

@ -1,10 +1,11 @@
name: aws-zig nightly build name: aws-zig nightly build
on: on:
workflow_dispatch:
schedule: schedule:
- cron: '30 12 * * *' # 12:30 UTC, 4:30AM Pacific - cron: '30 12 * * *' # 12:30 UTC, 4:30AM Pacific
push: push:
branches: branches:
- 'zig-develop*' - 'zig-develop'
env: env:
PKG_PREFIX: nightly-zig PKG_PREFIX: nightly-zig
jobs: jobs:
@ -25,11 +26,17 @@ jobs:
with: with:
ref: zig-develop ref: zig-develop
- name: Setup Zig - name: Setup Zig
uses: mlugg/setup-zig@v1.2.1 uses: https://github.com/mlugg/setup-zig@v1.2.1
with: with:
version: master version: master
- name: Run tests - name: Restore Zig caches
run: zig build test --verbose uses: https://github.com/Hanaasagi/zig-action-cache@3954aae427f8b05914e08dfd79f15e1f2e435929
- name: Run smoke test
run: zig build smoke-test --verbose
- name: Run full tests
run: zig build test --verbose --summary all
- name: Run tests (release mode)
run: zig build test -Doptimize=ReleaseSafe --verbose
# Zig package manager expects everything to be inside a directory in the archive, # Zig package manager expects everything to be inside a directory in the archive,
# which it then strips out on download. So we need to shove everything inside a directory # which it then strips out on download. So we need to shove everything inside a directory
# the way GitHub/Gitea does for repo archives # the way GitHub/Gitea does for repo archives
@ -62,7 +69,7 @@ jobs:
# - run: echo "URL of foo (1) is ${{ steps.sign.outputs.URL_1 }}" # - run: echo "URL of foo (1) is ${{ steps.sign.outputs.URL_1 }}"
- name: Publish source code with generated models - name: Publish source code with generated models
run: | run: |
curl --user ${{ github.actor }}:${{ secrets.PACKAGE_PUSH }} \ curl -s --user ${{ github.actor }}:${{ secrets.PACKAGE_PUSH }} \
--upload-file ${{ runner.temp }}/${{ github.sha }}${{ env.PKG_PREFIX }}-with-models.tar.gz \ --upload-file ${{ runner.temp }}/${{ github.sha }}${{ env.PKG_PREFIX }}-with-models.tar.gz \
https://git.lerch.org/api/packages/lobo/generic/aws-sdk-with-models/${{ github.sha }}/${{ github.sha }}${{ env.PKG_PREFIX }}-with-models.tar.gz https://git.lerch.org/api/packages/lobo/generic/aws-sdk-with-models/${{ github.sha }}/${{ github.sha }}${{ env.PKG_PREFIX }}-with-models.tar.gz
- name: Build example - name: Build example

View file

@ -0,0 +1,91 @@
name: AWS-Zig Build
on:
workflow_dispatch:
push:
branches:
- 'zig-0.13'
env:
ACTIONS_RUNTIME_TOKEN: ${{ secrets.GITHUB_TOKEN }}
ACTIONS_RUNTIME_URL: ${{ env.GITHUB_SERVER_URL }}/api/actions_pipeline/
jobs:
build-zig-amd64-host:
runs-on: ubuntu-latest
# Need to use the default container with node and all that, so we can
# use JS-based actions like actions/checkout@v3...
# container:
# image: alpine:3.15.0
steps:
- name: Check out repository code
uses: actions/checkout@v4
with:
ref: zig-0.13
- name: Setup Zig
uses: https://github.com/mlugg/setup-zig@v1.2.1
with:
version: 0.13.0
- name: Restore Zig caches
uses: https://github.com/Hanaasagi/zig-action-cache@3954aae427f8b05914e08dfd79f15e1f2e435929
- name: Run smoke test
run: zig build smoke-test --verbose
- name: Run full tests
run: zig build test --verbose --summary all
# Release mode fix not backported to 0.13.0 code
#- name: Run tests (release mode)
# run: zig build test -Doptimize=ReleaseSafe --verbose
# Zig build scripts don't have the ability to import depenedencies directly
# (https://github.com/ziglang/zig/issues/18164). We can allow downstream
# build scripts to import aws with a few tweaks, but we can't @import("src/aws.zig")
# until we have our models built. So we have to have the build script
# basically modified, only during packaging, to allow this use case
#
# Zig package manager expects everything to be inside a directory in the archive,
# which it then strips out on download. So we need to shove everything inside a directory
# the way GitHub/Gitea does for repo archives
#
# Also, zig tar process doesn't handle gnu format for long names, nor does it seam to
# handle posix long name semantics cleanly either. ustar works. This
# should be using git archive, but we need our generated code to be part of it
- name: Package source code with generated models
run: |
sed -i 's#// UNCOMMENT AFTER MODEL GEN TO USE IN BUILD SCRIPTS //##' build.zig
tar -czf ${{ runner.temp }}/${{ github.sha }}-with-models.tar.gz \
--format ustar \
--exclude 'zig-*' \
*
# Something in this PR broke this transform. I don't mind removing it, but
# the PR attempts to handle situations with or without a prefix, but it
# doesn't. I have not yet determined what the problem is, though
# https://github.com/ziglang/zig/pull/19111/files
# --transform 's,^,${{ github.sha }}/,' *
# - name: Sign
# id: sign
# uses: https://git.lerch.org/lobo/action-hsm-sign@v1
# with:
# pin: ${{ secrets.HSM_USER_PIN }}
# files: ???
# public_key: 'https://emil.lerch.org/serverpublic.pem'
# - run: |
# echo "Source 0 should be ./bar: ${{ steps.sign.outputs.SOURCE_0 }}"
# - run: |
# echo "Signature 0 should be ./bar.sig: ${{ steps.sign.outputs.SIG_0 }}"
# - run: echo "URL of bar (0) is ${{ steps.sign.outputs.URL_0 }}"
# - run: |
# echo "Source 1 should be ./foo: ${{ steps.sign.outputs.SOURCE_1 }}"
# - run: |
# echo "Signature 1 should be ./foo.sig: ${{ steps.sign.outputs.SIG_1 }}"
# - run: echo "URL of foo (1) is ${{ steps.sign.outputs.URL_1 }}"
- name: Publish source code with generated models
run: |
curl -s --user ${{ github.actor }}:${{ secrets.PACKAGE_PUSH }} \
--upload-file ${{ runner.temp }}/${{ github.sha }}-with-models.tar.gz \
https://git.lerch.org/api/packages/lobo/generic/aws-sdk-with-models/${{ github.sha }}/${{ github.sha }}-with-models.tar.gz
- name: Build example
run: ( cd example && zig build ) # Make sure example builds
- name: Notify
uses: https://git.lerch.org/lobo/action-notify-ntfy@v2
if: always()
with:
host: ${{ secrets.NTFY_HOST }}
topic: ${{ secrets.NTFY_TOPIC }}
user: ${{ secrets.NTFY_USER }}
password: ${{ secrets.NTFY_PASSWORD }}

View file

@ -1,21 +0,0 @@
name: AWS-Zig Build
on:
push:
branches:
- '*'
- '!zig-develop*'
jobs:
build-zig-0-12-0-amd64:
runs-on: ubuntu-latest
if: ${{ github.env.GITEA_ACTIONS != 'true' }}
steps:
- name: Check out repository code
uses: actions/checkout@v4
- name: Setup Zig
uses: mlugg/setup-zig@v1.2.1
with:
version: 0.13.0
- name: Run tests
run: zig build test -Dbroken-windows --verbose # Github runners try to run the windows tests despite disabling foreign checks
- name: Build example
run: ( cd example && zig build ) # Make sure example builds

View file

@ -1,20 +0,0 @@
name: aws-zig mach nominated build
on:
push:
branches:
- 'zig-develop*'
jobs:
build-zig-mach-latest:
runs-on: ubuntu-latest
if: ${{ github.env.GITEA_ACTIONS != 'true' }}
steps:
- name: Check out repository code
uses: actions/checkout@v4
- name: Setup Zig
uses: mlugg/setup-zig@v1.2.1
with:
version: mach-latest
- name: Run tests
run: zig build test -Dbroken-windows --verbose
- name: Build example
run: ( cd example && zig build ) # Make sure example builds

View file

@ -1,20 +0,0 @@
name: aws-zig nightly build
on:
push:
branches:
- 'zig-develop*'
jobs:
build-zig-nightly:
runs-on: ubuntu-latest
if: ${{ github.env.GITEA_ACTIONS != 'true' }}
steps:
- name: Check out repository code
uses: actions/checkout@v4
- name: Setup Zig
uses: mlugg/setup-zig@v1.2.1
with:
version: master
- name: Run tests
run: zig build test -Dbroken-windows --verbose
- name: Build example
run: ( cd example && zig build ) # Make sure example builds

View file

@ -1,11 +1,11 @@
AWS SDK for Zig AWS SDK for Zig
=============== ===============
[Zig 0.13](https://ziglang.org/download/#release-0.13.0): [Zig 0.14](https://ziglang.org/download/#release-0.14.0):
[![Build Status: Zig 0.13.0](https://git.lerch.org/lobo/aws-sdk-for-zig/actions/workflows/build.yaml/badge.svg)](https://git.lerch.org/lobo/aws-sdk-for-zig/actions?workflow=build.yaml&state=closed) [![Build Status: Zig 0.14.0](https://git.lerch.org/lobo/aws-sdk-for-zig/actions/workflows/build.yaml/badge.svg)](https://git.lerch.org/lobo/aws-sdk-for-zig/actions?workflow=build.yaml&state=closed)
[Last Mach Nominated Zig Version](https://machengine.org/about/nominated-zig/): [Last Mach Nominated Zig Version](https://machengine.org/docs/nominated-zig/):
[![Build Status: Mach nominated](https://git.lerch.org/lobo/aws-sdk-for-zig/actions/workflows/zig-mach.yaml/badge.svg)](https://git.lerch.org/lobo/aws-sdk-for-zig/actions?workflow=zig-mach.yaml&state=closed) [![Build Status: Mach nominated](https://git.lerch.org/lobo/aws-sdk-for-zig/actions/workflows/zig-mach.yaml/badge.svg)](https://git.lerch.org/lobo/aws-sdk-for-zig/actions?workflow=zig-mach.yaml&state=closed)
@ -13,11 +13,16 @@ AWS SDK for Zig
[![Build Status: Zig Nightly](https://git.lerch.org/lobo/aws-sdk-for-zig/actions/workflows/zig-nightly.yaml/badge.svg)](https://git.lerch.org/lobo/aws-sdk-for-zig/actions?workflow=zig-nightly.yaml&state=closed) [![Build Status: Zig Nightly](https://git.lerch.org/lobo/aws-sdk-for-zig/actions/workflows/zig-nightly.yaml/badge.svg)](https://git.lerch.org/lobo/aws-sdk-for-zig/actions?workflow=zig-nightly.yaml&state=closed)
[Zig 0.13](https://ziglang.org/download/#release-0.13.0):
[![Build Status: Zig 0.13.0](https://git.lerch.org/lobo/aws-sdk-for-zig/actions/workflows/zig-previous.yaml/badge.svg)](https://git.lerch.org/lobo/aws-sdk-for-zig/actions?workflow=zig-previous.yaml&state=closed)
Current executable size for the demo is 980k after compiling with -Doptimize=ReleaseSmall Current executable size for the demo is 980k after compiling with -Doptimize=ReleaseSmall
in x86_linux, and will vary based on services used. Tested targets: in x86_64-linux, and will vary based on services used. Tested targets:
* x86_64-linux * x86_64-linux
* riscv64-linux\* * riscv64-linux
* aarch64-linux * aarch64-linux
* x86_64-windows * x86_64-windows
* arm-linux * arm-linux
@ -26,25 +31,38 @@ in x86_linux, and will vary based on services used. Tested targets:
Tested targets are built, but not continuously tested, by CI. Tested targets are built, but not continuously tested, by CI.
\* On Zig 0.12/0.13, riscv64-linux disabled due to [LLLM's O(N^2) codegen](https://github.com/ziglang/zig/issues/18872) Branches
--------
* **master**: This branch tracks the latest released zig version
* **zig-0.13**: This branch tracks the previous released zig version (0.13 currently).
Support for the previous version is best effort, generally
degrading over time. Fixes will generally appear in master, then
backported into the previous version.
* **zig-mach**: This branch tracks the latest mach nominated version. A separate
branch is necessary as mach nominated is usually, but not always,
more recent than the latest production zig. Support for the mach
version is best effort.
* **zig-develop**: This branch tracks zig nightly, and is used mainly as a canary
for breaking changes that will need to be dealt with when
a new mach nominated version or new zig release appear.
Expect significant delays in any build failures.
Zig-Develop Branch Other branches/tags exist but are unsupported
------------------
This branch is intended for use with the in-development version of Zig. This
starts with 0.12.0-dev.3180+83e578a18. This is aligned with [Mach Engine's Nominated
Zig Versions](https://machengine.org/about/nominated-zig/). Nightly zig versions
are difficult to keep up with and there is no special effort made there, build
status is FYI (and used as a canary for nominated zig versions).
Building Building
-------- --------
`zig build` should work. It will build the code generation project, fetch model `zig build` should work. It will build the code generation project, fetch model
files from upstream AWS Go SDK v2, run the code generation, then build the main files from upstream AWS Go SDK v2, run the code generation, then build the main
project with the generated code. Testing can be done with `zig test`. project with the generated code. Testing can be done with `zig build test`. Note that
this command tests on all supported architectures, so for a faster testing
process, use `zig build smoke-test` instead.
To make development even faster, a build option is provided to avoid the use of
LLVM. To use this, use the command `zig build -Dno-llvm smoke-test`. This
can reduce build/test time 300%. Note, however, native code generation in zig
is not yet complete, so you may see errors.
Using Using
----- -----
@ -52,7 +70,8 @@ Using
This is designed for use with the Zig package manager, and exposes a module This is designed for use with the Zig package manager, and exposes a module
called "aws". Set up `build.zig.zon` and add the dependency/module to your project called "aws". Set up `build.zig.zon` and add the dependency/module to your project
as normal and the package manager should do its thing. A full example can be found as normal and the package manager should do its thing. A full example can be found
in [/example](example/README.md). in [/example](example/build.zig.zon). This can also be used at build time in
a downstream project's `build.zig`.
Configuring the module and/or Running the demo Configuring the module and/or Running the demo
---------------------------------------------- ----------------------------------------------
@ -60,8 +79,8 @@ Configuring the module and/or Running the demo
This library mimics the aws c libraries for it's work, so it operates like most This library mimics the aws c libraries for it's work, so it operates like most
other 'AWS things'. [/src/main.zig](src/main.zig) gives you a handful of examples other 'AWS things'. [/src/main.zig](src/main.zig) gives you a handful of examples
for working with services. For local testing or alternative endpoints, there's for working with services. For local testing or alternative endpoints, there's
no real standard, so there is code to look for `AWS_ENDPOINT_URL` environment no real standard, so there is code to look for an environment variable
variable that will supersede all other configuration. `AWS_ENDPOINT_URL` variable that will supersede all other configuration.
Limitations Limitations
----------- -----------
@ -82,13 +101,6 @@ TODO List:
* Implement timeouts and other TODO's in the code * Implement timeouts and other TODO's in the code
* Add option to cache signature keys * Add option to cache signature keys
Services without TLS 1.3 support
--------------------------------
All AWS services should support TLS 1.3 at this point, but there are many regions
and several partitions, and not all of them have been tested, so your mileage
may vary. If something doesn't work, please submit an issue to let others know.
Dependency tree Dependency tree
--------------- ---------------

View file

@ -10,11 +10,7 @@ const test_targets = [_]std.Target.Query{
.{}, // native .{}, // native
.{ .cpu_arch = .x86_64, .os_tag = .linux }, .{ .cpu_arch = .x86_64, .os_tag = .linux },
.{ .cpu_arch = .aarch64, .os_tag = .linux }, .{ .cpu_arch = .aarch64, .os_tag = .linux },
// The test executable linking process just spins forever in LLVM using nominated zig 0.13 May 2024 .{ .cpu_arch = .riscv64, .os_tag = .linux },
// This is likely a LLVM problem unlikely to be fixed in zig 0.13
// Potentially this issue: https://github.com/llvm/llvm-project/issues/81440
// Zig tracker: https://github.com/ziglang/zig/issues/18872
// .{ .cpu_arch = .riscv64, .os_tag = .linux },
.{ .cpu_arch = .arm, .os_tag = .linux }, .{ .cpu_arch = .arm, .os_tag = .linux },
.{ .cpu_arch = .x86_64, .os_tag = .windows }, .{ .cpu_arch = .x86_64, .os_tag = .windows },
.{ .cpu_arch = .aarch64, .os_tag = .macos }, .{ .cpu_arch = .aarch64, .os_tag = .macos },
@ -33,11 +29,23 @@ pub fn build(b: *Builder) !void {
// between Debug, ReleaseSafe, ReleaseFast, and ReleaseSmall. // between Debug, ReleaseSafe, ReleaseFast, and ReleaseSmall.
const optimize = b.standardOptimizeOption(.{}); const optimize = b.standardOptimizeOption(.{});
const no_llvm = b.option(
bool,
"no-llvm",
"Disable LLVM",
) orelse false;
const broken_windows = b.option( const broken_windows = b.option(
bool, bool,
"broken-windows", "broken-windows",
"Windows is broken in this environment (do not run Windows tests)", "Windows is broken in this environment (do not run Windows tests)",
) orelse false; ) orelse false;
const no_bin = b.option(bool, "no-bin", "skip emitting binary") orelse false;
const test_filters: []const []const u8 = b.option(
[]const []const u8,
"test-filter",
"Skip tests that do not match any of the specified filters",
) orelse &.{};
// TODO: Embed the current git version in the code. We can do this // TODO: Embed the current git version in the code. We can do this
// by looking for .git/HEAD (if it exists, follow the ref to /ref/heads/whatevs, // by looking for .git/HEAD (if it exists, follow the ref to /ref/heads/whatevs,
// grab that commit, and use b.addOptions/exe.addOptions to generate the // grab that commit, and use b.addOptions/exe.addOptions to generate the
@ -56,6 +64,7 @@ pub fn build(b: *Builder) !void {
.target = target, .target = target,
.optimize = optimize, .optimize = optimize,
}); });
exe.use_llvm = !no_llvm;
const smithy_dep = b.dependency("smithy", .{ const smithy_dep = b.dependency("smithy", .{
// These are the arguments to the dependency. It expects a target and optimization level. // These are the arguments to the dependency. It expects a target and optimization level.
.target = target, .target = target,
@ -64,17 +73,6 @@ pub fn build(b: *Builder) !void {
const smithy_module = smithy_dep.module("smithy"); const smithy_module = smithy_dep.module("smithy");
exe.root_module.addImport("smithy", smithy_module); // not sure this should be here... exe.root_module.addImport("smithy", smithy_module); // not sure this should be here...
// Expose module to others
_ = b.addModule("aws", .{
.root_source_file = b.path("src/aws.zig"),
.imports = &.{.{ .name = "smithy", .module = smithy_module }},
});
// Expose module to others
_ = b.addModule("aws-signing", .{
.root_source_file = b.path("src/aws_signing.zig"),
.imports = &.{.{ .name = "smithy", .module = smithy_module }},
});
// TODO: This does not work correctly due to https://github.com/ziglang/zig/issues/16354 // TODO: This does not work correctly due to https://github.com/ziglang/zig/issues/16354
// //
// We are working here with kind of a weird dependency though. So we can do this // We are working here with kind of a weird dependency though. So we can do this
@ -97,7 +95,6 @@ pub fn build(b: *Builder) !void {
const run_step = b.step("run", "Run the app"); const run_step = b.step("run", "Run the app");
run_step.dependOn(&run_cmd.step); run_step.dependOn(&run_cmd.step);
const gen_step = blk: {
const cg = b.step("gen", "Generate zig service code from smithy models"); const cg = b.step("gen", "Generate zig service code from smithy models");
const cg_exe = b.addExecutable(.{ const cg_exe = b.addExecutable(.{
@ -107,29 +104,18 @@ pub fn build(b: *Builder) !void {
.target = b.graph.host, .target = b.graph.host,
.optimize = if (b.verbose) .Debug else .ReleaseSafe, .optimize = if (b.verbose) .Debug else .ReleaseSafe,
}); });
cg_exe.root_module.addImport("smithy", smithy_dep.module("smithy")); cg_exe.root_module.addImport("smithy", smithy_module);
var cg_cmd = b.addRunArtifact(cg_exe); var cg_cmd = b.addRunArtifact(cg_exe);
cg_cmd.addArg("--models"); cg_cmd.addArg("--models");
const hash = hash_blk: {
for (b.available_deps) |dep| {
const dep_name = dep.@"0";
const dep_hash = dep.@"1";
if (std.mem.eql(u8, dep_name, "models"))
break :hash_blk dep_hash;
}
return error.DependencyNamedModelsNotFoundInBuildZigZon;
};
cg_cmd.addArg(try std.fs.path.join( cg_cmd.addArg(try std.fs.path.join(
b.allocator, b.allocator,
&[_][]const u8{ &[_][]const u8{
b.graph.global_cache_root.path.?, try b.dependency("models", .{}).path("").getPath3(b, null).toString(b.allocator),
"p",
hash,
models_subdir, models_subdir,
}, },
)); ));
cg_cmd.addArg("--output"); cg_cmd.addArg("--output");
cg_cmd.addDirectoryArg(b.path("src/models")); const cg_output_dir = cg_cmd.addOutputDirectoryArg("src/models");
if (b.verbose) if (b.verbose)
cg_cmd.addArg("--verbose"); cg_cmd.addArg("--verbose");
// cg_cmd.step.dependOn(&fetch_step.step); // cg_cmd.step.dependOn(&fetch_step.step);
@ -148,10 +134,34 @@ pub fn build(b: *Builder) !void {
// later about warning on manual changes... // later about warning on manual changes...
cg.dependOn(&cg_cmd.step); cg.dependOn(&cg_cmd.step);
break :blk cg;
};
exe.step.dependOn(gen_step); exe.step.dependOn(cg);
// This allows us to have each module depend on the
// generated service manifest.
const service_manifest_module = b.createModule(.{
.root_source_file = cg_output_dir.path(b, "service_manifest.zig"),
.target = target,
.optimize = optimize,
});
service_manifest_module.addImport("smithy", smithy_module);
exe.root_module.addImport("service_manifest", service_manifest_module);
// Expose module to others
_ = b.addModule("aws", .{
.root_source_file = b.path("src/aws.zig"),
.imports = &.{
.{ .name = "smithy", .module = smithy_module },
.{ .name = "service_manifest", .module = service_manifest_module },
},
});
// Expose module to others
_ = b.addModule("aws-signing", .{
.root_source_file = b.path("src/aws_signing.zig"),
.imports = &.{.{ .name = "smithy", .module = smithy_module }},
});
// Similar to creating the run step earlier, this exposes a `test` step to // Similar to creating the run step earlier, this exposes a `test` step to
// the `zig build --help` menu, providing a way for the user to request // the `zig build --help` menu, providing a way for the user to request
@ -180,9 +190,12 @@ pub fn build(b: *Builder) !void {
.root_source_file = b.path("src/aws.zig"), .root_source_file = b.path("src/aws.zig"),
.target = b.resolveTargetQuery(t), .target = b.resolveTargetQuery(t),
.optimize = optimize, .optimize = optimize,
.filters = test_filters,
}); });
unit_tests.root_module.addImport("smithy", smithy_dep.module("smithy")); unit_tests.root_module.addImport("smithy", smithy_module);
unit_tests.step.dependOn(gen_step); unit_tests.root_module.addImport("service_manifest", service_manifest_module);
unit_tests.step.dependOn(cg);
unit_tests.use_llvm = !no_llvm;
const run_unit_tests = b.addRunArtifact(unit_tests); const run_unit_tests = b.addRunArtifact(unit_tests);
run_unit_tests.skip_foreign_checks = true; run_unit_tests.skip_foreign_checks = true;
@ -203,12 +216,19 @@ pub fn build(b: *Builder) !void {
.root_source_file = b.path("src/aws.zig"), .root_source_file = b.path("src/aws.zig"),
.target = target, .target = target,
.optimize = optimize, .optimize = optimize,
.filters = test_filters,
}); });
smoke_test.root_module.addImport("smithy", smithy_dep.module("smithy")); smoke_test.use_llvm = !no_llvm;
smoke_test.step.dependOn(gen_step); smoke_test.root_module.addImport("smithy", smithy_module);
smoke_test.root_module.addImport("service_manifest", service_manifest_module);
smoke_test.step.dependOn(cg);
const run_smoke_test = b.addRunArtifact(smoke_test); const run_smoke_test = b.addRunArtifact(smoke_test);
smoke_test_step.dependOn(&run_smoke_test.step); smoke_test_step.dependOn(&run_smoke_test.step);
if (no_bin) {
b.getInstallStep().dependOn(&exe.step);
} else {
b.installArtifact(exe); b.installArtifact(exe);
} }
}

View file

@ -1,18 +1,20 @@
.{ .{
.name = "aws", .name = .aws,
.version = "0.0.1", .version = "0.0.1",
.fingerprint = 0x1f26b7b27005bb49,
.paths = .{ .paths = .{
"build.zig", "build.zig",
"build.zig.zon", "build.zig.zon",
"src", "src",
"codegen",
"README.md", "README.md",
"LICENSE", "LICENSE",
}, },
.dependencies = .{ .dependencies = .{
.smithy = .{ .smithy = .{
.url = "https://git.lerch.org/lobo/smithy/archive/3ed98751bc414e005af6ad185feb213d4366c0db.tar.gz", .url = "https://git.lerch.org/lobo/smithy/archive/a4c6ec6dfe552c57bab601c7d99e8de02bbab1fe.tar.gz",
.hash = "12204a784751a4ad5ed6c8955ba91fcbc4a3cad6c5a7da38f39abf074ef801d13172", .hash = "smithy-1.0.0-uAyBgS_MAgC4qgc9QaEy5Y5Nf7kv32buQZBYugqNQsAn",
}, },
.models = .{ .models = .{
.url = "https://github.com/aws/aws-sdk-go-v2/archive/58cf6509525a12d64fd826da883bfdbacbd2f00e.tar.gz", .url = "https://github.com/aws/aws-sdk-go-v2/archive/58cf6509525a12d64fd826da883bfdbacbd2f00e.tar.gz",

View file

@ -2,7 +2,7 @@ const std = @import("std");
// options is a json.Options, but since we're using our hacked json.zig we don't want to // options is a json.Options, but since we're using our hacked json.zig we don't want to
// specifically call this out // specifically call this out
pub fn serializeMap(map: anytype, key: []const u8, options: anytype, out_stream: anytype) !bool { pub fn serializeMap(map: anytype, key: []const u8, options: anytype, out_stream: anytype) !bool {
if (@typeInfo(@TypeOf(map)) == .Optional) { if (@typeInfo(@TypeOf(map)) == .optional) {
if (map == null) if (map == null)
return false return false
else else

View file

@ -435,7 +435,7 @@ fn generateServices(allocator: std.mem.Allocator, comptime _: []const u8, file:
fn generateAdditionalTypes(allocator: std.mem.Allocator, file_state: FileGenerationState, writer: anytype) !void { fn generateAdditionalTypes(allocator: std.mem.Allocator, file_state: FileGenerationState, writer: anytype) !void {
// More types may be added during processing // More types may be added during processing
while (file_state.additional_types_to_generate.popOrNull()) |t| { while (file_state.additional_types_to_generate.pop()) |t| {
if (file_state.additional_types_generated.getEntry(t.name) != null) continue; if (file_state.additional_types_generated.getEntry(t.name) != null) continue;
// std.log.info("\t\t{s}", .{t.name}); // std.log.info("\t\t{s}", .{t.name});
var type_stack = std.ArrayList(*const smithy.ShapeInfo).init(allocator); var type_stack = std.ArrayList(*const smithy.ShapeInfo).init(allocator);

View file

@ -1,12 +1,13 @@
.{ .{
.name = "myapp", .name = .myapp,
.version = "0.0.1", .version = "0.0.1",
.fingerprint = 0x8798022a511224c5,
.paths = .{""}, .paths = .{""},
.dependencies = .{ .dependencies = .{
.aws = .{ .aws = .{
.url = "https://git.lerch.org/api/packages/lobo/generic/aws-sdk-with-models/dfda8e77d624dfb776e3a70471501a7c610fbac1/dfda8e77d624dfb776e3a70471501a7c610fbac1-with-models.tar.gz", .url = "https://git.lerch.org/api/packages/lobo/generic/aws-sdk-with-models/7a6086447c1249b0e5b5b5f3873d2f7932bea56d/7a6086447c1249b0e5b5b5f3873d2f7932bea56d-with-models.tar.gz",
.hash = "122000ad704234e68fee82a52e3b4e365a52874ec851d978b109e05ac66a80dc86ac", .hash = "aws-0.0.1-SbsFcGN_CQCBjurpc2GEMw4c_qAkGu6KpuVnLBLY4L4q",
}, },
}, },
} }

View file

@ -517,9 +517,9 @@ pub fn Request(comptime request_action: anytype) type {
// And the response property below will pull whatever is the ActionResult object // And the response property below will pull whatever is the ActionResult object
// We can grab index [0] as structs are guaranteed by zig to be returned in the order // We can grab index [0] as structs are guaranteed by zig to be returned in the order
// declared, and we're declaring in that order in ServerResponse(). // declared, and we're declaring in that order in ServerResponse().
const real_response = @field(parsed_response, @typeInfo(response_types.NormalResponse).Struct.fields[0].name); const real_response = @field(parsed_response, @typeInfo(response_types.NormalResponse).@"struct".fields[0].name);
return FullResponseType{ return FullResponseType{
.response = @field(real_response, @typeInfo(@TypeOf(real_response)).Struct.fields[0].name), .response = @field(real_response, @typeInfo(@TypeOf(real_response)).@"struct".fields[0].name),
.response_metadata = .{ .response_metadata = .{
.request_id = try options.client.allocator.dupe(u8, real_response.ResponseMetadata.RequestId), .request_id = try options.client.allocator.dupe(u8, real_response.ResponseMetadata.RequestId),
}, },
@ -688,7 +688,6 @@ pub fn Request(comptime request_action: anytype) type {
fn ParsedJsonData(comptime T: type) type { fn ParsedJsonData(comptime T: type) type {
return struct { return struct {
raw_response_parsed: bool,
parsed_response_ptr: *T, parsed_response_ptr: *T,
allocator: std.mem.Allocator, allocator: std.mem.Allocator,
@ -697,7 +696,6 @@ pub fn Request(comptime request_action: anytype) type {
pub fn deinit(self: MySelf) void { pub fn deinit(self: MySelf) void {
// This feels like it should result in a use after free, but it // This feels like it should result in a use after free, but it
// seems to be working? // seems to be working?
if (self.raw_response_parsed)
self.allocator.destroy(self.parsed_response_ptr); self.allocator.destroy(self.parsed_response_ptr);
} }
}; };
@ -713,11 +711,13 @@ pub fn Request(comptime request_action: anytype) type {
std.mem.eql(u8, key, action.action_name ++ "Response") or std.mem.eql(u8, key, action.action_name ++ "Response") or
std.mem.eql(u8, key, action.action_name ++ "Result") or std.mem.eql(u8, key, action.action_name ++ "Result") or
isOtherNormalResponse(response_types.NormalResponse, key); isOtherNormalResponse(response_types.NormalResponse, key);
var raw_response_parsed = false;
var stream = json.TokenStream.init(data); var stream = json.TokenStream.init(data);
const parsed_response_ptr = blk: { const parsed_response_ptr = blk: {
if (!response_types.isRawPossible or found_normal_json_response) const ptr = try options.client.allocator.create(response_types.NormalResponse);
break :blk &(json.parse(response_types.NormalResponse, &stream, parser_options) catch |e| { errdefer options.client.allocator.destroy(ptr);
if (!response_types.isRawPossible or found_normal_json_response) {
ptr.* = (json.parse(response_types.NormalResponse, &stream, parser_options) catch |e| {
log.err( log.err(
\\Call successful, but unexpected response from service. \\Call successful, but unexpected response from service.
\\This could be the result of a bug or a stale set of code generated \\This could be the result of a bug or a stale set of code generated
@ -733,10 +733,10 @@ pub fn Request(comptime request_action: anytype) type {
return e; return e;
}); });
break :blk ptr;
}
log.debug("Appears server has provided a raw response", .{}); log.debug("Appears server has provided a raw response", .{});
raw_response_parsed = true;
const ptr = try options.client.allocator.create(response_types.NormalResponse);
errdefer options.client.allocator.destroy(ptr);
@field(ptr.*, std.meta.fields(action.Response)[0].name) = @field(ptr.*, std.meta.fields(action.Response)[0].name) =
json.parse(response_types.RawResponse, &stream, parser_options) catch |e| { json.parse(response_types.RawResponse, &stream, parser_options) catch |e| {
log.err( log.err(
@ -756,8 +756,7 @@ pub fn Request(comptime request_action: anytype) type {
break :blk ptr; break :blk ptr;
}; };
return ParsedJsonData(response_types.NormalResponse){ return ParsedJsonData(response_types.NormalResponse){
.raw_response_parsed = raw_response_parsed, .parsed_response_ptr = parsed_response_ptr,
.parsed_response_ptr = @constCast(parsed_response_ptr), //TODO: why doesn't changing const->var above fix this?
.allocator = options.client.allocator, .allocator = options.client.allocator,
}; };
} }
@ -773,12 +772,16 @@ fn isOtherNormalResponse(comptime T: type, first_key: []const u8) bool {
return std.mem.eql(u8, first_key, expected_key); return std.mem.eql(u8, first_key, expected_key);
} }
fn coerceFromString(comptime T: type, val: []const u8) anyerror!T { fn coerceFromString(comptime T: type, val: []const u8) anyerror!T {
if (@typeInfo(T) == .Optional) return try coerceFromString(@typeInfo(T).Optional.child, val); if (@typeInfo(T) == .optional) return try coerceFromString(@typeInfo(T).optional.child, val);
// TODO: This is terrible...fix it // TODO: This is terrible...fix it
switch (T) { switch (T) {
bool => return std.ascii.eqlIgnoreCase(val, "true"), bool => return std.ascii.eqlIgnoreCase(val, "true"),
i64 => return parseInt(T, val) catch |e| { i64, i128 => return parseInt(T, val) catch |e| {
log.err("Invalid string representing i64: {s}", .{val}); log.err("Invalid string representing {s}: {s}", .{ @typeName(T), val });
return e;
},
f64, f128 => return std.fmt.parseFloat(T, val) catch |e| {
log.err("Invalid string representing {s}: {s}", .{ @typeName(T), val });
return e; return e;
}, },
else => return val, else => return val,
@ -806,8 +809,8 @@ fn parseInt(comptime T: type, val: []const u8) !T {
fn generalAllocPrint(allocator: std.mem.Allocator, val: anytype) !?[]const u8 { fn generalAllocPrint(allocator: std.mem.Allocator, val: anytype) !?[]const u8 {
switch (@typeInfo(@TypeOf(val))) { switch (@typeInfo(@TypeOf(val))) {
.Optional => if (val) |v| return generalAllocPrint(allocator, v) else return null, .optional => if (val) |v| return generalAllocPrint(allocator, v) else return null,
.Array, .Pointer => return try std.fmt.allocPrint(allocator, "{s}", .{val}), .array, .pointer => return try std.fmt.allocPrint(allocator, "{s}", .{val}),
else => return try std.fmt.allocPrint(allocator, "{any}", .{val}), else => return try std.fmt.allocPrint(allocator, "{any}", .{val}),
} }
} }
@ -926,20 +929,20 @@ fn ServerResponse(comptime action: anytype) type {
RequestId: []u8, RequestId: []u8,
}; };
const Result = @Type(.{ const Result = @Type(.{
.Struct = .{ .@"struct" = .{
.layout = .auto, .layout = .auto,
.fields = &[_]std.builtin.Type.StructField{ .fields = &[_]std.builtin.Type.StructField{
.{ .{
.name = action.action_name ++ "Result", .name = action.action_name ++ "Result",
.type = T, .type = T,
.default_value = null, .default_value_ptr = null,
.is_comptime = false, .is_comptime = false,
.alignment = 0, .alignment = 0,
}, },
.{ .{
.name = "ResponseMetadata", .name = "ResponseMetadata",
.type = ResponseMetadata, .type = ResponseMetadata,
.default_value = null, .default_value_ptr = null,
.is_comptime = false, .is_comptime = false,
.alignment = 0, .alignment = 0,
}, },
@ -949,13 +952,13 @@ fn ServerResponse(comptime action: anytype) type {
}, },
}); });
return @Type(.{ return @Type(.{
.Struct = .{ .@"struct" = .{
.layout = .auto, .layout = .auto,
.fields = &[_]std.builtin.Type.StructField{ .fields = &[_]std.builtin.Type.StructField{
.{ .{
.name = action.action_name ++ "Response", .name = action.action_name ++ "Response",
.type = Result, .type = Result,
.default_value = null, .default_value_ptr = null,
.is_comptime = false, .is_comptime = false,
.alignment = 0, .alignment = 0,
}, },
@ -1015,8 +1018,8 @@ fn FullResponse(comptime action: anytype) type {
} }
fn safeFree(allocator: std.mem.Allocator, obj: anytype) void { fn safeFree(allocator: std.mem.Allocator, obj: anytype) void {
switch (@typeInfo(@TypeOf(obj))) { switch (@typeInfo(@TypeOf(obj))) {
.Pointer => allocator.free(obj), .pointer => allocator.free(obj),
.Optional => if (obj) |o| safeFree(allocator, o), .optional => if (obj) |o| safeFree(allocator, o),
else => {}, else => {},
} }
} }
@ -1125,7 +1128,7 @@ fn buildQuery(allocator: std.mem.Allocator, request: anytype) ![]const u8 {
var prefix = "?"; var prefix = "?";
if (@hasDecl(@TypeOf(request), "http_query")) { if (@hasDecl(@TypeOf(request), "http_query")) {
const query_arguments = @field(@TypeOf(request), "http_query"); const query_arguments = @field(@TypeOf(request), "http_query");
inline for (@typeInfo(@TypeOf(query_arguments)).Struct.fields) |arg| { inline for (@typeInfo(@TypeOf(query_arguments)).@"struct".fields) |arg| {
const val = @field(request, arg.name); const val = @field(request, arg.name);
if (try addQueryArg(arg.type, prefix, @field(query_arguments, arg.name), val, writer)) if (try addQueryArg(arg.type, prefix, @field(query_arguments, arg.name), val, writer))
prefix = "&"; prefix = "&";
@ -1136,14 +1139,14 @@ fn buildQuery(allocator: std.mem.Allocator, request: anytype) ![]const u8 {
fn addQueryArg(comptime ValueType: type, prefix: []const u8, key: []const u8, value: anytype, writer: anytype) !bool { fn addQueryArg(comptime ValueType: type, prefix: []const u8, key: []const u8, value: anytype, writer: anytype) !bool {
switch (@typeInfo(@TypeOf(value))) { switch (@typeInfo(@TypeOf(value))) {
.Optional => { .optional => {
if (value) |v| if (value) |v|
return try addQueryArg(ValueType, prefix, key, v, writer); return try addQueryArg(ValueType, prefix, key, v, writer);
return false; return false;
}, },
// if this is a pointer, we want to make sure it is more than just a string // if this is a pointer, we want to make sure it is more than just a string
.Pointer => |ptr| { .pointer => |ptr| {
if (ptr.child == u8 or ptr.size != .Slice) { if (ptr.child == u8 or ptr.size != .slice) {
// This is just a string // This is just a string
return try addBasicQueryArg(prefix, key, value, writer); return try addBasicQueryArg(prefix, key, value, writer);
} }
@ -1154,7 +1157,7 @@ fn addQueryArg(comptime ValueType: type, prefix: []const u8, key: []const u8, va
} }
return std.mem.eql(u8, "&", p); return std.mem.eql(u8, "&", p);
}, },
.Array => |arr| { .array => |arr| {
if (arr.child == u8) if (arr.child == u8)
return try addBasicQueryArg(prefix, key, value, writer); return try addBasicQueryArg(prefix, key, value, writer);
var p = prefix; var p = prefix;
@ -1274,8 +1277,8 @@ fn reportTraffic(
fn typeForField(comptime T: type, comptime field_name: []const u8) !type { fn typeForField(comptime T: type, comptime field_name: []const u8) !type {
const ti = @typeInfo(T); const ti = @typeInfo(T);
switch (ti) { switch (ti) {
.Struct => { .@"struct" => {
inline for (ti.Struct.fields) |field| { inline for (ti.@"struct".fields) |field| {
if (std.mem.eql(u8, field.name, field_name)) if (std.mem.eql(u8, field.name, field_name))
return field.type; return field.type;
} }
@ -1289,7 +1292,7 @@ test "custom serialization for map objects" {
const allocator = std.testing.allocator; const allocator = std.testing.allocator;
var buffer = std.ArrayList(u8).init(allocator); var buffer = std.ArrayList(u8).init(allocator);
defer buffer.deinit(); defer buffer.deinit();
var tags = try std.ArrayList(@typeInfo(try typeForField(services.lambda.tag_resource.Request, "tags")).Pointer.child).initCapacity(allocator, 2); var tags = try std.ArrayList(@typeInfo(try typeForField(services.lambda.tag_resource.Request, "tags")).pointer.child).initCapacity(allocator, 2);
defer tags.deinit(); defer tags.deinit();
tags.appendAssumeCapacity(.{ .key = "Foo", .value = "Bar" }); tags.appendAssumeCapacity(.{ .key = "Foo", .value = "Bar" });
tags.appendAssumeCapacity(.{ .key = "Baz", .value = "Qux" }); tags.appendAssumeCapacity(.{ .key = "Baz", .value = "Qux" });
@ -1533,7 +1536,7 @@ const TestOptions = struct {
request_target: []const u8 = undefined, request_target: []const u8 = undefined,
request_headers: []std.http.Header = undefined, request_headers: []std.http.Header = undefined,
test_server_runtime_uri: ?[]u8 = null, test_server_runtime_uri: ?[]u8 = null,
server_ready: bool = false, server_ready: std.Thread.Semaphore = .{},
requests_processed: usize = 0, requests_processed: usize = 0,
const Self = @This(); const Self = @This();
@ -1588,10 +1591,18 @@ const TestOptions = struct {
return error.HeaderOrValueNotFound; return error.HeaderOrValueNotFound;
} }
fn waitForReady(self: *Self) !void { fn waitForReady(self: *Self) !void {
// While this doesn't return an error, we can use !void // Set 10s timeout...this is way longer than necessary
// to prepare for addition of timeout log.debug("waiting for ready", .{});
while (!self.server_ready) try self.server_ready.timedWait(1000 * std.time.ns_per_ms);
std.time.sleep(100); // var deadline = std.Thread.Futex.Deadline.init(1000 * std.time.ns_per_ms);
// if (self.futex_word.load(.acquire) != 0) return;
// log.debug("futex zero", .{});
// // note that this seems backwards from the documentation...
// deadline.wait(self.futex_word, 1) catch {
// log.err("futex value {d}", .{self.futex_word.load(.acquire)});
// return error.TestServerTimeoutWaitingForReady;
// };
log.debug("the wait is over!", .{});
} }
}; };
@ -1619,8 +1630,9 @@ fn threadMain(options: *TestOptions) !void {
// var aa = arena.allocator(); // var aa = arena.allocator();
// We're in control of all requests/responses, so this flag will tell us // We're in control of all requests/responses, so this flag will tell us
// when it's time to shut down // when it's time to shut down
while (options.server_remaining_requests > 0) { if (options.server_remaining_requests == 0)
options.server_remaining_requests -= 1; options.server_ready.post(); // This will cause the wait for server to return
while (options.server_remaining_requests > 0) : (options.server_remaining_requests -= 1) {
processRequest(options, &http_server) catch |e| { processRequest(options, &http_server) catch |e| {
log.err("Unexpected error processing request: {any}", .{e}); log.err("Unexpected error processing request: {any}", .{e});
if (@errorReturnTrace()) |trace| { if (@errorReturnTrace()) |trace| {
@ -1631,12 +1643,13 @@ fn threadMain(options: *TestOptions) !void {
} }
fn processRequest(options: *TestOptions, net_server: *std.net.Server) !void { fn processRequest(options: *TestOptions, net_server: *std.net.Server) !void {
options.server_ready = true;
errdefer options.server_ready = false;
log.debug( log.debug(
"tid {d} (server): server waiting to accept. requests remaining: {d}", "tid {d} (server): server waiting to accept. requests remaining: {d}",
.{ std.Thread.getCurrentId(), options.server_remaining_requests + 1 }, .{ std.Thread.getCurrentId(), options.server_remaining_requests },
); );
// options.futex_word.store(1, .release);
// errdefer options.futex_word.store(0, .release);
options.server_ready.post();
var connection = try net_server.accept(); var connection = try net_server.accept();
defer connection.stream.close(); defer connection.stream.close();
var read_buffer: [1024 * 16]u8 = undefined; var read_buffer: [1024 * 16]u8 = undefined;
@ -1655,8 +1668,6 @@ fn processRequest(options: *TestOptions, net_server: *std.net.Server) !void {
} }
fn serveRequest(options: *TestOptions, request: *std.http.Server.Request) !void { fn serveRequest(options: *TestOptions, request: *std.http.Server.Request) !void {
options.server_ready = false;
options.requests_processed += 1; options.requests_processed += 1;
options.request_body = try (try request.reader()).readAllAlloc(options.allocator, std.math.maxInt(usize)); options.request_body = try (try request.reader()).readAllAlloc(options.allocator, std.math.maxInt(usize));
options.request_method = request.head.method; options.request_method = request.head.method;
@ -1698,7 +1709,7 @@ const TestSetup = struct {
request_options: TestOptions, request_options: TestOptions,
server_thread: std.Thread = undefined, server_thread: std.Thread = undefined,
creds: aws_auth.Credentials = undefined, creds: aws_auth.Credentials = undefined,
client: *Client = undefined, client: Client = undefined,
started: bool = false, started: bool = false,
const Self = @This(); const Self = @This();
@ -1726,7 +1737,8 @@ const TestSetup = struct {
// Not sure why we're getting sprayed here, but we have an arena allocator, and this // Not sure why we're getting sprayed here, but we have an arena allocator, and this
// is testing, so yolo // is testing, so yolo
awshttp.endpoint_override = self.request_options.test_server_runtime_uri; awshttp.endpoint_override = self.request_options.test_server_runtime_uri;
log.debug("endpoint override set to {?s}", .{awshttp.endpoint_override}); if (awshttp.endpoint_override == null) return error.TestSetupStartFailure;
std.log.debug("endpoint override set to {?s}", .{awshttp.endpoint_override});
self.creds = aws_auth.Credentials.init( self.creds = aws_auth.Credentials.init(
self.allocator, self.allocator,
try self.allocator.dupe(u8, "ACCESS"), try self.allocator.dupe(u8, "ACCESS"),
@ -1734,8 +1746,8 @@ const TestSetup = struct {
null, null,
); );
aws_creds.static_credentials = self.creds; aws_creds.static_credentials = self.creds;
var client = Client.init(self.allocator, .{}); const client = Client.init(self.allocator, .{});
self.client = &client; self.client = client;
return .{ return .{
.region = "us-west-2", .region = "us-west-2",
.client = client, .client = client,
@ -1744,6 +1756,27 @@ const TestSetup = struct {
} }
fn stop(self: *Self) void { fn stop(self: *Self) void {
if (self.request_options.server_remaining_requests > 0)
if (test_error_log_enabled)
std.log.err(
"Test server has {d} request(s) remaining to issue! Draining",
.{self.request_options.server_remaining_requests},
)
else
std.log.info(
"Test server has {d} request(s) remaining to issue! Draining",
.{self.request_options.server_remaining_requests},
);
var rr = self.request_options.server_remaining_requests;
while (rr > 0) : (rr -= 1) {
std.log.debug("rr: {d}", .{self.request_options.server_remaining_requests});
// We need to drain all remaining requests, otherwise the server
// will hang indefinitely
var client = std.http.Client{ .allocator = self.allocator };
defer client.deinit();
_ = client.fetch(.{ .location = .{ .url = self.request_options.test_server_runtime_uri.? } }) catch unreachable;
}
self.server_thread.join(); self.server_thread.join();
} }
@ -2103,7 +2136,7 @@ test "rest_json_1_work_with_lambda: lambda tagResource (only), to excercise zig
defer test_harness.deinit(); defer test_harness.deinit();
const options = try test_harness.start(); const options = try test_harness.start();
const lambda = (Services(.{.lambda}){}).lambda; const lambda = (Services(.{.lambda}){}).lambda;
var tags = try std.ArrayList(@typeInfo(try typeForField(lambda.tag_resource.Request, "tags")).Pointer.child).initCapacity(allocator, 1); var tags = try std.ArrayList(@typeInfo(try typeForField(lambda.tag_resource.Request, "tags")).pointer.child).initCapacity(allocator, 1);
defer tags.deinit(); defer tags.deinit();
tags.appendAssumeCapacity(.{ .key = "Foo", .value = "Bar" }); tags.appendAssumeCapacity(.{ .key = "Foo", .value = "Bar" });
const req = services.lambda.tag_resource.Request{ .resource = "arn:aws:lambda:us-west-2:550620852718:function:awsome-lambda-LambdaStackawsomeLambda", .tags = tags.items }; const req = services.lambda.tag_resource.Request{ .resource = "arn:aws:lambda:us-west-2:550620852718:function:awsome-lambda-LambdaStackawsomeLambda", .tags = tags.items };
@ -2389,3 +2422,33 @@ test "json_1_1: ECR timestamps" {
// try std.testing.expectEqual(@as(i64, 1.73859841557E9), call.response.authorization_data.?[0].expires_at.?); // try std.testing.expectEqual(@as(i64, 1.73859841557E9), call.response.authorization_data.?[0].expires_at.?);
try std.testing.expectEqual(@as(f128, 1.7385984915E9), call.response.authorization_data.?[0].expires_at.?); try std.testing.expectEqual(@as(f128, 1.7385984915E9), call.response.authorization_data.?[0].expires_at.?);
} }
var test_error_log_enabled = true;
test "test server timeout works" {
// const old = std.testing.log_level;
// defer std.testing.log_level = old;
// std.testing.log_level = .debug;
// defer std.testing.log_level = old;
// std.testing.log_level = .debug;
test_error_log_enabled = false;
defer test_error_log_enabled = true;
std.log.debug("test start", .{});
const allocator = std.testing.allocator;
var test_harness = TestSetup.init(.{
.allocator = allocator,
.server_response =
\\{}
,
.server_response_headers = &.{
.{ .name = "Content-Type", .value = "application/json" },
.{ .name = "x-amzn-RequestId", .value = "QBI72OUIN8U9M9AG6PCSADJL4JVV4KQNSO5AEMVJF66Q9ASUAAJG" },
},
});
defer test_harness.deinit();
defer test_harness.creds.deinit(); // Usually this gets done during the call,
// but we're purposely not making a call
// here, so we have to deinit() manually
_ = try test_harness.start();
std.log.debug("harness started", .{});
test_harness.stop();
std.log.debug("test complete", .{});
}

View file

@ -662,12 +662,12 @@ fn canonicalUri(allocator: std.mem.Allocator, path: []const u8, double_encode: b
} }
defer allocator.free(encoded_once); defer allocator.free(encoded_once);
var encoded_twice = try encodeUri(allocator, encoded_once); var encoded_twice = try encodeUri(allocator, encoded_once);
defer allocator.free(encoded_twice);
log.debug("encoded path (2): {s}", .{encoded_twice}); log.debug("encoded path (2): {s}", .{encoded_twice});
if (std.mem.lastIndexOf(u8, encoded_twice, "?")) |i| { if (std.mem.lastIndexOf(u8, encoded_twice, "?")) |i| {
_ = allocator.resize(encoded_twice, i); return try allocator.dupe(u8, encoded_twice[0..i]);
return encoded_twice[0..i];
} }
return encoded_twice; return try allocator.dupe(u8, encoded_twice);
} }
fn encodeParamPart(allocator: std.mem.Allocator, path: []const u8) ![]const u8 { fn encodeParamPart(allocator: std.mem.Allocator, path: []const u8) ![]const u8 {
@ -936,6 +936,7 @@ fn canonicalHeaderValue(allocator: std.mem.Allocator, value: []const u8) ![]cons
const in_quote = false; const in_quote = false;
var start: usize = 0; var start: usize = 0;
const rc = try allocator.alloc(u8, value.len); const rc = try allocator.alloc(u8, value.len);
defer allocator.free(rc);
var rc_inx: usize = 0; var rc_inx: usize = 0;
for (value, 0..) |c, i| { for (value, 0..) |c, i| {
if (!started and !std.ascii.isWhitespace(c)) { if (!started and !std.ascii.isWhitespace(c)) {
@ -953,8 +954,7 @@ fn canonicalHeaderValue(allocator: std.mem.Allocator, value: []const u8) ![]cons
// Trim end // Trim end
while (std.ascii.isWhitespace(rc[rc_inx - 1])) while (std.ascii.isWhitespace(rc[rc_inx - 1]))
rc_inx -= 1; rc_inx -= 1;
_ = allocator.resize(rc, rc_inx); return try allocator.dupe(u8, rc[0..rc_inx]);
return rc[0..rc_inx];
} }
fn lessThan(context: void, lhs: std.http.Header, rhs: std.http.Header) bool { fn lessThan(context: void, lhs: std.http.Header, rhs: std.http.Header) bool {
_ = context; _ = context;
@ -986,6 +986,7 @@ test "canonical uri" {
const path = "/documents and settings/?foo=bar"; const path = "/documents and settings/?foo=bar";
const expected = "/documents%2520and%2520settings/"; const expected = "/documents%2520and%2520settings/";
const actual = try canonicalUri(allocator, path, true); const actual = try canonicalUri(allocator, path, true);
defer allocator.free(actual); defer allocator.free(actual);
try std.testing.expectEqualStrings(expected, actual); try std.testing.expectEqualStrings(expected, actual);

View file

@ -1560,21 +1560,21 @@ fn skipValue(tokens: *TokenStream) SkipValueError!void {
fn parseInternal(comptime T: type, token: Token, tokens: *TokenStream, options: ParseOptions) !T { fn parseInternal(comptime T: type, token: Token, tokens: *TokenStream, options: ParseOptions) !T {
switch (@typeInfo(T)) { switch (@typeInfo(T)) {
.Bool => { .bool => {
return switch (token) { return switch (token) {
.True => true, .True => true,
.False => false, .False => false,
else => error.UnexpectedToken, else => error.UnexpectedToken,
}; };
}, },
.Float, .ComptimeFloat => { .float, .comptime_float => {
const numberToken = switch (token) { const numberToken = switch (token) {
.Number => |n| n, .Number => |n| n,
else => return error.UnexpectedToken, else => return error.UnexpectedToken,
}; };
return try std.fmt.parseFloat(T, numberToken.slice(tokens.slice, tokens.i - 1)); return try std.fmt.parseFloat(T, numberToken.slice(tokens.slice, tokens.i - 1));
}, },
.Int, .ComptimeInt => { .int, .comptime_int => {
const numberToken = switch (token) { const numberToken = switch (token) {
.Number => |n| n, .Number => |n| n,
else => return error.UnexpectedToken, else => return error.UnexpectedToken,
@ -1587,14 +1587,14 @@ fn parseInternal(comptime T: type, token: Token, tokens: *TokenStream, options:
if (std.math.round(float) != float) return error.InvalidNumber; if (std.math.round(float) != float) return error.InvalidNumber;
return @as(T, @intFromFloat(float)); return @as(T, @intFromFloat(float));
}, },
.Optional => |optionalInfo| { .optional => |optionalInfo| {
if (token == .Null) { if (token == .Null) {
return null; return null;
} else { } else {
return try parseInternal(optionalInfo.child, token, tokens, options); return try parseInternal(optionalInfo.child, token, tokens, options);
} }
}, },
.Enum => |enumInfo| { .@"enum" => |enumInfo| {
switch (token) { switch (token) {
.Number => |numberToken| { .Number => |numberToken| {
if (!numberToken.is_integer) return error.UnexpectedToken; if (!numberToken.is_integer) return error.UnexpectedToken;
@ -1618,7 +1618,7 @@ fn parseInternal(comptime T: type, token: Token, tokens: *TokenStream, options:
else => return error.UnexpectedToken, else => return error.UnexpectedToken,
} }
}, },
.Union => |unionInfo| { .@"union" => |unionInfo| {
if (unionInfo.tag_type) |_| { if (unionInfo.tag_type) |_| {
// try each of the union fields until we find one that matches // try each of the union fields until we find one that matches
inline for (unionInfo.fields) |u_field| { inline for (unionInfo.fields) |u_field| {
@ -1642,7 +1642,7 @@ fn parseInternal(comptime T: type, token: Token, tokens: *TokenStream, options:
@compileError("Unable to parse into untagged union '" ++ @typeName(T) ++ "'"); @compileError("Unable to parse into untagged union '" ++ @typeName(T) ++ "'");
} }
}, },
.Struct => |structInfo| { .@"struct" => |structInfo| {
switch (token) { switch (token) {
.ObjectBegin => {}, .ObjectBegin => {},
else => return error.UnexpectedToken, else => return error.UnexpectedToken,
@ -1723,7 +1723,7 @@ fn parseInternal(comptime T: type, token: Token, tokens: *TokenStream, options:
} }
inline for (structInfo.fields, 0..) |field, i| { inline for (structInfo.fields, 0..) |field, i| {
if (!fields_seen[i]) { if (!fields_seen[i]) {
if (field.default_value) |default_value_ptr| { if (field.default_value_ptr) |default_value_ptr| {
if (!field.is_comptime) { if (!field.is_comptime) {
const default_value = @as(*align(1) const field.type, @ptrCast(default_value_ptr)).*; const default_value = @as(*align(1) const field.type, @ptrCast(default_value_ptr)).*;
@field(r, field.name) = default_value; @field(r, field.name) = default_value;
@ -1736,7 +1736,7 @@ fn parseInternal(comptime T: type, token: Token, tokens: *TokenStream, options:
} }
return r; return r;
}, },
.Array => |arrayInfo| { .array => |arrayInfo| {
switch (token) { switch (token) {
.ArrayBegin => { .ArrayBegin => {
var r: T = undefined; var r: T = undefined;
@ -1770,21 +1770,21 @@ fn parseInternal(comptime T: type, token: Token, tokens: *TokenStream, options:
else => return error.UnexpectedToken, else => return error.UnexpectedToken,
} }
}, },
.Pointer => |ptrInfo| { .pointer => |ptrInfo| {
const allocator = options.allocator orelse return error.AllocatorRequired; const allocator = options.allocator orelse return error.AllocatorRequired;
switch (ptrInfo.size) { switch (ptrInfo.size) {
.One => { .one => {
const r: T = try allocator.create(ptrInfo.child); const r: T = try allocator.create(ptrInfo.child);
errdefer allocator.destroy(r); errdefer allocator.destroy(r);
r.* = try parseInternal(ptrInfo.child, token, tokens, options); r.* = try parseInternal(ptrInfo.child, token, tokens, options);
return r; return r;
}, },
.Slice => { .slice => {
switch (token) { switch (token) {
.ArrayBegin => { .ArrayBegin => {
var arraylist = std.ArrayList(ptrInfo.child).init(allocator); var arraylist = std.ArrayList(ptrInfo.child).init(allocator);
errdefer { errdefer {
while (arraylist.popOrNull()) |v| { while (arraylist.pop()) |v| {
parseFree(ptrInfo.child, v, options); parseFree(ptrInfo.child, v, options);
} }
arraylist.deinit(); arraylist.deinit();
@ -1829,7 +1829,7 @@ fn parseInternal(comptime T: type, token: Token, tokens: *TokenStream, options:
if (value_type == null) return error.UnexpectedToken; if (value_type == null) return error.UnexpectedToken;
var arraylist = std.ArrayList(ptrInfo.child).init(allocator); var arraylist = std.ArrayList(ptrInfo.child).init(allocator);
errdefer { errdefer {
while (arraylist.popOrNull()) |v| { while (arraylist.pop()) |v| {
parseFree(ptrInfo.child, v, options); parseFree(ptrInfo.child, v, options);
} }
arraylist.deinit(); arraylist.deinit();
@ -1863,8 +1863,8 @@ fn parseInternal(comptime T: type, token: Token, tokens: *TokenStream, options:
fn typeForField(comptime T: type, comptime field_name: []const u8) ?type { fn typeForField(comptime T: type, comptime field_name: []const u8) ?type {
const ti = @typeInfo(T); const ti = @typeInfo(T);
switch (ti) { switch (ti) {
.Struct => { .@"struct" => {
inline for (ti.Struct.fields) |field| { inline for (ti.@"struct".fields) |field| {
if (std.mem.eql(u8, field.name, field_name)) if (std.mem.eql(u8, field.name, field_name))
return field.type; return field.type;
} }
@ -1878,14 +1878,14 @@ fn isMapPattern(comptime T: type) bool {
// We should be getting a type that is a pointer to a slice. // We should be getting a type that is a pointer to a slice.
// Let's just double check before proceeding // Let's just double check before proceeding
const ti = @typeInfo(T); const ti = @typeInfo(T);
if (ti != .Pointer) return false; if (ti != .pointer) return false;
if (ti.Pointer.size != .Slice) return false; if (ti.pointer.size != .slice) return false;
const ti_child = @typeInfo(ti.Pointer.child); const ti_child = @typeInfo(ti.pointer.child);
if (ti_child != .Struct) return false; if (ti_child != .@"struct") return false;
if (ti_child.Struct.fields.len != 2) return false; if (ti_child.@"struct".fields.len != 2) return false;
var key_found = false; var key_found = false;
var value_found = false; var value_found = false;
inline for (ti_child.Struct.fields) |field| { inline for (ti_child.@"struct".fields) |field| {
if (std.mem.eql(u8, "key", field.name)) if (std.mem.eql(u8, "key", field.name))
key_found = true; key_found = true;
if (std.mem.eql(u8, "value", field.name)) if (std.mem.eql(u8, "value", field.name))
@ -1904,13 +1904,13 @@ pub fn parse(comptime T: type, tokens: *TokenStream, options: ParseOptions) !T {
/// Should be called with the same type and `ParseOptions` that were passed to `parse` /// Should be called with the same type and `ParseOptions` that were passed to `parse`
pub fn parseFree(comptime T: type, value: T, options: ParseOptions) void { pub fn parseFree(comptime T: type, value: T, options: ParseOptions) void {
switch (@typeInfo(T)) { switch (@typeInfo(T)) {
.Bool, .Float, .ComptimeFloat, .Int, .ComptimeInt, .Enum => {}, .bool, .float, .comptime_float, .int, .comptime_int, .@"enum" => {},
.Optional => { .optional => {
if (value) |v| { if (value) |v| {
return parseFree(@TypeOf(v), v, options); return parseFree(@TypeOf(v), v, options);
} }
}, },
.Union => |unionInfo| { .@"union" => |unionInfo| {
if (unionInfo.tag_type) |UnionTagType| { if (unionInfo.tag_type) |UnionTagType| {
inline for (unionInfo.fields) |u_field| { inline for (unionInfo.fields) |u_field| {
if (value == @field(UnionTagType, u_field.name)) { if (value == @field(UnionTagType, u_field.name)) {
@ -1922,24 +1922,24 @@ pub fn parseFree(comptime T: type, value: T, options: ParseOptions) void {
unreachable; unreachable;
} }
}, },
.Struct => |structInfo| { .@"struct" => |structInfo| {
inline for (structInfo.fields) |field| { inline for (structInfo.fields) |field| {
parseFree(field.type, @field(value, field.name), options); parseFree(field.type, @field(value, field.name), options);
} }
}, },
.Array => |arrayInfo| { .array => |arrayInfo| {
for (value) |v| { for (value) |v| {
parseFree(arrayInfo.child, v, options); parseFree(arrayInfo.child, v, options);
} }
}, },
.Pointer => |ptrInfo| { .pointer => |ptrInfo| {
const allocator = options.allocator orelse unreachable; const allocator = options.allocator orelse unreachable;
switch (ptrInfo.size) { switch (ptrInfo.size) {
.One => { .one => {
parseFree(ptrInfo.child, value.*, options); parseFree(ptrInfo.child, value.*, options);
allocator.destroy(value); allocator.destroy(value);
}, },
.Slice => { .slice => {
for (value) |v| { for (value) |v| {
parseFree(ptrInfo.child, v, options); parseFree(ptrInfo.child, v, options);
} }
@ -2284,7 +2284,7 @@ pub const Parser = struct {
return; return;
} }
var value = p.stack.pop(); var value = p.stack.pop().?;
try p.pushToParent(&value); try p.pushToParent(&value);
}, },
.String => |s| { .String => |s| {
@ -2350,7 +2350,7 @@ pub const Parser = struct {
return; return;
} }
var value = p.stack.pop(); var value = p.stack.pop().?;
try p.pushToParent(&value); try p.pushToParent(&value);
}, },
.ObjectBegin => { .ObjectBegin => {
@ -2812,38 +2812,38 @@ pub fn stringify(
) !void { ) !void {
const T = @TypeOf(value); const T = @TypeOf(value);
switch (@typeInfo(T)) { switch (@typeInfo(T)) {
.Float, .ComptimeFloat => { .float, .comptime_float => {
return std.fmt.format(out_stream, "{e}", .{value}); return std.fmt.format(out_stream, "{e}", .{value});
}, },
.Int, .ComptimeInt => { .int, .comptime_int => {
return std.fmt.formatIntValue(value, "", std.fmt.FormatOptions{}, out_stream); return std.fmt.formatIntValue(value, "", std.fmt.FormatOptions{}, out_stream);
}, },
.Bool => { .bool => {
return out_stream.writeAll(if (value) "true" else "false"); return out_stream.writeAll(if (value) "true" else "false");
}, },
.Null => { .null => {
return out_stream.writeAll("null"); return out_stream.writeAll("null");
}, },
.Optional => { .optional => {
if (value) |payload| { if (value) |payload| {
return try stringify(payload, options, out_stream); return try stringify(payload, options, out_stream);
} else { } else {
return try stringify(null, options, out_stream); return try stringify(null, options, out_stream);
} }
}, },
.Enum => { .@"enum" => {
if (comptime std.meta.hasFn(T, "jsonStringify")) { if (comptime std.meta.hasFn(T, "jsonStringify")) {
return value.jsonStringify(options, out_stream); return value.jsonStringify(options, out_stream);
} }
@compileError("Unable to stringify enum '" ++ @typeName(T) ++ "'"); @compileError("Unable to stringify enum '" ++ @typeName(T) ++ "'");
}, },
.Union => { .@"union" => {
if (comptime std.meta.hasFn(T, "jsonStringify")) { if (comptime std.meta.hasFn(T, "jsonStringify")) {
return value.jsonStringify(options, out_stream); return value.jsonStringify(options, out_stream);
} }
const info = @typeInfo(T).Union; const info = @typeInfo(T).@"union";
if (info.tag_type) |UnionTagType| { if (info.tag_type) |UnionTagType| {
inline for (info.fields) |u_field| { inline for (info.fields) |u_field| {
if (value == @field(UnionTagType, u_field.name)) { if (value == @field(UnionTagType, u_field.name)) {
@ -2854,7 +2854,7 @@ pub fn stringify(
@compileError("Unable to stringify untagged union '" ++ @typeName(T) ++ "'"); @compileError("Unable to stringify untagged union '" ++ @typeName(T) ++ "'");
} }
}, },
.Struct => |S| { .@"struct" => |S| {
if (comptime std.meta.hasFn(T, "jsonStringify")) { if (comptime std.meta.hasFn(T, "jsonStringify")) {
return value.jsonStringify(options, out_stream); return value.jsonStringify(options, out_stream);
} }
@ -2870,7 +2870,7 @@ pub fn stringify(
if (Field.type == void) continue; if (Field.type == void) continue;
var output_this_field = true; var output_this_field = true;
if (!options.emit_null and @typeInfo(Field.type) == .Optional and @field(value, Field.name) == null) output_this_field = false; if (!options.emit_null and @typeInfo(Field.type) == .optional and @field(value, Field.name) == null) output_this_field = false;
const final_name = if (comptime std.meta.hasFn(T, "fieldNameFor")) const final_name = if (comptime std.meta.hasFn(T, "fieldNameFor"))
value.fieldNameFor(Field.name) value.fieldNameFor(Field.name)
@ -2920,10 +2920,10 @@ pub fn stringify(
try out_stream.writeByte('}'); try out_stream.writeByte('}');
return; return;
}, },
.ErrorSet => return stringify(@as([]const u8, @errorName(value)), options, out_stream), .error_set => return stringify(@as([]const u8, @errorName(value)), options, out_stream),
.Pointer => |ptr_info| switch (ptr_info.size) { .pointer => |ptr_info| switch (ptr_info.size) {
.One => switch (@typeInfo(ptr_info.child)) { .one => switch (@typeInfo(ptr_info.child)) {
.Array => { .array => {
const Slice = []const std.meta.Elem(ptr_info.child); const Slice = []const std.meta.Elem(ptr_info.child);
return stringify(@as(Slice, value), options, out_stream); return stringify(@as(Slice, value), options, out_stream);
}, },
@ -2933,7 +2933,7 @@ pub fn stringify(
}, },
}, },
// TODO: .Many when there is a sentinel (waiting for https://github.com/ziglang/zig/pull/3972) // TODO: .Many when there is a sentinel (waiting for https://github.com/ziglang/zig/pull/3972)
.Slice => { .slice => {
if (ptr_info.child == u8 and options.string == .String and std.unicode.utf8ValidateSlice(value)) { if (ptr_info.child == u8 and options.string == .String and std.unicode.utf8ValidateSlice(value)) {
try out_stream.writeByte('\"'); try out_stream.writeByte('\"');
var i: usize = 0; var i: usize = 0;
@ -3002,8 +3002,8 @@ pub fn stringify(
}, },
else => @compileError("Unable to stringify type '" ++ @typeName(T) ++ "'"), else => @compileError("Unable to stringify type '" ++ @typeName(T) ++ "'"),
}, },
.Array => return stringify(&value, options, out_stream), .array => return stringify(&value, options, out_stream),
.Vector => |info| { .vector => |info| {
const array: [info.len]info.child = value; const array: [info.len]info.child = value;
return stringify(&array, options, out_stream); return stringify(&array, options, out_stream);
}, },

View file

@ -97,7 +97,7 @@ pub fn main() anyerror!void {
} }
continue; continue;
} }
inline for (@typeInfo(Tests).Enum.fields) |f| { inline for (@typeInfo(Tests).@"enum".fields) |f| {
if (std.mem.eql(u8, f.name, arg)) { if (std.mem.eql(u8, f.name, arg)) {
try tests.append(@field(Tests, f.name)); try tests.append(@field(Tests, f.name));
break; break;
@ -105,7 +105,7 @@ pub fn main() anyerror!void {
} }
} }
if (tests.items.len == 0) { if (tests.items.len == 0) {
inline for (@typeInfo(Tests).Enum.fields) |f| inline for (@typeInfo(Tests).@"enum".fields) |f|
try tests.append(@field(Tests, f.name)); try tests.append(@field(Tests, f.name));
} }
@ -192,7 +192,7 @@ pub fn main() anyerror!void {
const func = fns[0]; const func = fns[0];
const arn = func.function_arn.?; const arn = func.function_arn.?;
// This is a bit ugly. Maybe a helper function in the library would help? // This is a bit ugly. Maybe a helper function in the library would help?
var tags = try std.ArrayList(@typeInfo(try typeForField(services.lambda.tag_resource.Request, "tags")).Pointer.child).initCapacity(allocator, 1); var tags = try std.ArrayList(@typeInfo(try typeForField(services.lambda.tag_resource.Request, "tags")).pointer.child).initCapacity(allocator, 1);
defer tags.deinit(); defer tags.deinit();
tags.appendAssumeCapacity(.{ .key = "Foo", .value = "Bar" }); tags.appendAssumeCapacity(.{ .key = "Foo", .value = "Bar" });
const req = services.lambda.tag_resource.Request{ .resource = arn, .tags = tags.items }; const req = services.lambda.tag_resource.Request{ .resource = arn, .tags = tags.items };
@ -380,8 +380,8 @@ fn proxyFromString(string: []const u8) !std.http.Client.Proxy {
fn typeForField(comptime T: type, comptime field_name: []const u8) !type { fn typeForField(comptime T: type, comptime field_name: []const u8) !type {
const ti = @typeInfo(T); const ti = @typeInfo(T);
switch (ti) { switch (ti) {
.Struct => { .@"struct" => {
inline for (ti.Struct.fields) |field| { inline for (ti.@"struct".fields) |field| {
if (std.mem.eql(u8, field.name, field_name)) if (std.mem.eql(u8, field.name, field_name))
return field.type; return field.type;
} }

View file

@ -1,5 +1,5 @@
const std = @import("std"); const std = @import("std");
const service_list = @import("models/service_manifest.zig"); const service_list = @import("service_manifest");
const expectEqualStrings = std.testing.expectEqualStrings; const expectEqualStrings = std.testing.expectEqualStrings;
pub fn Services(comptime service_imports: anytype) type { pub fn Services(comptime service_imports: anytype) type {
@ -12,7 +12,7 @@ pub fn Services(comptime service_imports: anytype) type {
item.* = .{ item.* = .{
.name = @tagName(service_imports[i]), .name = @tagName(service_imports[i]),
.type = @TypeOf(import_field), .type = @TypeOf(import_field),
.default_value = &import_field, .default_value_ptr = &import_field,
.is_comptime = false, .is_comptime = false,
.alignment = 0, .alignment = 0,
}; };
@ -20,7 +20,7 @@ pub fn Services(comptime service_imports: anytype) type {
// finally, generate the type // finally, generate the type
return @Type(.{ return @Type(.{
.Struct = .{ .@"struct" = .{
.layout = .auto, .layout = .auto,
.fields = &fields, .fields = &fields,
.decls = &[_]std.builtin.Type.Declaration{}, .decls = &[_]std.builtin.Type.Declaration{},

View file

@ -24,10 +24,11 @@ fn encodeStruct(
comptime options: EncodingOptions, comptime options: EncodingOptions,
) !bool { ) !bool {
var rc = first; var rc = first;
inline for (@typeInfo(@TypeOf(obj)).Struct.fields) |field| { var arena = std.heap.ArenaAllocator.init(allocator);
const field_name = try options.field_name_transformer(allocator, field.name); defer arena.deinit();
defer if (options.field_name_transformer.* != defaultTransformer) const arena_alloc = arena.allocator();
allocator.free(field_name); inline for (@typeInfo(@TypeOf(obj)).@"struct".fields) |field| {
const field_name = try options.field_name_transformer(arena_alloc, field.name);
// @compileLog(@typeInfo(field.field_type).Pointer); // @compileLog(@typeInfo(field.field_type).Pointer);
rc = try encodeInternal(allocator, parent, field_name, rc, @field(obj, field.name), writer, options); rc = try encodeInternal(allocator, parent, field_name, rc, @field(obj, field.name), writer, options);
} }
@ -47,10 +48,10 @@ pub fn encodeInternal(
// @compileLog(@typeInfo(@TypeOf(obj))); // @compileLog(@typeInfo(@TypeOf(obj)));
var rc = first; var rc = first;
switch (@typeInfo(@TypeOf(obj))) { switch (@typeInfo(@TypeOf(obj))) {
.Optional => if (obj) |o| { .optional => if (obj) |o| {
rc = try encodeInternal(allocator, parent, field_name, first, o, writer, options); rc = try encodeInternal(allocator, parent, field_name, first, o, writer, options);
}, },
.Pointer => |ti| if (ti.size == .One) { .pointer => |ti| if (ti.size == .one) {
rc = try encodeInternal(allocator, parent, field_name, first, obj.*, writer, options); rc = try encodeInternal(allocator, parent, field_name, first, obj.*, writer, options);
} else { } else {
if (!first) _ = try writer.write("&"); if (!first) _ = try writer.write("&");
@ -61,7 +62,7 @@ pub fn encodeInternal(
try writer.print("{s}{s}={any}", .{ parent, field_name, obj }); try writer.print("{s}{s}={any}", .{ parent, field_name, obj });
rc = false; rc = false;
}, },
.Struct => if (std.mem.eql(u8, "", field_name)) { .@"struct" => if (std.mem.eql(u8, "", field_name)) {
rc = try encodeStruct(allocator, parent, first, obj, writer, options); rc = try encodeStruct(allocator, parent, first, obj, writer, options);
} else { } else {
// TODO: It would be lovely if we could concat at compile time or allocPrint at runtime // TODO: It would be lovely if we could concat at compile time or allocPrint at runtime
@ -73,12 +74,12 @@ pub fn encodeInternal(
rc = try encodeStruct(allocator, new_parent, first, obj, writer, options); rc = try encodeStruct(allocator, new_parent, first, obj, writer, options);
// try encodeStruct(parent ++ field_name ++ ".", first, obj, writer, options); // try encodeStruct(parent ++ field_name ++ ".", first, obj, writer, options);
}, },
.Array => { .array => {
if (!first) _ = try writer.write("&"); if (!first) _ = try writer.write("&");
try writer.print("{s}{s}={s}", .{ parent, field_name, obj }); try writer.print("{s}{s}={s}", .{ parent, field_name, obj });
rc = false; rc = false;
}, },
.Int, .ComptimeInt, .Float, .ComptimeFloat => { .int, .comptime_int, .float, .comptime_float => {
if (!first) _ = try writer.write("&"); if (!first) _ = try writer.write("&");
try writer.print("{s}{s}={d}", .{ parent, field_name, obj }); try writer.print("{s}{s}={d}", .{ parent, field_name, obj });
rc = false; rc = false;

View file

@ -653,7 +653,10 @@ fn dupeAndUnescape(alloc: Allocator, text: []const u8) ![]const u8 {
// This error is not strictly true, but we need to match one of the items // This error is not strictly true, but we need to match one of the items
// from the error set provided by the other stdlib calls at the calling site // from the error set provided by the other stdlib calls at the calling site
if (!alloc.resize(str, j)) return error.OutOfMemory; if (!alloc.resize(str, j)) {
defer alloc.free(str);
return alloc.dupe(u8, str[0..j]) catch return error.OutOfMemory;
}
return str[0..j]; return str[0..j];
} }

View file

@ -96,14 +96,14 @@ pub fn parse(comptime T: type, source: []const u8, options: ParseOptions) !Parse
fn parseInternal(comptime T: type, element: *xml.Element, options: ParseOptions) !T { fn parseInternal(comptime T: type, element: *xml.Element, options: ParseOptions) !T {
switch (@typeInfo(T)) { switch (@typeInfo(T)) {
.Bool => { .bool => {
if (std.ascii.eqlIgnoreCase("true", element.children.items[0].CharData)) if (std.ascii.eqlIgnoreCase("true", element.children.items[0].CharData))
return true; return true;
if (std.ascii.eqlIgnoreCase("false", element.children.items[0].CharData)) if (std.ascii.eqlIgnoreCase("false", element.children.items[0].CharData))
return false; return false;
return error.UnexpectedToken; return error.UnexpectedToken;
}, },
.Float, .ComptimeFloat => { .float, .comptime_float => {
return std.fmt.parseFloat(T, element.children.items[0].CharData) catch |e| { return std.fmt.parseFloat(T, element.children.items[0].CharData) catch |e| {
if (element.children.items[0].CharData[element.children.items[0].CharData.len - 1] == 'Z') { if (element.children.items[0].CharData[element.children.items[0].CharData.len - 1] == 'Z') {
// We have an iso8601 in an integer field (we think) // We have an iso8601 in an integer field (we think)
@ -127,7 +127,7 @@ fn parseInternal(comptime T: type, element: *xml.Element, options: ParseOptions)
return e; return e;
}; };
}, },
.Int, .ComptimeInt => { .int, .comptime_int => {
// 2021-10-05T16:39:45.000Z // 2021-10-05T16:39:45.000Z
return std.fmt.parseInt(T, element.children.items[0].CharData, 10) catch |e| { return std.fmt.parseInt(T, element.children.items[0].CharData, 10) catch |e| {
if (element.children.items[0].CharData[element.children.items[0].CharData.len - 1] == 'Z') { if (element.children.items[0].CharData[element.children.items[0].CharData.len - 1] == 'Z') {
@ -152,7 +152,7 @@ fn parseInternal(comptime T: type, element: *xml.Element, options: ParseOptions)
return e; return e;
}; };
}, },
.Optional => |optional_info| { .optional => |optional_info| {
if (element.children.items.len == 0) { if (element.children.items.len == 0) {
// This is almost certainly incomplete. Empty strings? xsi:nil? // This is almost certainly incomplete. Empty strings? xsi:nil?
return null; return null;
@ -162,7 +162,7 @@ fn parseInternal(comptime T: type, element: *xml.Element, options: ParseOptions)
return try parseInternal(optional_info.child, element, options); return try parseInternal(optional_info.child, element, options);
} }
}, },
.Enum => |enum_info| { .@"enum" => |enum_info| {
_ = enum_info; _ = enum_info;
// const numeric: ?enum_info.tag_type = std.fmt.parseInt(enum_info.tag_type, element.children.items[0].CharData, 10) catch null; // const numeric: ?enum_info.tag_type = std.fmt.parseInt(enum_info.tag_type, element.children.items[0].CharData, 10) catch null;
// if (numeric) |num| { // if (numeric) |num| {
@ -172,7 +172,7 @@ fn parseInternal(comptime T: type, element: *xml.Element, options: ParseOptions)
// return std.meta.stringToEnum(T, element.CharData); // return std.meta.stringToEnum(T, element.CharData);
// } // }
}, },
.Union => |union_info| { .@"union" => |union_info| {
if (union_info.tag_type) |_| { if (union_info.tag_type) |_| {
// try each of the union fields until we find one that matches // try each of the union fields until we find one that matches
// inline for (union_info.fields) |u_field| { // inline for (union_info.fields) |u_field| {
@ -195,7 +195,7 @@ fn parseInternal(comptime T: type, element: *xml.Element, options: ParseOptions)
} }
@compileError("Unable to parse into untagged union '" ++ @typeName(T) ++ "'"); @compileError("Unable to parse into untagged union '" ++ @typeName(T) ++ "'");
}, },
.Struct => |struct_info| { .@"struct" => |struct_info| {
var r: T = undefined; var r: T = undefined;
var fields_seen = [_]bool{false} ** struct_info.fields.len; var fields_seen = [_]bool{false} ** struct_info.fields.len;
var fields_set: u64 = 0; var fields_set: u64 = 0;
@ -250,7 +250,7 @@ fn parseInternal(comptime T: type, element: *xml.Element, options: ParseOptions)
fields_set = fields_set + 1; fields_set = fields_set + 1;
found_value = true; found_value = true;
} }
if (@typeInfo(field.type) == .Optional) { if (@typeInfo(field.type) == .optional) {
// Test "compiler assertion failure 2" // Test "compiler assertion failure 2"
// Zig compiler bug circa 0.9.0. Using "and !found_value" // Zig compiler bug circa 0.9.0. Using "and !found_value"
// in the if statement above will trigger assertion failure // in the if statement above will trigger assertion failure
@ -275,7 +275,7 @@ fn parseInternal(comptime T: type, element: *xml.Element, options: ParseOptions)
return error.FieldElementMismatch; // see fields_seen for details return error.FieldElementMismatch; // see fields_seen for details
return r; return r;
}, },
.Array => //|array_info| { .array => //|array_info| {
return error.ArrayNotImplemented, return error.ArrayNotImplemented,
// switch (token) { // switch (token) {
// .ArrayBegin => { // .ArrayBegin => {
@ -310,16 +310,16 @@ fn parseInternal(comptime T: type, element: *xml.Element, options: ParseOptions)
// else => return error.UnexpectedToken, // else => return error.UnexpectedToken,
// } // }
// }, // },
.Pointer => |ptr_info| { .pointer => |ptr_info| {
const allocator = options.allocator orelse return error.AllocatorRequired; const allocator = options.allocator orelse return error.AllocatorRequired;
switch (ptr_info.size) { switch (ptr_info.size) {
.One => { .one => {
const r: T = try allocator.create(ptr_info.child); const r: T = try allocator.create(ptr_info.child);
errdefer allocator.free(r); errdefer allocator.free(r);
r.* = try parseInternal(ptr_info.child, element, options); r.* = try parseInternal(ptr_info.child, element, options);
return r; return r;
}, },
.Slice => { .slice => {
// TODO: Detect and deal with arrays. This will require two // TODO: Detect and deal with arrays. This will require two
// passes through the element children - one to // passes through the element children - one to
// determine if it is an array, one to parse the elements // determine if it is an array, one to parse the elements
@ -348,10 +348,10 @@ fn parseInternal(comptime T: type, element: *xml.Element, options: ParseOptions)
} }
return try allocator.dupe(u8, element.children.items[0].CharData); return try allocator.dupe(u8, element.children.items[0].CharData);
}, },
.Many => { .many => {
return error.ManyPointerSizeNotImplemented; return error.ManyPointerSizeNotImplemented;
}, },
.C => { .c => {
return error.CPointerSizeNotImplemented; return error.CPointerSizeNotImplemented;
}, },
} }