Compare commits
No commits in common. "master" and "zig-0.12.0" have entirely different histories.
master
...
zig-0.12.0
53 changed files with 3487 additions and 5477 deletions
8
.envrc
8
.envrc
|
@ -1,8 +0,0 @@
|
||||||
# vi: ft=sh
|
|
||||||
# shellcheck shell=bash
|
|
||||||
|
|
||||||
if ! has zvm_direnv_version || ! zvm_direnv_version 2.0.0; then
|
|
||||||
source_url "https://git.lerch.org/lobo/zvm-direnv/raw/tag/2.0.0/direnvrc" "sha256-8Umzxj32hFU6G0a7Wrq0KTNDQ8XEuje2A3s2ljh/hFY="
|
|
||||||
fi
|
|
||||||
|
|
||||||
use zig 0.14.0
|
|
|
@ -1,9 +1,10 @@
|
||||||
name: AWS-Zig Build
|
name: AWS-Zig Build
|
||||||
|
run-name: ${{ github.actor }} building AWS Zig SDK
|
||||||
on:
|
on:
|
||||||
workflow_dispatch:
|
|
||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- 'master'
|
- '*'
|
||||||
|
- '!zig-develop*'
|
||||||
env:
|
env:
|
||||||
ACTIONS_RUNTIME_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
ACTIONS_RUNTIME_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
ACTIONS_RUNTIME_URL: ${{ env.GITHUB_SERVER_URL }}/api/actions_pipeline/
|
ACTIONS_RUNTIME_URL: ${{ env.GITHUB_SERVER_URL }}/api/actions_pipeline/
|
||||||
|
@ -16,25 +17,13 @@ jobs:
|
||||||
# image: alpine:3.15.0
|
# image: alpine:3.15.0
|
||||||
steps:
|
steps:
|
||||||
- name: Check out repository code
|
- name: Check out repository code
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v3
|
||||||
- name: Setup Zig
|
- name: Setup Zig
|
||||||
uses: https://github.com/mlugg/setup-zig@v2.0.5
|
uses: https://git.lerch.org/lobo/setup-zig@v3
|
||||||
# We will let setup-zig use minimum_zig_version from build.zig.zon
|
with:
|
||||||
# setup-zig also sets up the zig cache appropriately
|
version: 0.12.0
|
||||||
- name: Ulimit
|
|
||||||
run: ulimit -a
|
|
||||||
- name: Run smoke test
|
|
||||||
run: zig build smoke-test --verbose
|
|
||||||
- name: Run tests
|
- name: Run tests
|
||||||
run: zig build test --verbose --summary all
|
run: zig build test --verbose
|
||||||
- name: Run tests (release mode)
|
|
||||||
run: zig build test -Doptimize=ReleaseSafe --verbose
|
|
||||||
# Zig build scripts don't have the ability to import depenedencies directly
|
|
||||||
# (https://github.com/ziglang/zig/issues/18164). We can allow downstream
|
|
||||||
# build scripts to import aws with a few tweaks, but we can't @import("src/aws.zig")
|
|
||||||
# until we have our models built. So we have to have the build script
|
|
||||||
# basically modified, only during packaging, to allow this use case
|
|
||||||
#
|
|
||||||
# Zig package manager expects everything to be inside a directory in the archive,
|
# Zig package manager expects everything to be inside a directory in the archive,
|
||||||
# which it then strips out on download. So we need to shove everything inside a directory
|
# which it then strips out on download. So we need to shove everything inside a directory
|
||||||
# the way GitHub/Gitea does for repo archives
|
# the way GitHub/Gitea does for repo archives
|
||||||
|
@ -44,7 +33,6 @@ jobs:
|
||||||
# should be using git archive, but we need our generated code to be part of it
|
# should be using git archive, but we need our generated code to be part of it
|
||||||
- name: Package source code with generated models
|
- name: Package source code with generated models
|
||||||
run: |
|
run: |
|
||||||
sed -i 's#// UNCOMMENT AFTER MODEL GEN TO USE IN BUILD SCRIPTS //##' build.zig
|
|
||||||
tar -czf ${{ runner.temp }}/${{ github.sha }}-with-models.tar.gz \
|
tar -czf ${{ runner.temp }}/${{ github.sha }}-with-models.tar.gz \
|
||||||
--format ustar \
|
--format ustar \
|
||||||
--exclude 'zig-*' \
|
--exclude 'zig-*' \
|
||||||
|
@ -73,7 +61,7 @@ jobs:
|
||||||
# - run: echo "URL of foo (1) is ${{ steps.sign.outputs.URL_1 }}"
|
# - run: echo "URL of foo (1) is ${{ steps.sign.outputs.URL_1 }}"
|
||||||
- name: Publish source code with generated models
|
- name: Publish source code with generated models
|
||||||
run: |
|
run: |
|
||||||
curl -s --user ${{ github.actor }}:${{ secrets.PACKAGE_PUSH }} \
|
curl --user ${{ github.actor }}:${{ secrets.PACKAGE_PUSH }} \
|
||||||
--upload-file ${{ runner.temp }}/${{ github.sha }}-with-models.tar.gz \
|
--upload-file ${{ runner.temp }}/${{ github.sha }}-with-models.tar.gz \
|
||||||
https://git.lerch.org/api/packages/lobo/generic/aws-sdk-with-models/${{ github.sha }}/${{ github.sha }}-with-models.tar.gz
|
https://git.lerch.org/api/packages/lobo/generic/aws-sdk-with-models/${{ github.sha }}/${{ github.sha }}-with-models.tar.gz
|
||||||
- name: Build example
|
- name: Build example
|
||||||
|
|
|
@ -1,20 +1,16 @@
|
||||||
name: aws-zig mach nominated build
|
name: aws-zig mach nominated build
|
||||||
|
run-name: ${{ github.actor }} building AWS Zig SDK
|
||||||
on:
|
on:
|
||||||
workflow_dispatch:
|
schedule:
|
||||||
# schedule:
|
- cron: '0 12 * * *' # noon UTC, 4AM Pacific
|
||||||
# - cron: '0 12 * * *' # noon UTC, 4AM Pacific
|
|
||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- 'zig-mach'
|
- 'zig-develop*'
|
||||||
env:
|
env:
|
||||||
PKG_PREFIX: nominated-zig
|
ACTIONS_RUNTIME_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
ACTIONS_RUNTIME_URL: ${{ env.GITHUB_SERVER_URL }}/api/actions_pipeline/
|
||||||
jobs:
|
jobs:
|
||||||
build-zig-nominated-mach-latest:
|
build-zig-nominated-mach-latest:
|
||||||
container:
|
|
||||||
# We need CAP_SYS_PTRACE for stack traces due to a regression in 0.14.0
|
|
||||||
# TODO: Remove this after https://github.com/ziglang/zig/issues/21815 is
|
|
||||||
# addressed
|
|
||||||
options: --cap-add CAP_SYS_PTRACE
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
# Need to use the default container with node and all that, so we can
|
# Need to use the default container with node and all that, so we can
|
||||||
# use JS-based actions like actions/checkout@v3...
|
# use JS-based actions like actions/checkout@v3...
|
||||||
|
@ -22,24 +18,13 @@ jobs:
|
||||||
# image: alpine:3.15.0
|
# image: alpine:3.15.0
|
||||||
steps:
|
steps:
|
||||||
- name: Check out repository code
|
- name: Check out repository code
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v3
|
||||||
with:
|
with:
|
||||||
ref: zig-mach
|
ref: zig-develop
|
||||||
- name: Setup Zig
|
- name: Setup Zig
|
||||||
uses: https://github.com/mlugg/setup-zig@v2.0.1
|
uses: https://git.lerch.org/lobo/setup-zig@v3
|
||||||
with:
|
with:
|
||||||
version: mach-latest
|
version: mach-latest
|
||||||
- name: Restore Zig caches
|
|
||||||
uses: https://github.com/Hanaasagi/zig-action-cache@3954aae427f8b05914e08dfd79f15e1f2e435929
|
|
||||||
- name: Run gen
|
|
||||||
run: zig build gen --verbose
|
|
||||||
- name: Run smoke test
|
|
||||||
run: zig build smoke-test --verbose
|
|
||||||
- name: Run full tests
|
|
||||||
run: zig build test --verbose --summary all
|
|
||||||
# TODO: Zig mach currently tracking behind zig 0.14.0 branch - enable this test after update
|
|
||||||
# - name: Run tests (release mode)
|
|
||||||
# run: zig build test -Doptimize=ReleaseSafe --verbose
|
|
||||||
# Zig package manager expects everything to be inside a directory in the archive,
|
# Zig package manager expects everything to be inside a directory in the archive,
|
||||||
# which it then strips out on download. So we need to shove everything inside a directory
|
# which it then strips out on download. So we need to shove everything inside a directory
|
||||||
# the way GitHub/Gitea does for repo archives
|
# the way GitHub/Gitea does for repo archives
|
||||||
|
@ -49,7 +34,7 @@ jobs:
|
||||||
# should be using git archive, but we need our generated code to be part of it
|
# should be using git archive, but we need our generated code to be part of it
|
||||||
- name: Package source code with generated models
|
- name: Package source code with generated models
|
||||||
run: |
|
run: |
|
||||||
tar -czf ${{ runner.temp }}/${{ github.sha }}${{ env.PKG_PREFIX }}-with-models.tar.gz \
|
tar -czf ${{ runner.temp }}/${{ github.sha }}-with-models.tar.gz \
|
||||||
--format ustar \
|
--format ustar \
|
||||||
--exclude 'zig-*' \
|
--exclude 'zig-*' \
|
||||||
--transform 's,^,${{ github.sha }}/,' *
|
--transform 's,^,${{ github.sha }}/,' *
|
||||||
|
@ -72,9 +57,9 @@ jobs:
|
||||||
# - run: echo "URL of foo (1) is ${{ steps.sign.outputs.URL_1 }}"
|
# - run: echo "URL of foo (1) is ${{ steps.sign.outputs.URL_1 }}"
|
||||||
- name: Publish source code with generated models
|
- name: Publish source code with generated models
|
||||||
run: |
|
run: |
|
||||||
curl -s --user ${{ github.actor }}:${{ secrets.PACKAGE_PUSH }} \
|
curl --user ${{ github.actor }}:${{ secrets.PACKAGE_PUSH }} \
|
||||||
--upload-file ${{ runner.temp }}/${{ github.sha }}${{ env.PKG_PREFIX }}-with-models.tar.gz \
|
--upload-file ${{ runner.temp }}/${{ github.sha }}-with-models.tar.gz \
|
||||||
https://git.lerch.org/api/packages/lobo/generic/aws-sdk-with-models/${{ github.sha }}/${{ github.sha }}${{ env.PKG_PREFIX }}-with-models.tar.gz
|
https://git.lerch.org/api/packages/lobo/generic/aws-sdk-with-models/${{ github.sha }}/${{ github.sha }}-with-models.tar.gz
|
||||||
- name: Build example
|
- name: Build example
|
||||||
run: ( cd example && zig build ) # Make sure example builds
|
run: ( cd example && zig build ) # Make sure example builds
|
||||||
- name: Notify
|
- name: Notify
|
||||||
|
|
|
@ -1,20 +1,16 @@
|
||||||
name: aws-zig nightly build
|
name: aws-zig nightly build
|
||||||
|
run-name: ${{ github.actor }} building AWS Zig SDK
|
||||||
on:
|
on:
|
||||||
workflow_dispatch:
|
|
||||||
schedule:
|
schedule:
|
||||||
- cron: '30 12 * * *' # 12:30 UTC, 4:30AM Pacific
|
- cron: '0 12 30 * *' # 12:30 UTC, 4:30AM Pacific
|
||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- 'zig-develop'
|
- 'zig-develop*'
|
||||||
env:
|
env:
|
||||||
PKG_PREFIX: nightly-zig
|
ACTIONS_RUNTIME_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
ACTIONS_RUNTIME_URL: ${{ env.GITHUB_SERVER_URL }}/api/actions_pipeline/
|
||||||
jobs:
|
jobs:
|
||||||
build-zig-nightly:
|
build-zig-nightly:
|
||||||
container:
|
|
||||||
# We need CAP_SYS_PTRACE for stack traces due to a regression in 0.14.0
|
|
||||||
# TODO: Remove this after https://github.com/ziglang/zig/issues/21815 is
|
|
||||||
# addressed
|
|
||||||
options: --cap-add CAP_SYS_PTRACE
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
# Need to use the default container with node and all that, so we can
|
# Need to use the default container with node and all that, so we can
|
||||||
# use JS-based actions like actions/checkout@v3...
|
# use JS-based actions like actions/checkout@v3...
|
||||||
|
@ -22,19 +18,15 @@ jobs:
|
||||||
# image: alpine:3.15.0
|
# image: alpine:3.15.0
|
||||||
steps:
|
steps:
|
||||||
- name: Check out repository code
|
- name: Check out repository code
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v3
|
||||||
with:
|
with:
|
||||||
ref: zig-develop
|
ref: zig-develop
|
||||||
- name: Setup Zig
|
- name: Setup Zig
|
||||||
uses: https://github.com/mlugg/setup-zig@v2.0.5
|
uses: https://git.lerch.org/lobo/setup-zig@v3
|
||||||
with:
|
with:
|
||||||
version: master
|
version: master
|
||||||
- name: Run smoke test
|
- name: Run tests
|
||||||
run: zig build smoke-test --verbose
|
run: zig build test --verbose
|
||||||
- name: Run full tests
|
|
||||||
run: zig build test --verbose --summary all
|
|
||||||
- name: Run tests (release mode)
|
|
||||||
run: zig build test -Doptimize=ReleaseSafe --verbose
|
|
||||||
# Zig package manager expects everything to be inside a directory in the archive,
|
# Zig package manager expects everything to be inside a directory in the archive,
|
||||||
# which it then strips out on download. So we need to shove everything inside a directory
|
# which it then strips out on download. So we need to shove everything inside a directory
|
||||||
# the way GitHub/Gitea does for repo archives
|
# the way GitHub/Gitea does for repo archives
|
||||||
|
@ -44,7 +36,7 @@ jobs:
|
||||||
# should be using git archive, but we need our generated code to be part of it
|
# should be using git archive, but we need our generated code to be part of it
|
||||||
- name: Package source code with generated models
|
- name: Package source code with generated models
|
||||||
run: |
|
run: |
|
||||||
tar -czf ${{ runner.temp }}/${{ github.sha }}${{ env.PKG_PREFIX }}-with-models.tar.gz \
|
tar -czf ${{ runner.temp }}/${{ github.sha }}-with-models.tar.gz \
|
||||||
--format ustar \
|
--format ustar \
|
||||||
--exclude 'zig-*' \
|
--exclude 'zig-*' \
|
||||||
--transform 's,^,${{ github.sha }}/,' *
|
--transform 's,^,${{ github.sha }}/,' *
|
||||||
|
@ -67,9 +59,9 @@ jobs:
|
||||||
# - run: echo "URL of foo (1) is ${{ steps.sign.outputs.URL_1 }}"
|
# - run: echo "URL of foo (1) is ${{ steps.sign.outputs.URL_1 }}"
|
||||||
- name: Publish source code with generated models
|
- name: Publish source code with generated models
|
||||||
run: |
|
run: |
|
||||||
curl -s --user ${{ github.actor }}:${{ secrets.PACKAGE_PUSH }} \
|
curl --user ${{ github.actor }}:${{ secrets.PACKAGE_PUSH }} \
|
||||||
--upload-file ${{ runner.temp }}/${{ github.sha }}${{ env.PKG_PREFIX }}-with-models.tar.gz \
|
--upload-file ${{ runner.temp }}/${{ github.sha }}-with-models.tar.gz \
|
||||||
https://git.lerch.org/api/packages/lobo/generic/aws-sdk-with-models/${{ github.sha }}/${{ github.sha }}${{ env.PKG_PREFIX }}-with-models.tar.gz
|
https://git.lerch.org/api/packages/lobo/generic/aws-sdk-with-models/${{ github.sha }}/${{ github.sha }}-with-models.tar.gz
|
||||||
- name: Build example
|
- name: Build example
|
||||||
run: ( cd example && zig build ) # Make sure example builds
|
run: ( cd example && zig build ) # Make sure example builds
|
||||||
- name: Notify
|
- name: Notify
|
||||||
|
|
|
@ -1,89 +0,0 @@
|
||||||
name: AWS-Zig Build
|
|
||||||
on:
|
|
||||||
workflow_dispatch:
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- 'zig-0.14.x'
|
|
||||||
env:
|
|
||||||
ACTIONS_RUNTIME_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
ACTIONS_RUNTIME_URL: ${{ env.GITHUB_SERVER_URL }}/api/actions_pipeline/
|
|
||||||
jobs:
|
|
||||||
build-zig-amd64-host:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
# Need to use the default container with node and all that, so we can
|
|
||||||
# use JS-based actions like actions/checkout@v3...
|
|
||||||
# container:
|
|
||||||
# image: alpine:3.15.0
|
|
||||||
steps:
|
|
||||||
- name: Check out repository code
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
with:
|
|
||||||
ref: zig-0.14.x
|
|
||||||
- name: Setup Zig
|
|
||||||
uses: https://github.com/mlugg/setup-zig@v2.0.1
|
|
||||||
with:
|
|
||||||
version: 0.14.0
|
|
||||||
- name: Run smoke test
|
|
||||||
run: zig build smoke-test --verbose
|
|
||||||
- name: Run full tests
|
|
||||||
run: zig build test --verbose --summary all
|
|
||||||
# Release mode fix not backported to 0.13.0 code
|
|
||||||
#- name: Run tests (release mode)
|
|
||||||
# run: zig build test -Doptimize=ReleaseSafe --verbose
|
|
||||||
# Zig build scripts don't have the ability to import depenedencies directly
|
|
||||||
# (https://github.com/ziglang/zig/issues/18164). We can allow downstream
|
|
||||||
# build scripts to import aws with a few tweaks, but we can't @import("src/aws.zig")
|
|
||||||
# until we have our models built. So we have to have the build script
|
|
||||||
# basically modified, only during packaging, to allow this use case
|
|
||||||
#
|
|
||||||
# Zig package manager expects everything to be inside a directory in the archive,
|
|
||||||
# which it then strips out on download. So we need to shove everything inside a directory
|
|
||||||
# the way GitHub/Gitea does for repo archives
|
|
||||||
#
|
|
||||||
# Also, zig tar process doesn't handle gnu format for long names, nor does it seam to
|
|
||||||
# handle posix long name semantics cleanly either. ustar works. This
|
|
||||||
# should be using git archive, but we need our generated code to be part of it
|
|
||||||
- name: Package source code with generated models
|
|
||||||
run: |
|
|
||||||
sed -i 's#// UNCOMMENT AFTER MODEL GEN TO USE IN BUILD SCRIPTS //##' build.zig
|
|
||||||
tar -czf ${{ runner.temp }}/${{ github.sha }}-with-models.tar.gz \
|
|
||||||
--format ustar \
|
|
||||||
--exclude 'zig-*' \
|
|
||||||
*
|
|
||||||
# Something in this PR broke this transform. I don't mind removing it, but
|
|
||||||
# the PR attempts to handle situations with or without a prefix, but it
|
|
||||||
# doesn't. I have not yet determined what the problem is, though
|
|
||||||
# https://github.com/ziglang/zig/pull/19111/files
|
|
||||||
# --transform 's,^,${{ github.sha }}/,' *
|
|
||||||
# - name: Sign
|
|
||||||
# id: sign
|
|
||||||
# uses: https://git.lerch.org/lobo/action-hsm-sign@v1
|
|
||||||
# with:
|
|
||||||
# pin: ${{ secrets.HSM_USER_PIN }}
|
|
||||||
# files: ???
|
|
||||||
# public_key: 'https://emil.lerch.org/serverpublic.pem'
|
|
||||||
# - run: |
|
|
||||||
# echo "Source 0 should be ./bar: ${{ steps.sign.outputs.SOURCE_0 }}"
|
|
||||||
# - run: |
|
|
||||||
# echo "Signature 0 should be ./bar.sig: ${{ steps.sign.outputs.SIG_0 }}"
|
|
||||||
# - run: echo "URL of bar (0) is ${{ steps.sign.outputs.URL_0 }}"
|
|
||||||
# - run: |
|
|
||||||
# echo "Source 1 should be ./foo: ${{ steps.sign.outputs.SOURCE_1 }}"
|
|
||||||
# - run: |
|
|
||||||
# echo "Signature 1 should be ./foo.sig: ${{ steps.sign.outputs.SIG_1 }}"
|
|
||||||
# - run: echo "URL of foo (1) is ${{ steps.sign.outputs.URL_1 }}"
|
|
||||||
- name: Publish source code with generated models
|
|
||||||
run: |
|
|
||||||
curl -s --user ${{ github.actor }}:${{ secrets.PACKAGE_PUSH }} \
|
|
||||||
--upload-file ${{ runner.temp }}/${{ github.sha }}-with-models.tar.gz \
|
|
||||||
https://git.lerch.org/api/packages/lobo/generic/aws-sdk-with-models/${{ github.sha }}/${{ github.sha }}-with-models.tar.gz
|
|
||||||
- name: Build example
|
|
||||||
run: ( cd example && zig build ) # Make sure example builds
|
|
||||||
- name: Notify
|
|
||||||
uses: https://git.lerch.org/lobo/action-notify-ntfy@v2
|
|
||||||
if: always()
|
|
||||||
with:
|
|
||||||
host: ${{ secrets.NTFY_HOST }}
|
|
||||||
topic: ${{ secrets.NTFY_TOPIC }}
|
|
||||||
user: ${{ secrets.NTFY_USER }}
|
|
||||||
password: ${{ secrets.NTFY_PASSWORD }}
|
|
31
.github/workflows/build.yaml
vendored
Normal file
31
.github/workflows/build.yaml
vendored
Normal file
|
@ -0,0 +1,31 @@
|
||||||
|
name: AWS-Zig Build
|
||||||
|
run-name: ${{ github.actor }} building AWS Zig SDK
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- '*'
|
||||||
|
- '!zig-develop*'
|
||||||
|
jobs:
|
||||||
|
build-zig-0-12-0-amd64:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
env:
|
||||||
|
ZIG_VERSION: 0.12.0
|
||||||
|
ARCH: x86_64
|
||||||
|
if: ${{ github.env.GITEA_ACTIONS != 'true' }}
|
||||||
|
steps:
|
||||||
|
- name: Check out repository code
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
# ARCH is fine, but we can't substitute directly because zig
|
||||||
|
# uses x86_64 instead of amd64. They also use aarch64 instead of arm64.
|
||||||
|
#
|
||||||
|
# However, arm64/linux isn't quite fully tier 1 yet, so this is more of a
|
||||||
|
# TODO: https://github.com/ziglang/zig/issues/2443
|
||||||
|
- name: Install zig
|
||||||
|
run: |
|
||||||
|
wget -q https://ziglang.org/download/${ZIG_VERSION}/zig-linux-${ARCH}-${ZIG_VERSION}.tar.xz
|
||||||
|
sudo tar x -C /usr/local -f zig-linux-${ARCH}-${ZIG_VERSION}.tar.xz
|
||||||
|
sudo ln -s /usr/local/zig-linux-${ARCH}-${ZIG_VERSION}/zig /usr/local/bin/zig
|
||||||
|
- name: Run tests
|
||||||
|
run: zig build test -Dbroken-windows --verbose # Github runners try to run the windows tests despite disabling foreign checks
|
||||||
|
- name: Build example
|
||||||
|
run: ( cd example && zig build ) # Make sure example builds
|
36
.github/workflows/zig-mach.yaml
vendored
Normal file
36
.github/workflows/zig-mach.yaml
vendored
Normal file
|
@ -0,0 +1,36 @@
|
||||||
|
name: aws-zig mach nominated build
|
||||||
|
run-name: ${{ github.actor }} building AWS Zig SDK
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- 'zig-develop*'
|
||||||
|
jobs:
|
||||||
|
build-zig-mach-latest:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
# Need to use the default container with node and all that, so we can
|
||||||
|
# use JS-based actions like actions/checkout@v3...
|
||||||
|
# container:
|
||||||
|
# image: alpine:3.15.0
|
||||||
|
env:
|
||||||
|
ZIG_VERSION: mach-latest
|
||||||
|
ARCH: x86_64
|
||||||
|
if: ${{ github.env.GITEA_ACTIONS != 'true' }}
|
||||||
|
steps:
|
||||||
|
- name: Check out repository code
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
# ARCH is fine, but we can't substitute directly because zig
|
||||||
|
# uses x86_64 instead of amd64. They also use aarch64 instead of arm64.
|
||||||
|
#
|
||||||
|
# However, arm64/linux isn't quite fully tier 1 yet, so this is more of a
|
||||||
|
# TODO: https://github.com/ziglang/zig/issues/2443
|
||||||
|
- name: Install zig
|
||||||
|
run: |
|
||||||
|
apt-get update && apt-get install -y jq
|
||||||
|
file="$(curl -Osw '%{filename_effective}' "$(curl -s https://machengine.org/zig/index.json |jq -r '."'${ZIG_VERSION}'"."x86_64-linux".tarball')")"
|
||||||
|
sudo tar x -C /usr/local -f "${file}"
|
||||||
|
sudo ln -s /usr/local/"${file%%.tar.xz}"/zig /usr/local/bin/zig
|
||||||
|
zig version
|
||||||
|
- name: Run tests
|
||||||
|
run: zig build test -Dbroken-windows --verbose
|
||||||
|
- name: Build example
|
||||||
|
run: ( cd example && zig build ) # Make sure example builds
|
36
.github/workflows/zig-nightly.yaml
vendored
Normal file
36
.github/workflows/zig-nightly.yaml
vendored
Normal file
|
@ -0,0 +1,36 @@
|
||||||
|
name: aws-zig nightly build
|
||||||
|
run-name: ${{ github.actor }} building AWS Zig SDK
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- 'zig-develop*'
|
||||||
|
jobs:
|
||||||
|
build-zig-nightly:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
# Need to use the default container with node and all that, so we can
|
||||||
|
# use JS-based actions like actions/checkout@v3...
|
||||||
|
# container:
|
||||||
|
# image: alpine:3.15.0
|
||||||
|
env:
|
||||||
|
ZIG_VERSION: master
|
||||||
|
ARCH: x86_64
|
||||||
|
if: ${{ github.env.GITEA_ACTIONS != 'true' }}
|
||||||
|
steps:
|
||||||
|
- name: Check out repository code
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
# ARCH is fine, but we can't substitute directly because zig
|
||||||
|
# uses x86_64 instead of amd64. They also use aarch64 instead of arm64.
|
||||||
|
#
|
||||||
|
# However, arm64/linux isn't quite fully tier 1 yet, so this is more of a
|
||||||
|
# TODO: https://github.com/ziglang/zig/issues/2443
|
||||||
|
- name: Install zig
|
||||||
|
run: |
|
||||||
|
apt-get update && apt-get install -y jq
|
||||||
|
file="$(curl -Osw '%{filename_effective}' "$(curl -s https://ziglang.org/download/index.json |jq -r '."'${ZIG_VERSION}'"."x86_64-linux".tarball')")"
|
||||||
|
sudo tar x -C /usr/local -f "${file}"
|
||||||
|
sudo ln -s /usr/local/"${file%%.tar.xz}"/zig /usr/local/bin/zig
|
||||||
|
zig version
|
||||||
|
- name: Run tests
|
||||||
|
run: zig build test -Dbroken-windows --verbose
|
||||||
|
- name: Build example
|
||||||
|
run: ( cd example && zig build ) # Make sure example builds
|
1
.gitignore
vendored
1
.gitignore
vendored
|
@ -11,4 +11,3 @@ libs/
|
||||||
src/git_version.zig
|
src/git_version.zig
|
||||||
zig-out
|
zig-out
|
||||||
core
|
core
|
||||||
.zig-cache
|
|
||||||
|
|
|
@ -1,5 +0,0 @@
|
||||||
[tools]
|
|
||||||
pre-commit = "latest"
|
|
||||||
"ubi:DonIsaac/zlint" = "latest"
|
|
||||||
zig = "0.15.1"
|
|
||||||
zls = "0.15.0"
|
|
|
@ -1,30 +0,0 @@
|
||||||
# See https://pre-commit.com for more information
|
|
||||||
# See https://pre-commit.com/hooks.html for more hooks
|
|
||||||
repos:
|
|
||||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
|
||||||
rev: v3.2.0
|
|
||||||
hooks:
|
|
||||||
- id: trailing-whitespace
|
|
||||||
- id: end-of-file-fixer
|
|
||||||
- id: check-yaml
|
|
||||||
- id: check-added-large-files
|
|
||||||
- repo: https://github.com/batmac/pre-commit-zig
|
|
||||||
rev: v0.3.0
|
|
||||||
hooks:
|
|
||||||
- id: zig-fmt
|
|
||||||
- id: zig-build
|
|
||||||
- repo: local
|
|
||||||
hooks:
|
|
||||||
- id: smoke-test
|
|
||||||
name: Run zig build smoke-test
|
|
||||||
entry: zig
|
|
||||||
args: ["build", "--verbose", "smoke-test"]
|
|
||||||
language: system
|
|
||||||
types: [file]
|
|
||||||
pass_filenames: false
|
|
||||||
- id: zlint
|
|
||||||
name: Run zlint
|
|
||||||
entry: zlint
|
|
||||||
args: ["--deny-warnings", "--fix"]
|
|
||||||
language: system
|
|
||||||
types: [zig]
|
|
71
README.md
71
README.md
|
@ -1,68 +1,61 @@
|
||||||
AWS SDK for Zig
|
AWS SDK for Zig
|
||||||
===============
|
===============
|
||||||
|
|
||||||
[Zig 0.15.1](https://ziglang.org/download/#release-0.15.1):
|
[Zig 0.12](https://ziglang.org/download/#release-0.12.0):
|
||||||
|
|
||||||
[](https://git.lerch.org/lobo/aws-sdk-for-zig/actions?workflow=build.yaml&state=closed)
|
[](https://git.lerch.org/lobo/aws-sdk-for-zig/actions?workflow=build.yaml&state=closed)
|
||||||
|
|
||||||
|
[Last Mach Nominated Zig Version](https://machengine.org/about/nominated-zig/):
|
||||||
|
|
||||||
|
[](https://git.lerch.org/lobo/aws-sdk-for-zig/actions?workflow=zig-mach.yaml&state=closed)
|
||||||
|
|
||||||
[Nightly Zig](https://ziglang.org/download/):
|
[Nightly Zig](https://ziglang.org/download/):
|
||||||
|
|
||||||
[](https://git.lerch.org/lobo/aws-sdk-for-zig/actions?workflow=zig-nightly.yaml&state=closed)
|
[](https://git.lerch.org/lobo/aws-sdk-for-zig/actions?workflow=zig-nightly.yaml&state=closed)
|
||||||
|
|
||||||
[Zig 0.14.1](https://ziglang.org/download/#release-0.14.1):
|
|
||||||
|
|
||||||
[](https://git.lerch.org/lobo/aws-sdk-for-zig/actions?workflow=zig-previous.yaml&state=closed)
|
|
||||||
|
|
||||||
Current executable size for the demo is 980k after compiling with -Doptimize=ReleaseSmall
|
Current executable size for the demo is 980k after compiling with -Doptimize=ReleaseSmall
|
||||||
in x86_64-linux, and will vary based on services used. Tested targets:
|
in x86_linux, and will vary based on services used. Tested targets:
|
||||||
|
|
||||||
* x86_64-linux
|
* x86_64-linux
|
||||||
* riscv64-linux
|
* riscv64-linux\*
|
||||||
* aarch64-linux
|
* aarch64-linux
|
||||||
* x86_64-windows
|
* x86_64-windows\*\*
|
||||||
* arm-linux
|
* arm-linux
|
||||||
* aarch64-macos
|
* aarch64-macos
|
||||||
* x86_64-macos
|
* x86_64-macos
|
||||||
|
|
||||||
Tested targets are built, but not continuously tested, by CI.
|
Tested targets are built, but not continuously tested, by CI.
|
||||||
|
|
||||||
Branches
|
\* On Zig 0.12, riscv64-linux tests take a significant time to compile (each aws.zig test takes approximately 1min, 45 seconds to compile on Intel i9 10th gen)
|
||||||
--------
|
|
||||||
|
|
||||||
* **zig-develop**: This branch tracks zig nightly, and is used mainly as a canary
|
\*\* On Zig 0.12, x86_64-windows tests have one test skipped as LLVM consumes all available RAM on the system
|
||||||
for breaking changes that will need to be dealt with when
|
|
||||||
a new zig release appears. Expect significant delays in any
|
|
||||||
build failures (PRs always welcome!).
|
|
||||||
* **master**: This branch tracks the latest released zig version
|
|
||||||
* **zig-0.14.x**: This branch tracks the 0.14/0.14.1 released zig versions.
|
|
||||||
Support for these previous version is best effort, generally
|
|
||||||
degrading over time. Fixes will generally appear in master, then
|
|
||||||
backported into the previous version.
|
|
||||||
|
|
||||||
Other branches/tags exist but are unsupported
|
|
||||||
|
Zig-Develop Branch
|
||||||
|
------------------
|
||||||
|
|
||||||
|
This branch is intended for use with the in-development version of Zig. This
|
||||||
|
starts with 0.12.0-dev.3180+83e578a18. I will try to keep this branch up to date
|
||||||
|
with latest, but with a special eye towards aligning with [Mach Engine's Nominated
|
||||||
|
Zig Versions](https://machengine.org/about/nominated-zig/). As nightly zig versions
|
||||||
|
disappear off the downloads page (and back end server), we can use the mirroring
|
||||||
|
that the Mach Engine participates in to pull these versions.
|
||||||
|
|
||||||
Building
|
Building
|
||||||
--------
|
--------
|
||||||
|
|
||||||
`zig build` should work. It will build the code generation project, fetch model
|
`zig build` should work. It will build the code generation project, fetch model
|
||||||
files from upstream AWS Go SDK v2, run the code generation, then build the main
|
files from upstream AWS Go SDK v2, run the code generation, then build the main
|
||||||
project with the generated code. Testing can be done with `zig build test`. Note that
|
project with the generated code. Testing can be done with `zig test`.
|
||||||
this command tests on all supported architectures, so for a faster testing
|
|
||||||
process, use `zig build smoke-test` instead.
|
|
||||||
|
|
||||||
To make development even faster, a build option is provided to avoid the use of
|
|
||||||
LLVM. To use this, use the command `zig build -Dno-llvm smoke-test`. This
|
|
||||||
can reduce build/test time 300%. Note, however, native code generation in zig
|
|
||||||
is not yet complete, so you may see errors.
|
|
||||||
|
|
||||||
Using
|
Using
|
||||||
-----
|
-----
|
||||||
|
|
||||||
This is designed for use with the Zig package manager, and exposes a module
|
This is designed for use with the Zig 0.11 package manager, and exposes a module
|
||||||
called "aws". Set up `build.zig.zon` and add the dependency/module to your project
|
called "aws". Set up `build.zig.zon` and add the dependency/module to your project
|
||||||
as normal and the package manager should do its thing. A full example can be found
|
as normal and the package manager should do its thing. A full example can be found
|
||||||
in [/example](example/build.zig.zon). This can also be used at build time in
|
in [/example](example/README.md).
|
||||||
a downstream project's `build.zig`.
|
|
||||||
|
|
||||||
Configuring the module and/or Running the demo
|
Configuring the module and/or Running the demo
|
||||||
----------------------------------------------
|
----------------------------------------------
|
||||||
|
@ -70,8 +63,8 @@ Configuring the module and/or Running the demo
|
||||||
This library mimics the aws c libraries for it's work, so it operates like most
|
This library mimics the aws c libraries for it's work, so it operates like most
|
||||||
other 'AWS things'. [/src/main.zig](src/main.zig) gives you a handful of examples
|
other 'AWS things'. [/src/main.zig](src/main.zig) gives you a handful of examples
|
||||||
for working with services. For local testing or alternative endpoints, there's
|
for working with services. For local testing or alternative endpoints, there's
|
||||||
no real standard, so there is code to look for an environment variable
|
no real standard, so there is code to look for `AWS_ENDPOINT_URL` environment
|
||||||
`AWS_ENDPOINT_URL` variable that will supersede all other configuration.
|
variable that will supersede all other configuration.
|
||||||
|
|
||||||
Limitations
|
Limitations
|
||||||
-----------
|
-----------
|
||||||
|
@ -91,7 +84,13 @@ TODO List:
|
||||||
* Implement jitter/exponential backoff
|
* Implement jitter/exponential backoff
|
||||||
* Implement timeouts and other TODO's in the code
|
* Implement timeouts and other TODO's in the code
|
||||||
* Add option to cache signature keys
|
* Add option to cache signature keys
|
||||||
* Add CBOR support
|
|
||||||
|
Services without TLS 1.3 support
|
||||||
|
--------------------------------
|
||||||
|
|
||||||
|
All AWS services should support TLS 1.3 at this point, but there are many regions
|
||||||
|
and several partitions, and not all of them have been tested, so your mileage
|
||||||
|
may vary.
|
||||||
|
|
||||||
Dependency tree
|
Dependency tree
|
||||||
---------------
|
---------------
|
||||||
|
@ -101,6 +100,7 @@ No dependencies:
|
||||||
* aws_http_base: contains basic structures for http requests/results
|
* aws_http_base: contains basic structures for http requests/results
|
||||||
* case: provides functions to change casing
|
* case: provides functions to change casing
|
||||||
* date: provides limited date manipulation functions
|
* date: provides limited date manipulation functions
|
||||||
|
* http_client_17015_issue: zig 0.11 http client, with changes
|
||||||
* json: custom version of earlier stdlib json parser
|
* json: custom version of earlier stdlib json parser
|
||||||
* xml: custom xml parser library
|
* xml: custom xml parser library
|
||||||
* url: custom url encoding
|
* url: custom url encoding
|
||||||
|
@ -109,6 +109,7 @@ aws_credentials: Allows credential handling
|
||||||
aws_authentication
|
aws_authentication
|
||||||
|
|
||||||
aws_http:
|
aws_http:
|
||||||
|
http_client_17015_issue
|
||||||
aws_http_base
|
aws_http_base
|
||||||
aws_signing
|
aws_signing
|
||||||
|
|
||||||
|
|
334
build.zig
334
build.zig
|
@ -1,61 +1,112 @@
|
||||||
const std = @import("std");
|
const std = @import("std");
|
||||||
|
const builtin = @import("builtin");
|
||||||
const Builder = @import("std").Build;
|
const Builder = @import("std").Build;
|
||||||
|
|
||||||
const models_subdir = "codegen/sdk-codegen/aws-models/"; // note will probably not work on windows
|
const models_subdir = "codegen/sdk-codegen/aws-models/"; // note will probably not work on windows
|
||||||
|
|
||||||
// UNCOMMENT AFTER MODEL GEN TO USE IN BUILD SCRIPTS //pub const aws = @import("src/aws.zig");
|
const test_targets = [_]std.zig.CrossTarget{
|
||||||
|
|
||||||
const test_targets = [_]std.Target.Query{
|
|
||||||
.{}, // native
|
.{}, // native
|
||||||
.{ .cpu_arch = .x86_64, .os_tag = .linux },
|
.{
|
||||||
.{ .cpu_arch = .aarch64, .os_tag = .linux },
|
.cpu_arch = .x86_64,
|
||||||
.{ .cpu_arch = .riscv64, .os_tag = .linux },
|
.os_tag = .linux,
|
||||||
.{ .cpu_arch = .arm, .os_tag = .linux },
|
},
|
||||||
.{ .cpu_arch = .x86_64, .os_tag = .windows },
|
.{
|
||||||
.{ .cpu_arch = .aarch64, .os_tag = .macos },
|
.cpu_arch = .aarch64,
|
||||||
.{ .cpu_arch = .x86_64, .os_tag = .macos },
|
.os_tag = .linux,
|
||||||
// .{ .cpu_arch = .wasm32, .os_tag = .wasi },
|
},
|
||||||
|
// // The test executable just spins forever in LLVM using nominated zig 0.12 March 2024
|
||||||
|
// // This is likely a LLVM problem unlikely to be fixed in zig 0.12
|
||||||
|
// .{
|
||||||
|
// .cpu_arch = .riscv64,
|
||||||
|
// .os_tag = .linux,
|
||||||
|
// },
|
||||||
|
.{
|
||||||
|
.cpu_arch = .arm,
|
||||||
|
.os_tag = .linux,
|
||||||
|
},
|
||||||
|
.{
|
||||||
|
.cpu_arch = .x86_64,
|
||||||
|
.os_tag = .windows,
|
||||||
|
},
|
||||||
|
.{
|
||||||
|
.cpu_arch = .aarch64,
|
||||||
|
.os_tag = .macos,
|
||||||
|
},
|
||||||
|
.{
|
||||||
|
.cpu_arch = .x86_64,
|
||||||
|
.os_tag = .macos,
|
||||||
|
},
|
||||||
|
// .{
|
||||||
|
// .cpu_arch = .wasm32,
|
||||||
|
// .os_tag = .wasi,
|
||||||
|
// },
|
||||||
};
|
};
|
||||||
|
|
||||||
pub fn build(b: *Builder) !void {
|
pub fn build(b: *Builder) !void {
|
||||||
|
// Standard target options allows the person running `zig build` to choose
|
||||||
|
// what target to build for. Here we do not override the defaults, which
|
||||||
|
// means any target is allowed, and the default is native. Other options
|
||||||
|
// for restricting supported target set are available.
|
||||||
const target = b.standardTargetOptions(.{});
|
const target = b.standardTargetOptions(.{});
|
||||||
|
|
||||||
|
// Standard release options allow the person running `zig build` to select
|
||||||
|
// between Debug, ReleaseSafe, ReleaseFast, and ReleaseSmall.
|
||||||
const optimize = b.standardOptimizeOption(.{});
|
const optimize = b.standardOptimizeOption(.{});
|
||||||
|
|
||||||
const no_llvm = b.option(
|
|
||||||
bool,
|
|
||||||
"no-llvm",
|
|
||||||
"Disable LLVM",
|
|
||||||
) orelse false;
|
|
||||||
const broken_windows = b.option(
|
const broken_windows = b.option(
|
||||||
bool,
|
bool,
|
||||||
"broken-windows",
|
"broken-windows",
|
||||||
"Windows is broken in this environment (do not run Windows tests)",
|
"Windows is broken in this environment (do not run Windows tests)",
|
||||||
) orelse false;
|
) orelse false;
|
||||||
const no_bin = b.option(bool, "no-bin", "skip emitting binary") orelse false;
|
// TODO: Embed the current git version in the code. We can do this
|
||||||
|
// by looking for .git/HEAD (if it exists, follow the ref to /ref/heads/whatevs,
|
||||||
const test_filters: []const []const u8 = b.option(
|
// grab that commit, and use b.addOptions/exe.addOptions to generate the
|
||||||
[]const []const u8,
|
// Options file. See https://github.com/ziglang/zig/issues/14979 for usage
|
||||||
"test-filter",
|
// example.
|
||||||
"Skip tests that do not match any of the specified filters",
|
//
|
||||||
) orelse &.{};
|
// From there, I'm not sure what the generated file looks like or quite how
|
||||||
|
// to use, but that should be easy. It may also give some ideas on the
|
||||||
const dep_mods = try getDependencyModules(b, .{
|
// code gen piece itself, though it might be nice to leave as a seperate
|
||||||
.target = target,
|
// executable
|
||||||
.optimize = optimize,
|
// TODO: This executable should not be built when importing as a package.
|
||||||
});
|
// It relies on code gen and is all fouled up when getting imported
|
||||||
|
|
||||||
const mod_exe = b.createModule(.{
|
|
||||||
.root_source_file = b.path("src/main.zig"),
|
|
||||||
.target = target,
|
|
||||||
.optimize = optimize,
|
|
||||||
});
|
|
||||||
configure(mod_exe, dep_mods, true);
|
|
||||||
|
|
||||||
const exe = b.addExecutable(.{
|
const exe = b.addExecutable(.{
|
||||||
.name = "demo",
|
.name = "demo",
|
||||||
.root_module = mod_exe,
|
.root_source_file = .{ .path = "src/main.zig" },
|
||||||
.use_llvm = !no_llvm,
|
.target = target,
|
||||||
|
.optimize = optimize,
|
||||||
});
|
});
|
||||||
|
const smithy_dep = b.dependency("smithy", .{
|
||||||
|
// These are the arguments to the dependency. It expects a target and optimization level.
|
||||||
|
.target = target,
|
||||||
|
.optimize = optimize,
|
||||||
|
});
|
||||||
|
const smithy_module = smithy_dep.module("smithy");
|
||||||
|
exe.root_module.addImport("smithy", smithy_module); // not sure this should be here...
|
||||||
|
|
||||||
|
// Expose module to others
|
||||||
|
_ = b.addModule("aws", .{
|
||||||
|
.root_source_file = .{ .path = "src/aws.zig" },
|
||||||
|
.imports = &.{.{ .name = "smithy", .module = smithy_module }},
|
||||||
|
});
|
||||||
|
|
||||||
|
// Expose module to others
|
||||||
|
_ = b.addModule("aws-signing", .{
|
||||||
|
.root_source_file = .{ .path = "src/aws_signing.zig" },
|
||||||
|
.imports = &.{.{ .name = "smithy", .module = smithy_module }},
|
||||||
|
});
|
||||||
|
// TODO: This does not work correctly due to https://github.com/ziglang/zig/issues/16354
|
||||||
|
//
|
||||||
|
// We are working here with kind of a weird dependency though. So we can do this
|
||||||
|
// another way
|
||||||
|
//
|
||||||
|
// TODO: These target/optimize are not correct, as we need to run the thing
|
||||||
|
// const codegen = b.anonymousDependency("codegen/", @import("codegen/build.zig"), .{
|
||||||
|
// .target = target,
|
||||||
|
// .optimize = optimize,
|
||||||
|
// });
|
||||||
|
// const codegen_cmd = b.addRunArtifact(codegen.artifact("codegen"));
|
||||||
|
// exe.step.dependOn(&codegen_cmd.step);
|
||||||
|
|
||||||
const run_cmd = b.addRunArtifact(exe);
|
const run_cmd = b.addRunArtifact(exe);
|
||||||
run_cmd.step.dependOn(b.getInstallStep());
|
run_cmd.step.dependOn(b.getInstallStep());
|
||||||
|
@ -66,81 +117,61 @@ pub fn build(b: *Builder) !void {
|
||||||
const run_step = b.step("run", "Run the app");
|
const run_step = b.step("run", "Run the app");
|
||||||
run_step.dependOn(&run_cmd.step);
|
run_step.dependOn(&run_cmd.step);
|
||||||
|
|
||||||
const cg = b.step("gen", "Generate zig service code from smithy models");
|
const gen_step = blk: {
|
||||||
|
const cg = b.step("gen", "Generate zig service code from smithy models");
|
||||||
|
|
||||||
const cg_mod = b.createModule(.{
|
const cg_exe = b.addExecutable(.{
|
||||||
.root_source_file = b.path("codegen/src/main.zig"),
|
.name = "codegen",
|
||||||
// We need this generated for the host, not the real target
|
.root_source_file = .{ .path = "codegen/src/main.zig" },
|
||||||
.target = b.graph.host,
|
// We need this generated for the host, not the real target
|
||||||
.optimize = if (b.verbose) .Debug else .ReleaseSafe,
|
.target = b.host,
|
||||||
});
|
.optimize = if (b.verbose) .Debug else .ReleaseSafe,
|
||||||
configure(cg_mod, dep_mods, false);
|
});
|
||||||
|
cg_exe.root_module.addImport("smithy", smithy_dep.module("smithy"));
|
||||||
|
var cg_cmd = b.addRunArtifact(cg_exe);
|
||||||
|
cg_cmd.addArg("--models");
|
||||||
|
const hash = hash_blk: {
|
||||||
|
for (b.available_deps) |dep| {
|
||||||
|
const dep_name = dep.@"0";
|
||||||
|
const dep_hash = dep.@"1";
|
||||||
|
if (std.mem.eql(u8, dep_name, "models"))
|
||||||
|
break :hash_blk dep_hash;
|
||||||
|
}
|
||||||
|
return error.DependencyNamedModelsNotFoundInBuildZigZon;
|
||||||
|
};
|
||||||
|
cg_cmd.addArg(try std.fs.path.join(
|
||||||
|
b.allocator,
|
||||||
|
&[_][]const u8{
|
||||||
|
b.graph.global_cache_root.path.?,
|
||||||
|
"p",
|
||||||
|
hash,
|
||||||
|
models_subdir,
|
||||||
|
},
|
||||||
|
));
|
||||||
|
cg_cmd.addArg("--output");
|
||||||
|
cg_cmd.addDirectoryArg(b.path("src/models"));
|
||||||
|
if (b.verbose)
|
||||||
|
cg_cmd.addArg("--verbose");
|
||||||
|
// cg_cmd.step.dependOn(&fetch_step.step);
|
||||||
|
// TODO: this should use zig_exe from std.Build
|
||||||
|
// codegen should store a hash in a comment
|
||||||
|
// this would be hash of the exe that created the file
|
||||||
|
// concatenated with hash of input json. this would
|
||||||
|
// allow skipping generated files. May not include hash
|
||||||
|
// of contents of output file as maybe we want to tweak
|
||||||
|
// manually??
|
||||||
|
//
|
||||||
|
// All the hashes can be in service_manifest.zig, which
|
||||||
|
// could be fun to just parse and go nuts. Top of
|
||||||
|
// file, generator exe hash. Each import has comment
|
||||||
|
// with both input and output hash and we can decide
|
||||||
|
// later about warning on manual changes...
|
||||||
|
|
||||||
const cg_exe = b.addExecutable(.{
|
cg.dependOn(&cg_cmd.step);
|
||||||
.name = "codegen",
|
break :blk cg;
|
||||||
.root_module = cg_mod,
|
};
|
||||||
});
|
|
||||||
var cg_cmd = b.addRunArtifact(cg_exe);
|
|
||||||
cg_cmd.addArg("--models");
|
|
||||||
cg_cmd.addArg(try std.fs.path.join(
|
|
||||||
b.allocator,
|
|
||||||
&[_][]const u8{
|
|
||||||
try b.dependency("models", .{}).path("").getPath3(b, null).toString(b.allocator),
|
|
||||||
models_subdir,
|
|
||||||
},
|
|
||||||
));
|
|
||||||
cg_cmd.addArg("--output");
|
|
||||||
const cg_output_dir = cg_cmd.addOutputDirectoryArg("src/models");
|
|
||||||
if (b.verbose) {
|
|
||||||
cg_cmd.addArg("--verbose");
|
|
||||||
}
|
|
||||||
if (!no_bin) {
|
|
||||||
b.installArtifact(cg_exe);
|
|
||||||
}
|
|
||||||
// cg_cmd.step.dependOn(&fetch_step.step);
|
|
||||||
// TODO: this should use zig_exe from std.Build
|
|
||||||
// codegen should store a hash in a comment
|
|
||||||
// this would be hash of the exe that created the file
|
|
||||||
// concatenated with hash of input json. this would
|
|
||||||
// allow skipping generated files. May not include hash
|
|
||||||
// of contents of output file as maybe we want to tweak
|
|
||||||
// manually??
|
|
||||||
//
|
|
||||||
// All the hashes can be in service_manifest.zig, which
|
|
||||||
// could be fun to just parse and go nuts. Top of
|
|
||||||
// file, generator exe hash. Each import has comment
|
|
||||||
// with both input and output hash and we can decide
|
|
||||||
// later about warning on manual changes...
|
|
||||||
|
|
||||||
cg.dependOn(&cg_cmd.step);
|
exe.step.dependOn(gen_step);
|
||||||
|
|
||||||
exe.step.dependOn(cg);
|
|
||||||
|
|
||||||
// This allows us to have each module depend on the
|
|
||||||
// generated service manifest.
|
|
||||||
const service_manifest_module = b.createModule(.{
|
|
||||||
.root_source_file = cg_output_dir.path(b, "service_manifest.zig"),
|
|
||||||
.target = target,
|
|
||||||
.optimize = optimize,
|
|
||||||
});
|
|
||||||
configure(service_manifest_module, dep_mods, true);
|
|
||||||
|
|
||||||
mod_exe.addImport("service_manifest", service_manifest_module);
|
|
||||||
|
|
||||||
// Expose module to others
|
|
||||||
const mod_aws = b.addModule("aws", .{
|
|
||||||
.root_source_file = b.path("src/aws.zig"),
|
|
||||||
.target = target,
|
|
||||||
.optimize = optimize,
|
|
||||||
});
|
|
||||||
mod_aws.addImport("service_manifest", service_manifest_module);
|
|
||||||
configure(mod_aws, dep_mods, true);
|
|
||||||
|
|
||||||
// Expose module to others
|
|
||||||
const mod_aws_signing = b.addModule("aws-signing", .{
|
|
||||||
.root_source_file = b.path("src/aws_signing.zig"),
|
|
||||||
});
|
|
||||||
configure(mod_aws_signing, dep_mods, false);
|
|
||||||
|
|
||||||
// Similar to creating the run step earlier, this exposes a `test` step to
|
// Similar to creating the run step earlier, this exposes a `test` step to
|
||||||
// the `zig build --help` menu, providing a way for the user to request
|
// the `zig build --help` menu, providing a way for the user to request
|
||||||
|
@ -163,91 +194,20 @@ pub fn build(b: *Builder) !void {
|
||||||
// test_step.dependOn(&run_unit_tests.step);
|
// test_step.dependOn(&run_unit_tests.step);
|
||||||
for (test_targets) |t| {
|
for (test_targets) |t| {
|
||||||
if (broken_windows and t.os_tag == .windows) continue;
|
if (broken_windows and t.os_tag == .windows) continue;
|
||||||
|
|
||||||
const mod_unit_tests = b.createModule(.{
|
|
||||||
.root_source_file = b.path("src/aws.zig"),
|
|
||||||
.target = b.resolveTargetQuery(t),
|
|
||||||
.optimize = optimize,
|
|
||||||
});
|
|
||||||
mod_unit_tests.addImport("service_manifest", service_manifest_module);
|
|
||||||
configure(mod_unit_tests, dep_mods, true);
|
|
||||||
|
|
||||||
// Creates a step for unit testing. This only builds the test executable
|
// Creates a step for unit testing. This only builds the test executable
|
||||||
// but does not run it.
|
// but does not run it.
|
||||||
const unit_tests = b.addTest(.{
|
const unit_tests = b.addTest(.{
|
||||||
.root_module = mod_unit_tests,
|
.root_source_file = .{ .path = "src/aws.zig" },
|
||||||
.filters = test_filters,
|
.target = b.resolveTargetQuery(t),
|
||||||
|
.optimize = optimize,
|
||||||
});
|
});
|
||||||
|
unit_tests.root_module.addImport("smithy", smithy_dep.module("smithy"));
|
||||||
unit_tests.step.dependOn(cg);
|
unit_tests.step.dependOn(gen_step);
|
||||||
unit_tests.use_llvm = !no_llvm;
|
|
||||||
|
|
||||||
const run_unit_tests = b.addRunArtifact(unit_tests);
|
const run_unit_tests = b.addRunArtifact(unit_tests);
|
||||||
run_unit_tests.skip_foreign_checks = true;
|
run_unit_tests.skip_foreign_checks = true;
|
||||||
|
|
||||||
test_step.dependOn(&run_unit_tests.step);
|
test_step.dependOn(&run_unit_tests.step);
|
||||||
}
|
}
|
||||||
const check = b.step("check", "Check compilation errors");
|
b.installArtifact(exe);
|
||||||
check.dependOn(&exe.step);
|
|
||||||
|
|
||||||
// Similar to creating the run step earlier, this exposes a `test` step to
|
|
||||||
// the `zig build --help` menu, providing a way for the user to request
|
|
||||||
// running the unit tests.
|
|
||||||
const smoke_test_step = b.step("smoke-test", "Run unit tests");
|
|
||||||
|
|
||||||
// Creates a step for unit testing. This only builds the test executable
|
|
||||||
// but does not run it.
|
|
||||||
const smoke_test = b.addTest(.{
|
|
||||||
.root_module = mod_aws,
|
|
||||||
.filters = test_filters,
|
|
||||||
});
|
|
||||||
smoke_test.use_llvm = !no_llvm;
|
|
||||||
smoke_test.step.dependOn(cg);
|
|
||||||
|
|
||||||
const run_smoke_test = b.addRunArtifact(smoke_test);
|
|
||||||
|
|
||||||
smoke_test_step.dependOn(&run_smoke_test.step);
|
|
||||||
if (no_bin) {
|
|
||||||
b.getInstallStep().dependOn(&exe.step);
|
|
||||||
} else {
|
|
||||||
b.installArtifact(exe);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn configure(compile: *std.Build.Module, modules: std.StringHashMap(*std.Build.Module), include_time: bool) void {
|
|
||||||
compile.addImport("smithy", modules.get("smithy").?);
|
|
||||||
compile.addImport("date", modules.get("date").?);
|
|
||||||
compile.addImport("json", modules.get("json").?);
|
|
||||||
compile.addImport("case", modules.get("case").?);
|
|
||||||
if (include_time) compile.addImport("zeit", modules.get("zeit").?);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn getDependencyModules(b: *std.Build, args: anytype) !std.StringHashMap(*std.Build.Module) {
|
|
||||||
var result = std.StringHashMap(*std.Build.Module).init(b.allocator);
|
|
||||||
|
|
||||||
// External dependencies
|
|
||||||
const dep_smithy = b.dependency("smithy", args);
|
|
||||||
const mod_smithy = dep_smithy.module("smithy");
|
|
||||||
try result.putNoClobber("smithy", mod_smithy);
|
|
||||||
|
|
||||||
const dep_zeit = b.dependency("zeit", args);
|
|
||||||
const mod_zeit = dep_zeit.module("zeit");
|
|
||||||
try result.putNoClobber("zeit", mod_zeit);
|
|
||||||
|
|
||||||
const dep_case = b.dependency("case", args);
|
|
||||||
const mod_case = dep_case.module("case");
|
|
||||||
try result.putNoClobber("case", mod_case);
|
|
||||||
// End External dependencies
|
|
||||||
|
|
||||||
// Private modules/dependencies
|
|
||||||
const dep_json = b.dependency("json", args);
|
|
||||||
const mod_json = dep_json.module("json");
|
|
||||||
try result.putNoClobber("json", mod_json);
|
|
||||||
|
|
||||||
const dep_date = b.dependency("date", args);
|
|
||||||
const mod_date = dep_date.module("date");
|
|
||||||
try result.putNoClobber("date", mod_date);
|
|
||||||
// End private modules/dependencies
|
|
||||||
|
|
||||||
return result;
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,40 +1,20 @@
|
||||||
.{
|
.{
|
||||||
.name = .aws,
|
.name = "aws",
|
||||||
.version = "0.0.1",
|
.version = "0.0.1",
|
||||||
.fingerprint = 0x1f26b7b27005bb49,
|
|
||||||
.paths = .{
|
.paths = .{
|
||||||
"build.zig",
|
"build.zig",
|
||||||
"build.zig.zon",
|
"build.zig.zon",
|
||||||
"src",
|
"src",
|
||||||
"codegen",
|
|
||||||
"lib",
|
|
||||||
"README.md",
|
|
||||||
"LICENSE",
|
|
||||||
},
|
},
|
||||||
.minimum_zig_version = "0.15.1",
|
|
||||||
|
|
||||||
.dependencies = .{
|
.dependencies = .{
|
||||||
.smithy = .{
|
.smithy = .{
|
||||||
.url = "git+https://git.lerch.org/lobo/smithy.git#09c0a618877ebaf8e15fbfc505983876f4e063d5",
|
.url = "https://git.lerch.org/lobo/smithy/archive/1e534201c4df5ea4f615faeedc69d414adbec0b1.tar.gz",
|
||||||
.hash = "smithy-1.0.0-uAyBgTnTAgBp2v6vypGcK5-YOCtxs2iEqR-4LfC5FTlS",
|
.hash = "1220af63ae0498010004af79936cedf3fe6702f516daab77ebbd97a274eba1b42aad",
|
||||||
},
|
},
|
||||||
.models = .{
|
.models = .{
|
||||||
.url = "https://github.com/aws/aws-sdk-go-v2/archive/refs/tags/release-2025-05-05.tar.gz",
|
.url = "https://github.com/aws/aws-sdk-go-v2/archive/58cf6509525a12d64fd826da883bfdbacbd2f00e.tar.gz",
|
||||||
.hash = "N-V-__8AAKWdeiawujEcrfukQbb8lLAiQIRT0uG5gCcm4b7W",
|
.hash = "122017a2f3081ce83c23e0c832feb1b8b4176d507b6077f522855dc774bcf83ee315",
|
||||||
},
|
|
||||||
.zeit = .{
|
|
||||||
.url = "git+https://github.com/rockorager/zeit?ref=zig-0.15#ed2ca60db118414bda2b12df2039e33bad3b0b88",
|
|
||||||
.hash = "zeit-0.6.0-5I6bk0J9AgCVa0nnyL0lNY9Xa9F68hHq-ZarhuXNV-Jb",
|
|
||||||
},
|
|
||||||
.date = .{
|
|
||||||
.path = "lib/date",
|
|
||||||
},
|
|
||||||
.json = .{
|
|
||||||
.path = "lib/json",
|
|
||||||
},
|
|
||||||
.case = .{
|
|
||||||
.url = "git+https://github.com/travisstaloch/case.git#f8003fe5f93b65f673d10d41323e347225e8cb87",
|
|
||||||
.hash = "case-0.0.1-chGYqx_EAADaGJjmoln5M1iMBDTrMdd8to5wdEVpfXm4",
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,19 +1,11 @@
|
||||||
.{
|
.{
|
||||||
.name = .codegen,
|
.name = "aws-zig-codegen",
|
||||||
.version = "0.0.1",
|
.version = "0.0.1",
|
||||||
.paths = .{
|
|
||||||
"build.zig",
|
|
||||||
"build.zig.zon",
|
|
||||||
"src",
|
|
||||||
"README.md",
|
|
||||||
"LICENSE",
|
|
||||||
},
|
|
||||||
.fingerprint = 0x41c2ec2d551fe279,
|
|
||||||
|
|
||||||
.dependencies = .{
|
.dependencies = .{
|
||||||
.smithy = .{
|
.smithy = .{
|
||||||
.url = "git+https://git.lerch.org/lobo/smithy.git#09c0a618877ebaf8e15fbfc505983876f4e063d5",
|
.url = "https://git.lerch.org/lobo/smithy/archive/41b61745d25a65817209dd5dddbb5f9b66896a99.tar.gz",
|
||||||
.hash = "smithy-1.0.0-uAyBgTnTAgBp2v6vypGcK5-YOCtxs2iEqR-4LfC5FTlS",
|
.hash = "122087deb0ae309b2258d59b40d82fe5921fdfc35b420bb59033244851f7f276fa34",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,10 +0,0 @@
|
||||||
const std = @import("std");
|
|
||||||
const smithy = @import("smithy");
|
|
||||||
|
|
||||||
const FileGenerationState = @This();
|
|
||||||
|
|
||||||
protocol: smithy.AwsProtocol,
|
|
||||||
shapes: std.StringHashMap(smithy.ShapeInfo),
|
|
||||||
shape_references: std.StringHashMap(u64),
|
|
||||||
additional_types_to_generate: *std.ArrayList(smithy.ShapeInfo),
|
|
||||||
additional_types_generated: *std.StringHashMap(void),
|
|
|
@ -1,21 +0,0 @@
|
||||||
const std = @import("std");
|
|
||||||
const case = @import("case");
|
|
||||||
|
|
||||||
const GenerateTypeOptions = @This();
|
|
||||||
|
|
||||||
end_structure: bool,
|
|
||||||
key_case: case.Case,
|
|
||||||
|
|
||||||
pub fn endStructure(self: @This(), value: bool) GenerateTypeOptions {
|
|
||||||
return .{
|
|
||||||
.end_structure = value,
|
|
||||||
.key_case = self.key_case,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn keyCase(self: @This(), value: case.Case) GenerateTypeOptions {
|
|
||||||
return .{
|
|
||||||
.end_structure = self.end_structure,
|
|
||||||
.key_case = value,
|
|
||||||
};
|
|
||||||
}
|
|
|
@ -1,53 +0,0 @@
|
||||||
const std = @import("std");
|
|
||||||
const smithy = @import("smithy");
|
|
||||||
|
|
||||||
const FileGenerationState = @import("FileGenerationState.zig");
|
|
||||||
|
|
||||||
const GenerationState = @This();
|
|
||||||
|
|
||||||
type_stack: *std.ArrayList(*const smithy.ShapeInfo),
|
|
||||||
file_state: FileGenerationState,
|
|
||||||
// we will need some sort of "type decls needed" for recursive structures
|
|
||||||
allocator: std.mem.Allocator,
|
|
||||||
indent_level: u64,
|
|
||||||
|
|
||||||
pub fn appendToTypeStack(self: @This(), shape_info: *const smithy.ShapeInfo) !void {
|
|
||||||
try self.type_stack.append(self.allocator, shape_info);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn popFromTypeStack(self: @This()) void {
|
|
||||||
_ = self.type_stack.pop();
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn getTypeRecurrenceCount(self: @This(), id: []const u8) u8 {
|
|
||||||
var self_occurences: u8 = 0;
|
|
||||||
|
|
||||||
for (self.type_stack.items) |i| {
|
|
||||||
if (std.mem.eql(u8, i.id, id)) {
|
|
||||||
self_occurences += 1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return self_occurences;
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn indent(self: @This()) GenerationState {
|
|
||||||
var new_state = self.clone();
|
|
||||||
new_state.indent_level += 1;
|
|
||||||
return new_state;
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn deindent(self: @This()) GenerationState {
|
|
||||||
var new_state = self.clone();
|
|
||||||
new_state.indent_level = @max(0, new_state.indent_level - 1);
|
|
||||||
return new_state;
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn clone(self: @This()) GenerationState {
|
|
||||||
return GenerationState{
|
|
||||||
.type_stack = self.type_stack,
|
|
||||||
.file_state = self.file_state,
|
|
||||||
.allocator = self.allocator,
|
|
||||||
.indent_level = self.indent_level,
|
|
||||||
};
|
|
||||||
}
|
|
|
@ -20,6 +20,7 @@ const multihash_len = 1 + 1 + Hash.digest_length;
|
||||||
pub const hex_multihash_len = 2 * multihash_len;
|
pub const hex_multihash_len = 2 * multihash_len;
|
||||||
pub const digest_len = Hash.digest_length;
|
pub const digest_len = Hash.digest_length;
|
||||||
|
|
||||||
|
const MultiHashHexDigest = [hex_multihash_len]u8;
|
||||||
const MultihashFunction = enum(u16) {
|
const MultihashFunction = enum(u16) {
|
||||||
identity = 0x00,
|
identity = 0x00,
|
||||||
sha1 = 0x11,
|
sha1 = 0x11,
|
||||||
|
@ -69,7 +70,7 @@ pub fn hex64(x: u64) [16]u8 {
|
||||||
var result: [16]u8 = undefined;
|
var result: [16]u8 = undefined;
|
||||||
var i: usize = 0;
|
var i: usize = 0;
|
||||||
while (i < 8) : (i += 1) {
|
while (i < 8) : (i += 1) {
|
||||||
const byte: u8 = @truncate(x >> @as(u6, @intCast(8 * i)));
|
const byte = @as(u8, @truncate(x >> @as(u6, @intCast(8 * i))));
|
||||||
result[i * 2 + 0] = hex_charset[byte >> 4];
|
result[i * 2 + 0] = hex_charset[byte >> 4];
|
||||||
result[i * 2 + 1] = hex_charset[byte & 15];
|
result[i * 2 + 1] = hex_charset[byte & 15];
|
||||||
}
|
}
|
||||||
|
@ -107,9 +108,8 @@ pub fn computeDirectoryHash(
|
||||||
const arena = arena_instance.allocator();
|
const arena = arena_instance.allocator();
|
||||||
|
|
||||||
// Collect all files, recursively, then sort.
|
// Collect all files, recursively, then sort.
|
||||||
// Normally we're looking at around 300 model files
|
var all_files = std.ArrayList(*HashedFile).init(gpa);
|
||||||
var all_files = try std.ArrayList(*HashedFile).initCapacity(gpa, 300);
|
defer all_files.deinit();
|
||||||
defer all_files.deinit(gpa);
|
|
||||||
|
|
||||||
var walker = try dir.walk(gpa);
|
var walker = try dir.walk(gpa);
|
||||||
defer walker.deinit();
|
defer walker.deinit();
|
||||||
|
@ -140,7 +140,7 @@ pub fn computeDirectoryHash(
|
||||||
wait_group.start();
|
wait_group.start();
|
||||||
try thread_pool.spawn(workerHashFile, .{ dir, hashed_file, &wait_group });
|
try thread_pool.spawn(workerHashFile, .{ dir, hashed_file, &wait_group });
|
||||||
|
|
||||||
try all_files.append(gpa, hashed_file);
|
try all_files.append(hashed_file);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -156,7 +156,7 @@ pub fn computeDirectoryHash(
|
||||||
hasher.update(&hashed_file.hash);
|
hasher.update(&hashed_file.hash);
|
||||||
}
|
}
|
||||||
if (any_failures) return error.DirectoryHashUnavailable;
|
if (any_failures) return error.DirectoryHashUnavailable;
|
||||||
if (options.needFileHashes) options.fileHashes = try all_files.toOwnedSlice(gpa);
|
if (options.needFileHashes) options.fileHashes = try all_files.toOwnedSlice();
|
||||||
return hasher.finalResult();
|
return hasher.finalResult();
|
||||||
}
|
}
|
||||||
fn workerHashFile(dir: std.fs.Dir, hashed_file: *HashedFile, wg: *std.Thread.WaitGroup) void {
|
fn workerHashFile(dir: std.fs.Dir, hashed_file: *HashedFile, wg: *std.Thread.WaitGroup) void {
|
||||||
|
|
124
codegen/src/json.zig
Normal file
124
codegen/src/json.zig
Normal file
|
@ -0,0 +1,124 @@
|
||||||
|
const std = @import("std");
|
||||||
|
// options is a json.Options, but since we're using our hacked json.zig we don't want to
|
||||||
|
// specifically call this out
|
||||||
|
pub fn serializeMap(map: anytype, key: []const u8, options: anytype, out_stream: anytype) !bool {
|
||||||
|
if (map.len == 0) return true;
|
||||||
|
// TODO: Map might be [][]struct{key, value} rather than []struct{key, value}
|
||||||
|
var child_options = options;
|
||||||
|
if (child_options.whitespace) |*child_ws|
|
||||||
|
child_ws.indent_level += 1;
|
||||||
|
|
||||||
|
try out_stream.writeByte('"');
|
||||||
|
try out_stream.writeAll(key);
|
||||||
|
_ = try out_stream.write("\":");
|
||||||
|
if (options.whitespace) |ws| {
|
||||||
|
if (ws.separator) {
|
||||||
|
try out_stream.writeByte(' ');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
try out_stream.writeByte('{');
|
||||||
|
if (options.whitespace) |_|
|
||||||
|
try out_stream.writeByte('\n');
|
||||||
|
for (map, 0..) |tag, i| {
|
||||||
|
if (tag.key == null or tag.value == null) continue;
|
||||||
|
// TODO: Deal with escaping and general "json.stringify" the values...
|
||||||
|
if (child_options.whitespace) |ws|
|
||||||
|
try ws.outputIndent(out_stream);
|
||||||
|
try out_stream.writeByte('"');
|
||||||
|
try jsonEscape(tag.key.?, child_options, out_stream);
|
||||||
|
_ = try out_stream.write("\":");
|
||||||
|
if (child_options.whitespace) |ws| {
|
||||||
|
if (ws.separator) {
|
||||||
|
try out_stream.writeByte(' ');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
try out_stream.writeByte('"');
|
||||||
|
try jsonEscape(tag.value.?, child_options, out_stream);
|
||||||
|
try out_stream.writeByte('"');
|
||||||
|
if (i < map.len - 1) {
|
||||||
|
try out_stream.writeByte(',');
|
||||||
|
}
|
||||||
|
if (child_options.whitespace) |_|
|
||||||
|
try out_stream.writeByte('\n');
|
||||||
|
}
|
||||||
|
if (options.whitespace) |ws|
|
||||||
|
try ws.outputIndent(out_stream);
|
||||||
|
try out_stream.writeByte('}');
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
// code within jsonEscape lifted from json.zig in stdlib
|
||||||
|
fn jsonEscape(value: []const u8, options: anytype, out_stream: anytype) !void {
|
||||||
|
var i: usize = 0;
|
||||||
|
while (i < value.len) : (i += 1) {
|
||||||
|
switch (value[i]) {
|
||||||
|
// normal ascii character
|
||||||
|
0x20...0x21, 0x23...0x2E, 0x30...0x5B, 0x5D...0x7F => |c| try out_stream.writeByte(c),
|
||||||
|
// only 2 characters that *must* be escaped
|
||||||
|
'\\' => try out_stream.writeAll("\\\\"),
|
||||||
|
'\"' => try out_stream.writeAll("\\\""),
|
||||||
|
// solidus is optional to escape
|
||||||
|
'/' => {
|
||||||
|
if (options.string.String.escape_solidus) {
|
||||||
|
try out_stream.writeAll("\\/");
|
||||||
|
} else {
|
||||||
|
try out_stream.writeByte('/');
|
||||||
|
}
|
||||||
|
},
|
||||||
|
// control characters with short escapes
|
||||||
|
// TODO: option to switch between unicode and 'short' forms?
|
||||||
|
0x8 => try out_stream.writeAll("\\b"),
|
||||||
|
0xC => try out_stream.writeAll("\\f"),
|
||||||
|
'\n' => try out_stream.writeAll("\\n"),
|
||||||
|
'\r' => try out_stream.writeAll("\\r"),
|
||||||
|
'\t' => try out_stream.writeAll("\\t"),
|
||||||
|
else => {
|
||||||
|
const ulen = std.unicode.utf8ByteSequenceLength(value[i]) catch unreachable;
|
||||||
|
// control characters (only things left with 1 byte length) should always be printed as unicode escapes
|
||||||
|
if (ulen == 1 or options.string.String.escape_unicode) {
|
||||||
|
const codepoint = std.unicode.utf8Decode(value[i .. i + ulen]) catch unreachable;
|
||||||
|
try outputUnicodeEscape(codepoint, out_stream);
|
||||||
|
} else {
|
||||||
|
try out_stream.writeAll(value[i .. i + ulen]);
|
||||||
|
}
|
||||||
|
i += ulen - 1;
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// outputUnicodeEscape and assert lifted from json.zig in stdlib
|
||||||
|
fn outputUnicodeEscape(
|
||||||
|
codepoint: u21,
|
||||||
|
out_stream: anytype,
|
||||||
|
) !void {
|
||||||
|
if (codepoint <= 0xFFFF) {
|
||||||
|
// If the character is in the Basic Multilingual Plane (U+0000 through U+FFFF),
|
||||||
|
// then it may be represented as a six-character sequence: a reverse solidus, followed
|
||||||
|
// by the lowercase letter u, followed by four hexadecimal digits that encode the character's code point.
|
||||||
|
try out_stream.writeAll("\\u");
|
||||||
|
try std.fmt.formatIntValue(codepoint, "x", std.fmt.FormatOptions{ .width = 4, .fill = '0' }, out_stream);
|
||||||
|
} else {
|
||||||
|
assert(codepoint <= 0x10FFFF);
|
||||||
|
// To escape an extended character that is not in the Basic Multilingual Plane,
|
||||||
|
// the character is represented as a 12-character sequence, encoding the UTF-16 surrogate pair.
|
||||||
|
const high = @as(u16, @intCast((codepoint - 0x10000) >> 10)) + 0xD800;
|
||||||
|
const low = @as(u16, @intCast(codepoint & 0x3FF)) + 0xDC00;
|
||||||
|
try out_stream.writeAll("\\u");
|
||||||
|
try std.fmt.formatIntValue(high, "x", std.fmt.FormatOptions{ .width = 4, .fill = '0' }, out_stream);
|
||||||
|
try out_stream.writeAll("\\u");
|
||||||
|
try std.fmt.formatIntValue(low, "x", std.fmt.FormatOptions{ .width = 4, .fill = '0' }, out_stream);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// This function invokes undefined behavior when `ok` is `false`.
|
||||||
|
/// In Debug and ReleaseSafe modes, calls to this function are always
|
||||||
|
/// generated, and the `unreachable` statement triggers a panic.
|
||||||
|
/// In ReleaseFast and ReleaseSmall modes, calls to this function are
|
||||||
|
/// optimized away, and in fact the optimizer is able to use the assertion
|
||||||
|
/// in its heuristics.
|
||||||
|
/// Inside a test block, it is best to use the `std.testing` module rather
|
||||||
|
/// than this function, because this function may not detect a test failure
|
||||||
|
/// in ReleaseFast and ReleaseSmall mode. Outside of a test block, this assert
|
||||||
|
/// function is the correct function to use.
|
||||||
|
pub fn assert(ok: bool) void {
|
||||||
|
if (!ok) unreachable; // assertion failure
|
||||||
|
}
|
File diff suppressed because it is too large
Load diff
|
@ -1 +0,0 @@
|
||||||
pub const json = @import("serialization/json.zig");
|
|
|
@ -1,392 +0,0 @@
|
||||||
const std = @import("std");
|
|
||||||
const smithy = @import("smithy");
|
|
||||||
const smithy_tools = @import("../smithy_tools.zig");
|
|
||||||
const support = @import("../support.zig");
|
|
||||||
|
|
||||||
const GenerationState = @import("../GenerationState.zig");
|
|
||||||
const GenerateTypeOptions = @import("../GenerateTypeOptions.zig");
|
|
||||||
const Allocator = std.mem.Allocator;
|
|
||||||
|
|
||||||
const Shape = smithy_tools.Shape;
|
|
||||||
|
|
||||||
const JsonMember = struct {
|
|
||||||
field_name: []const u8,
|
|
||||||
json_key: []const u8,
|
|
||||||
target: []const u8,
|
|
||||||
type_member: smithy.TypeMember,
|
|
||||||
shape_info: smithy.ShapeInfo,
|
|
||||||
};
|
|
||||||
|
|
||||||
pub fn generateToJsonFunction(shape_id: []const u8, writer: *std.Io.Writer, state: GenerationState, comptime options: GenerateTypeOptions) !void {
|
|
||||||
_ = options;
|
|
||||||
const allocator = state.allocator;
|
|
||||||
|
|
||||||
const shape_info = try smithy_tools.getShapeInfo(shape_id, state.file_state.shapes);
|
|
||||||
const shape = shape_info.shape;
|
|
||||||
|
|
||||||
if (try getJsonMembers(allocator, shape, state)) |json_members| {
|
|
||||||
if (json_members.items.len > 0) {
|
|
||||||
try writer.writeAll("pub fn jsonStringify(self: @This(), jw: anytype) !void {\n");
|
|
||||||
try writer.writeAll("try jw.beginObject();\n");
|
|
||||||
try writer.writeAll("{\n");
|
|
||||||
|
|
||||||
for (json_members.items) |member| {
|
|
||||||
const member_value = try getMemberValueJson(allocator, "self", member);
|
|
||||||
defer allocator.free(member_value);
|
|
||||||
|
|
||||||
try writer.print("try jw.objectField(\"{s}\");\n", .{member.json_key});
|
|
||||||
try writeMemberJson(
|
|
||||||
.{
|
|
||||||
.shape_id = member.target,
|
|
||||||
.field_name = member.field_name,
|
|
||||||
.field_value = member_value,
|
|
||||||
.state = state.indent(),
|
|
||||||
.member = member.type_member,
|
|
||||||
},
|
|
||||||
writer,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
try writer.writeAll("}\n");
|
|
||||||
try writer.writeAll("try jw.endObject();\n");
|
|
||||||
try writer.writeAll("}\n\n");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn getJsonMembers(allocator: Allocator, shape: Shape, state: GenerationState) !?std.ArrayListUnmanaged(JsonMember) {
|
|
||||||
const is_json_shape = switch (state.file_state.protocol) {
|
|
||||||
.json_1_0, .json_1_1, .rest_json_1 => true,
|
|
||||||
else => false,
|
|
||||||
};
|
|
||||||
|
|
||||||
if (!is_json_shape) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
var hash_map = std.StringHashMapUnmanaged(smithy.TypeMember){};
|
|
||||||
|
|
||||||
const shape_members = smithy_tools.getShapeMembers(shape);
|
|
||||||
for (shape_members) |member| {
|
|
||||||
try hash_map.putNoClobber(state.allocator, member.name, member);
|
|
||||||
}
|
|
||||||
|
|
||||||
for (shape_members) |member| {
|
|
||||||
for (member.traits) |trait| {
|
|
||||||
switch (trait) {
|
|
||||||
.http_header, .http_query => {
|
|
||||||
std.debug.assert(hash_map.remove(member.name));
|
|
||||||
break;
|
|
||||||
},
|
|
||||||
else => continue,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (hash_map.count() == 0) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
var json_members = std.ArrayListUnmanaged(JsonMember){};
|
|
||||||
|
|
||||||
var iter = hash_map.iterator();
|
|
||||||
while (iter.next()) |kvp| {
|
|
||||||
const member = kvp.value_ptr.*;
|
|
||||||
|
|
||||||
const key = blk: {
|
|
||||||
if (smithy_tools.findTrait(.json_name, member.traits)) |trait| {
|
|
||||||
break :blk trait.json_name;
|
|
||||||
}
|
|
||||||
|
|
||||||
break :blk member.name;
|
|
||||||
};
|
|
||||||
|
|
||||||
try json_members.append(allocator, .{
|
|
||||||
.field_name = try support.constantName(allocator, member.name, .snake),
|
|
||||||
.json_key = key,
|
|
||||||
.target = member.target,
|
|
||||||
.type_member = member,
|
|
||||||
.shape_info = try smithy_tools.getShapeInfo(member.target, state.file_state.shapes),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
return json_members;
|
|
||||||
}
|
|
||||||
|
|
||||||
fn getMemberValueJson(allocator: std.mem.Allocator, source: []const u8, member: JsonMember) ![]const u8 {
|
|
||||||
const member_value = try std.fmt.allocPrint(allocator, "@field({s}, \"{s}\")", .{ source, member.field_name });
|
|
||||||
defer allocator.free(member_value);
|
|
||||||
|
|
||||||
var output_block = std.Io.Writer.Allocating.init(allocator);
|
|
||||||
defer output_block.deinit();
|
|
||||||
|
|
||||||
try writeMemberValue(
|
|
||||||
&output_block.writer,
|
|
||||||
member_value,
|
|
||||||
);
|
|
||||||
|
|
||||||
return output_block.toOwnedSlice();
|
|
||||||
}
|
|
||||||
|
|
||||||
fn getShapeJsonValueType(shape: Shape) []const u8 {
|
|
||||||
return switch (shape) {
|
|
||||||
.string, .@"enum", .blob, .document, .timestamp => ".string",
|
|
||||||
.boolean => ".bool",
|
|
||||||
.integer, .bigInteger, .short, .long => ".integer",
|
|
||||||
.float, .double, .bigDecimal => ".float",
|
|
||||||
else => std.debug.panic("Unexpected shape: {}", .{shape}),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
fn writeMemberValue(
|
|
||||||
writer: *std.Io.Writer,
|
|
||||||
member_value: []const u8,
|
|
||||||
) !void {
|
|
||||||
try writer.writeAll(member_value);
|
|
||||||
}
|
|
||||||
|
|
||||||
const WriteMemberJsonParams = struct {
|
|
||||||
shape_id: []const u8,
|
|
||||||
field_name: []const u8,
|
|
||||||
field_value: []const u8,
|
|
||||||
state: GenerationState,
|
|
||||||
member: smithy.TypeMember,
|
|
||||||
};
|
|
||||||
|
|
||||||
fn writeStructureJson(params: WriteMemberJsonParams, writer: *std.Io.Writer) !void {
|
|
||||||
const shape_type = "structure";
|
|
||||||
const allocator = params.state.allocator;
|
|
||||||
const state = params.state;
|
|
||||||
|
|
||||||
const shape_info = try smithy_tools.getShapeInfo(params.shape_id, state.file_state.shapes);
|
|
||||||
const shape = shape_info.shape;
|
|
||||||
|
|
||||||
const structure_name = try std.fmt.allocPrint(params.state.allocator, "{s}_{s}_{d}", .{ params.field_name, shape_type, state.indent_level });
|
|
||||||
defer params.state.allocator.free(structure_name);
|
|
||||||
|
|
||||||
const object_value_capture = try std.fmt.allocPrint(allocator, "{s}_capture", .{structure_name});
|
|
||||||
defer allocator.free(object_value_capture);
|
|
||||||
|
|
||||||
try writer.print("\n// start {s}: {s}\n", .{ shape_type, structure_name });
|
|
||||||
defer writer.print("// end {s}: {s}\n", .{ shape_type, structure_name }) catch std.debug.panic("Unreachable", .{});
|
|
||||||
|
|
||||||
if (try getJsonMembers(allocator, shape, state)) |json_members| {
|
|
||||||
if (json_members.items.len > 0) {
|
|
||||||
const is_optional = smithy_tools.shapeIsOptional(params.member.traits);
|
|
||||||
|
|
||||||
var object_value = params.field_value;
|
|
||||||
|
|
||||||
if (is_optional) {
|
|
||||||
object_value = object_value_capture;
|
|
||||||
|
|
||||||
try writer.print("if ({s}) |{s}|", .{ params.field_value, object_value_capture });
|
|
||||||
try writer.writeAll("{\n");
|
|
||||||
}
|
|
||||||
|
|
||||||
try writer.writeAll("try jw.beginObject();\n");
|
|
||||||
try writer.writeAll("{\n");
|
|
||||||
|
|
||||||
// this is a workaround in case a child structure doesn't have any fields
|
|
||||||
// and therefore doesn't use the structure variable so we capture it here.
|
|
||||||
// the compiler should optimize this away
|
|
||||||
try writer.print("const unused_capture_{s} = {s};\n", .{ structure_name, object_value });
|
|
||||||
try writer.print("_ = unused_capture_{s};\n", .{structure_name});
|
|
||||||
|
|
||||||
for (json_members.items) |member| {
|
|
||||||
const member_value = try getMemberValueJson(allocator, object_value, member);
|
|
||||||
defer allocator.free(member_value);
|
|
||||||
|
|
||||||
try writer.print("try jw.objectField(\"{s}\");\n", .{member.json_key});
|
|
||||||
try writeMemberJson(
|
|
||||||
.{
|
|
||||||
.shape_id = member.target,
|
|
||||||
.field_name = member.field_name,
|
|
||||||
.field_value = member_value,
|
|
||||||
.state = state.indent(),
|
|
||||||
.member = member.type_member,
|
|
||||||
},
|
|
||||||
writer,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
try writer.writeAll("}\n");
|
|
||||||
try writer.writeAll("try jw.endObject();\n");
|
|
||||||
|
|
||||||
if (is_optional) {
|
|
||||||
try writer.writeAll("} else {\n");
|
|
||||||
try writer.writeAll("try jw.write(null);\n");
|
|
||||||
try writer.writeAll("}\n");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn writeListJson(list: smithy_tools.ListShape, params: WriteMemberJsonParams, writer: *std.Io.Writer) anyerror!void {
|
|
||||||
const state = params.state;
|
|
||||||
const allocator = state.allocator;
|
|
||||||
|
|
||||||
const list_name = try std.fmt.allocPrint(allocator, "{s}_list_{d}", .{ params.field_name, state.indent_level });
|
|
||||||
defer state.allocator.free(list_name);
|
|
||||||
|
|
||||||
try writer.print("\n// start list: {s}\n", .{list_name});
|
|
||||||
defer writer.print("// end list: {s}\n", .{list_name}) catch std.debug.panic("Unreachable", .{});
|
|
||||||
|
|
||||||
const list_each_value = try std.fmt.allocPrint(allocator, "{s}_value", .{list_name});
|
|
||||||
defer allocator.free(list_each_value);
|
|
||||||
|
|
||||||
const list_capture = try std.fmt.allocPrint(allocator, "{s}_capture", .{list_name});
|
|
||||||
defer allocator.free(list_capture);
|
|
||||||
|
|
||||||
{
|
|
||||||
const list_is_optional = smithy_tools.shapeIsOptional(list.traits);
|
|
||||||
|
|
||||||
var list_value = params.field_value;
|
|
||||||
|
|
||||||
if (list_is_optional) {
|
|
||||||
list_value = list_capture;
|
|
||||||
|
|
||||||
try writer.print("if ({s}) |{s}| ", .{
|
|
||||||
params.field_value,
|
|
||||||
list_capture,
|
|
||||||
});
|
|
||||||
try writer.writeAll("{\n");
|
|
||||||
}
|
|
||||||
|
|
||||||
// start loop
|
|
||||||
try writer.writeAll("try jw.beginArray();\n");
|
|
||||||
try writer.print("for ({s}) |{s}|", .{ list_value, list_each_value });
|
|
||||||
try writer.writeAll("{\n");
|
|
||||||
try writer.writeAll("try jw.write(");
|
|
||||||
try writeMemberValue(
|
|
||||||
writer,
|
|
||||||
list_each_value,
|
|
||||||
);
|
|
||||||
try writer.writeAll(");\n");
|
|
||||||
try writer.writeAll("}\n");
|
|
||||||
try writer.writeAll("try jw.endArray();\n");
|
|
||||||
// end loop
|
|
||||||
|
|
||||||
if (list_is_optional) {
|
|
||||||
try writer.writeAll("} else {\n");
|
|
||||||
try writer.writeAll("try jw.write(null);\n");
|
|
||||||
try writer.writeAll("}\n");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn writeMapJson(map: smithy_tools.MapShape, params: WriteMemberJsonParams, writer: *std.Io.Writer) anyerror!void {
|
|
||||||
const state = params.state;
|
|
||||||
const name = params.field_name;
|
|
||||||
const value = params.field_value;
|
|
||||||
const allocator = state.allocator;
|
|
||||||
|
|
||||||
const map_name = try std.fmt.allocPrint(allocator, "{s}_object_map_{d}", .{ name, state.indent_level });
|
|
||||||
defer allocator.free(map_name);
|
|
||||||
|
|
||||||
try writer.print("\n// start map: {s}\n", .{map_name});
|
|
||||||
defer writer.print("// end map: {s}\n", .{map_name}) catch std.debug.panic("Unreachable", .{});
|
|
||||||
|
|
||||||
const map_value_capture = try std.fmt.allocPrint(allocator, "{s}_kvp", .{map_name});
|
|
||||||
defer allocator.free(map_value_capture);
|
|
||||||
|
|
||||||
const map_capture_key = try std.fmt.allocPrint(allocator, "{s}.key", .{map_value_capture});
|
|
||||||
defer allocator.free(map_capture_key);
|
|
||||||
|
|
||||||
const map_capture_value = try std.fmt.allocPrint(allocator, "{s}.value", .{map_value_capture});
|
|
||||||
defer allocator.free(map_capture_value);
|
|
||||||
|
|
||||||
const value_shape_info = try smithy_tools.getShapeInfo(map.value, state.file_state.shapes);
|
|
||||||
|
|
||||||
const value_member = smithy.TypeMember{
|
|
||||||
.name = "value",
|
|
||||||
.target = map.value,
|
|
||||||
.traits = smithy_tools.getShapeTraits(value_shape_info.shape),
|
|
||||||
};
|
|
||||||
|
|
||||||
const map_capture = try std.fmt.allocPrint(state.allocator, "{s}_capture", .{map_name});
|
|
||||||
|
|
||||||
{
|
|
||||||
const map_member = params.member;
|
|
||||||
const map_is_optional = !smithy_tools.hasTrait(.required, map_member.traits);
|
|
||||||
|
|
||||||
var map_value = value;
|
|
||||||
|
|
||||||
if (map_is_optional) {
|
|
||||||
map_value = map_capture;
|
|
||||||
|
|
||||||
try writer.print("if ({s}) |{s}| ", .{
|
|
||||||
value,
|
|
||||||
map_capture,
|
|
||||||
});
|
|
||||||
try writer.writeAll("{\n");
|
|
||||||
}
|
|
||||||
|
|
||||||
try writer.writeAll("try jw.beginObject();\n");
|
|
||||||
try writer.writeAll("{\n");
|
|
||||||
|
|
||||||
// start loop
|
|
||||||
try writer.print("for ({s}) |{s}|", .{ map_value, map_value_capture });
|
|
||||||
try writer.writeAll("{\n");
|
|
||||||
try writer.print("try jw.objectField({s});\n", .{map_capture_key});
|
|
||||||
|
|
||||||
try writeMemberJson(.{
|
|
||||||
.shape_id = map.value,
|
|
||||||
.field_name = "value",
|
|
||||||
.field_value = map_capture_value,
|
|
||||||
.state = state.indent(),
|
|
||||||
.member = value_member,
|
|
||||||
}, writer);
|
|
||||||
|
|
||||||
try writer.writeAll("}\n");
|
|
||||||
// end loop
|
|
||||||
|
|
||||||
try writer.writeAll("}\n");
|
|
||||||
try writer.writeAll("try jw.endObject();\n");
|
|
||||||
|
|
||||||
if (map_is_optional) {
|
|
||||||
try writer.writeAll("} else {\n");
|
|
||||||
try writer.writeAll("try jw.write(null);\n");
|
|
||||||
try writer.writeAll("}\n");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn writeScalarJson(comment: []const u8, params: WriteMemberJsonParams, writer: *std.Io.Writer) anyerror!void {
|
|
||||||
try writer.print("try jw.write({s}); // {s}\n\n", .{ params.field_value, comment });
|
|
||||||
}
|
|
||||||
|
|
||||||
fn writeMemberJson(params: WriteMemberJsonParams, writer: *std.Io.Writer) anyerror!void {
|
|
||||||
const shape_id = params.shape_id;
|
|
||||||
const state = params.state;
|
|
||||||
const shape_info = try smithy_tools.getShapeInfo(shape_id, state.file_state.shapes);
|
|
||||||
const shape = shape_info.shape;
|
|
||||||
|
|
||||||
if (state.getTypeRecurrenceCount(shape_id) > 2) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
try state.appendToTypeStack(&shape_info);
|
|
||||||
defer state.popFromTypeStack();
|
|
||||||
|
|
||||||
switch (shape) {
|
|
||||||
.structure, .uniontype => try writeStructureJson(params, writer),
|
|
||||||
.list => |l| try writeListJson(l, params, writer),
|
|
||||||
.map => |m| try writeMapJson(m, params, writer),
|
|
||||||
.timestamp => try writeScalarJson("timestamp", params, writer),
|
|
||||||
.string => try writeScalarJson("string", params, writer),
|
|
||||||
.@"enum" => try writeScalarJson("enum", params, writer),
|
|
||||||
.document => try writeScalarJson("document", params, writer),
|
|
||||||
.blob => try writeScalarJson("blob", params, writer),
|
|
||||||
.boolean => try writeScalarJson("bool", params, writer),
|
|
||||||
.float => try writeScalarJson("float", params, writer),
|
|
||||||
.integer => try writeScalarJson("integer", params, writer),
|
|
||||||
.long => try writeScalarJson("long", params, writer),
|
|
||||||
.double => try writeScalarJson("double", params, writer),
|
|
||||||
.bigDecimal => try writeScalarJson("bigDecimal", params, writer),
|
|
||||||
.bigInteger => try writeScalarJson("bigInteger", params, writer),
|
|
||||||
.unit => try writeScalarJson("unit", params, writer),
|
|
||||||
.byte => try writeScalarJson("byte", params, writer),
|
|
||||||
.short => try writeScalarJson("short", params, writer),
|
|
||||||
.service, .resource, .operation, .member, .set => std.debug.panic("Shape type not supported: {}", .{shape}),
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,67 +0,0 @@
|
||||||
const std = @import("std");
|
|
||||||
const smithy = @import("smithy");
|
|
||||||
|
|
||||||
pub const Shape = @FieldType(smithy.ShapeInfo, "shape");
|
|
||||||
pub const ServiceShape = @TypeOf((Shape{ .service = undefined }).service);
|
|
||||||
pub const ListShape = @TypeOf((Shape{ .list = undefined }).list);
|
|
||||||
pub const MapShape = @TypeOf((Shape{ .map = undefined }).map);
|
|
||||||
|
|
||||||
pub fn getShapeInfo(id: []const u8, shapes: std.StringHashMap(smithy.ShapeInfo)) !smithy.ShapeInfo {
|
|
||||||
return shapes.get(id) orelse {
|
|
||||||
std.debug.print("Shape ID not found. This is most likely a bug. Shape ID: {s}\n", .{id});
|
|
||||||
return error.InvalidType;
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn getShapeTraits(shape: Shape) []smithy.Trait {
|
|
||||||
return switch (shape) {
|
|
||||||
.service, .operation, .resource => std.debug.panic("Unexpected shape type: {}", .{shape}),
|
|
||||||
inline else => |s| s.traits,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn getShapeMembers(shape: Shape) []smithy.TypeMember {
|
|
||||||
return switch (shape) {
|
|
||||||
inline .structure, .uniontype => |s| s.members,
|
|
||||||
else => std.debug.panic("Unexpected shape type: {}", .{shape}),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn shapeIsLeaf(shape: Shape) bool {
|
|
||||||
return switch (shape) {
|
|
||||||
.@"enum",
|
|
||||||
.bigDecimal,
|
|
||||||
.bigInteger,
|
|
||||||
.blob,
|
|
||||||
.boolean,
|
|
||||||
.byte,
|
|
||||||
.document,
|
|
||||||
.double,
|
|
||||||
.float,
|
|
||||||
.integer,
|
|
||||||
.long,
|
|
||||||
.short,
|
|
||||||
.string,
|
|
||||||
.timestamp,
|
|
||||||
=> true,
|
|
||||||
else => false,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn shapeIsOptional(traits: []smithy.Trait) bool {
|
|
||||||
return !hasTrait(.required, traits);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn findTrait(trait_type: smithy.TraitType, traits: []smithy.Trait) ?smithy.Trait {
|
|
||||||
for (traits) |trait| {
|
|
||||||
if (trait == trait_type) {
|
|
||||||
return trait;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn hasTrait(trait_type: smithy.TraitType, traits: []smithy.Trait) bool {
|
|
||||||
return findTrait(trait_type, traits) != null;
|
|
||||||
}
|
|
157
codegen/src/snake.zig
Normal file
157
codegen/src/snake.zig
Normal file
|
@ -0,0 +1,157 @@
|
||||||
|
const std = @import("std");
|
||||||
|
const expectEqualStrings = std.testing.expectEqualStrings;
|
||||||
|
|
||||||
|
pub fn fromPascalCase(allocator: std.mem.Allocator, name: []const u8) ![]u8 {
|
||||||
|
const rc = try allocator.alloc(u8, name.len * 2); // This is overkill, but is > the maximum length possibly needed
|
||||||
|
errdefer allocator.free(rc);
|
||||||
|
var utf8_name = (std.unicode.Utf8View.init(name) catch unreachable).iterator();
|
||||||
|
var target_inx: u64 = 0;
|
||||||
|
var curr_char = (try isAscii(utf8_name.nextCodepoint())).?;
|
||||||
|
target_inx = setNext(lowercase(curr_char), rc, target_inx);
|
||||||
|
var prev_char = curr_char;
|
||||||
|
if (try isAscii(utf8_name.nextCodepoint())) |ch| {
|
||||||
|
curr_char = ch;
|
||||||
|
} else {
|
||||||
|
// Single character only - we're done here
|
||||||
|
_ = setNext(0, rc, target_inx);
|
||||||
|
return rc[0..target_inx];
|
||||||
|
}
|
||||||
|
while (try isAscii(utf8_name.nextCodepoint())) |next_char| {
|
||||||
|
if (next_char == ' ') {
|
||||||
|
// a space shouldn't be happening. But if it does, it clues us
|
||||||
|
// in pretty well:
|
||||||
|
//
|
||||||
|
// MyStuff Is Awesome
|
||||||
|
// |^
|
||||||
|
// |next_char
|
||||||
|
// ^
|
||||||
|
// prev_codepoint/ascii_prev_char (and target_inx)
|
||||||
|
target_inx = setNext(lowercase(curr_char), rc, target_inx);
|
||||||
|
target_inx = setNext('_', rc, target_inx);
|
||||||
|
var maybe_curr_char = (try isAscii(utf8_name.nextCodepoint()));
|
||||||
|
if (maybe_curr_char == null) {
|
||||||
|
std.log.err("Error on fromPascalCase processing name '{s}'", .{name});
|
||||||
|
}
|
||||||
|
curr_char = maybe_curr_char.?;
|
||||||
|
maybe_curr_char = (try isAscii(utf8_name.nextCodepoint()));
|
||||||
|
if (maybe_curr_char == null) {
|
||||||
|
// We have reached the end of the string (e.g. "Resource Explorer 2")
|
||||||
|
// We need to do this check before we setNext, so that we don't
|
||||||
|
// end up duplicating the last character
|
||||||
|
break;
|
||||||
|
// std.log.err("Error on fromPascalCase processing name '{s}', curr_char = '{}'", .{ name, curr_char });
|
||||||
|
}
|
||||||
|
target_inx = setNext(lowercase(curr_char), rc, target_inx);
|
||||||
|
prev_char = curr_char;
|
||||||
|
curr_char = maybe_curr_char.?;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if (between(curr_char, 'A', 'Z')) {
|
||||||
|
if (isAcronym(curr_char, next_char)) {
|
||||||
|
// We could be in an acronym at the start of a word. This
|
||||||
|
// is the only case where we actually need to look back at the
|
||||||
|
// previous character, and if that's the case, throw in an
|
||||||
|
// underscore
|
||||||
|
// "SAMLMySAMLAcronymThing");
|
||||||
|
if (between(prev_char, 'a', 'z'))
|
||||||
|
target_inx = setNext('_', rc, target_inx);
|
||||||
|
|
||||||
|
//we are in an acronym - don't snake, just lower
|
||||||
|
target_inx = setNext(lowercase(curr_char), rc, target_inx);
|
||||||
|
} else {
|
||||||
|
target_inx = setNext('_', rc, target_inx);
|
||||||
|
target_inx = setNext(lowercase(curr_char), rc, target_inx);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
target_inx = setNext(curr_char, rc, target_inx);
|
||||||
|
}
|
||||||
|
prev_char = curr_char;
|
||||||
|
curr_char = next_char;
|
||||||
|
}
|
||||||
|
// work in the last codepoint - force lowercase
|
||||||
|
target_inx = setNext(lowercase(curr_char), rc, target_inx);
|
||||||
|
|
||||||
|
rc[target_inx] = 0;
|
||||||
|
_ = allocator.resize(rc, target_inx);
|
||||||
|
return rc[0..target_inx];
|
||||||
|
}
|
||||||
|
|
||||||
|
fn isAcronym(char1: u8, char2: u8) bool {
|
||||||
|
return isAcronymChar(char1) and isAcronymChar(char2);
|
||||||
|
}
|
||||||
|
fn isAcronymChar(char: u8) bool {
|
||||||
|
return between(char, 'A', 'Z') or between(char, '0', '9');
|
||||||
|
}
|
||||||
|
fn isAscii(codepoint: ?u21) !?u8 {
|
||||||
|
if (codepoint) |cp| {
|
||||||
|
if (cp > 0xff) return error.UnicodeNotSupported;
|
||||||
|
return @as(u8, @truncate(cp));
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn setNext(ascii: u8, slice: []u8, inx: u64) u64 {
|
||||||
|
slice[inx] = ascii;
|
||||||
|
return inx + 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn lowercase(ascii: u8) u8 {
|
||||||
|
var lowercase_char = ascii;
|
||||||
|
if (between(ascii, 'A', 'Z'))
|
||||||
|
lowercase_char = ascii + ('a' - 'A');
|
||||||
|
return lowercase_char;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn between(char: u8, from: u8, to: u8) bool {
|
||||||
|
return char >= from and char <= to;
|
||||||
|
}
|
||||||
|
|
||||||
|
test "converts from PascalCase to snake_case" {
|
||||||
|
const allocator = std.testing.allocator;
|
||||||
|
const snake_case = try fromPascalCase(allocator, "MyPascalCaseThing");
|
||||||
|
defer allocator.free(snake_case);
|
||||||
|
try expectEqualStrings("my_pascal_case_thing", snake_case);
|
||||||
|
}
|
||||||
|
test "handles from PascalCase acronyms to snake_case" {
|
||||||
|
const allocator = std.testing.allocator;
|
||||||
|
const snake_case = try fromPascalCase(allocator, "SAMLMySAMLAcronymThing");
|
||||||
|
defer allocator.free(snake_case);
|
||||||
|
try expectEqualStrings("saml_my_saml_acronym_thing", snake_case);
|
||||||
|
}
|
||||||
|
test "spaces in the name" {
|
||||||
|
const allocator = std.testing.allocator;
|
||||||
|
const snake_case = try fromPascalCase(allocator, "API Gateway");
|
||||||
|
defer allocator.free(snake_case);
|
||||||
|
try expectEqualStrings("api_gateway", snake_case);
|
||||||
|
}
|
||||||
|
|
||||||
|
test "S3" {
|
||||||
|
const allocator = std.testing.allocator;
|
||||||
|
const snake_case = try fromPascalCase(allocator, "S3");
|
||||||
|
defer allocator.free(snake_case);
|
||||||
|
try expectEqualStrings("s3", snake_case);
|
||||||
|
}
|
||||||
|
|
||||||
|
test "ec2" {
|
||||||
|
const allocator = std.testing.allocator;
|
||||||
|
const snake_case = try fromPascalCase(allocator, "EC2");
|
||||||
|
defer allocator.free(snake_case);
|
||||||
|
try expectEqualStrings("ec2", snake_case);
|
||||||
|
}
|
||||||
|
|
||||||
|
test "IoT 1Click Devices Service" {
|
||||||
|
const allocator = std.testing.allocator;
|
||||||
|
const snake_case = try fromPascalCase(allocator, "IoT 1Click Devices Service");
|
||||||
|
defer allocator.free(snake_case);
|
||||||
|
// NOTE: There is some debate amoung humans about what this should
|
||||||
|
// turn into. Should it be iot_1click_... or iot_1_click...?
|
||||||
|
try expectEqualStrings("iot_1_click_devices_service", snake_case);
|
||||||
|
}
|
||||||
|
test "Resource Explorer 2" {
|
||||||
|
const allocator = std.testing.allocator;
|
||||||
|
const snake_case = try fromPascalCase(allocator, "Resource Explorer 2");
|
||||||
|
defer allocator.free(snake_case);
|
||||||
|
// NOTE: There is some debate amoung humans about what this should
|
||||||
|
// turn into. Should it be iot_1click_... or iot_1_click...?
|
||||||
|
try expectEqualStrings("resource_explorer_2", snake_case);
|
||||||
|
}
|
|
@ -1,33 +0,0 @@
|
||||||
const std = @import("std");
|
|
||||||
const case = @import("case");
|
|
||||||
|
|
||||||
const Allocator = std.mem.Allocator;
|
|
||||||
|
|
||||||
pub fn constantName(allocator: Allocator, id: []const u8, comptime to_case: case.Case) ![]const u8 {
|
|
||||||
// There are some ids that don't follow consistent rules, so we'll
|
|
||||||
// look for the exceptions and, if not found, revert to the snake case
|
|
||||||
// algorithm
|
|
||||||
|
|
||||||
var buf = std.mem.zeroes([256]u8);
|
|
||||||
@memcpy(buf[0..id.len], id);
|
|
||||||
|
|
||||||
var name = try allocator.dupe(u8, id);
|
|
||||||
|
|
||||||
const simple_replacements = &.{
|
|
||||||
&.{ "DevOps", "Devops" },
|
|
||||||
&.{ "IoT", "Iot" },
|
|
||||||
&.{ "FSx", "Fsx" },
|
|
||||||
&.{ "CloudFront", "Cloudfront" },
|
|
||||||
};
|
|
||||||
|
|
||||||
inline for (simple_replacements) |rep| {
|
|
||||||
if (std.mem.indexOf(u8, name, rep[0])) |idx| @memcpy(name[idx .. idx + rep[0].len], rep[1]);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (to_case == .snake) {
|
|
||||||
if (std.mem.eql(u8, id, "SESv2")) return try std.fmt.allocPrint(allocator, "ses_v2", .{});
|
|
||||||
if (std.mem.eql(u8, id, "ETag")) return try std.fmt.allocPrint(allocator, "e_tag", .{});
|
|
||||||
}
|
|
||||||
|
|
||||||
return try case.allocTo(allocator, to_case, name);
|
|
||||||
}
|
|
|
@ -15,18 +15,22 @@ pub fn build(b: *std.Build) void {
|
||||||
// set a preferred release mode, allowing the user to decide how to optimize.
|
// set a preferred release mode, allowing the user to decide how to optimize.
|
||||||
const optimize = b.standardOptimizeOption(.{});
|
const optimize = b.standardOptimizeOption(.{});
|
||||||
|
|
||||||
const mod_exe = b.createModule(.{
|
const exe = b.addExecutable(.{
|
||||||
.root_source_file = b.path("src/main.zig"),
|
.name = "tmp",
|
||||||
|
// In this case the main source file is merely a path, however, in more
|
||||||
|
// complicated build scripts, this could be a generated file.
|
||||||
|
.root_source_file = .{ .path = "src/main.zig" },
|
||||||
.target = target,
|
.target = target,
|
||||||
.optimize = optimize,
|
.optimize = optimize,
|
||||||
});
|
});
|
||||||
|
|
||||||
const exe = b.addExecutable(.{
|
// const smithy_dep = b.dependency("smithy", .{
|
||||||
.name = "tmp",
|
// // These are the two arguments to the dependency. It expects a target and optimization level.
|
||||||
.root_module = mod_exe,
|
// .target = target,
|
||||||
});
|
// .optimize = optimize,
|
||||||
|
// });
|
||||||
const aws_dep = b.dependency("aws", .{
|
// exe.addModule("smithy", smithy_dep.module("smithy"));
|
||||||
|
const aws_dep = b.dependency("aws-zig", .{
|
||||||
// These are the two arguments to the dependency. It expects a target and optimization level.
|
// These are the two arguments to the dependency. It expects a target and optimization level.
|
||||||
.target = target,
|
.target = target,
|
||||||
.optimize = optimize,
|
.optimize = optimize,
|
||||||
|
@ -61,15 +65,12 @@ pub fn build(b: *std.Build) void {
|
||||||
const run_step = b.step("run", "Run the app");
|
const run_step = b.step("run", "Run the app");
|
||||||
run_step.dependOn(&run_cmd.step);
|
run_step.dependOn(&run_cmd.step);
|
||||||
|
|
||||||
const mod_unit_tests = b.createModule(.{
|
|
||||||
.root_source_file = b.path("src/main.zig"),
|
|
||||||
.target = target,
|
|
||||||
.optimize = optimize,
|
|
||||||
});
|
|
||||||
// Creates a step for unit testing. This only builds the test executable
|
// Creates a step for unit testing. This only builds the test executable
|
||||||
// but does not run it.
|
// but does not run it.
|
||||||
const unit_tests = b.addTest(.{
|
const unit_tests = b.addTest(.{
|
||||||
.root_module = mod_unit_tests,
|
.root_source_file = .{ .path = "src/main.zig" },
|
||||||
|
.target = target,
|
||||||
|
.optimize = optimize,
|
||||||
});
|
});
|
||||||
|
|
||||||
const run_unit_tests = b.addRunArtifact(unit_tests);
|
const run_unit_tests = b.addRunArtifact(unit_tests);
|
||||||
|
|
|
@ -1,13 +1,16 @@
|
||||||
.{
|
.{
|
||||||
.name = .myapp,
|
.name = "myapp",
|
||||||
.version = "0.0.1",
|
.version = "0.0.1",
|
||||||
.fingerprint = 0x8798022a511224c5,
|
|
||||||
.paths = .{""},
|
.paths = .{""},
|
||||||
|
|
||||||
.dependencies = .{
|
.dependencies = .{
|
||||||
.aws = .{
|
.smithy = .{
|
||||||
.url = "https://git.lerch.org/api/packages/lobo/generic/aws-sdk-with-models/e41f98b389539c8bc6b1a231d25e2980318e5ef4/e41f98b389539c8bc6b1a231d25e2980318e5ef4-with-models.tar.gz",
|
.url = "https://git.lerch.org/lobo/smithy/archive/1e534201c4df5ea4f615faeedc69d414adbec0b1.tar.gz",
|
||||||
.hash = "aws-0.0.1-SbsFcI0RCgBdf1nak95gi1kAtI6sv3Ntb7BPETH30fpS",
|
.hash = "1220af63ae0498010004af79936cedf3fe6702f516daab77ebbd97a274eba1b42aad",
|
||||||
|
},
|
||||||
|
.@"aws-zig" = .{
|
||||||
|
.url = "https://git.lerch.org/api/packages/lobo/generic/aws-sdk-with-models/a0773971f2f52182c8a5235582500d36afda2e81/a0773971f2f52182c8a5235582500d36afda2e81-with-models.tar.gz",
|
||||||
|
.hash = "1220198f7b734c1cc6a683ad13246439a59be934156a2df3a734bcaf15433b33eead",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
|
@ -15,10 +15,10 @@ pub fn main() anyerror!void {
|
||||||
var gpa = std.heap.GeneralPurposeAllocator(.{}){};
|
var gpa = std.heap.GeneralPurposeAllocator(.{}){};
|
||||||
defer _ = gpa.deinit();
|
defer _ = gpa.deinit();
|
||||||
const allocator = gpa.allocator();
|
const allocator = gpa.allocator();
|
||||||
var stdout_buffer: [1024]u8 = undefined;
|
const stdout_raw = std.io.getStdOut().writer();
|
||||||
var stdout_raw = std.fs.File.stdout().writer(&stdout_buffer);
|
var bw = std.io.bufferedWriter(stdout_raw);
|
||||||
const stdout = &stdout_raw.interface;
|
defer bw.flush() catch unreachable;
|
||||||
defer stdout.flush() catch unreachable;
|
const stdout = bw.writer();
|
||||||
|
|
||||||
// To use a proxy, uncomment the following with your own configuration
|
// To use a proxy, uncomment the following with your own configuration
|
||||||
// const proxy = std.http.Proxy{
|
// const proxy = std.http.Proxy{
|
||||||
|
@ -36,8 +36,10 @@ pub fn main() anyerror!void {
|
||||||
.client = client,
|
.client = client,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// As of 2023-08-28, only ECS from this list supports TLS v1.3
|
||||||
|
// AWS commitment is to enable all services by 2023-12-31
|
||||||
const services = aws.Services(.{ .sts, .kms }){};
|
const services = aws.Services(.{ .sts, .kms }){};
|
||||||
try stdout.print("Calling KMS ListKeys\n", .{});
|
try stdout.print("Calling KMS ListKeys, a TLS 1.3 enabled service\n", .{});
|
||||||
try stdout.print("You likely have at least some AWS-generated keys in your account,\n", .{});
|
try stdout.print("You likely have at least some AWS-generated keys in your account,\n", .{});
|
||||||
try stdout.print("but if the account has not had many services used, this may return 0 keys\n\n", .{});
|
try stdout.print("but if the account has not had many services used, this may return 0 keys\n\n", .{});
|
||||||
const call_kms = try aws.Request(services.kms.list_keys).call(.{}, options);
|
const call_kms = try aws.Request(services.kms.list_keys).call(.{}, options);
|
||||||
|
@ -49,7 +51,8 @@ pub fn main() anyerror!void {
|
||||||
}
|
}
|
||||||
defer call_kms.deinit();
|
defer call_kms.deinit();
|
||||||
|
|
||||||
try stdout.print("\n\n\nCalling STS GetCallerIdentity\n", .{});
|
try stdout.print("\n\n\nCalling STS GetCallerIdentity. This does not have TLS 1.3 in September 2023\n", .{});
|
||||||
|
try stdout.print("A failure may occur\n\n", .{});
|
||||||
const call = try aws.Request(services.sts.get_caller_identity).call(.{}, options);
|
const call = try aws.Request(services.sts.get_caller_identity).call(.{}, options);
|
||||||
defer call.deinit();
|
defer call.deinit();
|
||||||
try stdout.print("\tarn: {s}\n", .{call.response.arn.?});
|
try stdout.print("\tarn: {s}\n", .{call.response.arn.?});
|
||||||
|
|
|
@ -1,41 +0,0 @@
|
||||||
const std = @import("std");
|
|
||||||
|
|
||||||
pub fn build(b: *std.Build) void {
|
|
||||||
const target = b.standardTargetOptions(.{});
|
|
||||||
const optimize = b.standardOptimizeOption(.{});
|
|
||||||
|
|
||||||
const lib_mod = b.addModule("date", .{
|
|
||||||
.root_source_file = b.path("src/root.zig"),
|
|
||||||
.target = target,
|
|
||||||
.optimize = optimize,
|
|
||||||
});
|
|
||||||
|
|
||||||
const lib = b.addLibrary(.{
|
|
||||||
.linkage = .static,
|
|
||||||
.name = "date",
|
|
||||||
.root_module = lib_mod,
|
|
||||||
});
|
|
||||||
|
|
||||||
b.installArtifact(lib);
|
|
||||||
|
|
||||||
const lib_unit_tests = b.addTest(.{
|
|
||||||
.root_module = lib_mod,
|
|
||||||
});
|
|
||||||
|
|
||||||
const run_lib_unit_tests = b.addRunArtifact(lib_unit_tests);
|
|
||||||
|
|
||||||
const test_step = b.step("test", "Run unit tests");
|
|
||||||
test_step.dependOn(&run_lib_unit_tests.step);
|
|
||||||
|
|
||||||
const dep_zeit = b.dependency("zeit", .{
|
|
||||||
.target = target,
|
|
||||||
.optimize = optimize,
|
|
||||||
});
|
|
||||||
lib_mod.addImport("zeit", dep_zeit.module("zeit"));
|
|
||||||
|
|
||||||
const dep_json = b.dependency("json", .{
|
|
||||||
.target = target,
|
|
||||||
.optimize = optimize,
|
|
||||||
});
|
|
||||||
lib_mod.addImport("json", dep_json.module("json"));
|
|
||||||
}
|
|
|
@ -1,20 +0,0 @@
|
||||||
.{
|
|
||||||
.name = .date,
|
|
||||||
.version = "0.0.0",
|
|
||||||
.fingerprint = 0xaa9e377a226d739e, // Changing this has security and trust implications.
|
|
||||||
.minimum_zig_version = "0.14.0",
|
|
||||||
.dependencies = .{
|
|
||||||
.zeit = .{
|
|
||||||
.url = "git+https://github.com/rockorager/zeit?ref=zig-0.15#ed2ca60db118414bda2b12df2039e33bad3b0b88",
|
|
||||||
.hash = "zeit-0.6.0-5I6bk0J9AgCVa0nnyL0lNY9Xa9F68hHq-ZarhuXNV-Jb",
|
|
||||||
},
|
|
||||||
.json = .{
|
|
||||||
.path = "../json",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
.paths = .{
|
|
||||||
"build.zig",
|
|
||||||
"build.zig.zon",
|
|
||||||
"src",
|
|
||||||
},
|
|
||||||
}
|
|
|
@ -1,118 +0,0 @@
|
||||||
const std = @import("std");
|
|
||||||
const log = std.log.scoped(.date);
|
|
||||||
const zeit = @import("zeit");
|
|
||||||
|
|
||||||
pub const DateTime = struct {
|
|
||||||
day: u8,
|
|
||||||
month: u8,
|
|
||||||
year: u16,
|
|
||||||
hour: u8,
|
|
||||||
minute: u8,
|
|
||||||
second: u8,
|
|
||||||
|
|
||||||
pub fn fromInstant(val: zeit.Instant) DateTime {
|
|
||||||
return fromTime(val.time());
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn fromTime(val: zeit.Time) DateTime {
|
|
||||||
return DateTime{
|
|
||||||
.day = val.day,
|
|
||||||
.month = @intFromEnum(val.month),
|
|
||||||
.year = @intCast(val.year),
|
|
||||||
.hour = val.hour,
|
|
||||||
.minute = val.minute,
|
|
||||||
.second = val.second,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn time(self: DateTime) zeit.Time {
|
|
||||||
return zeit.Time{
|
|
||||||
.day = @intCast(self.day),
|
|
||||||
.month = @enumFromInt(self.month),
|
|
||||||
.year = self.year,
|
|
||||||
.hour = @intCast(self.hour),
|
|
||||||
.minute = @intCast(self.minute),
|
|
||||||
.second = @intCast(self.second),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn instant(self: DateTime) !zeit.Instant {
|
|
||||||
return try zeit.instant(.{ .source = .{ .time = self.time() } });
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
pub fn timestampToDateTime(timestamp: zeit.Seconds) DateTime {
|
|
||||||
const ins = zeit.instant(.{ .source = .{ .unix_timestamp = timestamp } }) catch @panic("Failed to create instant from timestamp");
|
|
||||||
return DateTime.fromInstant(ins);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn parseEnglishToTimestamp(data: []const u8) !i64 {
|
|
||||||
return try dateTimeToTimestamp(try parseEnglishToDateTime(data));
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Converts a string to a timestamp value. May not handle dates before the
|
|
||||||
/// epoch. Dates should look like "Fri, 03 Jun 2022 18:12:36 GMT"
|
|
||||||
pub fn parseEnglishToDateTime(data: []const u8) !DateTime {
|
|
||||||
const ins = try zeit.instant(.{ .source = .{ .rfc1123 = data } });
|
|
||||||
return DateTime.fromInstant(ins);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn parseIso8601ToTimestamp(data: []const u8) !i64 {
|
|
||||||
return try dateTimeToTimestamp(try parseIso8601ToDateTime(data));
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Converts a string to a timestamp value. May not handle dates before the
|
|
||||||
/// epoch
|
|
||||||
pub fn parseIso8601ToDateTime(data: []const u8) !DateTime {
|
|
||||||
const ins = try zeit.instant(.{ .source = .{ .iso8601 = data } });
|
|
||||||
return DateTime.fromInstant(ins);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn dateTimeToTimestamp(datetime: DateTime) !zeit.Seconds {
|
|
||||||
return (try datetime.instant()).unixTimestamp();
|
|
||||||
}
|
|
||||||
|
|
||||||
fn printDateTime(dt: DateTime) void {
|
|
||||||
log.debug("{:0>4}-{:0>2}-{:0>2}T{:0>2}:{:0>2}:{:0<2}Z", .{
|
|
||||||
dt.year,
|
|
||||||
dt.month,
|
|
||||||
dt.day,
|
|
||||||
dt.hour,
|
|
||||||
dt.minute,
|
|
||||||
dt.second,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn printNowUtc() void {
|
|
||||||
printDateTime(timestampToDateTime(std.time.timestamp()));
|
|
||||||
}
|
|
||||||
|
|
||||||
test "Convert timestamp to datetime" {
|
|
||||||
printDateTime(timestampToDateTime(std.time.timestamp()));
|
|
||||||
try std.testing.expectEqual(DateTime{ .year = 2020, .month = 8, .day = 28, .hour = 9, .minute = 32, .second = 27 }, timestampToDateTime(1598607147));
|
|
||||||
|
|
||||||
try std.testing.expectEqual(DateTime{ .year = 2020, .month = 11, .day = 1, .hour = 5, .minute = 6, .second = 7 }, timestampToDateTime(1604207167));
|
|
||||||
// Get time for date: https://wtools.io/convert-date-time-to-unix-time
|
|
||||||
try std.testing.expectEqual(DateTime{ .year = 2015, .month = 8, .day = 30, .hour = 12, .minute = 36, .second = 0 }, timestampToDateTime(1440938160));
|
|
||||||
}
|
|
||||||
|
|
||||||
test "Convert datetime to timestamp" {
|
|
||||||
try std.testing.expectEqual(@as(i64, 1598607147), try dateTimeToTimestamp(DateTime{ .year = 2020, .month = 8, .day = 28, .hour = 9, .minute = 32, .second = 27 }));
|
|
||||||
try std.testing.expectEqual(@as(i64, 1604207167), try dateTimeToTimestamp(DateTime{ .year = 2020, .month = 11, .day = 1, .hour = 5, .minute = 6, .second = 7 }));
|
|
||||||
try std.testing.expectEqual(@as(i64, 1440938160), try dateTimeToTimestamp(DateTime{ .year = 2015, .month = 8, .day = 30, .hour = 12, .minute = 36, .second = 0 }));
|
|
||||||
}
|
|
||||||
|
|
||||||
test "Convert ISO8601 string to timestamp" {
|
|
||||||
try std.testing.expectEqual(DateTime{ .year = 2020, .month = 8, .day = 28, .hour = 9, .minute = 32, .second = 27 }, try parseIso8601ToDateTime("20200828T093227"));
|
|
||||||
try std.testing.expectEqual(DateTime{ .year = 2020, .month = 8, .day = 28, .hour = 9, .minute = 32, .second = 27 }, try parseIso8601ToDateTime("2020-08-28T9:32:27Z"));
|
|
||||||
try std.testing.expectEqual(DateTime{ .year = 2020, .month = 11, .day = 1, .hour = 5, .minute = 6, .second = 7 }, try parseIso8601ToDateTime("2020-11-01T5:06:7Z"));
|
|
||||||
try std.testing.expectEqual(DateTime{ .year = 2015, .month = 8, .day = 30, .hour = 12, .minute = 36, .second = 0 }, try parseIso8601ToDateTime("2015-08-30T12:36:00.000Z"));
|
|
||||||
}
|
|
||||||
test "Convert datetime to timestamp before 1970" {
|
|
||||||
try std.testing.expectEqual(@as(i64, -449392815), try dateTimeToTimestamp(DateTime{ .year = 1955, .month = 10, .day = 5, .hour = 16, .minute = 39, .second = 45 }));
|
|
||||||
}
|
|
||||||
|
|
||||||
test "Convert whatever AWS is sending us to timestamp" {
|
|
||||||
const string_date = "Fri, 03 Jun 2022 18:12:36 GMT";
|
|
||||||
try std.testing.expectEqual(DateTime{ .year = 2022, .month = 6, .day = 3, .hour = 18, .minute = 12, .second = 36 }, try parseEnglishToDateTime(string_date));
|
|
||||||
}
|
|
|
@ -1,20 +0,0 @@
|
||||||
const std = @import("std");
|
|
||||||
const testing = std.testing;
|
|
||||||
|
|
||||||
const parsing = @import("parsing.zig");
|
|
||||||
pub const DateTime = parsing.DateTime;
|
|
||||||
pub const timestampToDateTime = parsing.timestampToDateTime;
|
|
||||||
pub const parseEnglishToTimestamp = parsing.parseEnglishToTimestamp;
|
|
||||||
pub const parseEnglishToDateTime = parsing.parseEnglishToDateTime;
|
|
||||||
pub const parseIso8601ToTimestamp = parsing.parseIso8601ToTimestamp;
|
|
||||||
pub const parseIso8601ToDateTime = parsing.parseIso8601ToDateTime;
|
|
||||||
pub const dateTimeToTimestamp = parsing.dateTimeToTimestamp;
|
|
||||||
pub const printNowUtc = parsing.printNowUtc;
|
|
||||||
|
|
||||||
const timestamp = @import("timestamp.zig");
|
|
||||||
pub const DateFormat = timestamp.DateFormat;
|
|
||||||
pub const Timestamp = timestamp.Timestamp;
|
|
||||||
|
|
||||||
test {
|
|
||||||
testing.refAllDeclsRecursive(@This());
|
|
||||||
}
|
|
|
@ -1,69 +0,0 @@
|
||||||
const std = @import("std");
|
|
||||||
const zeit = @import("zeit");
|
|
||||||
|
|
||||||
pub const DateFormat = enum {
|
|
||||||
rfc1123,
|
|
||||||
iso8601,
|
|
||||||
};
|
|
||||||
|
|
||||||
pub const Timestamp = enum(zeit.Nanoseconds) {
|
|
||||||
_,
|
|
||||||
|
|
||||||
pub fn jsonStringify(value: Timestamp, jw: anytype) !void {
|
|
||||||
const instant = zeit.instant(.{
|
|
||||||
.source = .{
|
|
||||||
.unix_nano = @intFromEnum(value),
|
|
||||||
},
|
|
||||||
}) catch std.debug.panic("Failed to parse timestamp to instant: {d}", .{value});
|
|
||||||
|
|
||||||
const fmt = "Mon, 02 Jan 2006 15:04:05 GMT";
|
|
||||||
var buf: [fmt.len]u8 = undefined;
|
|
||||||
|
|
||||||
var fbs = std.Io.Writer.fixed(&buf);
|
|
||||||
instant.time().gofmt(&fbs, fmt) catch std.debug.panic("Failed to format instant: {d}", .{instant.timestamp});
|
|
||||||
|
|
||||||
try jw.write(&buf);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn parse(val: []const u8) !Timestamp {
|
|
||||||
const date_format = blk: {
|
|
||||||
if (std.ascii.isDigit(val[0])) {
|
|
||||||
break :blk DateFormat.iso8601;
|
|
||||||
} else {
|
|
||||||
break :blk DateFormat.rfc1123;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const ins = try zeit.instant(.{
|
|
||||||
.source = switch (date_format) {
|
|
||||||
DateFormat.iso8601 => .{
|
|
||||||
.iso8601 = val,
|
|
||||||
},
|
|
||||||
DateFormat.rfc1123 => .{
|
|
||||||
.rfc1123 = val,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
return @enumFromInt(ins.timestamp);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
test Timestamp {
|
|
||||||
const in_date = "Wed, 23 Apr 2025 11:23:45 GMT";
|
|
||||||
|
|
||||||
const expected_ts: Timestamp = @enumFromInt(1745407425000000000);
|
|
||||||
const actual_ts = try Timestamp.parse(in_date);
|
|
||||||
|
|
||||||
try std.testing.expectEqual(expected_ts, actual_ts);
|
|
||||||
|
|
||||||
var buf: [100]u8 = undefined;
|
|
||||||
var fbs = std.io.fixedBufferStream(&buf);
|
|
||||||
var counting_writer = std.io.countingWriter(fbs.writer());
|
|
||||||
try Timestamp.jsonStringify(expected_ts, .{}, counting_writer.writer());
|
|
||||||
|
|
||||||
const expected_json = "\"" ++ in_date ++ "\"";
|
|
||||||
const actual_json = buf[0..counting_writer.bytes_written];
|
|
||||||
|
|
||||||
try std.testing.expectEqualStrings(expected_json, actual_json);
|
|
||||||
}
|
|
|
@ -1,29 +0,0 @@
|
||||||
const std = @import("std");
|
|
||||||
|
|
||||||
pub fn build(b: *std.Build) void {
|
|
||||||
const target = b.standardTargetOptions(.{});
|
|
||||||
const optimize = b.standardOptimizeOption(.{});
|
|
||||||
|
|
||||||
const lib_mod = b.addModule("json", .{
|
|
||||||
.root_source_file = b.path("src/json.zig"),
|
|
||||||
.target = target,
|
|
||||||
.optimize = optimize,
|
|
||||||
});
|
|
||||||
|
|
||||||
const lib = b.addLibrary(.{
|
|
||||||
.linkage = .static,
|
|
||||||
.name = "json",
|
|
||||||
.root_module = lib_mod,
|
|
||||||
});
|
|
||||||
|
|
||||||
b.installArtifact(lib);
|
|
||||||
|
|
||||||
const lib_unit_tests = b.addTest(.{
|
|
||||||
.root_module = lib_mod,
|
|
||||||
});
|
|
||||||
|
|
||||||
const run_lib_unit_tests = b.addRunArtifact(lib_unit_tests);
|
|
||||||
|
|
||||||
const test_step = b.step("test", "Run unit tests");
|
|
||||||
test_step.dependOn(&run_lib_unit_tests.step);
|
|
||||||
}
|
|
|
@ -1,12 +0,0 @@
|
||||||
.{
|
|
||||||
.name = .json,
|
|
||||||
.version = "0.0.0",
|
|
||||||
.fingerprint = 0x6b0725452065211c, // Changing this has security and trust implications.
|
|
||||||
.minimum_zig_version = "0.14.0",
|
|
||||||
.dependencies = .{},
|
|
||||||
.paths = .{
|
|
||||||
"build.zig",
|
|
||||||
"build.zig.zon",
|
|
||||||
"src",
|
|
||||||
},
|
|
||||||
}
|
|
1630
src/aws.zig
1630
src/aws.zig
File diff suppressed because it is too large
Load diff
|
@ -25,7 +25,7 @@ pub const Credentials = struct {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
pub fn deinit(self: Self) void {
|
pub fn deinit(self: Self) void {
|
||||||
std.crypto.secureZero(u8, self.secret_key);
|
std.crypto.utils.secureZero(u8, self.secret_key);
|
||||||
self.allocator.free(self.secret_key);
|
self.allocator.free(self.secret_key);
|
||||||
self.allocator.free(self.access_key);
|
self.allocator.free(self.access_key);
|
||||||
if (self.session_token) |t| self.allocator.free(t);
|
if (self.session_token) |t| self.allocator.free(t);
|
||||||
|
|
|
@ -11,56 +11,7 @@ const std = @import("std");
|
||||||
const builtin = @import("builtin");
|
const builtin = @import("builtin");
|
||||||
const auth = @import("aws_authentication.zig");
|
const auth = @import("aws_authentication.zig");
|
||||||
|
|
||||||
const scoped_log = std.log.scoped(.aws_credentials);
|
const log = std.log.scoped(.aws_credentials);
|
||||||
/// Specifies logging level. This should not be touched unless the normal
|
|
||||||
/// zig logging capabilities are inaccessible (e.g. during a build)
|
|
||||||
pub var log_level: std.log.Level = .debug;
|
|
||||||
|
|
||||||
/// Turn off logging completely
|
|
||||||
pub var logs_off: bool = false;
|
|
||||||
const log = struct {
|
|
||||||
/// Log an error message. This log level is intended to be used
|
|
||||||
/// when something has gone wrong. This might be recoverable or might
|
|
||||||
/// be followed by the program exiting.
|
|
||||||
pub fn err(
|
|
||||||
comptime format: []const u8,
|
|
||||||
args: anytype,
|
|
||||||
) void {
|
|
||||||
if (!logs_off and @intFromEnum(std.log.Level.err) <= @intFromEnum(log_level))
|
|
||||||
scoped_log.err(format, args);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Log a warning message. This log level is intended to be used if
|
|
||||||
/// it is uncertain whether something has gone wrong or not, but the
|
|
||||||
/// circumstances would be worth investigating.
|
|
||||||
pub fn warn(
|
|
||||||
comptime format: []const u8,
|
|
||||||
args: anytype,
|
|
||||||
) void {
|
|
||||||
if (!logs_off and @intFromEnum(std.log.Level.warn) <= @intFromEnum(log_level))
|
|
||||||
scoped_log.warn(format, args);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Log an info message. This log level is intended to be used for
|
|
||||||
/// general messages about the state of the program.
|
|
||||||
pub fn info(
|
|
||||||
comptime format: []const u8,
|
|
||||||
args: anytype,
|
|
||||||
) void {
|
|
||||||
if (!logs_off and @intFromEnum(std.log.Level.info) <= @intFromEnum(log_level))
|
|
||||||
scoped_log.info(format, args);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Log a debug message. This log level is intended to be used for
|
|
||||||
/// messages which are only useful for debugging.
|
|
||||||
pub fn debug(
|
|
||||||
comptime format: []const u8,
|
|
||||||
args: anytype,
|
|
||||||
) void {
|
|
||||||
if (!logs_off and @intFromEnum(std.log.Level.debug) <= @intFromEnum(log_level))
|
|
||||||
scoped_log.debug(format, args);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
pub const Profile = struct {
|
pub const Profile = struct {
|
||||||
/// Credential file. Defaults to AWS_SHARED_CREDENTIALS_FILE or ~/.aws/credentials
|
/// Credential file. Defaults to AWS_SHARED_CREDENTIALS_FILE or ~/.aws/credentials
|
||||||
|
@ -173,12 +124,11 @@ fn getContainerCredentials(allocator: std.mem.Allocator) !?auth.Credentials {
|
||||||
|
|
||||||
var cl = std.http.Client{ .allocator = allocator };
|
var cl = std.http.Client{ .allocator = allocator };
|
||||||
defer cl.deinit(); // I don't belive connection pooling would help much here as it's non-ssl and local
|
defer cl.deinit(); // I don't belive connection pooling would help much here as it's non-ssl and local
|
||||||
var aw: std.Io.Writer.Allocating = .init(allocator);
|
var resp_payload = std.ArrayList(u8).init(allocator);
|
||||||
defer aw.deinit();
|
defer resp_payload.deinit();
|
||||||
const response_payload = &aw.writer;
|
|
||||||
const req = try cl.fetch(.{
|
const req = try cl.fetch(.{
|
||||||
.location = .{ .url = container_uri },
|
.location = .{ .url = container_uri },
|
||||||
.response_writer = response_payload,
|
.response_storage = .{ .dynamic = &resp_payload },
|
||||||
});
|
});
|
||||||
if (req.status != .ok and req.status != .not_found) {
|
if (req.status != .ok and req.status != .not_found) {
|
||||||
log.warn("Bad status code received from container credentials endpoint: {}", .{@intFromEnum(req.status)});
|
log.warn("Bad status code received from container credentials endpoint: {}", .{@intFromEnum(req.status)});
|
||||||
|
@ -186,8 +136,8 @@ fn getContainerCredentials(allocator: std.mem.Allocator) !?auth.Credentials {
|
||||||
}
|
}
|
||||||
if (req.status == .not_found) return null;
|
if (req.status == .not_found) return null;
|
||||||
|
|
||||||
log.debug("Read {d} bytes from container credentials endpoint", .{aw.written().len});
|
log.debug("Read {d} bytes from container credentials endpoint", .{resp_payload.items.len});
|
||||||
if (aw.written().len == 0) return null;
|
if (resp_payload.items.len == 0) return null;
|
||||||
|
|
||||||
const CredsResponse = struct {
|
const CredsResponse = struct {
|
||||||
AccessKeyId: []const u8,
|
AccessKeyId: []const u8,
|
||||||
|
@ -197,8 +147,8 @@ fn getContainerCredentials(allocator: std.mem.Allocator) !?auth.Credentials {
|
||||||
Token: []const u8,
|
Token: []const u8,
|
||||||
};
|
};
|
||||||
const creds_response = blk: {
|
const creds_response = blk: {
|
||||||
const res = std.json.parseFromSlice(CredsResponse, allocator, aw.written(), .{}) catch |e| {
|
const res = std.json.parseFromSlice(CredsResponse, allocator, resp_payload.items, .{}) catch |e| {
|
||||||
log.err("Unexpected Json response from container credentials endpoint: {s}", .{aw.written()});
|
log.err("Unexpected Json response from container credentials endpoint: {s}", .{resp_payload.items});
|
||||||
log.err("Error parsing json: {}", .{e});
|
log.err("Error parsing json: {}", .{e});
|
||||||
if (@errorReturnTrace()) |trace| {
|
if (@errorReturnTrace()) |trace| {
|
||||||
std.debug.dumpStackTrace(trace.*);
|
std.debug.dumpStackTrace(trace.*);
|
||||||
|
@ -225,27 +175,26 @@ fn getImdsv2Credentials(allocator: std.mem.Allocator) !?auth.Credentials {
|
||||||
defer cl.deinit(); // I don't belive connection pooling would help much here as it's non-ssl and local
|
defer cl.deinit(); // I don't belive connection pooling would help much here as it's non-ssl and local
|
||||||
// Get token
|
// Get token
|
||||||
{
|
{
|
||||||
var aw: std.Io.Writer.Allocating = .init(allocator);
|
var resp_payload = std.ArrayList(u8).init(allocator);
|
||||||
defer aw.deinit();
|
defer resp_payload.deinit();
|
||||||
const response_payload = &aw.writer;
|
|
||||||
const req = try cl.fetch(.{
|
const req = try cl.fetch(.{
|
||||||
.method = .PUT,
|
.method = .PUT,
|
||||||
.location = .{ .url = "http://169.254.169.254/latest/api/token" },
|
.location = .{ .url = "http://169.254.169.254/latest/api/token" },
|
||||||
.extra_headers = &[_]std.http.Header{
|
.extra_headers = &[_]std.http.Header{
|
||||||
.{ .name = "X-aws-ec2-metadata-token-ttl-seconds", .value = "21600" },
|
.{ .name = "X-aws-ec2-metadata-token-ttl-seconds", .value = "21600" },
|
||||||
},
|
},
|
||||||
.response_writer = response_payload,
|
.response_storage = .{ .dynamic = &resp_payload },
|
||||||
});
|
});
|
||||||
if (req.status != .ok) {
|
if (req.status != .ok) {
|
||||||
log.warn("Bad status code received from IMDS v2: {}", .{@intFromEnum(req.status)});
|
log.warn("Bad status code received from IMDS v2: {}", .{@intFromEnum(req.status)});
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
if (aw.written().len == 0) {
|
if (resp_payload.items.len == 0) {
|
||||||
log.warn("Unexpected zero response from IMDS v2", .{});
|
log.warn("Unexpected zero response from IMDS v2", .{});
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
token = try aw.toOwnedSlice();
|
token = try resp_payload.toOwnedSlice();
|
||||||
errdefer if (token) |t| allocator.free(t);
|
errdefer if (token) |t| allocator.free(t);
|
||||||
}
|
}
|
||||||
std.debug.assert(token != null);
|
std.debug.assert(token != null);
|
||||||
|
@ -267,16 +216,15 @@ fn getImdsRoleName(allocator: std.mem.Allocator, client: *std.http.Client, imds_
|
||||||
// "InstanceProfileArn" : "arn:aws:iam::550620852718:instance-profile/ec2-dev",
|
// "InstanceProfileArn" : "arn:aws:iam::550620852718:instance-profile/ec2-dev",
|
||||||
// "InstanceProfileId" : "AIPAYAM4POHXCFNKZ7HU2"
|
// "InstanceProfileId" : "AIPAYAM4POHXCFNKZ7HU2"
|
||||||
// }
|
// }
|
||||||
var aw: std.Io.Writer.Allocating = .init(allocator);
|
var resp_payload = std.ArrayList(u8).init(allocator);
|
||||||
defer aw.deinit();
|
defer resp_payload.deinit();
|
||||||
const response_payload = &aw.writer;
|
|
||||||
const req = try client.fetch(.{
|
const req = try client.fetch(.{
|
||||||
.method = .GET,
|
.method = .GET,
|
||||||
.location = .{ .url = "http://169.254.169.254/latest/meta-data/iam/info" },
|
.location = .{ .url = "http://169.254.169.254/latest/meta-data/iam/info" },
|
||||||
.extra_headers = &[_]std.http.Header{
|
.extra_headers = &[_]std.http.Header{
|
||||||
.{ .name = "X-aws-ec2-metadata-token", .value = imds_token },
|
.{ .name = "X-aws-ec2-metadata-token", .value = imds_token },
|
||||||
},
|
},
|
||||||
.response_writer = response_payload,
|
.response_storage = .{ .dynamic = &resp_payload },
|
||||||
});
|
});
|
||||||
|
|
||||||
if (req.status != .ok and req.status != .not_found) {
|
if (req.status != .ok and req.status != .not_found) {
|
||||||
|
@ -284,7 +232,7 @@ fn getImdsRoleName(allocator: std.mem.Allocator, client: *std.http.Client, imds_
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
if (req.status == .not_found) return null;
|
if (req.status == .not_found) return null;
|
||||||
if (aw.written().len == 0) {
|
if (resp_payload.items.len == 0) {
|
||||||
log.warn("Unexpected empty response from IMDS endpoint post token", .{});
|
log.warn("Unexpected empty response from IMDS endpoint post token", .{});
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
@ -295,8 +243,8 @@ fn getImdsRoleName(allocator: std.mem.Allocator, client: *std.http.Client, imds_
|
||||||
InstanceProfileArn: []const u8,
|
InstanceProfileArn: []const u8,
|
||||||
InstanceProfileId: []const u8,
|
InstanceProfileId: []const u8,
|
||||||
};
|
};
|
||||||
const imds_response = std.json.parseFromSlice(ImdsResponse, allocator, aw.written(), .{}) catch |e| {
|
const imds_response = std.json.parseFromSlice(ImdsResponse, allocator, resp_payload.items, .{}) catch |e| {
|
||||||
log.err("Unexpected Json response from IMDS endpoint: {s}", .{aw.written()});
|
log.err("Unexpected Json response from IMDS endpoint: {s}", .{resp_payload.items});
|
||||||
log.err("Error parsing json: {}", .{e});
|
log.err("Error parsing json: {}", .{e});
|
||||||
if (@errorReturnTrace()) |trace| {
|
if (@errorReturnTrace()) |trace| {
|
||||||
std.debug.dumpStackTrace(trace.*);
|
std.debug.dumpStackTrace(trace.*);
|
||||||
|
@ -318,16 +266,15 @@ fn getImdsRoleName(allocator: std.mem.Allocator, client: *std.http.Client, imds_
|
||||||
fn getImdsCredentials(allocator: std.mem.Allocator, client: *std.http.Client, role_name: []const u8, imds_token: []u8) !?auth.Credentials {
|
fn getImdsCredentials(allocator: std.mem.Allocator, client: *std.http.Client, role_name: []const u8, imds_token: []u8) !?auth.Credentials {
|
||||||
const url = try std.fmt.allocPrint(allocator, "http://169.254.169.254/latest/meta-data/iam/security-credentials/{s}/", .{role_name});
|
const url = try std.fmt.allocPrint(allocator, "http://169.254.169.254/latest/meta-data/iam/security-credentials/{s}/", .{role_name});
|
||||||
defer allocator.free(url);
|
defer allocator.free(url);
|
||||||
var aw: std.Io.Writer.Allocating = .init(allocator);
|
var resp_payload = std.ArrayList(u8).init(allocator);
|
||||||
defer aw.deinit();
|
defer resp_payload.deinit();
|
||||||
const response_payload = &aw.writer;
|
|
||||||
const req = try client.fetch(.{
|
const req = try client.fetch(.{
|
||||||
.method = .GET,
|
.method = .GET,
|
||||||
.location = .{ .url = url },
|
.location = .{ .url = url },
|
||||||
.extra_headers = &[_]std.http.Header{
|
.extra_headers = &[_]std.http.Header{
|
||||||
.{ .name = "X-aws-ec2-metadata-token", .value = imds_token },
|
.{ .name = "X-aws-ec2-metadata-token", .value = imds_token },
|
||||||
},
|
},
|
||||||
.response_writer = response_payload,
|
.response_storage = .{ .dynamic = &resp_payload },
|
||||||
});
|
});
|
||||||
|
|
||||||
if (req.status != .ok and req.status != .not_found) {
|
if (req.status != .ok and req.status != .not_found) {
|
||||||
|
@ -335,7 +282,7 @@ fn getImdsCredentials(allocator: std.mem.Allocator, client: *std.http.Client, ro
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
if (req.status == .not_found) return null;
|
if (req.status == .not_found) return null;
|
||||||
if (aw.written().len == 0) {
|
if (resp_payload.items.len == 0) {
|
||||||
log.warn("Unexpected empty response from IMDS role endpoint", .{});
|
log.warn("Unexpected empty response from IMDS role endpoint", .{});
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
@ -350,8 +297,8 @@ fn getImdsCredentials(allocator: std.mem.Allocator, client: *std.http.Client, ro
|
||||||
Token: []const u8,
|
Token: []const u8,
|
||||||
Expiration: []const u8,
|
Expiration: []const u8,
|
||||||
};
|
};
|
||||||
const imds_response = std.json.parseFromSlice(ImdsResponse, allocator, aw.written(), .{}) catch |e| {
|
const imds_response = std.json.parseFromSlice(ImdsResponse, allocator, resp_payload.items, .{}) catch |e| {
|
||||||
log.err("Unexpected Json response from IMDS endpoint: {s}", .{aw.written()});
|
log.err("Unexpected Json response from IMDS endpoint: {s}", .{resp_payload.items});
|
||||||
log.err("Error parsing json: {}", .{e});
|
log.err("Error parsing json: {}", .{e});
|
||||||
if (@errorReturnTrace()) |trace| {
|
if (@errorReturnTrace()) |trace| {
|
||||||
std.debug.dumpStackTrace(trace.*);
|
std.debug.dumpStackTrace(trace.*);
|
||||||
|
@ -463,10 +410,7 @@ const PartialCredentials = struct {
|
||||||
};
|
};
|
||||||
fn credsForFile(allocator: std.mem.Allocator, file: ?std.fs.File, profile: []const u8) !PartialCredentials {
|
fn credsForFile(allocator: std.mem.Allocator, file: ?std.fs.File, profile: []const u8) !PartialCredentials {
|
||||||
if (file == null) return PartialCredentials{};
|
if (file == null) return PartialCredentials{};
|
||||||
var fbuf: [1024]u8 = undefined;
|
const text = try file.?.readToEndAlloc(allocator, std.math.maxInt(usize));
|
||||||
var freader = file.?.reader(&fbuf);
|
|
||||||
var reader = &freader.interface;
|
|
||||||
const text = try reader.allocRemaining(allocator, .unlimited);
|
|
||||||
defer allocator.free(text);
|
defer allocator.free(text);
|
||||||
const partial_creds = try credsForText(text, profile);
|
const partial_creds = try credsForText(text, profile);
|
||||||
var ak: ?[]const u8 = null;
|
var ak: ?[]const u8 = null;
|
||||||
|
|
298
src/aws_http.zig
298
src/aws_http.zig
|
@ -17,57 +17,7 @@ const CN_NORTHWEST_1_HASH = std.hash_map.hashString("cn-northwest-1");
|
||||||
const US_ISO_EAST_1_HASH = std.hash_map.hashString("us-iso-east-1");
|
const US_ISO_EAST_1_HASH = std.hash_map.hashString("us-iso-east-1");
|
||||||
const US_ISOB_EAST_1_HASH = std.hash_map.hashString("us-isob-east-1");
|
const US_ISOB_EAST_1_HASH = std.hash_map.hashString("us-isob-east-1");
|
||||||
|
|
||||||
const scoped_log = std.log.scoped(.awshttp);
|
const log = std.log.scoped(.awshttp);
|
||||||
|
|
||||||
/// Specifies logging level. This should not be touched unless the normal
|
|
||||||
/// zig logging capabilities are inaccessible (e.g. during a build)
|
|
||||||
pub var log_level: std.log.Level = .debug;
|
|
||||||
|
|
||||||
/// Turn off logging completely
|
|
||||||
pub var logs_off: bool = false;
|
|
||||||
const log = struct {
|
|
||||||
/// Log an error message. This log level is intended to be used
|
|
||||||
/// when something has gone wrong. This might be recoverable or might
|
|
||||||
/// be followed by the program exiting.
|
|
||||||
pub fn err(
|
|
||||||
comptime format: []const u8,
|
|
||||||
args: anytype,
|
|
||||||
) void {
|
|
||||||
if (!logs_off and @intFromEnum(std.log.Level.err) <= @intFromEnum(log_level))
|
|
||||||
scoped_log.err(format, args);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Log a warning message. This log level is intended to be used if
|
|
||||||
/// it is uncertain whether something has gone wrong or not, but the
|
|
||||||
/// circumstances would be worth investigating.
|
|
||||||
pub fn warn(
|
|
||||||
comptime format: []const u8,
|
|
||||||
args: anytype,
|
|
||||||
) void {
|
|
||||||
if (!logs_off and @intFromEnum(std.log.Level.warn) <= @intFromEnum(log_level))
|
|
||||||
scoped_log.warn(format, args);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Log an info message. This log level is intended to be used for
|
|
||||||
/// general messages about the state of the program.
|
|
||||||
pub fn info(
|
|
||||||
comptime format: []const u8,
|
|
||||||
args: anytype,
|
|
||||||
) void {
|
|
||||||
if (!logs_off and @intFromEnum(std.log.Level.info) <= @intFromEnum(log_level))
|
|
||||||
scoped_log.info(format, args);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Log a debug message. This log level is intended to be used for
|
|
||||||
/// messages which are only useful for debugging.
|
|
||||||
pub fn debug(
|
|
||||||
comptime format: []const u8,
|
|
||||||
args: anytype,
|
|
||||||
) void {
|
|
||||||
if (!logs_off and @intFromEnum(std.log.Level.debug) <= @intFromEnum(log_level))
|
|
||||||
scoped_log.debug(format, args);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
pub const AwsError = error{
|
pub const AwsError = error{
|
||||||
AddHeaderError,
|
AddHeaderError,
|
||||||
|
@ -90,37 +40,8 @@ pub const Options = struct {
|
||||||
dualstack: bool = false,
|
dualstack: bool = false,
|
||||||
sigv4_service_name: ?[]const u8 = null,
|
sigv4_service_name: ?[]const u8 = null,
|
||||||
|
|
||||||
mock: ?Mock = null,
|
/// Used for testing to provide consistent signing. If null, will use current time
|
||||||
};
|
signing_time: ?i64 = null,
|
||||||
|
|
||||||
/// mocking methods for isolated testing
|
|
||||||
pub const Mock = struct {
|
|
||||||
/// Used to provide consistent signing
|
|
||||||
signing_time: ?i64,
|
|
||||||
/// context is desiged to be type-erased pointer (@intFromPtr)
|
|
||||||
context: usize = 0,
|
|
||||||
request_fn: *const fn (
|
|
||||||
usize,
|
|
||||||
std.http.Method,
|
|
||||||
std.Uri,
|
|
||||||
std.http.Client.RequestOptions,
|
|
||||||
) std.http.Client.RequestError!std.http.Client.Request,
|
|
||||||
send_body_complete: *const fn (usize, []u8) std.Io.Writer.Error!void,
|
|
||||||
receive_head: *const fn (usize) std.http.Client.Request.ReceiveHeadError!std.http.Client.Response,
|
|
||||||
reader_decompressing: *const fn (usize) *std.Io.Reader,
|
|
||||||
|
|
||||||
fn request(m: Mock, method: std.http.Method, uri: std.Uri, options: std.http.Client.RequestOptions) std.http.Client.RequestError!std.http.Client.Request {
|
|
||||||
return m.request_fn(m.context, method, uri, options);
|
|
||||||
}
|
|
||||||
fn sendBodyComplete(m: Mock, body: []u8) std.Io.Writer.Error!void {
|
|
||||||
return m.send_body_complete(m.context, body);
|
|
||||||
}
|
|
||||||
fn receiveHead(m: Mock) std.http.Client.Request.ReceiveHeadError!std.http.Client.Response {
|
|
||||||
return m.receive_head(m.context);
|
|
||||||
}
|
|
||||||
fn readerDecompressing(m: Mock) *std.Io.Reader {
|
|
||||||
return m.reader_decompressing(m.context);
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
pub const Header = std.http.Header;
|
pub const Header = std.http.Header;
|
||||||
|
@ -192,9 +113,9 @@ pub const AwsHttp = struct {
|
||||||
.region = getRegion(service, options.region),
|
.region = getRegion(service, options.region),
|
||||||
.service = options.sigv4_service_name orelse service,
|
.service = options.sigv4_service_name orelse service,
|
||||||
.credentials = creds,
|
.credentials = creds,
|
||||||
.signing_time = if (options.mock) |m| m.signing_time else null,
|
.signing_time = options.signing_time,
|
||||||
};
|
};
|
||||||
return try self.makeRequest(endpoint, request, signing_config, options);
|
return try self.makeRequest(endpoint, request, signing_config);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// makeRequest is a low level http/https function that can be used inside
|
/// makeRequest is a low level http/https function that can be used inside
|
||||||
|
@ -213,13 +134,7 @@ pub const AwsHttp = struct {
|
||||||
/// Content-Length: (length of body)
|
/// Content-Length: (length of body)
|
||||||
///
|
///
|
||||||
/// Return value is an HttpResult, which will need the caller to deinit().
|
/// Return value is an HttpResult, which will need the caller to deinit().
|
||||||
pub fn makeRequest(
|
pub fn makeRequest(self: Self, endpoint: EndPoint, request: HttpRequest, signing_config: ?signing.Config) !HttpResult {
|
||||||
self: Self,
|
|
||||||
endpoint: EndPoint,
|
|
||||||
request: HttpRequest,
|
|
||||||
signing_config: ?signing.Config,
|
|
||||||
options: Options,
|
|
||||||
) !HttpResult {
|
|
||||||
var request_cp = request;
|
var request_cp = request;
|
||||||
|
|
||||||
log.debug("Request Path: {s}", .{request_cp.path});
|
log.debug("Request Path: {s}", .{request_cp.path});
|
||||||
|
@ -234,8 +149,8 @@ pub const AwsHttp = struct {
|
||||||
// We will use endpoint instead
|
// We will use endpoint instead
|
||||||
request_cp.path = endpoint.path;
|
request_cp.path = endpoint.path;
|
||||||
|
|
||||||
var request_headers = std.ArrayList(std.http.Header){};
|
var request_headers = std.ArrayList(std.http.Header).init(self.allocator);
|
||||||
defer request_headers.deinit(self.allocator);
|
defer request_headers.deinit();
|
||||||
|
|
||||||
const len = try addHeaders(self.allocator, &request_headers, endpoint.host, request_cp.body, request_cp.content_type, request_cp.headers);
|
const len = try addHeaders(self.allocator, &request_headers, endpoint.host, request_cp.body, request_cp.content_type, request_cp.headers);
|
||||||
defer if (len) |l| self.allocator.free(l);
|
defer if (len) |l| self.allocator.free(l);
|
||||||
|
@ -248,10 +163,10 @@ pub const AwsHttp = struct {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
var headers = std.ArrayList(std.http.Header){};
|
var headers = std.ArrayList(std.http.Header).init(self.allocator);
|
||||||
defer headers.deinit(self.allocator);
|
defer headers.deinit();
|
||||||
for (request_cp.headers) |header|
|
for (request_cp.headers) |header|
|
||||||
try headers.append(self.allocator, .{ .name = header.name, .value = header.value });
|
try headers.append(.{ .name = header.name, .value = header.value });
|
||||||
log.debug("All Request Headers:", .{});
|
log.debug("All Request Headers:", .{});
|
||||||
for (headers.items) |h| {
|
for (headers.items) |h| {
|
||||||
log.debug("\t{s}: {s}", .{ h.name, h.value });
|
log.debug("\t{s}: {s}", .{ h.name, h.value });
|
||||||
|
@ -263,30 +178,20 @@ pub const AwsHttp = struct {
|
||||||
// TODO: Fix this proxy stuff. This is all a kludge just to compile, but std.http.Client has it all built in now
|
// TODO: Fix this proxy stuff. This is all a kludge just to compile, but std.http.Client has it all built in now
|
||||||
var cl = std.http.Client{ .allocator = self.allocator, .https_proxy = if (self.proxy) |*p| @constCast(p) else null };
|
var cl = std.http.Client{ .allocator = self.allocator, .https_proxy = if (self.proxy) |*p| @constCast(p) else null };
|
||||||
defer cl.deinit(); // TODO: Connection pooling
|
defer cl.deinit(); // TODO: Connection pooling
|
||||||
|
|
||||||
const method = std.meta.stringToEnum(std.http.Method, request_cp.method).?;
|
const method = std.meta.stringToEnum(std.http.Method, request_cp.method).?;
|
||||||
|
var server_header_buffer: [16 * 1024]u8 = undefined;
|
||||||
// Fetch API in 0.15.1 is insufficient as it does not provide
|
var resp_payload = std.ArrayList(u8).init(self.allocator);
|
||||||
// server headers. We'll construct and send the request ourselves
|
defer resp_payload.deinit();
|
||||||
const uri = try std.Uri.parse(url);
|
const req = try cl.fetch(.{
|
||||||
const req_options: std.http.Client.RequestOptions = .{
|
.server_header_buffer = &server_header_buffer,
|
||||||
// we need full control over most headers. I wish libraries would do a
|
.method = method,
|
||||||
// better job of having default headers as an opt-in...
|
.payload = if (request_cp.body.len > 0) request_cp.body else null,
|
||||||
.headers = .{
|
.response_storage = .{ .dynamic = &resp_payload },
|
||||||
.host = .omit,
|
.raw_uri = true,
|
||||||
.authorization = .omit,
|
.location = .{ .url = url },
|
||||||
.user_agent = .omit,
|
|
||||||
.connection = .default, // we can let the client manage this...it has no impact to us
|
|
||||||
.accept_encoding = .default, // accept encoding (gzip, deflate) *should* be ok
|
|
||||||
.content_type = .omit,
|
|
||||||
},
|
|
||||||
.extra_headers = headers.items,
|
.extra_headers = headers.items,
|
||||||
};
|
});
|
||||||
|
|
||||||
var req = if (options.mock) |m|
|
|
||||||
try m.request(method, uri, req_options) // This will call the test harness
|
|
||||||
else
|
|
||||||
try cl.request(method, uri, req_options);
|
|
||||||
defer req.deinit();
|
|
||||||
// TODO: Need to test for payloads > 2^14. I believe one of our tests does this, but not sure
|
// TODO: Need to test for payloads > 2^14. I believe one of our tests does this, but not sure
|
||||||
// if (request_cp.body.len > 0) {
|
// if (request_cp.body.len > 0) {
|
||||||
// // Workaround for https://github.com/ziglang/zig/issues/15626
|
// // Workaround for https://github.com/ziglang/zig/issues/15626
|
||||||
|
@ -301,69 +206,33 @@ pub const AwsHttp = struct {
|
||||||
// }
|
// }
|
||||||
// try req.wait();
|
// try req.wait();
|
||||||
|
|
||||||
if (request_cp.body.len > 0) {
|
|
||||||
// This seems a bit silly, but we can't have a []const u8 here
|
|
||||||
// because when it sends, it's using a writer, and this becomes
|
|
||||||
// the buffer of the writer. It's conceivable that something
|
|
||||||
// in the chain then does actually modify the body of the request
|
|
||||||
// so we'll need to duplicate it here
|
|
||||||
const req_body = try self.allocator.dupe(u8, request_cp.body);
|
|
||||||
defer self.allocator.free(req_body); // docs for sendBodyComplete say it flushes, so no need to outlive this
|
|
||||||
if (options.mock) |m|
|
|
||||||
try m.sendBodyComplete(req_body)
|
|
||||||
else
|
|
||||||
try req.sendBodyComplete(req_body);
|
|
||||||
} else if (options.mock == null) try req.sendBodiless();
|
|
||||||
|
|
||||||
// if (options.mock == null) log.err("Request sent. Body len {d}, uri {f}", .{ request_cp.body.len, uri });
|
|
||||||
var response = if (options.mock) |m| try m.receiveHead() else try req.receiveHead(&.{});
|
|
||||||
|
|
||||||
// TODO: Timeout - is this now above us?
|
// TODO: Timeout - is this now above us?
|
||||||
log.debug(
|
log.debug(
|
||||||
"Request Complete. Response code {d}: {?s}",
|
"Request Complete. Response code {d}: {?s}",
|
||||||
.{ @intFromEnum(response.head.status), response.head.status.phrase() },
|
.{ @intFromEnum(req.status), req.status.phrase() },
|
||||||
);
|
);
|
||||||
log.debug("Response headers:", .{});
|
log.debug("Response headers:", .{});
|
||||||
var resp_headers = std.ArrayList(Header){};
|
var resp_headers = std.ArrayList(Header).init(
|
||||||
defer resp_headers.deinit(self.allocator);
|
self.allocator,
|
||||||
var it = response.head.iterateHeaders();
|
);
|
||||||
|
defer resp_headers.deinit();
|
||||||
|
var it = std.http.HeaderIterator.init(server_header_buffer[0..]);
|
||||||
while (it.next()) |h| { // even though we don't expect to fill the buffer,
|
while (it.next()) |h| { // even though we don't expect to fill the buffer,
|
||||||
// we don't get a length, but looks via stdlib source
|
// we don't get a length, but looks via stdlib source
|
||||||
// it should be ok to call next on the undefined memory
|
// it should be ok to call next on the undefined memory
|
||||||
log.debug(" {s}: {s}", .{ h.name, h.value });
|
log.debug(" {s}: {s}", .{ h.name, h.value });
|
||||||
try resp_headers.append(self.allocator, .{
|
try resp_headers.append(.{
|
||||||
.name = try (self.allocator.dupe(u8, h.name)),
|
.name = try (self.allocator.dupe(u8, h.name)),
|
||||||
.value = try (self.allocator.dupe(u8, h.value)),
|
.value = try (self.allocator.dupe(u8, h.value)),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
// This is directly lifted from fetch, as there is no function in
|
|
||||||
// 0.15.1 client to negotiate decompression
|
|
||||||
const decompress_buffer: []u8 = switch (response.head.content_encoding) {
|
|
||||||
.identity => &.{},
|
|
||||||
.zstd => try self.allocator.alloc(u8, std.compress.zstd.default_window_len),
|
|
||||||
.deflate, .gzip => try self.allocator.alloc(u8, std.compress.flate.max_window_len),
|
|
||||||
.compress => return error.UnsupportedCompressionMethod,
|
|
||||||
};
|
|
||||||
defer self.allocator.free(decompress_buffer);
|
|
||||||
|
|
||||||
var transfer_buffer: [64]u8 = undefined;
|
log.debug("raw response body:\n{s}", .{resp_payload.items});
|
||||||
var decompress: std.http.Decompress = undefined;
|
|
||||||
const reader = response.readerDecompressing(&transfer_buffer, &decompress, decompress_buffer);
|
|
||||||
|
|
||||||
// Not sure on optimal size here, but should definitely be > 0
|
|
||||||
var aw = try std.Io.Writer.Allocating.initCapacity(self.allocator, 128);
|
|
||||||
defer aw.deinit();
|
|
||||||
const response_writer = &aw.writer;
|
|
||||||
_ = reader.streamRemaining(response_writer) catch |err| switch (err) {
|
|
||||||
error.ReadFailed => return response.bodyErr().?,
|
|
||||||
else => |e| return e,
|
|
||||||
};
|
|
||||||
log.debug("raw response body:\n{s}", .{aw.written()});
|
|
||||||
|
|
||||||
const rc = HttpResult{
|
const rc = HttpResult{
|
||||||
.response_code = @intFromEnum(response.head.status),
|
.response_code = @intFromEnum(req.status),
|
||||||
.body = try aw.toOwnedSlice(),
|
.body = try resp_payload.toOwnedSlice(),
|
||||||
.headers = try resp_headers.toOwnedSlice(self.allocator),
|
.headers = try resp_headers.toOwnedSlice(),
|
||||||
.allocator = self.allocator,
|
.allocator = self.allocator,
|
||||||
};
|
};
|
||||||
return rc;
|
return rc;
|
||||||
|
@ -372,25 +241,18 @@ pub const AwsHttp = struct {
|
||||||
|
|
||||||
fn getRegion(service: []const u8, region: []const u8) []const u8 {
|
fn getRegion(service: []const u8, region: []const u8) []const u8 {
|
||||||
if (std.mem.eql(u8, service, "cloudfront")) return "us-east-1";
|
if (std.mem.eql(u8, service, "cloudfront")) return "us-east-1";
|
||||||
if (std.mem.eql(u8, service, "iam")) return "us-east-1";
|
|
||||||
return region;
|
return region;
|
||||||
}
|
}
|
||||||
|
|
||||||
fn addHeaders(
|
fn addHeaders(allocator: std.mem.Allocator, headers: *std.ArrayList(std.http.Header), host: []const u8, body: []const u8, content_type: []const u8, additional_headers: []const Header) !?[]const u8 {
|
||||||
allocator: std.mem.Allocator,
|
// We don't need allocator and body because they were to add a
|
||||||
headers: *std.ArrayList(std.http.Header),
|
// Content-Length header. But that is being added by the client send()
|
||||||
host: []const u8,
|
// function, so we don't want it on the request twice. But I also feel
|
||||||
body: []const u8,
|
// pretty strongly that send() should be providing us control, because
|
||||||
content_type: []const u8,
|
// I think if we don't add it here, it won't get signed, and we would
|
||||||
additional_headers: []const Header,
|
// really prefer it to be signed. So, we will wait and watch for this
|
||||||
) !?[]const u8 {
|
// situation to change in stdlib
|
||||||
// We don't need body because they were to add a Content-Length header. But
|
_ = allocator;
|
||||||
// that is being added by the client send() function, so we don't want it
|
|
||||||
// on the request twice. But I also feel pretty strongly that send() should
|
|
||||||
// be providing us control, because I think if we don't add it here, it
|
|
||||||
// won't get signed, and we would really prefer it to be signed. So, we
|
|
||||||
// will wait and watch for this situation to change in stdlib
|
|
||||||
|
|
||||||
_ = body;
|
_ = body;
|
||||||
var has_content_type = false;
|
var has_content_type = false;
|
||||||
for (additional_headers) |h| {
|
for (additional_headers) |h| {
|
||||||
|
@ -399,12 +261,12 @@ fn addHeaders(
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
try headers.append(allocator, .{ .name = "Accept", .value = "application/json" });
|
try headers.append(.{ .name = "Accept", .value = "application/json" });
|
||||||
try headers.append(allocator, .{ .name = "Host", .value = host });
|
try headers.append(.{ .name = "Host", .value = host });
|
||||||
try headers.append(allocator, .{ .name = "User-Agent", .value = "zig-aws 1.0" });
|
try headers.append(.{ .name = "User-Agent", .value = "zig-aws 1.0" });
|
||||||
if (!has_content_type)
|
if (!has_content_type)
|
||||||
try headers.append(allocator, .{ .name = "Content-Type", .value = content_type });
|
try headers.append(.{ .name = "Content-Type", .value = content_type });
|
||||||
try headers.appendSlice(allocator, additional_headers);
|
try headers.appendSlice(additional_headers);
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -466,26 +328,6 @@ fn endpointException(
|
||||||
dualstack: []const u8,
|
dualstack: []const u8,
|
||||||
domain: []const u8,
|
domain: []const u8,
|
||||||
) !?EndPoint {
|
) !?EndPoint {
|
||||||
// Global endpoints (https://docs.aws.amazon.com/general/latest/gr/rande.html#global-endpoints):
|
|
||||||
// ✓ Amazon CloudFront
|
|
||||||
// AWS Global Accelerator
|
|
||||||
// ✓ AWS Identity and Access Management (IAM)
|
|
||||||
// AWS Network Manager
|
|
||||||
// AWS Organizations
|
|
||||||
// Amazon Route 53
|
|
||||||
// AWS Shield Advanced
|
|
||||||
// AWS WAF Classic
|
|
||||||
|
|
||||||
if (std.mem.eql(u8, service, "iam")) {
|
|
||||||
return EndPoint{
|
|
||||||
.uri = try allocator.dupe(u8, "https://iam.amazonaws.com"),
|
|
||||||
.host = try allocator.dupe(u8, "iam.amazonaws.com"),
|
|
||||||
.scheme = "https",
|
|
||||||
.port = 443,
|
|
||||||
.allocator = allocator,
|
|
||||||
.path = try allocator.dupe(u8, request.path),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
if (std.mem.eql(u8, service, "cloudfront")) {
|
if (std.mem.eql(u8, service, "cloudfront")) {
|
||||||
return EndPoint{
|
return EndPoint{
|
||||||
.uri = try allocator.dupe(u8, "https://cloudfront.amazonaws.com"),
|
.uri = try allocator.dupe(u8, "https://cloudfront.amazonaws.com"),
|
||||||
|
@ -540,19 +382,41 @@ fn s3BucketFromPath(path: []const u8) []const u8 {
|
||||||
/// allocator: Will be used only to construct the EndPoint struct
|
/// allocator: Will be used only to construct the EndPoint struct
|
||||||
/// uri: string constructed in such a way that deallocation is needed
|
/// uri: string constructed in such a way that deallocation is needed
|
||||||
fn endPointFromUri(allocator: std.mem.Allocator, uri: []const u8, path: []const u8) !EndPoint {
|
fn endPointFromUri(allocator: std.mem.Allocator, uri: []const u8, path: []const u8) !EndPoint {
|
||||||
const parsed_uri = try std.Uri.parse(uri);
|
var scheme: []const u8 = "";
|
||||||
|
var host: []const u8 = "";
|
||||||
const scheme = parsed_uri.scheme;
|
var port: u16 = 443;
|
||||||
const host = try allocator.dupe(u8, parsed_uri.host.?.percent_encoded);
|
var host_start: usize = 0;
|
||||||
const port: u16 = blk: {
|
var host_end: usize = 0;
|
||||||
if (parsed_uri.port) |port| break :blk port;
|
for (uri, 0..) |ch, i| {
|
||||||
if (std.mem.eql(u8, scheme, "http")) break :blk 80;
|
switch (ch) {
|
||||||
if (std.mem.eql(u8, scheme, "https")) break :blk 443;
|
':' => {
|
||||||
break :blk 0;
|
if (!std.mem.eql(u8, scheme, "")) {
|
||||||
};
|
// here to end is port - this is likely a bug if ipv6 address used
|
||||||
|
const rest_of_uri = uri[i + 1 ..];
|
||||||
|
port = try std.fmt.parseUnsigned(u16, rest_of_uri, 10);
|
||||||
|
host_end = i;
|
||||||
|
}
|
||||||
|
},
|
||||||
|
'/' => {
|
||||||
|
if (host_start == 0) {
|
||||||
|
host_start = i + 2;
|
||||||
|
scheme = uri[0 .. i - 1];
|
||||||
|
if (std.mem.eql(u8, scheme, "http")) {
|
||||||
|
port = 80;
|
||||||
|
} else {
|
||||||
|
port = 443;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
else => continue,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (host_end == 0) {
|
||||||
|
host_end = uri.len;
|
||||||
|
}
|
||||||
|
host = try allocator.dupe(u8, uri[host_start..host_end]);
|
||||||
|
|
||||||
log.debug("host: {s}, scheme: {s}, port: {}", .{ host, scheme, port });
|
log.debug("host: {s}, scheme: {s}, port: {}", .{ host, scheme, port });
|
||||||
|
|
||||||
return EndPoint{
|
return EndPoint{
|
||||||
.uri = uri,
|
.uri = uri,
|
||||||
.host = host,
|
.host = host,
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
//! This module provides base data structures for aws http requests
|
//! This module provides base data structures for aws http requests
|
||||||
const std = @import("std");
|
const std = @import("std");
|
||||||
|
const log = std.log.scoped(.aws_base);
|
||||||
pub const Request = struct {
|
pub const Request = struct {
|
||||||
path: []const u8 = "/",
|
path: []const u8 = "/",
|
||||||
query: []const u8 = "",
|
query: []const u8 = "",
|
||||||
|
@ -21,7 +22,7 @@ pub const Result = struct {
|
||||||
self.allocator.free(h.value);
|
self.allocator.free(h.value);
|
||||||
}
|
}
|
||||||
self.allocator.free(self.headers);
|
self.allocator.free(self.headers);
|
||||||
//log.debug("http result deinit complete", .{});
|
log.debug("http result deinit complete", .{});
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
|
@ -1,59 +1,10 @@
|
||||||
const std = @import("std");
|
const std = @import("std");
|
||||||
const base = @import("aws_http_base.zig");
|
const base = @import("aws_http_base.zig");
|
||||||
const auth = @import("aws_authentication.zig");
|
const auth = @import("aws_authentication.zig");
|
||||||
const date = @import("date");
|
const date = @import("date.zig");
|
||||||
|
|
||||||
const scoped_log = std.log.scoped(.aws_signing);
|
const log = std.log.scoped(.aws_signing);
|
||||||
|
|
||||||
/// Specifies logging level. This should not be touched unless the normal
|
|
||||||
/// zig logging capabilities are inaccessible (e.g. during a build)
|
|
||||||
pub var log_level: std.log.Level = .debug;
|
|
||||||
|
|
||||||
/// Turn off logging completely
|
|
||||||
pub var logs_off: bool = false;
|
|
||||||
const log = struct {
|
|
||||||
/// Log an error message. This log level is intended to be used
|
|
||||||
/// when something has gone wrong. This might be recoverable or might
|
|
||||||
/// be followed by the program exiting.
|
|
||||||
pub fn err(
|
|
||||||
comptime format: []const u8,
|
|
||||||
args: anytype,
|
|
||||||
) void {
|
|
||||||
if (!logs_off and @intFromEnum(std.log.Level.err) <= @intFromEnum(log_level))
|
|
||||||
scoped_log.err(format, args);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Log a warning message. This log level is intended to be used if
|
|
||||||
/// it is uncertain whether something has gone wrong or not, but the
|
|
||||||
/// circumstances would be worth investigating.
|
|
||||||
pub fn warn(
|
|
||||||
comptime format: []const u8,
|
|
||||||
args: anytype,
|
|
||||||
) void {
|
|
||||||
if (!logs_off and @intFromEnum(std.log.Level.warn) <= @intFromEnum(log_level))
|
|
||||||
scoped_log.warn(format, args);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Log an info message. This log level is intended to be used for
|
|
||||||
/// general messages about the state of the program.
|
|
||||||
pub fn info(
|
|
||||||
comptime format: []const u8,
|
|
||||||
args: anytype,
|
|
||||||
) void {
|
|
||||||
if (!logs_off and @intFromEnum(std.log.Level.info) <= @intFromEnum(log_level))
|
|
||||||
scoped_log.info(format, args);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Log a debug message. This log level is intended to be used for
|
|
||||||
/// messages which are only useful for debugging.
|
|
||||||
pub fn debug(
|
|
||||||
comptime format: []const u8,
|
|
||||||
args: anytype,
|
|
||||||
) void {
|
|
||||||
if (!logs_off and @intFromEnum(std.log.Level.debug) <= @intFromEnum(log_level))
|
|
||||||
scoped_log.debug(format, args);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
// TODO: Remove this?! This is an aws_signing, so we should know a thing
|
// TODO: Remove this?! This is an aws_signing, so we should know a thing
|
||||||
// or two about aws. So perhaps the right level of abstraction here
|
// or two about aws. So perhaps the right level of abstraction here
|
||||||
// is to have our service signing idiosyncracies dealt with in this
|
// is to have our service signing idiosyncracies dealt with in this
|
||||||
|
@ -157,7 +108,7 @@ pub const SigningError = error{
|
||||||
XAmzExpiresHeaderInRequest,
|
XAmzExpiresHeaderInRequest,
|
||||||
/// Used if the request headers already includes x-amz-region-set
|
/// Used if the request headers already includes x-amz-region-set
|
||||||
XAmzRegionSetHeaderInRequest,
|
XAmzRegionSetHeaderInRequest,
|
||||||
} || error{OutOfMemory};
|
} || std.fmt.AllocPrintError;
|
||||||
|
|
||||||
const forbidden_headers = .{
|
const forbidden_headers = .{
|
||||||
.{ .name = "x-amz-content-sha256", .err = SigningError.XAmzContentSha256HeaderInRequest },
|
.{ .name = "x-amz-content-sha256", .err = SigningError.XAmzContentSha256HeaderInRequest },
|
||||||
|
@ -240,10 +191,6 @@ pub fn signRequest(allocator: std.mem.Allocator, request: base.Request, config:
|
||||||
// regardless of whether we're sticking the header on the request
|
// regardless of whether we're sticking the header on the request
|
||||||
std.debug.assert(config.signed_body_header == .none or
|
std.debug.assert(config.signed_body_header == .none or
|
||||||
config.signed_body_header == .sha256);
|
config.signed_body_header == .sha256);
|
||||||
log.debug(
|
|
||||||
"Request body len: {d}. First 5 bytes (max): {s}",
|
|
||||||
.{ request.body.len, request.body[0..@min(request.body.len, 5)] },
|
|
||||||
);
|
|
||||||
const payload_hash = try hash(allocator, request.body, .sha256);
|
const payload_hash = try hash(allocator, request.body, .sha256);
|
||||||
if (config.signed_body_header == .sha256) {
|
if (config.signed_body_header == .sha256) {
|
||||||
// From the AWS nitro enclaves SDK, it appears that there is no reason
|
// From the AWS nitro enclaves SDK, it appears that there is no reason
|
||||||
|
@ -316,12 +263,12 @@ pub fn signRequest(allocator: std.mem.Allocator, request: base.Request, config:
|
||||||
.name = "Authorization",
|
.name = "Authorization",
|
||||||
.value = try std.fmt.allocPrint(
|
.value = try std.fmt.allocPrint(
|
||||||
allocator,
|
allocator,
|
||||||
"AWS4-HMAC-SHA256 Credential={s}/{s}, SignedHeaders={s}, Signature={x}",
|
"AWS4-HMAC-SHA256 Credential={s}/{s}, SignedHeaders={s}, Signature={s}",
|
||||||
.{
|
.{
|
||||||
config.credentials.access_key,
|
config.credentials.access_key,
|
||||||
scope,
|
scope,
|
||||||
canonical_request.headers.signed_headers,
|
canonical_request.headers.signed_headers,
|
||||||
signature,
|
std.fmt.fmtSliceHexLower(signature),
|
||||||
},
|
},
|
||||||
),
|
),
|
||||||
};
|
};
|
||||||
|
@ -352,7 +299,7 @@ pub fn freeSignedRequest(allocator: std.mem.Allocator, request: *base.Request, c
|
||||||
|
|
||||||
pub const credentialsFn = *const fn ([]const u8) ?Credentials;
|
pub const credentialsFn = *const fn ([]const u8) ?Credentials;
|
||||||
|
|
||||||
pub fn verifyServerRequest(allocator: std.mem.Allocator, request: *std.http.Server.Request, request_body_reader: *std.Io.Reader, credentials_fn: credentialsFn) !bool {
|
pub fn verifyServerRequest(allocator: std.mem.Allocator, request: *std.http.Server.Request, request_body_reader: anytype, credentials_fn: credentialsFn) !bool {
|
||||||
var unverified_request = try UnverifiedRequest.init(allocator, request);
|
var unverified_request = try UnverifiedRequest.init(allocator, request);
|
||||||
defer unverified_request.deinit();
|
defer unverified_request.deinit();
|
||||||
return verify(allocator, unverified_request, request_body_reader, credentials_fn);
|
return verify(allocator, unverified_request, request_body_reader, credentials_fn);
|
||||||
|
@ -363,19 +310,17 @@ pub const UnverifiedRequest = struct {
|
||||||
target: []const u8,
|
target: []const u8,
|
||||||
method: std.http.Method,
|
method: std.http.Method,
|
||||||
allocator: std.mem.Allocator,
|
allocator: std.mem.Allocator,
|
||||||
raw: *std.http.Server.Request,
|
|
||||||
|
|
||||||
pub fn init(allocator: std.mem.Allocator, request: *std.http.Server.Request) !UnverifiedRequest {
|
pub fn init(allocator: std.mem.Allocator, request: *std.http.Server.Request) !UnverifiedRequest {
|
||||||
var al = std.ArrayList(std.http.Header){};
|
var al = std.ArrayList(std.http.Header).init(allocator);
|
||||||
defer al.deinit(allocator);
|
defer al.deinit();
|
||||||
var it = request.iterateHeaders();
|
var it = request.iterateHeaders();
|
||||||
while (it.next()) |h| try al.append(allocator, h);
|
while (it.next()) |h| try al.append(h);
|
||||||
return .{
|
return .{
|
||||||
.target = request.head.target,
|
.target = request.head.target,
|
||||||
.method = request.head.method,
|
.method = request.head.method,
|
||||||
.headers = try al.toOwnedSlice(allocator),
|
.headers = try al.toOwnedSlice(),
|
||||||
.allocator = allocator,
|
.allocator = allocator,
|
||||||
.raw = request,
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -393,7 +338,7 @@ pub const UnverifiedRequest = struct {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
pub fn verify(allocator: std.mem.Allocator, request: UnverifiedRequest, request_body_reader: *std.Io.Reader, credentials_fn: credentialsFn) !bool {
|
pub fn verify(allocator: std.mem.Allocator, request: UnverifiedRequest, request_body_reader: anytype, credentials_fn: credentialsFn) !bool {
|
||||||
var arena = std.heap.ArenaAllocator.init(allocator);
|
var arena = std.heap.ArenaAllocator.init(allocator);
|
||||||
defer arena.deinit();
|
defer arena.deinit();
|
||||||
const aa = arena.allocator();
|
const aa = arena.allocator();
|
||||||
|
@ -426,10 +371,10 @@ pub fn verify(allocator: std.mem.Allocator, request: UnverifiedRequest, request_
|
||||||
return verifyParsedAuthorization(
|
return verifyParsedAuthorization(
|
||||||
aa,
|
aa,
|
||||||
request,
|
request,
|
||||||
|
request_body_reader,
|
||||||
credential.?,
|
credential.?,
|
||||||
signed_headers.?,
|
signed_headers.?,
|
||||||
signature.?,
|
signature.?,
|
||||||
request_body_reader,
|
|
||||||
credentials_fn,
|
credentials_fn,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -437,17 +382,17 @@ pub fn verify(allocator: std.mem.Allocator, request: UnverifiedRequest, request_
|
||||||
fn verifyParsedAuthorization(
|
fn verifyParsedAuthorization(
|
||||||
allocator: std.mem.Allocator,
|
allocator: std.mem.Allocator,
|
||||||
request: UnverifiedRequest,
|
request: UnverifiedRequest,
|
||||||
|
request_body_reader: anytype,
|
||||||
credential: []const u8,
|
credential: []const u8,
|
||||||
signed_headers: []const u8,
|
signed_headers: []const u8,
|
||||||
signature: []const u8,
|
signature: []const u8,
|
||||||
request_body_reader: *std.Io.Reader,
|
|
||||||
credentials_fn: credentialsFn,
|
credentials_fn: credentialsFn,
|
||||||
) !bool {
|
) !bool {
|
||||||
// AWS4-HMAC-SHA256
|
// AWS4-HMAC-SHA256
|
||||||
// Credential=ACCESS/20230908/us-west-2/s3/aws4_request
|
// Credential=ACCESS/20230908/us-west-2/s3/aws4_request
|
||||||
// SignedHeaders=accept;content-length;content-type;host;x-amz-content-sha256;x-amz-date;x-amz-storage-class
|
// SignedHeaders=accept;content-length;content-type;host;x-amz-content-sha256;x-amz-date;x-amz-storage-class
|
||||||
// Signature=fcc43ce73a34c9bd1ddf17e8a435f46a859812822f944f9eeb2aabcd64b03523
|
// Signature=fcc43ce73a34c9bd1ddf17e8a435f46a859812822f944f9eeb2aabcd64b03523
|
||||||
var credential_iterator = std.mem.splitScalar(u8, credential, '/');
|
var credential_iterator = std.mem.split(u8, credential, "/");
|
||||||
const access_key = credential_iterator.next().?;
|
const access_key = credential_iterator.next().?;
|
||||||
const credentials = credentials_fn(access_key) orelse return error.CredentialsNotFound;
|
const credentials = credentials_fn(access_key) orelse return error.CredentialsNotFound;
|
||||||
// TODO: https://stackoverflow.com/questions/29276609/aws-authentication-requires-a-valid-date-or-x-amz-date-header-curl
|
// TODO: https://stackoverflow.com/questions/29276609/aws-authentication-requires-a-valid-date-or-x-amz-date-header-curl
|
||||||
|
@ -500,7 +445,7 @@ fn verifyParsedAuthorization(
|
||||||
.content_type = request.getFirstHeaderValue("content-type").?,
|
.content_type = request.getFirstHeaderValue("content-type").?,
|
||||||
};
|
};
|
||||||
signed_request.query = request.target[signed_request.path.len..]; // TODO: should this be +1? query here would include '?'
|
signed_request.query = request.target[signed_request.path.len..]; // TODO: should this be +1? query here would include '?'
|
||||||
signed_request.body = try request_body_reader.allocRemaining(allocator, .unlimited);
|
signed_request.body = try request_body_reader.readAllAlloc(allocator, std.math.maxInt(usize));
|
||||||
defer allocator.free(signed_request.body);
|
defer allocator.free(signed_request.body);
|
||||||
signed_request = try signRequest(allocator, signed_request, config);
|
signed_request = try signRequest(allocator, signed_request, config);
|
||||||
defer freeSignedRequest(allocator, &signed_request, config);
|
defer freeSignedRequest(allocator, &signed_request, config);
|
||||||
|
@ -551,7 +496,7 @@ fn getSigningKey(allocator: std.mem.Allocator, signing_date: []const u8, config:
|
||||||
defer {
|
defer {
|
||||||
// secureZero avoids compiler optimizations that may say
|
// secureZero avoids compiler optimizations that may say
|
||||||
// "WTF are you doing this thing? Looks like nothing to me. It's silly and we will remove it"
|
// "WTF are you doing this thing? Looks like nothing to me. It's silly and we will remove it"
|
||||||
std.crypto.secureZero(u8, secret); // zero our copy of secret
|
std.crypto.utils.secureZero(u8, secret); // zero our copy of secret
|
||||||
allocator.free(secret);
|
allocator.free(secret);
|
||||||
}
|
}
|
||||||
// log.debug("secret: {s}", .{secret});
|
// log.debug("secret: {s}", .{secret});
|
||||||
|
@ -668,18 +613,18 @@ fn canonicalUri(allocator: std.mem.Allocator, path: []const u8, double_encode: b
|
||||||
}
|
}
|
||||||
defer allocator.free(encoded_once);
|
defer allocator.free(encoded_once);
|
||||||
var encoded_twice = try encodeUri(allocator, encoded_once);
|
var encoded_twice = try encodeUri(allocator, encoded_once);
|
||||||
defer allocator.free(encoded_twice);
|
|
||||||
log.debug("encoded path (2): {s}", .{encoded_twice});
|
log.debug("encoded path (2): {s}", .{encoded_twice});
|
||||||
if (std.mem.lastIndexOf(u8, encoded_twice, "?")) |i| {
|
if (std.mem.lastIndexOf(u8, encoded_twice, "?")) |i| {
|
||||||
return try allocator.dupe(u8, encoded_twice[0..i]);
|
_ = allocator.resize(encoded_twice, i);
|
||||||
|
return encoded_twice[0..i];
|
||||||
}
|
}
|
||||||
return try allocator.dupe(u8, encoded_twice);
|
return encoded_twice;
|
||||||
}
|
}
|
||||||
|
|
||||||
fn encodeParamPart(allocator: std.mem.Allocator, path: []const u8) ![]const u8 {
|
fn encodeParamPart(allocator: std.mem.Allocator, path: []const u8) ![]const u8 {
|
||||||
const unreserved_marks = "-_.!~*'()";
|
const unreserved_marks = "-_.!~*'()";
|
||||||
var encoded = try std.ArrayList(u8).initCapacity(allocator, path.len);
|
var encoded = try std.ArrayList(u8).initCapacity(allocator, path.len);
|
||||||
defer encoded.deinit(allocator);
|
defer encoded.deinit();
|
||||||
for (path) |c| {
|
for (path) |c| {
|
||||||
var should_encode = true;
|
var should_encode = true;
|
||||||
for (unreserved_marks) |r|
|
for (unreserved_marks) |r|
|
||||||
|
@ -691,16 +636,16 @@ fn encodeParamPart(allocator: std.mem.Allocator, path: []const u8) ![]const u8 {
|
||||||
should_encode = false;
|
should_encode = false;
|
||||||
|
|
||||||
if (!should_encode) {
|
if (!should_encode) {
|
||||||
try encoded.append(allocator, c);
|
try encoded.append(c);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
// Whatever remains, encode it
|
// Whatever remains, encode it
|
||||||
try encoded.append(allocator, '%');
|
try encoded.append('%');
|
||||||
const hex = try std.fmt.allocPrint(allocator, "{X}", .{&[_]u8{c}});
|
const hex = try std.fmt.allocPrint(allocator, "{s}", .{std.fmt.fmtSliceHexUpper(&[_]u8{c})});
|
||||||
defer allocator.free(hex);
|
defer allocator.free(hex);
|
||||||
try encoded.appendSlice(allocator, hex);
|
try encoded.appendSlice(hex);
|
||||||
}
|
}
|
||||||
return encoded.toOwnedSlice(allocator);
|
return encoded.toOwnedSlice();
|
||||||
}
|
}
|
||||||
|
|
||||||
// URI encode every byte except the unreserved characters:
|
// URI encode every byte except the unreserved characters:
|
||||||
|
@ -721,7 +666,7 @@ fn encodeUri(allocator: std.mem.Allocator, path: []const u8) ![]u8 {
|
||||||
const reserved_characters = ";,/?:@&=+$#";
|
const reserved_characters = ";,/?:@&=+$#";
|
||||||
const unreserved_marks = "-_.!~*'()";
|
const unreserved_marks = "-_.!~*'()";
|
||||||
var encoded = try std.ArrayList(u8).initCapacity(allocator, path.len);
|
var encoded = try std.ArrayList(u8).initCapacity(allocator, path.len);
|
||||||
defer encoded.deinit(allocator);
|
defer encoded.deinit();
|
||||||
// if (std.mem.startsWith(u8, path, "/2017-03-31/tags/arn")) {
|
// if (std.mem.startsWith(u8, path, "/2017-03-31/tags/arn")) {
|
||||||
// try encoded.appendSlice("/2017-03-31/tags/arn%25253Aaws%25253Alambda%25253Aus-west-2%25253A550620852718%25253Afunction%25253Aawsome-lambda-LambdaStackawsomeLambda");
|
// try encoded.appendSlice("/2017-03-31/tags/arn%25253Aaws%25253Alambda%25253Aus-west-2%25253A550620852718%25253Afunction%25253Aawsome-lambda-LambdaStackawsomeLambda");
|
||||||
// return encoded.toOwnedSlice();
|
// return encoded.toOwnedSlice();
|
||||||
|
@ -744,16 +689,16 @@ fn encodeUri(allocator: std.mem.Allocator, path: []const u8) ![]u8 {
|
||||||
should_encode = false;
|
should_encode = false;
|
||||||
|
|
||||||
if (!should_encode) {
|
if (!should_encode) {
|
||||||
try encoded.append(allocator, c);
|
try encoded.append(c);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
// Whatever remains, encode it
|
// Whatever remains, encode it
|
||||||
try encoded.append(allocator, '%');
|
try encoded.append('%');
|
||||||
const hex = try std.fmt.allocPrint(allocator, "{X}", .{&[_]u8{c}});
|
const hex = try std.fmt.allocPrint(allocator, "{s}", .{std.fmt.fmtSliceHexUpper(&[_]u8{c})});
|
||||||
defer allocator.free(hex);
|
defer allocator.free(hex);
|
||||||
try encoded.appendSlice(allocator, hex);
|
try encoded.appendSlice(hex);
|
||||||
}
|
}
|
||||||
return encoded.toOwnedSlice(allocator);
|
return encoded.toOwnedSlice();
|
||||||
}
|
}
|
||||||
|
|
||||||
fn canonicalQueryString(allocator: std.mem.Allocator, path: []const u8) ![]const u8 {
|
fn canonicalQueryString(allocator: std.mem.Allocator, path: []const u8) ![]const u8 {
|
||||||
|
@ -805,26 +750,26 @@ fn canonicalQueryString(allocator: std.mem.Allocator, path: []const u8) ![]const
|
||||||
const query = path[first_question.? + 1 ..];
|
const query = path[first_question.? + 1 ..];
|
||||||
|
|
||||||
// Split this by component
|
// Split this by component
|
||||||
var portions = std.mem.splitScalar(u8, query, '&');
|
var portions = std.mem.split(u8, query, "&");
|
||||||
var sort_me = std.ArrayList([]const u8){};
|
var sort_me = std.ArrayList([]const u8).init(allocator);
|
||||||
defer sort_me.deinit(allocator);
|
defer sort_me.deinit();
|
||||||
while (portions.next()) |item|
|
while (portions.next()) |item|
|
||||||
try sort_me.append(allocator, item);
|
try sort_me.append(item);
|
||||||
std.sort.pdq([]const u8, sort_me.items, {}, lessThanBinary);
|
std.sort.pdq([]const u8, sort_me.items, {}, lessThanBinary);
|
||||||
|
|
||||||
var normalized = try std.ArrayList(u8).initCapacity(allocator, path.len);
|
var normalized = try std.ArrayList(u8).initCapacity(allocator, path.len);
|
||||||
defer normalized.deinit(allocator);
|
defer normalized.deinit();
|
||||||
var first = true;
|
var first = true;
|
||||||
for (sort_me.items) |i| {
|
for (sort_me.items) |i| {
|
||||||
if (!first) try normalized.append(allocator, '&');
|
if (!first) try normalized.append('&');
|
||||||
first = false;
|
first = false;
|
||||||
const first_equals = std.mem.indexOf(u8, i, "=");
|
const first_equals = std.mem.indexOf(u8, i, "=");
|
||||||
if (first_equals == null) {
|
if (first_equals == null) {
|
||||||
// Rare. This is "foo="
|
// Rare. This is "foo="
|
||||||
const normed_item = try encodeUri(allocator, i);
|
const normed_item = try encodeUri(allocator, i);
|
||||||
defer allocator.free(normed_item);
|
defer allocator.free(normed_item);
|
||||||
try normalized.appendSlice(allocator, i); // This should be encoded
|
try normalized.appendSlice(i); // This should be encoded
|
||||||
try normalized.append(allocator, '=');
|
try normalized.append('=');
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -837,12 +782,12 @@ fn canonicalQueryString(allocator: std.mem.Allocator, path: []const u8) ![]const
|
||||||
// Double-encode any = in the value. But not anything else?
|
// Double-encode any = in the value. But not anything else?
|
||||||
const weird_equals_in_value_thing = try replace(allocator, value, "%3D", "%253D");
|
const weird_equals_in_value_thing = try replace(allocator, value, "%3D", "%253D");
|
||||||
defer allocator.free(weird_equals_in_value_thing);
|
defer allocator.free(weird_equals_in_value_thing);
|
||||||
try normalized.appendSlice(allocator, key);
|
try normalized.appendSlice(key);
|
||||||
try normalized.append(allocator, '=');
|
try normalized.append('=');
|
||||||
try normalized.appendSlice(allocator, weird_equals_in_value_thing);
|
try normalized.appendSlice(weird_equals_in_value_thing);
|
||||||
}
|
}
|
||||||
|
|
||||||
return normalized.toOwnedSlice(allocator);
|
return normalized.toOwnedSlice();
|
||||||
}
|
}
|
||||||
|
|
||||||
fn replace(allocator: std.mem.Allocator, haystack: []const u8, needle: []const u8, replacement_value: []const u8) ![]const u8 {
|
fn replace(allocator: std.mem.Allocator, haystack: []const u8, needle: []const u8, replacement_value: []const u8) ![]const u8 {
|
||||||
|
@ -881,7 +826,7 @@ fn canonicalHeaders(allocator: std.mem.Allocator, headers: []const std.http.Head
|
||||||
allocator.free(h.name);
|
allocator.free(h.name);
|
||||||
allocator.free(h.value);
|
allocator.free(h.value);
|
||||||
}
|
}
|
||||||
dest.deinit(allocator);
|
dest.deinit();
|
||||||
}
|
}
|
||||||
var total_len: usize = 0;
|
var total_len: usize = 0;
|
||||||
var total_name_len: usize = 0;
|
var total_name_len: usize = 0;
|
||||||
|
@ -911,15 +856,15 @@ fn canonicalHeaders(allocator: std.mem.Allocator, headers: []const std.http.Head
|
||||||
defer allocator.free(value);
|
defer allocator.free(value);
|
||||||
const n = try std.ascii.allocLowerString(allocator, h.name);
|
const n = try std.ascii.allocLowerString(allocator, h.name);
|
||||||
const v = try std.fmt.allocPrint(allocator, "{s}", .{value});
|
const v = try std.fmt.allocPrint(allocator, "{s}", .{value});
|
||||||
try dest.append(allocator, .{ .name = n, .value = v });
|
try dest.append(.{ .name = n, .value = v });
|
||||||
}
|
}
|
||||||
|
|
||||||
std.sort.pdq(std.http.Header, dest.items, {}, lessThan);
|
std.sort.pdq(std.http.Header, dest.items, {}, lessThan);
|
||||||
|
|
||||||
var dest_str = try std.ArrayList(u8).initCapacity(allocator, total_len);
|
var dest_str = try std.ArrayList(u8).initCapacity(allocator, total_len);
|
||||||
defer dest_str.deinit(allocator);
|
defer dest_str.deinit();
|
||||||
var signed_headers = try std.ArrayList(u8).initCapacity(allocator, total_name_len);
|
var signed_headers = try std.ArrayList(u8).initCapacity(allocator, total_name_len);
|
||||||
defer signed_headers.deinit(allocator);
|
defer signed_headers.deinit();
|
||||||
var first = true;
|
var first = true;
|
||||||
for (dest.items) |h| {
|
for (dest.items) |h| {
|
||||||
dest_str.appendSliceAssumeCapacity(h.name);
|
dest_str.appendSliceAssumeCapacity(h.name);
|
||||||
|
@ -932,8 +877,8 @@ fn canonicalHeaders(allocator: std.mem.Allocator, headers: []const std.http.Head
|
||||||
signed_headers.appendSliceAssumeCapacity(h.name);
|
signed_headers.appendSliceAssumeCapacity(h.name);
|
||||||
}
|
}
|
||||||
return CanonicalHeaders{
|
return CanonicalHeaders{
|
||||||
.str = try dest_str.toOwnedSlice(allocator),
|
.str = try dest_str.toOwnedSlice(),
|
||||||
.signed_headers = try signed_headers.toOwnedSlice(allocator),
|
.signed_headers = try signed_headers.toOwnedSlice(),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -942,7 +887,6 @@ fn canonicalHeaderValue(allocator: std.mem.Allocator, value: []const u8) ![]cons
|
||||||
const in_quote = false;
|
const in_quote = false;
|
||||||
var start: usize = 0;
|
var start: usize = 0;
|
||||||
const rc = try allocator.alloc(u8, value.len);
|
const rc = try allocator.alloc(u8, value.len);
|
||||||
defer allocator.free(rc);
|
|
||||||
var rc_inx: usize = 0;
|
var rc_inx: usize = 0;
|
||||||
for (value, 0..) |c, i| {
|
for (value, 0..) |c, i| {
|
||||||
if (!started and !std.ascii.isWhitespace(c)) {
|
if (!started and !std.ascii.isWhitespace(c)) {
|
||||||
|
@ -960,7 +904,8 @@ fn canonicalHeaderValue(allocator: std.mem.Allocator, value: []const u8) ![]cons
|
||||||
// Trim end
|
// Trim end
|
||||||
while (std.ascii.isWhitespace(rc[rc_inx - 1]))
|
while (std.ascii.isWhitespace(rc[rc_inx - 1]))
|
||||||
rc_inx -= 1;
|
rc_inx -= 1;
|
||||||
return try allocator.dupe(u8, rc[0..rc_inx]);
|
_ = allocator.resize(rc, rc_inx);
|
||||||
|
return rc[0..rc_inx];
|
||||||
}
|
}
|
||||||
fn lessThan(context: void, lhs: std.http.Header, rhs: std.http.Header) bool {
|
fn lessThan(context: void, lhs: std.http.Header, rhs: std.http.Header) bool {
|
||||||
_ = context;
|
_ = context;
|
||||||
|
@ -978,7 +923,7 @@ fn hash(allocator: std.mem.Allocator, payload: []const u8, sig_type: SignatureTy
|
||||||
};
|
};
|
||||||
var out: [std.crypto.hash.sha2.Sha256.digest_length]u8 = undefined;
|
var out: [std.crypto.hash.sha2.Sha256.digest_length]u8 = undefined;
|
||||||
std.crypto.hash.sha2.Sha256.hash(to_hash, &out, .{});
|
std.crypto.hash.sha2.Sha256.hash(to_hash, &out, .{});
|
||||||
return try std.fmt.allocPrint(allocator, "{x}", .{out});
|
return try std.fmt.allocPrint(allocator, "{s}", .{std.fmt.fmtSliceHexLower(&out)});
|
||||||
}
|
}
|
||||||
// SignedHeaders + '\n' +
|
// SignedHeaders + '\n' +
|
||||||
// HexEncode(Hash(RequestPayload))
|
// HexEncode(Hash(RequestPayload))
|
||||||
|
@ -992,7 +937,6 @@ test "canonical uri" {
|
||||||
const path = "/documents and settings/?foo=bar";
|
const path = "/documents and settings/?foo=bar";
|
||||||
const expected = "/documents%2520and%2520settings/";
|
const expected = "/documents%2520and%2520settings/";
|
||||||
const actual = try canonicalUri(allocator, path, true);
|
const actual = try canonicalUri(allocator, path, true);
|
||||||
|
|
||||||
defer allocator.free(actual);
|
defer allocator.free(actual);
|
||||||
try std.testing.expectEqualStrings(expected, actual);
|
try std.testing.expectEqualStrings(expected, actual);
|
||||||
|
|
||||||
|
@ -1016,13 +960,13 @@ test "canonical query" {
|
||||||
test "canonical headers" {
|
test "canonical headers" {
|
||||||
const allocator = std.testing.allocator;
|
const allocator = std.testing.allocator;
|
||||||
var headers = try std.ArrayList(std.http.Header).initCapacity(allocator, 5);
|
var headers = try std.ArrayList(std.http.Header).initCapacity(allocator, 5);
|
||||||
defer headers.deinit(allocator);
|
defer headers.deinit();
|
||||||
try headers.append(allocator, .{ .name = "Host", .value = "iam.amazonaws.com" });
|
try headers.append(.{ .name = "Host", .value = "iam.amazonaws.com" });
|
||||||
try headers.append(allocator, .{ .name = "Content-Type", .value = "application/x-www-form-urlencoded; charset=utf-8" });
|
try headers.append(.{ .name = "Content-Type", .value = "application/x-www-form-urlencoded; charset=utf-8" });
|
||||||
try headers.append(allocator, .{ .name = "User-Agent", .value = "This header should be skipped" });
|
try headers.append(.{ .name = "User-Agent", .value = "This header should be skipped" });
|
||||||
try headers.append(allocator, .{ .name = "My-header1", .value = " a b c " });
|
try headers.append(.{ .name = "My-header1", .value = " a b c " });
|
||||||
try headers.append(allocator, .{ .name = "X-Amz-Date", .value = "20150830T123600Z" });
|
try headers.append(.{ .name = "X-Amz-Date", .value = "20150830T123600Z" });
|
||||||
try headers.append(allocator, .{ .name = "My-header2", .value = " \"a b c\" " });
|
try headers.append(.{ .name = "My-header2", .value = " \"a b c\" " });
|
||||||
const expected =
|
const expected =
|
||||||
\\content-type:application/x-www-form-urlencoded; charset=utf-8
|
\\content-type:application/x-www-form-urlencoded; charset=utf-8
|
||||||
\\host:iam.amazonaws.com
|
\\host:iam.amazonaws.com
|
||||||
|
@ -1041,12 +985,12 @@ test "canonical headers" {
|
||||||
test "canonical request" {
|
test "canonical request" {
|
||||||
const allocator = std.testing.allocator;
|
const allocator = std.testing.allocator;
|
||||||
var headers = try std.ArrayList(std.http.Header).initCapacity(allocator, 5);
|
var headers = try std.ArrayList(std.http.Header).initCapacity(allocator, 5);
|
||||||
defer headers.deinit(allocator);
|
defer headers.deinit();
|
||||||
try headers.append(allocator, .{ .name = "User-agent", .value = "c sdk v1.0" });
|
try headers.append(.{ .name = "User-agent", .value = "c sdk v1.0" });
|
||||||
// In contrast to AWS CRT (aws-c-auth), we add the date as part of the
|
// In contrast to AWS CRT (aws-c-auth), we add the date as part of the
|
||||||
// signing operation. They add it as part of the canonicalization
|
// signing operation. They add it as part of the canonicalization
|
||||||
try headers.append(allocator, .{ .name = "X-Amz-Date", .value = "20150830T123600Z" });
|
try headers.append(.{ .name = "X-Amz-Date", .value = "20150830T123600Z" });
|
||||||
try headers.append(allocator, .{ .name = "Host", .value = "example.amazonaws.com" });
|
try headers.append(.{ .name = "Host", .value = "example.amazonaws.com" });
|
||||||
const req = base.Request{
|
const req = base.Request{
|
||||||
.path = "/",
|
.path = "/",
|
||||||
.method = "GET",
|
.method = "GET",
|
||||||
|
@ -1101,10 +1045,10 @@ test "can sign" {
|
||||||
|
|
||||||
const allocator = std.testing.allocator;
|
const allocator = std.testing.allocator;
|
||||||
var headers = try std.ArrayList(std.http.Header).initCapacity(allocator, 5);
|
var headers = try std.ArrayList(std.http.Header).initCapacity(allocator, 5);
|
||||||
defer headers.deinit(allocator);
|
defer headers.deinit();
|
||||||
try headers.append(allocator, .{ .name = "Content-Type", .value = "application/x-www-form-urlencoded; charset=utf-8" });
|
try headers.append(.{ .name = "Content-Type", .value = "application/x-www-form-urlencoded; charset=utf-8" });
|
||||||
try headers.append(allocator, .{ .name = "Content-Length", .value = "13" });
|
try headers.append(.{ .name = "Content-Length", .value = "13" });
|
||||||
try headers.append(allocator, .{ .name = "Host", .value = "example.amazonaws.com" });
|
try headers.append(.{ .name = "Host", .value = "example.amazonaws.com" });
|
||||||
const req = base.Request{
|
const req = base.Request{
|
||||||
.path = "/",
|
.path = "/",
|
||||||
.query = "",
|
.query = "",
|
||||||
|
@ -1171,27 +1115,25 @@ test "can verify server request" {
|
||||||
"X-Amz-Date: 20230908T170252Z\r\n" ++
|
"X-Amz-Date: 20230908T170252Z\r\n" ++
|
||||||
"x-amz-content-sha256: fcde2b2edba56bf408601fb721fe9b5c338d10ee429ea04fae5511b68fbf8fb9\r\n" ++
|
"x-amz-content-sha256: fcde2b2edba56bf408601fb721fe9b5c338d10ee429ea04fae5511b68fbf8fb9\r\n" ++
|
||||||
"Authorization: AWS4-HMAC-SHA256 Credential=ACCESS/20230908/us-west-2/s3/aws4_request, SignedHeaders=accept;content-length;content-type;host;x-amz-content-sha256;x-amz-date;x-amz-storage-class, Signature=fcc43ce73a34c9bd1ddf17e8a435f46a859812822f944f9eeb2aabcd64b03523\r\n\r\nbar";
|
"Authorization: AWS4-HMAC-SHA256 Credential=ACCESS/20230908/us-west-2/s3/aws4_request, SignedHeaders=accept;content-length;content-type;host;x-amz-content-sha256;x-amz-date;x-amz-storage-class, Signature=fcc43ce73a34c9bd1ddf17e8a435f46a859812822f944f9eeb2aabcd64b03523\r\n\r\nbar";
|
||||||
var reader = std.Io.Reader.fixed(req);
|
var read_buffer: [1024]u8 = undefined;
|
||||||
var body_reader = std.Io.Reader.fixed("bar");
|
@memcpy(read_buffer[0..req.len], req);
|
||||||
var server: std.http.Server = .{
|
var server: std.http.Server = .{
|
||||||
.out = undefined, // We're not sending a response here
|
.connection = undefined,
|
||||||
.reader = .{
|
.state = .ready,
|
||||||
.in = &reader,
|
.read_buffer = &read_buffer,
|
||||||
.interface = undefined,
|
.read_buffer_len = req.len,
|
||||||
.state = .received_head,
|
.next_request_start = 0,
|
||||||
.max_head_len = req.len,
|
|
||||||
},
|
|
||||||
};
|
};
|
||||||
var request: std.http.Server.Request = .{
|
var request: std.http.Server.Request = .{
|
||||||
.server = &server,
|
.server = &server,
|
||||||
.head = try std.http.Server.Request.Head.parse(req),
|
.head_end = req.len - 3,
|
||||||
.head_buffer = req,
|
.head = try std.http.Server.Request.Head.parse(read_buffer[0 .. req.len - 3]),
|
||||||
|
.reader_state = undefined,
|
||||||
};
|
};
|
||||||
|
|
||||||
// const old_level = std.testing.log_level;
|
|
||||||
// std.testing.log_level = .debug;
|
// std.testing.log_level = .debug;
|
||||||
// defer std.testing.log_level = old_level;
|
var fbs = std.io.fixedBufferStream("bar");
|
||||||
try std.testing.expect(try verifyServerRequest(allocator, &request, &body_reader, struct {
|
try std.testing.expect(try verifyServerRequest(allocator, &request, fbs.reader(), struct {
|
||||||
cred: Credentials,
|
cred: Credentials,
|
||||||
|
|
||||||
const Self = @This();
|
const Self = @This();
|
||||||
|
@ -1229,25 +1171,22 @@ test "can verify server request without x-amz-content-sha256" {
|
||||||
const req_data = head ++ body;
|
const req_data = head ++ body;
|
||||||
var read_buffer: [2048]u8 = undefined;
|
var read_buffer: [2048]u8 = undefined;
|
||||||
@memcpy(read_buffer[0..req_data.len], req_data);
|
@memcpy(read_buffer[0..req_data.len], req_data);
|
||||||
var reader = std.Io.Reader.fixed(&read_buffer);
|
|
||||||
var body_reader = std.Io.Reader.fixed(body);
|
|
||||||
var server: std.http.Server = .{
|
var server: std.http.Server = .{
|
||||||
.out = undefined, // We're not sending a response here
|
.connection = undefined,
|
||||||
.reader = .{
|
.state = .ready,
|
||||||
.interface = undefined,
|
.read_buffer = &read_buffer,
|
||||||
.in = &reader,
|
.read_buffer_len = req_data.len,
|
||||||
.state = .received_head,
|
.next_request_start = 0,
|
||||||
.max_head_len = 1024,
|
|
||||||
},
|
|
||||||
};
|
};
|
||||||
var request: std.http.Server.Request = .{
|
var request: std.http.Server.Request = .{
|
||||||
.server = &server,
|
.server = &server,
|
||||||
.head = try std.http.Server.Request.Head.parse(head),
|
.head_end = head.len,
|
||||||
.head_buffer = head,
|
.head = try std.http.Server.Request.Head.parse(read_buffer[0..head.len]),
|
||||||
|
.reader_state = undefined,
|
||||||
};
|
};
|
||||||
{
|
{
|
||||||
var h = try std.ArrayList(std.http.Header).initCapacity(allocator, 4);
|
var h = std.ArrayList(std.http.Header).init(allocator);
|
||||||
defer h.deinit(allocator);
|
defer h.deinit();
|
||||||
const signed_headers = &[_][]const u8{ "content-type", "host", "x-amz-date", "x-amz-target" };
|
const signed_headers = &[_][]const u8{ "content-type", "host", "x-amz-date", "x-amz-target" };
|
||||||
var it = request.iterateHeaders();
|
var it = request.iterateHeaders();
|
||||||
while (it.next()) |source| {
|
while (it.next()) |source| {
|
||||||
|
@ -1256,7 +1195,7 @@ test "can verify server request without x-amz-content-sha256" {
|
||||||
match = std.ascii.eqlIgnoreCase(s, source.name);
|
match = std.ascii.eqlIgnoreCase(s, source.name);
|
||||||
if (match) break;
|
if (match) break;
|
||||||
}
|
}
|
||||||
if (match) try h.append(allocator, .{ .name = source.name, .value = source.value });
|
if (match) try h.append(.{ .name = source.name, .value = source.value });
|
||||||
}
|
}
|
||||||
const req = base.Request{
|
const req = base.Request{
|
||||||
.path = "/",
|
.path = "/",
|
||||||
|
@ -1293,7 +1232,9 @@ test "can verify server request without x-amz-content-sha256" {
|
||||||
}
|
}
|
||||||
|
|
||||||
{ // verification
|
{ // verification
|
||||||
try std.testing.expect(try verifyServerRequest(allocator, &request, &body_reader, struct {
|
var fis = std.io.fixedBufferStream(body[0..]);
|
||||||
|
|
||||||
|
try std.testing.expect(try verifyServerRequest(allocator, &request, fis.reader(), struct {
|
||||||
cred: Credentials,
|
cred: Credentials,
|
||||||
|
|
||||||
const Self = @This();
|
const Self = @This();
|
||||||
|
|
1446
src/aws_test.zig
1446
src/aws_test.zig
File diff suppressed because it is too large
Load diff
47
src/case.zig
Normal file
47
src/case.zig
Normal file
|
@ -0,0 +1,47 @@
|
||||||
|
const std = @import("std");
|
||||||
|
const expectEqualStrings = std.testing.expectEqualStrings;
|
||||||
|
|
||||||
|
pub fn snakeToCamel(allocator: std.mem.Allocator, name: []const u8) ![]u8 {
|
||||||
|
var utf8_name = (std.unicode.Utf8View.init(name) catch unreachable).iterator();
|
||||||
|
var target_inx: usize = 0;
|
||||||
|
var previous_ascii: u8 = 0;
|
||||||
|
var rc = try allocator.alloc(u8, name.len);
|
||||||
|
while (utf8_name.nextCodepoint()) |cp| {
|
||||||
|
if (cp > 0xff) return error.UnicodeNotSupported;
|
||||||
|
const ascii_char = @as(u8, @truncate(cp));
|
||||||
|
if (ascii_char != '_') {
|
||||||
|
if (previous_ascii == '_' and ascii_char >= 'a' and ascii_char <= 'z') {
|
||||||
|
const uppercase_char = ascii_char - ('a' - 'A');
|
||||||
|
rc[target_inx] = uppercase_char;
|
||||||
|
} else {
|
||||||
|
rc[target_inx] = ascii_char;
|
||||||
|
}
|
||||||
|
target_inx = target_inx + 1;
|
||||||
|
}
|
||||||
|
previous_ascii = ascii_char;
|
||||||
|
}
|
||||||
|
// Do we care if the allocator refuses resize?
|
||||||
|
_ = allocator.resize(rc, target_inx);
|
||||||
|
return rc[0..target_inx];
|
||||||
|
}
|
||||||
|
pub fn snakeToPascal(allocator: std.mem.Allocator, name: []const u8) ![]u8 {
|
||||||
|
const rc = try snakeToCamel(allocator, name);
|
||||||
|
if (rc[0] >= 'a' and rc[0] <= 'z') {
|
||||||
|
const uppercase_char = rc[0] - ('a' - 'A');
|
||||||
|
rc[0] = uppercase_char;
|
||||||
|
}
|
||||||
|
return rc;
|
||||||
|
}
|
||||||
|
|
||||||
|
test "converts from snake to camelCase" {
|
||||||
|
const allocator = std.testing.allocator;
|
||||||
|
const camel = try snakeToCamel(allocator, "access_key_id");
|
||||||
|
defer allocator.free(camel);
|
||||||
|
try expectEqualStrings("accessKeyId", camel);
|
||||||
|
}
|
||||||
|
test "single word" {
|
||||||
|
const allocator = std.testing.allocator;
|
||||||
|
const camel = try snakeToCamel(allocator, "word");
|
||||||
|
defer allocator.free(camel);
|
||||||
|
try expectEqualStrings("word", camel);
|
||||||
|
}
|
414
src/date.zig
Normal file
414
src/date.zig
Normal file
|
@ -0,0 +1,414 @@
|
||||||
|
// From https://gist.github.com/WoodyAtHome/3ef50b17f0fa2860ac52b97af12f8d15
|
||||||
|
// Translated from German. We don't need any local time for this use case, and conversion
|
||||||
|
// really requires the TZ DB.
|
||||||
|
|
||||||
|
const std = @import("std");
|
||||||
|
|
||||||
|
const log = std.log.scoped(.date);
|
||||||
|
|
||||||
|
pub const DateTime = struct { day: u8, month: u8, year: u16, hour: u8, minute: u8, second: u8 };
|
||||||
|
|
||||||
|
const SECONDS_PER_DAY = 86400; //* 24* 60 * 60 */
|
||||||
|
const DAYS_PER_YEAR = 365; //* Normal year (no leap year) */
|
||||||
|
|
||||||
|
pub fn timestampToDateTime(timestamp: i64) DateTime {
|
||||||
|
|
||||||
|
// aus https://de.wikipedia.org/wiki/Unixzeit
|
||||||
|
const unixtime = @as(u64, @intCast(timestamp));
|
||||||
|
const DAYS_IN_4_YEARS = 1461; //* 4*365 + 1 */
|
||||||
|
const DAYS_IN_100_YEARS = 36524; //* 100*365 + 25 - 1 */
|
||||||
|
const DAYS_IN_400_YEARS = 146097; //* 400*365 + 100 - 4 + 1 */
|
||||||
|
const DAY_NUMBER_ADJUSTED_1970_01_01 = 719468; //* Day number relates to March 1st */
|
||||||
|
|
||||||
|
var dayN: u64 = DAY_NUMBER_ADJUSTED_1970_01_01 + unixtime / SECONDS_PER_DAY;
|
||||||
|
const seconds_since_midnight: u64 = unixtime % SECONDS_PER_DAY;
|
||||||
|
var temp: u64 = 0;
|
||||||
|
|
||||||
|
// Leap year rules for Gregorian Calendars
|
||||||
|
// Any year divisible by 100 is not a leap year unless also divisible by 400
|
||||||
|
temp = 4 * (dayN + DAYS_IN_100_YEARS + 1) / DAYS_IN_400_YEARS - 1;
|
||||||
|
var year = @as(u16, @intCast(100 * temp));
|
||||||
|
dayN -= DAYS_IN_100_YEARS * temp + temp / 4;
|
||||||
|
|
||||||
|
// For Julian calendars, each year divisible by 4 is a leap year
|
||||||
|
temp = 4 * (dayN + DAYS_PER_YEAR + 1) / DAYS_IN_4_YEARS - 1;
|
||||||
|
year += @as(u16, @intCast(temp));
|
||||||
|
dayN -= DAYS_PER_YEAR * temp + temp / 4;
|
||||||
|
|
||||||
|
// dayN calculates the days of the year in relation to March 1
|
||||||
|
var month = @as(u8, @intCast((5 * dayN + 2) / 153));
|
||||||
|
const day = @as(u8, @intCast(dayN - (@as(u64, @intCast(month)) * 153 + 2) / 5 + 1));
|
||||||
|
// 153 = 31+30+31+30+31 Days for the 5 months from March through July
|
||||||
|
// 153 = 31+30+31+30+31 Days for the 5 months from August through December
|
||||||
|
// 31+28 Days for January and February (see below)
|
||||||
|
// +2: Rounding adjustment
|
||||||
|
// +1: The first day in March is March 1st (not March 0)
|
||||||
|
|
||||||
|
month += 3; // Convert from the day that starts on March 1st, to a human year */
|
||||||
|
if (month > 12) { // months 13 and 14 become 1 (January) und 2 (February) of the next year
|
||||||
|
month -= 12;
|
||||||
|
year += 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
const hours = @as(u8, @intCast(seconds_since_midnight / 3600));
|
||||||
|
const minutes = @as(u8, @intCast(seconds_since_midnight % 3600 / 60));
|
||||||
|
const seconds = @as(u8, @intCast(seconds_since_midnight % 60));
|
||||||
|
|
||||||
|
return DateTime{ .day = day, .month = month, .year = year, .hour = hours, .minute = minutes, .second = seconds };
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn parseEnglishToTimestamp(data: []const u8) !i64 {
|
||||||
|
return try dateTimeToTimestamp(try parseEnglishToDateTime(data));
|
||||||
|
}
|
||||||
|
|
||||||
|
const EnglishParsingState = enum { Start, Day, Month, Year, Hour, Minute, Second, End };
|
||||||
|
/// Converts a string to a timestamp value. May not handle dates before the
|
||||||
|
/// epoch. Dates should look like "Fri, 03 Jun 2022 18:12:36 GMT"
|
||||||
|
pub fn parseEnglishToDateTime(data: []const u8) !DateTime {
|
||||||
|
// Fri, 03 Jun 2022 18:12:36 GMT
|
||||||
|
if (!std.mem.endsWith(u8, data, "GMT")) return error.InvalidFormat;
|
||||||
|
|
||||||
|
var start: usize = 0;
|
||||||
|
var state = EnglishParsingState.Start;
|
||||||
|
// Anything not explicitly set by our string would be 0
|
||||||
|
var rc = DateTime{ .year = 0, .month = 0, .day = 0, .hour = 0, .minute = 0, .second = 0 };
|
||||||
|
for (data, 0..) |ch, i| {
|
||||||
|
switch (ch) {
|
||||||
|
',' => {},
|
||||||
|
' ', ':' => {
|
||||||
|
// State transition
|
||||||
|
|
||||||
|
// We're going to coerce and this might not go well, but we
|
||||||
|
// want the compiler to create checks, so we'll turn on
|
||||||
|
// runtime safety for this block, forcing checks in ReleaseSafe
|
||||||
|
// ReleaseFast modes.
|
||||||
|
const next_state = try endEnglishState(state, &rc, data[start..i]);
|
||||||
|
state = next_state;
|
||||||
|
start = i + 1;
|
||||||
|
},
|
||||||
|
else => {}, // We need to be pretty trusting on this format...
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return rc;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn endEnglishState(current_state: EnglishParsingState, date: *DateTime, prev_data: []const u8) !EnglishParsingState {
|
||||||
|
var next_state: EnglishParsingState = undefined;
|
||||||
|
log.debug("endEnglishState. Current state '{}', data: {s}", .{ current_state, prev_data });
|
||||||
|
|
||||||
|
// Using two switches is slightly less efficient, but more readable
|
||||||
|
switch (current_state) {
|
||||||
|
.End => return error.IllegalStateTransition,
|
||||||
|
.Start => next_state = .Day,
|
||||||
|
.Day => next_state = .Month,
|
||||||
|
.Month => next_state = .Year,
|
||||||
|
.Year => next_state = .Hour,
|
||||||
|
.Hour => next_state = .Minute,
|
||||||
|
.Minute => next_state = .Second,
|
||||||
|
.Second => next_state = .End,
|
||||||
|
}
|
||||||
|
|
||||||
|
switch (current_state) {
|
||||||
|
.Year => date.year = try std.fmt.parseUnsigned(u16, prev_data, 10),
|
||||||
|
.Month => date.month = try parseEnglishMonth(prev_data),
|
||||||
|
.Day => date.day = try std.fmt.parseUnsigned(u8, prev_data, 10),
|
||||||
|
.Hour => date.hour = try std.fmt.parseUnsigned(u8, prev_data, 10),
|
||||||
|
.Minute => date.minute = try std.fmt.parseUnsigned(u8, prev_data, 10),
|
||||||
|
.Second => date.second = try std.fmt.parseUnsigned(u8, prev_data, 10),
|
||||||
|
.Start => {},
|
||||||
|
.End => return error.InvalidState,
|
||||||
|
}
|
||||||
|
return next_state;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn parseEnglishMonth(data: []const u8) !u8 {
|
||||||
|
if (std.ascii.startsWithIgnoreCase(data, "Jan")) return 1;
|
||||||
|
if (std.ascii.startsWithIgnoreCase(data, "Feb")) return 2;
|
||||||
|
if (std.ascii.startsWithIgnoreCase(data, "Mar")) return 3;
|
||||||
|
if (std.ascii.startsWithIgnoreCase(data, "Apr")) return 4;
|
||||||
|
if (std.ascii.startsWithIgnoreCase(data, "May")) return 5;
|
||||||
|
if (std.ascii.startsWithIgnoreCase(data, "Jun")) return 6;
|
||||||
|
if (std.ascii.startsWithIgnoreCase(data, "Jul")) return 7;
|
||||||
|
if (std.ascii.startsWithIgnoreCase(data, "Aug")) return 8;
|
||||||
|
if (std.ascii.startsWithIgnoreCase(data, "Sep")) return 9;
|
||||||
|
if (std.ascii.startsWithIgnoreCase(data, "Oct")) return 10;
|
||||||
|
if (std.ascii.startsWithIgnoreCase(data, "Nov")) return 11;
|
||||||
|
if (std.ascii.startsWithIgnoreCase(data, "Dec")) return 12;
|
||||||
|
return error.InvalidMonth;
|
||||||
|
}
|
||||||
|
pub fn parseIso8601ToTimestamp(data: []const u8) !i64 {
|
||||||
|
return try dateTimeToTimestamp(try parseIso8601ToDateTime(data));
|
||||||
|
}
|
||||||
|
|
||||||
|
const IsoParsingState = enum { Start, Year, Month, Day, Hour, Minute, Second, Millisecond, End };
|
||||||
|
/// Converts a string to a timestamp value. May not handle dates before the
|
||||||
|
/// epoch
|
||||||
|
pub fn parseIso8601ToDateTime(data: []const u8) !DateTime {
|
||||||
|
// Basic format YYYYMMDDThhmmss
|
||||||
|
if (data.len == "YYYYMMDDThhmmss".len and data[8] == 'T')
|
||||||
|
return try parseIso8601BasicFormatToDateTime(data);
|
||||||
|
if (data.len == "YYYYMMDDThhmmssZ".len and data[8] == 'T')
|
||||||
|
return try parseIso8601BasicFormatToDateTime(data);
|
||||||
|
|
||||||
|
var start: usize = 0;
|
||||||
|
var state = IsoParsingState.Start;
|
||||||
|
// Anything not explicitly set by our string would be 0
|
||||||
|
var rc = DateTime{ .year = 0, .month = 0, .day = 0, .hour = 0, .minute = 0, .second = 0 };
|
||||||
|
var zulu_time = false;
|
||||||
|
for (data, 0..) |ch, i| {
|
||||||
|
switch (ch) {
|
||||||
|
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9' => {
|
||||||
|
if (state == .Start) state = .Year;
|
||||||
|
},
|
||||||
|
'?', '~', '%' => {
|
||||||
|
// These characters all specify the type of time (approximate, etc)
|
||||||
|
// and we will ignore
|
||||||
|
},
|
||||||
|
'.', '-', ':', 'T' => {
|
||||||
|
// State transition
|
||||||
|
|
||||||
|
// We're going to coerce and this might not go well, but we
|
||||||
|
// want the compiler to create checks, so we'll turn on
|
||||||
|
// runtime safety for this block, forcing checks in ReleaseSafe
|
||||||
|
// ReleaseFast modes.
|
||||||
|
const next_state = try endIsoState(state, &rc, data[start..i]);
|
||||||
|
state = next_state;
|
||||||
|
start = i + 1;
|
||||||
|
},
|
||||||
|
'Z' => zulu_time = true,
|
||||||
|
else => {
|
||||||
|
log.err("Invalid character: {c}", .{ch});
|
||||||
|
return error.InvalidCharacter;
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (!zulu_time) return error.LocalTimeNotSupported;
|
||||||
|
// We know we have a Z at the end of this, so let's grab the last bit
|
||||||
|
// of the string, minus the 'Z', and fly, eagles, fly!
|
||||||
|
_ = try endIsoState(state, &rc, data[start .. data.len - 1]);
|
||||||
|
return rc;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn parseIso8601BasicFormatToDateTime(data: []const u8) !DateTime {
|
||||||
|
return DateTime{
|
||||||
|
.year = try std.fmt.parseUnsigned(u16, data[0..4], 10),
|
||||||
|
.month = try std.fmt.parseUnsigned(u8, data[4..6], 10),
|
||||||
|
.day = try std.fmt.parseUnsigned(u8, data[6..8], 10),
|
||||||
|
.hour = try std.fmt.parseUnsigned(u8, data[9..11], 10),
|
||||||
|
.minute = try std.fmt.parseUnsigned(u8, data[11..13], 10),
|
||||||
|
.second = try std.fmt.parseUnsigned(u8, data[13..15], 10),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
fn endIsoState(current_state: IsoParsingState, date: *DateTime, prev_data: []const u8) !IsoParsingState {
|
||||||
|
var next_state: IsoParsingState = undefined;
|
||||||
|
log.debug("endIsoState. Current state '{}', data: {s}", .{ current_state, prev_data });
|
||||||
|
|
||||||
|
// Using two switches is slightly less efficient, but more readable
|
||||||
|
switch (current_state) {
|
||||||
|
.Start, .End => return error.IllegalStateTransition,
|
||||||
|
.Year => next_state = .Month,
|
||||||
|
.Month => next_state = .Day,
|
||||||
|
.Day => next_state = .Hour,
|
||||||
|
.Hour => next_state = .Minute,
|
||||||
|
.Minute => next_state = .Second,
|
||||||
|
.Second => next_state = .Millisecond,
|
||||||
|
.Millisecond => next_state = .End,
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: This won't handle signed, which Iso supports. For now, let's fail
|
||||||
|
// explictly
|
||||||
|
switch (current_state) {
|
||||||
|
.Year => date.year = try std.fmt.parseUnsigned(u16, prev_data, 10),
|
||||||
|
.Month => date.month = try std.fmt.parseUnsigned(u8, prev_data, 10),
|
||||||
|
.Day => date.day = try std.fmt.parseUnsigned(u8, prev_data, 10),
|
||||||
|
.Hour => date.hour = try std.fmt.parseUnsigned(u8, prev_data, 10),
|
||||||
|
.Minute => date.minute = try std.fmt.parseUnsigned(u8, prev_data, 10),
|
||||||
|
.Second => date.second = try std.fmt.parseUnsigned(u8, prev_data, 10),
|
||||||
|
.Millisecond => {}, // We'll throw that away - our granularity is 1 second
|
||||||
|
.Start, .End => return error.InvalidState,
|
||||||
|
}
|
||||||
|
return next_state;
|
||||||
|
}
|
||||||
|
pub fn dateTimeToTimestamp(datetime: DateTime) !i64 {
|
||||||
|
const epoch = DateTime{
|
||||||
|
.year = 1970,
|
||||||
|
.month = 1,
|
||||||
|
.day = 1,
|
||||||
|
.hour = 0,
|
||||||
|
.minute = 0,
|
||||||
|
.second = 0,
|
||||||
|
};
|
||||||
|
return secondsBetween(epoch, datetime);
|
||||||
|
}
|
||||||
|
|
||||||
|
const DateTimeToTimestampError = error{
|
||||||
|
DateTimeOutOfRange,
|
||||||
|
};
|
||||||
|
|
||||||
|
fn secondsBetween(start: DateTime, end: DateTime) DateTimeToTimestampError!i64 {
|
||||||
|
try validateDatetime(start);
|
||||||
|
try validateDatetime(end);
|
||||||
|
if (end.year < start.year) return -1 * try secondsBetween(end, start);
|
||||||
|
if (start.month != 1 or
|
||||||
|
start.day != 1 or
|
||||||
|
start.hour != 0 or
|
||||||
|
start.minute != 0 or
|
||||||
|
start.second != 0)
|
||||||
|
{
|
||||||
|
const seconds_into_start_year = secondsFromBeginningOfYear(
|
||||||
|
start.year,
|
||||||
|
start.month,
|
||||||
|
start.day,
|
||||||
|
start.hour,
|
||||||
|
start.minute,
|
||||||
|
start.second,
|
||||||
|
);
|
||||||
|
const new_start = DateTime{
|
||||||
|
.year = start.year,
|
||||||
|
.month = 1,
|
||||||
|
.day = 1,
|
||||||
|
.hour = 0,
|
||||||
|
.minute = 0,
|
||||||
|
.second = 0,
|
||||||
|
};
|
||||||
|
return (try secondsBetween(new_start, end)) - seconds_into_start_year;
|
||||||
|
}
|
||||||
|
const leap_years_between = leapYearsBetween(start.year, end.year);
|
||||||
|
const add_days: u1 = 0;
|
||||||
|
const years_diff = end.year - start.year;
|
||||||
|
// log.debug("Years from epoch: {d}, Leap years: {d}", .{ years_diff, leap_years_between });
|
||||||
|
const days_diff: i32 = (years_diff * DAYS_PER_YEAR) + leap_years_between + add_days;
|
||||||
|
// log.debug("Days with leap year, without month: {d}", .{days_diff});
|
||||||
|
|
||||||
|
const seconds_into_year = secondsFromBeginningOfYear(
|
||||||
|
end.year,
|
||||||
|
end.month,
|
||||||
|
end.day,
|
||||||
|
end.hour,
|
||||||
|
end.minute,
|
||||||
|
end.second,
|
||||||
|
);
|
||||||
|
return (days_diff * SECONDS_PER_DAY) + @as(i64, seconds_into_year);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn validateDatetime(dt: DateTime) !void {
|
||||||
|
if (dt.month > 12 or
|
||||||
|
dt.day > 31 or
|
||||||
|
dt.hour >= 24 or
|
||||||
|
dt.minute >= 60 or
|
||||||
|
dt.second >= 60) return error.DateTimeOutOfRange;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn secondsFromBeginningOfYear(year: u16, month: u8, day: u8, hour: u8, minute: u8, second: u8) u32 {
|
||||||
|
const current_year_is_leap_year = isLeapYear(year);
|
||||||
|
const leap_year_days_per_month: [12]u5 = .{ 31, 29, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31 };
|
||||||
|
const normal_days_per_month: [12]u5 = .{ 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31 };
|
||||||
|
const days_per_month = if (current_year_is_leap_year) leap_year_days_per_month else normal_days_per_month;
|
||||||
|
var current_month: usize = 1;
|
||||||
|
const end_month = month;
|
||||||
|
var days_diff: u32 = 0;
|
||||||
|
while (current_month != end_month) {
|
||||||
|
days_diff += days_per_month[current_month - 1]; // months are 1-based vs array is 0-based
|
||||||
|
current_month += 1;
|
||||||
|
}
|
||||||
|
// log.debug("Days with month, without day: {d}. Day of month {d}, will add {d} days", .{
|
||||||
|
// days_diff,
|
||||||
|
// day,
|
||||||
|
// day - 1,
|
||||||
|
// });
|
||||||
|
// We need -1 because we're not actually including the ending day (that's up to hour/minute)
|
||||||
|
// In other words, days in the month are 1-based, while hours/minutes are zero based
|
||||||
|
days_diff += day - 1;
|
||||||
|
// log.debug("Total days diff: {d}", .{days_diff});
|
||||||
|
var seconds_diff: u32 = days_diff * SECONDS_PER_DAY;
|
||||||
|
|
||||||
|
// From here out, we want to get everything into seconds
|
||||||
|
seconds_diff += @as(u32, hour) * 60 * 60;
|
||||||
|
seconds_diff += @as(u32, minute) * 60;
|
||||||
|
seconds_diff += @as(u32, second);
|
||||||
|
|
||||||
|
return seconds_diff;
|
||||||
|
}
|
||||||
|
fn isLeapYear(year: u16) bool {
|
||||||
|
if (year % 4 != 0) return false;
|
||||||
|
if (year % 400 == 0) return true;
|
||||||
|
if (year % 100 == 0) return false;
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn leapYearsBetween(start_year_inclusive: u16, end_year_exclusive: u16) u16 {
|
||||||
|
const start = @min(start_year_inclusive, end_year_exclusive);
|
||||||
|
const end = @max(start_year_inclusive, end_year_exclusive);
|
||||||
|
var current = start;
|
||||||
|
// log.debug("Leap years starting from {d}, ending at {d}", .{ start, end });
|
||||||
|
while (current % 4 != 0 and current < end) {
|
||||||
|
current += 1;
|
||||||
|
}
|
||||||
|
if (current == end) return 0; // No leap years here. E.g. 1971-1973
|
||||||
|
// We're on a potential leap year, and now we can step by 4
|
||||||
|
var rc: u16 = 0;
|
||||||
|
while (current < end) {
|
||||||
|
if (current % 4 == 0) {
|
||||||
|
if (current % 100 != 0) {
|
||||||
|
// log.debug("Year {d} is leap year", .{current});
|
||||||
|
rc += 1;
|
||||||
|
current += 4;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
// We're on a century, which is normally not a leap year, unless
|
||||||
|
// it's divisible by 400
|
||||||
|
if (current % 400 == 0) {
|
||||||
|
// log.debug("Year {d} is leap year", .{current});
|
||||||
|
rc += 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
current += 4;
|
||||||
|
}
|
||||||
|
return rc;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn printDateTime(dt: DateTime) void {
|
||||||
|
log.debug("{:0>4}-{:0>2}-{:0>2}T{:0>2}:{:0>2}:{:0<2}Z", .{
|
||||||
|
dt.year,
|
||||||
|
dt.month,
|
||||||
|
dt.day,
|
||||||
|
dt.hour,
|
||||||
|
dt.minute,
|
||||||
|
dt.second,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn printNowUtc() void {
|
||||||
|
printDateTime(timestampToDateTime(std.time.timestamp()));
|
||||||
|
}
|
||||||
|
|
||||||
|
test "Convert timestamp to datetime" {
|
||||||
|
printDateTime(timestampToDateTime(std.time.timestamp()));
|
||||||
|
try std.testing.expectEqual(DateTime{ .year = 2020, .month = 8, .day = 28, .hour = 9, .minute = 32, .second = 27 }, timestampToDateTime(1598607147));
|
||||||
|
|
||||||
|
try std.testing.expectEqual(DateTime{ .year = 2020, .month = 11, .day = 1, .hour = 5, .minute = 6, .second = 7 }, timestampToDateTime(1604207167));
|
||||||
|
// Get time for date: https://wtools.io/convert-date-time-to-unix-time
|
||||||
|
try std.testing.expectEqual(DateTime{ .year = 2015, .month = 8, .day = 30, .hour = 12, .minute = 36, .second = 0 }, timestampToDateTime(1440938160));
|
||||||
|
}
|
||||||
|
|
||||||
|
test "Convert datetime to timestamp" {
|
||||||
|
try std.testing.expectEqual(@as(i64, 1598607147), try dateTimeToTimestamp(DateTime{ .year = 2020, .month = 8, .day = 28, .hour = 9, .minute = 32, .second = 27 }));
|
||||||
|
try std.testing.expectEqual(@as(i64, 1604207167), try dateTimeToTimestamp(DateTime{ .year = 2020, .month = 11, .day = 1, .hour = 5, .minute = 6, .second = 7 }));
|
||||||
|
try std.testing.expectEqual(@as(i64, 1440938160), try dateTimeToTimestamp(DateTime{ .year = 2015, .month = 8, .day = 30, .hour = 12, .minute = 36, .second = 0 }));
|
||||||
|
}
|
||||||
|
|
||||||
|
test "Convert ISO8601 string to timestamp" {
|
||||||
|
try std.testing.expectEqual(DateTime{ .year = 2020, .month = 8, .day = 28, .hour = 9, .minute = 32, .second = 27 }, try parseIso8601ToDateTime("20200828T093227"));
|
||||||
|
try std.testing.expectEqual(DateTime{ .year = 2020, .month = 8, .day = 28, .hour = 9, .minute = 32, .second = 27 }, try parseIso8601ToDateTime("2020-08-28T9:32:27Z"));
|
||||||
|
try std.testing.expectEqual(DateTime{ .year = 2020, .month = 11, .day = 1, .hour = 5, .minute = 6, .second = 7 }, try parseIso8601ToDateTime("2020-11-01T5:06:7Z"));
|
||||||
|
try std.testing.expectEqual(DateTime{ .year = 2015, .month = 8, .day = 30, .hour = 12, .minute = 36, .second = 0 }, try parseIso8601ToDateTime("2015-08-30T12:36:00.000Z"));
|
||||||
|
}
|
||||||
|
test "Convert datetime to timestamp before 1970" {
|
||||||
|
try std.testing.expectEqual(@as(i64, -449392815), try dateTimeToTimestamp(DateTime{ .year = 1955, .month = 10, .day = 5, .hour = 16, .minute = 39, .second = 45 }));
|
||||||
|
}
|
||||||
|
|
||||||
|
test "Convert whatever AWS is sending us to timestamp" {
|
||||||
|
const string_date = "Fri, 03 Jun 2022 18:12:36 GMT";
|
||||||
|
try std.testing.expectEqual(DateTime{ .year = 2022, .month = 6, .day = 3, .hour = 18, .minute = 12, .second = 36 }, try parseEnglishToDateTime(string_date));
|
||||||
|
}
|
|
@ -14,117 +14,8 @@ const testing = std.testing;
|
||||||
const mem = std.mem;
|
const mem = std.mem;
|
||||||
const maxInt = std.math.maxInt;
|
const maxInt = std.math.maxInt;
|
||||||
|
|
||||||
pub fn serializeMap(map: anytype, key: []const u8, options: anytype, out_stream: anytype) !void {
|
// pub const WriteStream = @import("json/write_stream.zig").WriteStream;
|
||||||
if (@typeInfo(@TypeOf(map)) == .optional) {
|
// pub const writeStream = @import("json/write_stream.zig").writeStream;
|
||||||
if (map) |m| serializeMapInternal(m, key, options, out_stream);
|
|
||||||
} else {
|
|
||||||
serializeMapInternal(map, key, options, out_stream);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn serializeMapKey(key: []const u8, options: anytype, out_stream: anytype) !void {
|
|
||||||
var child_options = options;
|
|
||||||
if (child_options.whitespace) |*child_ws|
|
|
||||||
child_ws.indent_level += 1;
|
|
||||||
|
|
||||||
try out_stream.writeByte('"');
|
|
||||||
try out_stream.writeAll(key);
|
|
||||||
_ = try out_stream.write("\":");
|
|
||||||
if (options.whitespace) |ws| {
|
|
||||||
if (ws.separator) {
|
|
||||||
try out_stream.writeByte(' ');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn serializeMapAsObject(map: anytype, options: anytype, out_stream: anytype) !void {
|
|
||||||
if (map.len == 0) {
|
|
||||||
try out_stream.writeByte('{');
|
|
||||||
try out_stream.writeByte('}');
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO: Map might be [][]struct{key, value} rather than []struct{key, value}
|
|
||||||
var child_options = options;
|
|
||||||
if (child_options.whitespace) |*whitespace| {
|
|
||||||
whitespace.indent_level += 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
try out_stream.writeByte('{');
|
|
||||||
if (options.whitespace) |_|
|
|
||||||
try out_stream.writeByte('\n');
|
|
||||||
|
|
||||||
for (map, 0..) |tag, i| {
|
|
||||||
// TODO: Deal with escaping and general "json.stringify" the values...
|
|
||||||
if (child_options.whitespace) |ws|
|
|
||||||
try ws.outputIndent(out_stream);
|
|
||||||
try out_stream.writeByte('"');
|
|
||||||
try jsonEscape(tag.key, child_options, out_stream);
|
|
||||||
_ = try out_stream.write("\":");
|
|
||||||
if (child_options.whitespace) |ws| {
|
|
||||||
if (ws.separator) {
|
|
||||||
try out_stream.writeByte(' ');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
try out_stream.writeByte('"');
|
|
||||||
try jsonEscape(tag.value, child_options, out_stream);
|
|
||||||
try out_stream.writeByte('"');
|
|
||||||
if (i < map.len - 1) {
|
|
||||||
try out_stream.writeByte(',');
|
|
||||||
}
|
|
||||||
if (child_options.whitespace) |_|
|
|
||||||
try out_stream.writeByte('\n');
|
|
||||||
}
|
|
||||||
|
|
||||||
if (options.whitespace) |ws|
|
|
||||||
try ws.outputIndent(out_stream);
|
|
||||||
try out_stream.writeByte('}');
|
|
||||||
}
|
|
||||||
|
|
||||||
fn serializeMapInternal(map: anytype, key: []const u8, options: anytype, out_stream: anytype) !bool {
|
|
||||||
try serializeMapKey(key, options, out_stream);
|
|
||||||
return try serializeMapAsObject(map, options, out_stream);
|
|
||||||
}
|
|
||||||
|
|
||||||
// code within jsonEscape lifted from json.zig in stdlib
|
|
||||||
fn jsonEscape(value: []const u8, options: anytype, out_stream: anytype) !void {
|
|
||||||
var i: usize = 0;
|
|
||||||
while (i < value.len) : (i += 1) {
|
|
||||||
switch (value[i]) {
|
|
||||||
// normal ascii character
|
|
||||||
0x20...0x21, 0x23...0x2E, 0x30...0x5B, 0x5D...0x7F => |c| try out_stream.writeByte(c),
|
|
||||||
// only 2 characters that *must* be escaped
|
|
||||||
'\\' => try out_stream.writeAll("\\\\"),
|
|
||||||
'\"' => try out_stream.writeAll("\\\""),
|
|
||||||
// solidus is optional to escape
|
|
||||||
'/' => {
|
|
||||||
if (options.string.String.escape_solidus) {
|
|
||||||
try out_stream.writeAll("\\/");
|
|
||||||
} else {
|
|
||||||
try out_stream.writeByte('/');
|
|
||||||
}
|
|
||||||
},
|
|
||||||
// control characters with short escapes
|
|
||||||
// TODO: option to switch between unicode and 'short' forms?
|
|
||||||
0x8 => try out_stream.writeAll("\\b"),
|
|
||||||
0xC => try out_stream.writeAll("\\f"),
|
|
||||||
'\n' => try out_stream.writeAll("\\n"),
|
|
||||||
'\r' => try out_stream.writeAll("\\r"),
|
|
||||||
'\t' => try out_stream.writeAll("\\t"),
|
|
||||||
else => {
|
|
||||||
const ulen = std.unicode.utf8ByteSequenceLength(value[i]) catch unreachable;
|
|
||||||
// control characters (only things left with 1 byte length) should always be printed as unicode escapes
|
|
||||||
if (ulen == 1 or options.string.String.escape_unicode) {
|
|
||||||
const codepoint = std.unicode.utf8Decode(value[i .. i + ulen]) catch unreachable;
|
|
||||||
try outputUnicodeEscape(codepoint, out_stream);
|
|
||||||
} else {
|
|
||||||
try out_stream.writeAll(value[i .. i + ulen]);
|
|
||||||
}
|
|
||||||
i += ulen - 1;
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const StringEscapes = union(enum) {
|
const StringEscapes = union(enum) {
|
||||||
None,
|
None,
|
||||||
|
@ -1371,8 +1262,137 @@ pub const Value = union(enum) {
|
||||||
String: []const u8,
|
String: []const u8,
|
||||||
Array: Array,
|
Array: Array,
|
||||||
Object: ObjectMap,
|
Object: ObjectMap,
|
||||||
|
|
||||||
|
pub fn jsonStringify(
|
||||||
|
value: @This(),
|
||||||
|
options: StringifyOptions,
|
||||||
|
out_stream: anytype,
|
||||||
|
) @TypeOf(out_stream).Error!void {
|
||||||
|
switch (value) {
|
||||||
|
.Null => try stringify(null, options, out_stream),
|
||||||
|
.Bool => |inner| try stringify(inner, options, out_stream),
|
||||||
|
.Integer => |inner| try stringify(inner, options, out_stream),
|
||||||
|
.Float => |inner| try stringify(inner, options, out_stream),
|
||||||
|
.NumberString => |inner| try out_stream.writeAll(inner),
|
||||||
|
.String => |inner| try stringify(inner, options, out_stream),
|
||||||
|
.Array => |inner| try stringify(inner.items, options, out_stream),
|
||||||
|
.Object => |inner| {
|
||||||
|
try out_stream.writeByte('{');
|
||||||
|
var field_output = false;
|
||||||
|
var child_options = options;
|
||||||
|
if (child_options.whitespace) |*child_whitespace| {
|
||||||
|
child_whitespace.indent_level += 1;
|
||||||
|
}
|
||||||
|
var it = inner.iterator();
|
||||||
|
while (it.next()) |entry| {
|
||||||
|
if (!field_output) {
|
||||||
|
field_output = true;
|
||||||
|
} else {
|
||||||
|
try out_stream.writeByte(',');
|
||||||
|
}
|
||||||
|
if (child_options.whitespace) |child_whitespace| {
|
||||||
|
try out_stream.writeByte('\n');
|
||||||
|
try child_whitespace.outputIndent(out_stream);
|
||||||
|
}
|
||||||
|
|
||||||
|
try stringify(entry.key_ptr, options, out_stream);
|
||||||
|
try out_stream.writeByte(':');
|
||||||
|
if (child_options.whitespace) |child_whitespace| {
|
||||||
|
if (child_whitespace.separator) {
|
||||||
|
try out_stream.writeByte(' ');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
try stringify(entry.value_ptr, child_options, out_stream);
|
||||||
|
}
|
||||||
|
if (field_output) {
|
||||||
|
if (options.whitespace) |whitespace| {
|
||||||
|
try out_stream.writeByte('\n');
|
||||||
|
try whitespace.outputIndent(out_stream);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
try out_stream.writeByte('}');
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn dump(self: Value) void {
|
||||||
|
var held = std.debug.getStderrMutex().acquire();
|
||||||
|
defer held.release();
|
||||||
|
|
||||||
|
const stderr = std.io.getStdErr().writer();
|
||||||
|
stringify(self, StringifyOptions{ .whitespace = null }, stderr) catch return;
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
pub fn dump(value: anytype) void {
|
||||||
|
var held = std.debug.getStderrMutex().acquire();
|
||||||
|
defer held.release();
|
||||||
|
|
||||||
|
const stderr = std.io.getStdErr().writer();
|
||||||
|
stringify(value, StringifyOptions{ .whitespace = null }, stderr) catch return;
|
||||||
|
}
|
||||||
|
|
||||||
|
test "Value.jsonStringify" {
|
||||||
|
{
|
||||||
|
var buffer: [10]u8 = undefined;
|
||||||
|
var fbs = std.io.fixedBufferStream(&buffer);
|
||||||
|
try @as(Value, .Null).jsonStringify(.{}, fbs.writer());
|
||||||
|
try testing.expectEqualSlices(u8, fbs.getWritten(), "null");
|
||||||
|
}
|
||||||
|
{
|
||||||
|
var buffer: [10]u8 = undefined;
|
||||||
|
var fbs = std.io.fixedBufferStream(&buffer);
|
||||||
|
try (Value{ .Bool = true }).jsonStringify(.{}, fbs.writer());
|
||||||
|
try testing.expectEqualSlices(u8, fbs.getWritten(), "true");
|
||||||
|
}
|
||||||
|
{
|
||||||
|
var buffer: [10]u8 = undefined;
|
||||||
|
var fbs = std.io.fixedBufferStream(&buffer);
|
||||||
|
try (Value{ .Integer = 42 }).jsonStringify(.{}, fbs.writer());
|
||||||
|
try testing.expectEqualSlices(u8, fbs.getWritten(), "42");
|
||||||
|
}
|
||||||
|
{
|
||||||
|
var buffer: [10]u8 = undefined;
|
||||||
|
var fbs = std.io.fixedBufferStream(&buffer);
|
||||||
|
try (Value{ .NumberString = "43" }).jsonStringify(.{}, fbs.writer());
|
||||||
|
try testing.expectEqualSlices(u8, fbs.getWritten(), "43");
|
||||||
|
}
|
||||||
|
{
|
||||||
|
var buffer: [10]u8 = undefined;
|
||||||
|
var fbs = std.io.fixedBufferStream(&buffer);
|
||||||
|
try (Value{ .Float = 42 }).jsonStringify(.{}, fbs.writer());
|
||||||
|
try testing.expectEqualSlices(u8, fbs.getWritten(), "4.2e1");
|
||||||
|
}
|
||||||
|
{
|
||||||
|
var buffer: [10]u8 = undefined;
|
||||||
|
var fbs = std.io.fixedBufferStream(&buffer);
|
||||||
|
try (Value{ .String = "weeee" }).jsonStringify(.{}, fbs.writer());
|
||||||
|
try testing.expectEqualSlices(u8, fbs.getWritten(), "\"weeee\"");
|
||||||
|
}
|
||||||
|
{
|
||||||
|
var buffer: [10]u8 = undefined;
|
||||||
|
var fbs = std.io.fixedBufferStream(&buffer);
|
||||||
|
var vals = [_]Value{
|
||||||
|
.{ .Integer = 1 },
|
||||||
|
.{ .Integer = 2 },
|
||||||
|
.{ .NumberString = "3" },
|
||||||
|
};
|
||||||
|
try (Value{
|
||||||
|
.Array = Array.fromOwnedSlice(undefined, &vals),
|
||||||
|
}).jsonStringify(.{}, fbs.writer());
|
||||||
|
try testing.expectEqualSlices(u8, fbs.getWritten(), "[1,2,3]");
|
||||||
|
}
|
||||||
|
{
|
||||||
|
var buffer: [10]u8 = undefined;
|
||||||
|
var fbs = std.io.fixedBufferStream(&buffer);
|
||||||
|
var obj = ObjectMap.init(testing.allocator);
|
||||||
|
defer obj.deinit();
|
||||||
|
try obj.putNoClobber("a", .{ .String = "b" });
|
||||||
|
try (Value{ .Object = obj }).jsonStringify(.{}, fbs.writer());
|
||||||
|
try testing.expectEqualSlices(u8, fbs.getWritten(), "{\"a\":\"b\"}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// parse tokens from a stream, returning `false` if they do not decode to `value`
|
/// parse tokens from a stream, returning `false` if they do not decode to `value`
|
||||||
fn parsesTo(comptime T: type, value: T, tokens: *TokenStream, options: ParseOptions) !bool {
|
fn parsesTo(comptime T: type, value: T, tokens: *TokenStream, options: ParseOptions) !bool {
|
||||||
// TODO: should be able to write this function to not require an allocator
|
// TODO: should be able to write this function to not require an allocator
|
||||||
|
@ -1540,21 +1560,21 @@ fn skipValue(tokens: *TokenStream) SkipValueError!void {
|
||||||
|
|
||||||
fn parseInternal(comptime T: type, token: Token, tokens: *TokenStream, options: ParseOptions) !T {
|
fn parseInternal(comptime T: type, token: Token, tokens: *TokenStream, options: ParseOptions) !T {
|
||||||
switch (@typeInfo(T)) {
|
switch (@typeInfo(T)) {
|
||||||
.bool => {
|
.Bool => {
|
||||||
return switch (token) {
|
return switch (token) {
|
||||||
.True => true,
|
.True => true,
|
||||||
.False => false,
|
.False => false,
|
||||||
else => error.UnexpectedToken,
|
else => error.UnexpectedToken,
|
||||||
};
|
};
|
||||||
},
|
},
|
||||||
.float, .comptime_float => {
|
.Float, .ComptimeFloat => {
|
||||||
const numberToken = switch (token) {
|
const numberToken = switch (token) {
|
||||||
.Number => |n| n,
|
.Number => |n| n,
|
||||||
else => return error.UnexpectedToken,
|
else => return error.UnexpectedToken,
|
||||||
};
|
};
|
||||||
return try std.fmt.parseFloat(T, numberToken.slice(tokens.slice, tokens.i - 1));
|
return try std.fmt.parseFloat(T, numberToken.slice(tokens.slice, tokens.i - 1));
|
||||||
},
|
},
|
||||||
.int, .comptime_int => {
|
.Int, .ComptimeInt => {
|
||||||
const numberToken = switch (token) {
|
const numberToken = switch (token) {
|
||||||
.Number => |n| n,
|
.Number => |n| n,
|
||||||
else => return error.UnexpectedToken,
|
else => return error.UnexpectedToken,
|
||||||
|
@ -1567,32 +1587,22 @@ fn parseInternal(comptime T: type, token: Token, tokens: *TokenStream, options:
|
||||||
if (std.math.round(float) != float) return error.InvalidNumber;
|
if (std.math.round(float) != float) return error.InvalidNumber;
|
||||||
return @as(T, @intFromFloat(float));
|
return @as(T, @intFromFloat(float));
|
||||||
},
|
},
|
||||||
.optional => |optionalInfo| {
|
.Optional => |optionalInfo| {
|
||||||
if (token == .Null) {
|
if (token == .Null) {
|
||||||
return null;
|
return null;
|
||||||
} else {
|
} else {
|
||||||
return try parseInternal(optionalInfo.child, token, tokens, options);
|
return try parseInternal(optionalInfo.child, token, tokens, options);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
.@"enum" => |enumInfo| {
|
.Enum => |enumInfo| {
|
||||||
switch (token) {
|
switch (token) {
|
||||||
.Number => |numberToken| {
|
.Number => |numberToken| {
|
||||||
if (!numberToken.is_integer) {
|
if (!numberToken.is_integer) return error.UnexpectedToken;
|
||||||
// probably is in scientific notation
|
|
||||||
const n = try std.fmt.parseFloat(f128, numberToken.slice(tokens.slice, tokens.i - 1));
|
|
||||||
return try std.meta.intToEnum(T, @as(i128, @intFromFloat(n)));
|
|
||||||
}
|
|
||||||
|
|
||||||
const n = try std.fmt.parseInt(enumInfo.tag_type, numberToken.slice(tokens.slice, tokens.i - 1), 10);
|
const n = try std.fmt.parseInt(enumInfo.tag_type, numberToken.slice(tokens.slice, tokens.i - 1), 10);
|
||||||
return try std.meta.intToEnum(T, n);
|
return try std.meta.intToEnum(T, n);
|
||||||
},
|
},
|
||||||
.String => |stringToken| {
|
.String => |stringToken| {
|
||||||
const source_slice = stringToken.slice(tokens.slice, tokens.i - 1);
|
const source_slice = stringToken.slice(tokens.slice, tokens.i - 1);
|
||||||
|
|
||||||
if (std.meta.hasFn(T, "parse")) {
|
|
||||||
return try T.parse(source_slice);
|
|
||||||
}
|
|
||||||
|
|
||||||
switch (stringToken.escapes) {
|
switch (stringToken.escapes) {
|
||||||
.None => return std.meta.stringToEnum(T, source_slice) orelse return error.InvalidEnumTag,
|
.None => return std.meta.stringToEnum(T, source_slice) orelse return error.InvalidEnumTag,
|
||||||
.Some => {
|
.Some => {
|
||||||
|
@ -1608,7 +1618,7 @@ fn parseInternal(comptime T: type, token: Token, tokens: *TokenStream, options:
|
||||||
else => return error.UnexpectedToken,
|
else => return error.UnexpectedToken,
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
.@"union" => |unionInfo| {
|
.Union => |unionInfo| {
|
||||||
if (unionInfo.tag_type) |_| {
|
if (unionInfo.tag_type) |_| {
|
||||||
// try each of the union fields until we find one that matches
|
// try each of the union fields until we find one that matches
|
||||||
inline for (unionInfo.fields) |u_field| {
|
inline for (unionInfo.fields) |u_field| {
|
||||||
|
@ -1632,7 +1642,7 @@ fn parseInternal(comptime T: type, token: Token, tokens: *TokenStream, options:
|
||||||
@compileError("Unable to parse into untagged union '" ++ @typeName(T) ++ "'");
|
@compileError("Unable to parse into untagged union '" ++ @typeName(T) ++ "'");
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
.@"struct" => |structInfo| {
|
.Struct => |structInfo| {
|
||||||
switch (token) {
|
switch (token) {
|
||||||
.ObjectBegin => {},
|
.ObjectBegin => {},
|
||||||
else => return error.UnexpectedToken,
|
else => return error.UnexpectedToken,
|
||||||
|
@ -1713,7 +1723,7 @@ fn parseInternal(comptime T: type, token: Token, tokens: *TokenStream, options:
|
||||||
}
|
}
|
||||||
inline for (structInfo.fields, 0..) |field, i| {
|
inline for (structInfo.fields, 0..) |field, i| {
|
||||||
if (!fields_seen[i]) {
|
if (!fields_seen[i]) {
|
||||||
if (field.default_value_ptr) |default_value_ptr| {
|
if (field.default_value) |default_value_ptr| {
|
||||||
if (!field.is_comptime) {
|
if (!field.is_comptime) {
|
||||||
const default_value = @as(*align(1) const field.type, @ptrCast(default_value_ptr)).*;
|
const default_value = @as(*align(1) const field.type, @ptrCast(default_value_ptr)).*;
|
||||||
@field(r, field.name) = default_value;
|
@field(r, field.name) = default_value;
|
||||||
|
@ -1726,7 +1736,7 @@ fn parseInternal(comptime T: type, token: Token, tokens: *TokenStream, options:
|
||||||
}
|
}
|
||||||
return r;
|
return r;
|
||||||
},
|
},
|
||||||
.array => |arrayInfo| {
|
.Array => |arrayInfo| {
|
||||||
switch (token) {
|
switch (token) {
|
||||||
.ArrayBegin => {
|
.ArrayBegin => {
|
||||||
var r: T = undefined;
|
var r: T = undefined;
|
||||||
|
@ -1760,24 +1770,24 @@ fn parseInternal(comptime T: type, token: Token, tokens: *TokenStream, options:
|
||||||
else => return error.UnexpectedToken,
|
else => return error.UnexpectedToken,
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
.pointer => |ptrInfo| {
|
.Pointer => |ptrInfo| {
|
||||||
const allocator = options.allocator orelse return error.AllocatorRequired;
|
const allocator = options.allocator orelse return error.AllocatorRequired;
|
||||||
switch (ptrInfo.size) {
|
switch (ptrInfo.size) {
|
||||||
.one => {
|
.One => {
|
||||||
const r: T = try allocator.create(ptrInfo.child);
|
const r: T = try allocator.create(ptrInfo.child);
|
||||||
errdefer allocator.destroy(r);
|
errdefer allocator.destroy(r);
|
||||||
r.* = try parseInternal(ptrInfo.child, token, tokens, options);
|
r.* = try parseInternal(ptrInfo.child, token, tokens, options);
|
||||||
return r;
|
return r;
|
||||||
},
|
},
|
||||||
.slice => {
|
.Slice => {
|
||||||
switch (token) {
|
switch (token) {
|
||||||
.ArrayBegin => {
|
.ArrayBegin => {
|
||||||
var arraylist = std.ArrayList(ptrInfo.child){};
|
var arraylist = std.ArrayList(ptrInfo.child).init(allocator);
|
||||||
errdefer {
|
errdefer {
|
||||||
while (arraylist.pop()) |v| {
|
while (arraylist.popOrNull()) |v| {
|
||||||
parseFree(ptrInfo.child, v, options);
|
parseFree(ptrInfo.child, v, options);
|
||||||
}
|
}
|
||||||
arraylist.deinit(allocator);
|
arraylist.deinit();
|
||||||
}
|
}
|
||||||
|
|
||||||
while (true) {
|
while (true) {
|
||||||
|
@ -1787,11 +1797,11 @@ fn parseInternal(comptime T: type, token: Token, tokens: *TokenStream, options:
|
||||||
else => {},
|
else => {},
|
||||||
}
|
}
|
||||||
|
|
||||||
try arraylist.ensureTotalCapacity(allocator, arraylist.items.len + 1);
|
try arraylist.ensureTotalCapacity(arraylist.items.len + 1);
|
||||||
const v = try parseInternal(ptrInfo.child, tok, tokens, options);
|
const v = try parseInternal(ptrInfo.child, tok, tokens, options);
|
||||||
arraylist.appendAssumeCapacity(v);
|
arraylist.appendAssumeCapacity(v);
|
||||||
}
|
}
|
||||||
return arraylist.toOwnedSlice(allocator);
|
return arraylist.toOwnedSlice();
|
||||||
},
|
},
|
||||||
.String => |stringToken| {
|
.String => |stringToken| {
|
||||||
if (ptrInfo.child != u8) return error.UnexpectedToken;
|
if (ptrInfo.child != u8) return error.UnexpectedToken;
|
||||||
|
@ -1817,12 +1827,12 @@ fn parseInternal(comptime T: type, token: Token, tokens: *TokenStream, options:
|
||||||
if (key_type == null) return error.UnexpectedToken;
|
if (key_type == null) return error.UnexpectedToken;
|
||||||
const value_type = typeForField(ptrInfo.child, "value");
|
const value_type = typeForField(ptrInfo.child, "value");
|
||||||
if (value_type == null) return error.UnexpectedToken;
|
if (value_type == null) return error.UnexpectedToken;
|
||||||
var arraylist = std.ArrayList(ptrInfo.child){};
|
var arraylist = std.ArrayList(ptrInfo.child).init(allocator);
|
||||||
errdefer {
|
errdefer {
|
||||||
while (arraylist.pop()) |v| {
|
while (arraylist.popOrNull()) |v| {
|
||||||
parseFree(ptrInfo.child, v, options);
|
parseFree(ptrInfo.child, v, options);
|
||||||
}
|
}
|
||||||
arraylist.deinit(allocator);
|
arraylist.deinit();
|
||||||
}
|
}
|
||||||
while (true) {
|
while (true) {
|
||||||
const key = (try tokens.next()) orelse return error.UnexpectedEndOfJson;
|
const key = (try tokens.next()) orelse return error.UnexpectedEndOfJson;
|
||||||
|
@ -1831,13 +1841,13 @@ fn parseInternal(comptime T: type, token: Token, tokens: *TokenStream, options:
|
||||||
else => {},
|
else => {},
|
||||||
}
|
}
|
||||||
|
|
||||||
try arraylist.ensureTotalCapacity(allocator, arraylist.items.len + 1);
|
try arraylist.ensureTotalCapacity(arraylist.items.len + 1);
|
||||||
const key_val = try parseInternal(key_type.?, key, tokens, options);
|
const key_val = try parseInternal(key_type.?, key, tokens, options);
|
||||||
const val = (try tokens.next()) orelse return error.UnexpectedEndOfJson;
|
const val = (try tokens.next()) orelse return error.UnexpectedEndOfJson;
|
||||||
const val_val = try parseInternal(value_type.?, val, tokens, options);
|
const val_val = try parseInternal(value_type.?, val, tokens, options);
|
||||||
arraylist.appendAssumeCapacity(.{ .key = key_val, .value = val_val });
|
arraylist.appendAssumeCapacity(.{ .key = key_val, .value = val_val });
|
||||||
}
|
}
|
||||||
return arraylist.toOwnedSlice(allocator);
|
return arraylist.toOwnedSlice();
|
||||||
},
|
},
|
||||||
else => return error.UnexpectedToken,
|
else => return error.UnexpectedToken,
|
||||||
}
|
}
|
||||||
|
@ -1853,8 +1863,8 @@ fn parseInternal(comptime T: type, token: Token, tokens: *TokenStream, options:
|
||||||
fn typeForField(comptime T: type, comptime field_name: []const u8) ?type {
|
fn typeForField(comptime T: type, comptime field_name: []const u8) ?type {
|
||||||
const ti = @typeInfo(T);
|
const ti = @typeInfo(T);
|
||||||
switch (ti) {
|
switch (ti) {
|
||||||
.@"struct" => {
|
.Struct => {
|
||||||
inline for (ti.@"struct".fields) |field| {
|
inline for (ti.Struct.fields) |field| {
|
||||||
if (std.mem.eql(u8, field.name, field_name))
|
if (std.mem.eql(u8, field.name, field_name))
|
||||||
return field.type;
|
return field.type;
|
||||||
}
|
}
|
||||||
|
@ -1868,14 +1878,14 @@ fn isMapPattern(comptime T: type) bool {
|
||||||
// We should be getting a type that is a pointer to a slice.
|
// We should be getting a type that is a pointer to a slice.
|
||||||
// Let's just double check before proceeding
|
// Let's just double check before proceeding
|
||||||
const ti = @typeInfo(T);
|
const ti = @typeInfo(T);
|
||||||
if (ti != .pointer) return false;
|
if (ti != .Pointer) return false;
|
||||||
if (ti.pointer.size != .slice) return false;
|
if (ti.Pointer.size != .Slice) return false;
|
||||||
const ti_child = @typeInfo(ti.pointer.child);
|
const ti_child = @typeInfo(ti.Pointer.child);
|
||||||
if (ti_child != .@"struct") return false;
|
if (ti_child != .Struct) return false;
|
||||||
if (ti_child.@"struct".fields.len != 2) return false;
|
if (ti_child.Struct.fields.len != 2) return false;
|
||||||
var key_found = false;
|
var key_found = false;
|
||||||
var value_found = false;
|
var value_found = false;
|
||||||
inline for (ti_child.@"struct".fields) |field| {
|
inline for (ti_child.Struct.fields) |field| {
|
||||||
if (std.mem.eql(u8, "key", field.name))
|
if (std.mem.eql(u8, "key", field.name))
|
||||||
key_found = true;
|
key_found = true;
|
||||||
if (std.mem.eql(u8, "value", field.name))
|
if (std.mem.eql(u8, "value", field.name))
|
||||||
|
@ -1885,7 +1895,6 @@ fn isMapPattern(comptime T: type) bool {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn parse(comptime T: type, tokens: *TokenStream, options: ParseOptions) !T {
|
pub fn parse(comptime T: type, tokens: *TokenStream, options: ParseOptions) !T {
|
||||||
// std.log.debug("parsing {s} into type {s}", .{ tokens.slice, @typeName(T) });
|
|
||||||
const token = (try tokens.next()) orelse return error.UnexpectedEndOfJson;
|
const token = (try tokens.next()) orelse return error.UnexpectedEndOfJson;
|
||||||
return parseInternal(T, token, tokens, options);
|
return parseInternal(T, token, tokens, options);
|
||||||
}
|
}
|
||||||
|
@ -1894,13 +1903,13 @@ pub fn parse(comptime T: type, tokens: *TokenStream, options: ParseOptions) !T {
|
||||||
/// Should be called with the same type and `ParseOptions` that were passed to `parse`
|
/// Should be called with the same type and `ParseOptions` that were passed to `parse`
|
||||||
pub fn parseFree(comptime T: type, value: T, options: ParseOptions) void {
|
pub fn parseFree(comptime T: type, value: T, options: ParseOptions) void {
|
||||||
switch (@typeInfo(T)) {
|
switch (@typeInfo(T)) {
|
||||||
.bool, .float, .comptime_float, .int, .comptime_int, .@"enum" => {},
|
.Bool, .Float, .ComptimeFloat, .Int, .ComptimeInt, .Enum => {},
|
||||||
.optional => {
|
.Optional => {
|
||||||
if (value) |v| {
|
if (value) |v| {
|
||||||
return parseFree(@TypeOf(v), v, options);
|
return parseFree(@TypeOf(v), v, options);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
.@"union" => |unionInfo| {
|
.Union => |unionInfo| {
|
||||||
if (unionInfo.tag_type) |UnionTagType| {
|
if (unionInfo.tag_type) |UnionTagType| {
|
||||||
inline for (unionInfo.fields) |u_field| {
|
inline for (unionInfo.fields) |u_field| {
|
||||||
if (value == @field(UnionTagType, u_field.name)) {
|
if (value == @field(UnionTagType, u_field.name)) {
|
||||||
|
@ -1912,24 +1921,24 @@ pub fn parseFree(comptime T: type, value: T, options: ParseOptions) void {
|
||||||
unreachable;
|
unreachable;
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
.@"struct" => |structInfo| {
|
.Struct => |structInfo| {
|
||||||
inline for (structInfo.fields) |field| {
|
inline for (structInfo.fields) |field| {
|
||||||
parseFree(field.type, @field(value, field.name), options);
|
parseFree(field.type, @field(value, field.name), options);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
.array => |arrayInfo| {
|
.Array => |arrayInfo| {
|
||||||
for (value) |v| {
|
for (value) |v| {
|
||||||
parseFree(arrayInfo.child, v, options);
|
parseFree(arrayInfo.child, v, options);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
.pointer => |ptrInfo| {
|
.Pointer => |ptrInfo| {
|
||||||
const allocator = options.allocator orelse unreachable;
|
const allocator = options.allocator orelse unreachable;
|
||||||
switch (ptrInfo.size) {
|
switch (ptrInfo.size) {
|
||||||
.one => {
|
.One => {
|
||||||
parseFree(ptrInfo.child, value.*, options);
|
parseFree(ptrInfo.child, value.*, options);
|
||||||
allocator.destroy(value);
|
allocator.destroy(value);
|
||||||
},
|
},
|
||||||
.slice => {
|
.Slice => {
|
||||||
for (value) |v| {
|
for (value) |v| {
|
||||||
parseFree(ptrInfo.child, v, options);
|
parseFree(ptrInfo.child, v, options);
|
||||||
}
|
}
|
||||||
|
@ -2274,7 +2283,7 @@ pub const Parser = struct {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
var value = p.stack.pop().?;
|
var value = p.stack.pop();
|
||||||
try p.pushToParent(&value);
|
try p.pushToParent(&value);
|
||||||
},
|
},
|
||||||
.String => |s| {
|
.String => |s| {
|
||||||
|
@ -2340,7 +2349,7 @@ pub const Parser = struct {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
var value = p.stack.pop().?;
|
var value = p.stack.pop();
|
||||||
try p.pushToParent(&value);
|
try p.pushToParent(&value);
|
||||||
},
|
},
|
||||||
.ObjectBegin => {
|
.ObjectBegin => {
|
||||||
|
@ -2501,7 +2510,7 @@ pub fn unescapeValidString(output: []u8, input: []const u8) !void {
|
||||||
mem.nativeToLittle(u16, firstCodeUnit),
|
mem.nativeToLittle(u16, firstCodeUnit),
|
||||||
mem.nativeToLittle(u16, secondCodeUnit),
|
mem.nativeToLittle(u16, secondCodeUnit),
|
||||||
};
|
};
|
||||||
if (std.unicode.utf16LeToUtf8(output[outIndex..], &utf16le_seq)) |byteCount| {
|
if (std.unicode.utf16leToUtf8(output[outIndex..], &utf16le_seq)) |byteCount| {
|
||||||
outIndex += byteCount;
|
outIndex += byteCount;
|
||||||
inIndex += 12;
|
inIndex += 12;
|
||||||
} else |_| {
|
} else |_| {
|
||||||
|
@ -2747,10 +2756,6 @@ pub const StringifyOptions = struct {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
emit_null: bool = true,
|
|
||||||
|
|
||||||
exclude_fields: ?[][]const u8 = null,
|
|
||||||
|
|
||||||
/// Controls the whitespace emitted
|
/// Controls the whitespace emitted
|
||||||
whitespace: ?Whitespace = null,
|
whitespace: ?Whitespace = null,
|
||||||
|
|
||||||
|
@ -2794,3 +2799,385 @@ fn outputUnicodeEscape(
|
||||||
try std.fmt.formatIntValue(low, "x", std.fmt.FormatOptions{ .width = 4, .fill = '0' }, out_stream);
|
try std.fmt.formatIntValue(low, "x", std.fmt.FormatOptions{ .width = 4, .fill = '0' }, out_stream);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn stringify(
|
||||||
|
value: anytype,
|
||||||
|
options: StringifyOptions,
|
||||||
|
out_stream: anytype,
|
||||||
|
) !void {
|
||||||
|
const T = @TypeOf(value);
|
||||||
|
switch (@typeInfo(T)) {
|
||||||
|
.Float, .ComptimeFloat => {
|
||||||
|
return std.fmt.format(out_stream, "{e}", .{value});
|
||||||
|
},
|
||||||
|
.Int, .ComptimeInt => {
|
||||||
|
return std.fmt.formatIntValue(value, "", std.fmt.FormatOptions{}, out_stream);
|
||||||
|
},
|
||||||
|
.Bool => {
|
||||||
|
return out_stream.writeAll(if (value) "true" else "false");
|
||||||
|
},
|
||||||
|
.Null => {
|
||||||
|
return out_stream.writeAll("null");
|
||||||
|
},
|
||||||
|
.Optional => {
|
||||||
|
if (value) |payload| {
|
||||||
|
return try stringify(payload, options, out_stream);
|
||||||
|
} else {
|
||||||
|
return try stringify(null, options, out_stream);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
.Enum => {
|
||||||
|
if (comptime std.meta.hasFn(T, "jsonStringify")) {
|
||||||
|
return value.jsonStringify(options, out_stream);
|
||||||
|
}
|
||||||
|
|
||||||
|
@compileError("Unable to stringify enum '" ++ @typeName(T) ++ "'");
|
||||||
|
},
|
||||||
|
.Union => {
|
||||||
|
if (comptime std.meta.hasFn(T, "jsonStringify")) {
|
||||||
|
return value.jsonStringify(options, out_stream);
|
||||||
|
}
|
||||||
|
|
||||||
|
const info = @typeInfo(T).Union;
|
||||||
|
if (info.tag_type) |UnionTagType| {
|
||||||
|
inline for (info.fields) |u_field| {
|
||||||
|
if (value == @field(UnionTagType, u_field.name)) {
|
||||||
|
return try stringify(@field(value, u_field.name), options, out_stream);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
@compileError("Unable to stringify untagged union '" ++ @typeName(T) ++ "'");
|
||||||
|
}
|
||||||
|
},
|
||||||
|
.Struct => |S| {
|
||||||
|
if (comptime std.meta.hasFn(T, "jsonStringify")) {
|
||||||
|
return value.jsonStringify(options, out_stream);
|
||||||
|
}
|
||||||
|
|
||||||
|
try out_stream.writeByte('{');
|
||||||
|
comptime var field_output = false;
|
||||||
|
var child_options = options;
|
||||||
|
if (child_options.whitespace) |*child_whitespace| {
|
||||||
|
child_whitespace.indent_level += 1;
|
||||||
|
}
|
||||||
|
inline for (S.fields) |Field| {
|
||||||
|
// don't include void fields
|
||||||
|
if (Field.type == void) continue;
|
||||||
|
|
||||||
|
if (!field_output) {
|
||||||
|
field_output = true;
|
||||||
|
} else {
|
||||||
|
try out_stream.writeByte(',');
|
||||||
|
}
|
||||||
|
if (child_options.whitespace) |child_whitespace| {
|
||||||
|
try out_stream.writeByte('\n');
|
||||||
|
try child_whitespace.outputIndent(out_stream);
|
||||||
|
}
|
||||||
|
var field_written = false;
|
||||||
|
if (comptime std.meta.hasFn(T, "jsonStringifyField"))
|
||||||
|
field_written = try value.jsonStringifyField(Field.name, child_options, out_stream);
|
||||||
|
|
||||||
|
if (!field_written) {
|
||||||
|
if (comptime std.meta.hasFn(T, "fieldNameFor")) {
|
||||||
|
const name = value.fieldNameFor(Field.name);
|
||||||
|
try stringify(name, options, out_stream);
|
||||||
|
} else {
|
||||||
|
try stringify(Field.name, options, out_stream);
|
||||||
|
}
|
||||||
|
|
||||||
|
try out_stream.writeByte(':');
|
||||||
|
if (child_options.whitespace) |child_whitespace| {
|
||||||
|
if (child_whitespace.separator) {
|
||||||
|
try out_stream.writeByte(' ');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
try stringify(@field(value, Field.name), child_options, out_stream);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (field_output) {
|
||||||
|
if (options.whitespace) |whitespace| {
|
||||||
|
try out_stream.writeByte('\n');
|
||||||
|
try whitespace.outputIndent(out_stream);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
try out_stream.writeByte('}');
|
||||||
|
return;
|
||||||
|
},
|
||||||
|
.ErrorSet => return stringify(@as([]const u8, @errorName(value)), options, out_stream),
|
||||||
|
.Pointer => |ptr_info| switch (ptr_info.size) {
|
||||||
|
.One => switch (@typeInfo(ptr_info.child)) {
|
||||||
|
.Array => {
|
||||||
|
const Slice = []const std.meta.Elem(ptr_info.child);
|
||||||
|
return stringify(@as(Slice, value), options, out_stream);
|
||||||
|
},
|
||||||
|
else => {
|
||||||
|
// TODO: avoid loops?
|
||||||
|
return stringify(value.*, options, out_stream);
|
||||||
|
},
|
||||||
|
},
|
||||||
|
// TODO: .Many when there is a sentinel (waiting for https://github.com/ziglang/zig/pull/3972)
|
||||||
|
.Slice => {
|
||||||
|
if (ptr_info.child == u8 and options.string == .String and std.unicode.utf8ValidateSlice(value)) {
|
||||||
|
try out_stream.writeByte('\"');
|
||||||
|
var i: usize = 0;
|
||||||
|
while (i < value.len) : (i += 1) {
|
||||||
|
switch (value[i]) {
|
||||||
|
// normal ascii character
|
||||||
|
0x20...0x21, 0x23...0x2E, 0x30...0x5B, 0x5D...0x7F => |c| try out_stream.writeByte(c),
|
||||||
|
// only 2 characters that *must* be escaped
|
||||||
|
'\\' => try out_stream.writeAll("\\\\"),
|
||||||
|
'\"' => try out_stream.writeAll("\\\""),
|
||||||
|
// solidus is optional to escape
|
||||||
|
'/' => {
|
||||||
|
if (options.string.String.escape_solidus) {
|
||||||
|
try out_stream.writeAll("\\/");
|
||||||
|
} else {
|
||||||
|
try out_stream.writeByte('/');
|
||||||
|
}
|
||||||
|
},
|
||||||
|
// control characters with short escapes
|
||||||
|
// TODO: option to switch between unicode and 'short' forms?
|
||||||
|
0x8 => try out_stream.writeAll("\\b"),
|
||||||
|
0xC => try out_stream.writeAll("\\f"),
|
||||||
|
'\n' => try out_stream.writeAll("\\n"),
|
||||||
|
'\r' => try out_stream.writeAll("\\r"),
|
||||||
|
'\t' => try out_stream.writeAll("\\t"),
|
||||||
|
else => {
|
||||||
|
const ulen = std.unicode.utf8ByteSequenceLength(value[i]) catch unreachable;
|
||||||
|
// control characters (only things left with 1 byte length) should always be printed as unicode escapes
|
||||||
|
if (ulen == 1 or options.string.String.escape_unicode) {
|
||||||
|
const codepoint = std.unicode.utf8Decode(value[i .. i + ulen]) catch unreachable;
|
||||||
|
try outputUnicodeEscape(codepoint, out_stream);
|
||||||
|
} else {
|
||||||
|
try out_stream.writeAll(value[i .. i + ulen]);
|
||||||
|
}
|
||||||
|
i += ulen - 1;
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
try out_stream.writeByte('\"');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
try out_stream.writeByte('[');
|
||||||
|
var child_options = options;
|
||||||
|
if (child_options.whitespace) |*whitespace| {
|
||||||
|
whitespace.indent_level += 1;
|
||||||
|
}
|
||||||
|
for (value, 0..) |x, i| {
|
||||||
|
if (i != 0) {
|
||||||
|
try out_stream.writeByte(',');
|
||||||
|
}
|
||||||
|
if (child_options.whitespace) |child_whitespace| {
|
||||||
|
try out_stream.writeByte('\n');
|
||||||
|
try child_whitespace.outputIndent(out_stream);
|
||||||
|
}
|
||||||
|
try stringify(x, child_options, out_stream);
|
||||||
|
}
|
||||||
|
if (value.len != 0) {
|
||||||
|
if (options.whitespace) |whitespace| {
|
||||||
|
try out_stream.writeByte('\n');
|
||||||
|
try whitespace.outputIndent(out_stream);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
try out_stream.writeByte(']');
|
||||||
|
return;
|
||||||
|
},
|
||||||
|
else => @compileError("Unable to stringify type '" ++ @typeName(T) ++ "'"),
|
||||||
|
},
|
||||||
|
.Array => return stringify(&value, options, out_stream),
|
||||||
|
.Vector => |info| {
|
||||||
|
const array: [info.len]info.child = value;
|
||||||
|
return stringify(&array, options, out_stream);
|
||||||
|
},
|
||||||
|
else => @compileError("Unable to stringify type '" ++ @typeName(T) ++ "'"),
|
||||||
|
}
|
||||||
|
unreachable;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn teststringify(expected: []const u8, value: anytype, options: StringifyOptions) !void {
|
||||||
|
const ValidationWriter = struct {
|
||||||
|
const Self = @This();
|
||||||
|
pub const Writer = std.io.Writer(*Self, Error, write);
|
||||||
|
pub const Error = error{
|
||||||
|
TooMuchData,
|
||||||
|
DifferentData,
|
||||||
|
};
|
||||||
|
|
||||||
|
expected_remaining: []const u8,
|
||||||
|
|
||||||
|
fn init(exp: []const u8) Self {
|
||||||
|
return .{ .expected_remaining = exp };
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn writer(self: *Self) Writer {
|
||||||
|
return .{ .context = self };
|
||||||
|
}
|
||||||
|
|
||||||
|
fn write(self: *Self, bytes: []const u8) Error!usize {
|
||||||
|
if (self.expected_remaining.len < bytes.len) {
|
||||||
|
std.log.warn(
|
||||||
|
\\====== expected this output: =========
|
||||||
|
\\{s}
|
||||||
|
\\======== instead found this: =========
|
||||||
|
\\{s}
|
||||||
|
\\======================================
|
||||||
|
, .{
|
||||||
|
self.expected_remaining,
|
||||||
|
bytes,
|
||||||
|
});
|
||||||
|
return error.TooMuchData;
|
||||||
|
}
|
||||||
|
if (!mem.eql(u8, self.expected_remaining[0..bytes.len], bytes)) {
|
||||||
|
std.log.warn(
|
||||||
|
\\====== expected this output: =========
|
||||||
|
\\{s}
|
||||||
|
\\======== instead found this: =========
|
||||||
|
\\{s}
|
||||||
|
\\======================================
|
||||||
|
, .{
|
||||||
|
self.expected_remaining[0..bytes.len],
|
||||||
|
bytes,
|
||||||
|
});
|
||||||
|
return error.DifferentData;
|
||||||
|
}
|
||||||
|
self.expected_remaining = self.expected_remaining[bytes.len..];
|
||||||
|
return bytes.len;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
var vos = ValidationWriter.init(expected);
|
||||||
|
try stringify(value, options, vos.writer());
|
||||||
|
if (vos.expected_remaining.len > 0) return error.NotEnoughData;
|
||||||
|
}
|
||||||
|
|
||||||
|
test "stringify basic types" {
|
||||||
|
try teststringify("false", false, StringifyOptions{});
|
||||||
|
try teststringify("true", true, StringifyOptions{});
|
||||||
|
try teststringify("null", @as(?u8, null), StringifyOptions{});
|
||||||
|
try teststringify("null", @as(?*u32, null), StringifyOptions{});
|
||||||
|
try teststringify("42", 42, StringifyOptions{});
|
||||||
|
try teststringify("4.2e1", 42.0, StringifyOptions{});
|
||||||
|
try teststringify("42", @as(u8, 42), StringifyOptions{});
|
||||||
|
try teststringify("42", @as(u128, 42), StringifyOptions{});
|
||||||
|
try teststringify("4.2e1", @as(f32, 42), StringifyOptions{});
|
||||||
|
try teststringify("4.2e1", @as(f64, 42), StringifyOptions{});
|
||||||
|
try teststringify("\"ItBroke\"", @as(anyerror, error.ItBroke), StringifyOptions{});
|
||||||
|
}
|
||||||
|
|
||||||
|
test "stringify string" {
|
||||||
|
try teststringify("\"hello\"", "hello", StringifyOptions{});
|
||||||
|
try teststringify("\"with\\nescapes\\r\"", "with\nescapes\r", StringifyOptions{});
|
||||||
|
try teststringify("\"with\\nescapes\\r\"", "with\nescapes\r", StringifyOptions{ .string = .{ .String = .{ .escape_unicode = true } } });
|
||||||
|
try teststringify("\"with unicode\\u0001\"", "with unicode\u{1}", StringifyOptions{});
|
||||||
|
try teststringify("\"with unicode\\u0001\"", "with unicode\u{1}", StringifyOptions{ .string = .{ .String = .{ .escape_unicode = true } } });
|
||||||
|
try teststringify("\"with unicode\u{80}\"", "with unicode\u{80}", StringifyOptions{});
|
||||||
|
try teststringify("\"with unicode\\u0080\"", "with unicode\u{80}", StringifyOptions{ .string = .{ .String = .{ .escape_unicode = true } } });
|
||||||
|
try teststringify("\"with unicode\u{FF}\"", "with unicode\u{FF}", StringifyOptions{});
|
||||||
|
try teststringify("\"with unicode\\u00ff\"", "with unicode\u{FF}", StringifyOptions{ .string = .{ .String = .{ .escape_unicode = true } } });
|
||||||
|
try teststringify("\"with unicode\u{100}\"", "with unicode\u{100}", StringifyOptions{});
|
||||||
|
try teststringify("\"with unicode\\u0100\"", "with unicode\u{100}", StringifyOptions{ .string = .{ .String = .{ .escape_unicode = true } } });
|
||||||
|
try teststringify("\"with unicode\u{800}\"", "with unicode\u{800}", StringifyOptions{});
|
||||||
|
try teststringify("\"with unicode\\u0800\"", "with unicode\u{800}", StringifyOptions{ .string = .{ .String = .{ .escape_unicode = true } } });
|
||||||
|
try teststringify("\"with unicode\u{8000}\"", "with unicode\u{8000}", StringifyOptions{});
|
||||||
|
try teststringify("\"with unicode\\u8000\"", "with unicode\u{8000}", StringifyOptions{ .string = .{ .String = .{ .escape_unicode = true } } });
|
||||||
|
try teststringify("\"with unicode\u{D799}\"", "with unicode\u{D799}", StringifyOptions{});
|
||||||
|
try teststringify("\"with unicode\\ud799\"", "with unicode\u{D799}", StringifyOptions{ .string = .{ .String = .{ .escape_unicode = true } } });
|
||||||
|
try teststringify("\"with unicode\u{10000}\"", "with unicode\u{10000}", StringifyOptions{});
|
||||||
|
try teststringify("\"with unicode\\ud800\\udc00\"", "with unicode\u{10000}", StringifyOptions{ .string = .{ .String = .{ .escape_unicode = true } } });
|
||||||
|
try teststringify("\"with unicode\u{10FFFF}\"", "with unicode\u{10FFFF}", StringifyOptions{});
|
||||||
|
try teststringify("\"with unicode\\udbff\\udfff\"", "with unicode\u{10FFFF}", StringifyOptions{ .string = .{ .String = .{ .escape_unicode = true } } });
|
||||||
|
try teststringify("\"/\"", "/", StringifyOptions{});
|
||||||
|
try teststringify("\"\\/\"", "/", StringifyOptions{ .string = .{ .String = .{ .escape_solidus = true } } });
|
||||||
|
}
|
||||||
|
|
||||||
|
test "stringify tagged unions" {
|
||||||
|
try teststringify("42", union(enum) {
|
||||||
|
Foo: u32,
|
||||||
|
Bar: bool,
|
||||||
|
}{ .Foo = 42 }, StringifyOptions{});
|
||||||
|
}
|
||||||
|
|
||||||
|
test "stringify struct" {
|
||||||
|
try teststringify("{\"foo\":42}", struct {
|
||||||
|
foo: u32,
|
||||||
|
}{ .foo = 42 }, StringifyOptions{});
|
||||||
|
}
|
||||||
|
|
||||||
|
test "stringify struct with indentation" {
|
||||||
|
try teststringify(
|
||||||
|
\\{
|
||||||
|
\\ "foo": 42,
|
||||||
|
\\ "bar": [
|
||||||
|
\\ 1,
|
||||||
|
\\ 2,
|
||||||
|
\\ 3
|
||||||
|
\\ ]
|
||||||
|
\\}
|
||||||
|
,
|
||||||
|
struct {
|
||||||
|
foo: u32,
|
||||||
|
bar: [3]u32,
|
||||||
|
}{
|
||||||
|
.foo = 42,
|
||||||
|
.bar = .{ 1, 2, 3 },
|
||||||
|
},
|
||||||
|
StringifyOptions{
|
||||||
|
.whitespace = .{},
|
||||||
|
},
|
||||||
|
);
|
||||||
|
try teststringify(
|
||||||
|
"{\n\t\"foo\":42,\n\t\"bar\":[\n\t\t1,\n\t\t2,\n\t\t3\n\t]\n}",
|
||||||
|
struct {
|
||||||
|
foo: u32,
|
||||||
|
bar: [3]u32,
|
||||||
|
}{
|
||||||
|
.foo = 42,
|
||||||
|
.bar = .{ 1, 2, 3 },
|
||||||
|
},
|
||||||
|
StringifyOptions{
|
||||||
|
.whitespace = .{
|
||||||
|
.indent = .Tab,
|
||||||
|
.separator = false,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
test "stringify struct with void field" {
|
||||||
|
try teststringify("{\"foo\":42}", struct {
|
||||||
|
foo: u32,
|
||||||
|
bar: void = {},
|
||||||
|
}{ .foo = 42 }, StringifyOptions{});
|
||||||
|
}
|
||||||
|
|
||||||
|
test "stringify array of structs" {
|
||||||
|
const MyStruct = struct {
|
||||||
|
foo: u32,
|
||||||
|
};
|
||||||
|
try teststringify("[{\"foo\":42},{\"foo\":100},{\"foo\":1000}]", [_]MyStruct{
|
||||||
|
MyStruct{ .foo = 42 },
|
||||||
|
MyStruct{ .foo = 100 },
|
||||||
|
MyStruct{ .foo = 1000 },
|
||||||
|
}, StringifyOptions{});
|
||||||
|
}
|
||||||
|
|
||||||
|
test "stringify struct with custom stringifier" {
|
||||||
|
try teststringify("[\"something special\",42]", struct {
|
||||||
|
foo: u32,
|
||||||
|
const Self = @This();
|
||||||
|
pub fn jsonStringify(
|
||||||
|
_: Self,
|
||||||
|
options: StringifyOptions,
|
||||||
|
out_stream: anytype,
|
||||||
|
) !void {
|
||||||
|
try out_stream.writeAll("[\"something special\",");
|
||||||
|
try stringify(42, options, out_stream);
|
||||||
|
try out_stream.writeByte(']');
|
||||||
|
}
|
||||||
|
}{ .foo = 42 }, StringifyOptions{});
|
||||||
|
}
|
||||||
|
|
||||||
|
test "stringify vector" {
|
||||||
|
try teststringify("[1,1]", @as(@Vector(2, u32), @splat(@as(u32, 1))), StringifyOptions{});
|
||||||
|
}
|
41
src/main.zig
41
src/main.zig
|
@ -1,6 +1,6 @@
|
||||||
const std = @import("std");
|
const std = @import("std");
|
||||||
const aws = @import("aws.zig");
|
const aws = @import("aws.zig");
|
||||||
const json = @import("json");
|
const json = @import("json.zig");
|
||||||
|
|
||||||
var verbose: u8 = 0;
|
var verbose: u8 = 0;
|
||||||
|
|
||||||
|
@ -32,10 +32,9 @@ pub fn log(
|
||||||
const prefix = "[" ++ @tagName(level) ++ "] " ++ scope_prefix;
|
const prefix = "[" ++ @tagName(level) ++ "] " ++ scope_prefix;
|
||||||
|
|
||||||
// Print the message to stderr, silently ignoring any errors
|
// Print the message to stderr, silently ignoring any errors
|
||||||
std.debug.lockStdErr();
|
std.debug.getStderrMutex().lock();
|
||||||
defer std.debug.unlockStdErr();
|
defer std.debug.getStderrMutex().unlock();
|
||||||
var stderr_writer = std.fs.File.stderr().writer(&.{});
|
const stderr = std.io.getStdErr().writer();
|
||||||
const stderr = &stderr_writer.interface;
|
|
||||||
nosuspend stderr.print(prefix ++ format ++ "\n", args) catch return;
|
nosuspend stderr.print(prefix ++ format ++ "\n", args) catch return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -63,14 +62,14 @@ pub fn main() anyerror!void {
|
||||||
var gpa = std.heap.GeneralPurposeAllocator(.{}){};
|
var gpa = std.heap.GeneralPurposeAllocator(.{}){};
|
||||||
defer _ = gpa.deinit();
|
defer _ = gpa.deinit();
|
||||||
const allocator = gpa.allocator();
|
const allocator = gpa.allocator();
|
||||||
var tests = try std.ArrayList(Tests).initCapacity(allocator, @typeInfo(Tests).@"enum".fields.len);
|
var tests = std.ArrayList(Tests).init(allocator);
|
||||||
defer tests.deinit(allocator);
|
defer tests.deinit();
|
||||||
var args = try std.process.argsWithAllocator(allocator);
|
var args = try std.process.argsWithAllocator(allocator);
|
||||||
defer args.deinit();
|
defer args.deinit();
|
||||||
var stdout_buf: [4096]u8 = undefined;
|
const stdout_raw = std.io.getStdOut().writer();
|
||||||
const stdout_raw = std.fs.File.stdout().writer(&stdout_buf);
|
var bw = std.io.bufferedWriter(stdout_raw);
|
||||||
var stdout = stdout_raw.interface;
|
defer bw.flush() catch unreachable;
|
||||||
defer stdout.flush() catch @panic("could not flush stdout");
|
const stdout = bw.writer();
|
||||||
var arg0: ?[]const u8 = null;
|
var arg0: ?[]const u8 = null;
|
||||||
var proxy: ?std.http.Client.Proxy = null;
|
var proxy: ?std.http.Client.Proxy = null;
|
||||||
while (args.next()) |arg| {
|
while (args.next()) |arg| {
|
||||||
|
@ -98,16 +97,16 @@ pub fn main() anyerror!void {
|
||||||
}
|
}
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
inline for (@typeInfo(Tests).@"enum".fields) |f| {
|
inline for (@typeInfo(Tests).Enum.fields) |f| {
|
||||||
if (std.mem.eql(u8, f.name, arg)) {
|
if (std.mem.eql(u8, f.name, arg)) {
|
||||||
try tests.append(allocator, @field(Tests, f.name));
|
try tests.append(@field(Tests, f.name));
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (tests.items.len == 0) {
|
if (tests.items.len == 0) {
|
||||||
inline for (@typeInfo(Tests).@"enum".fields) |f|
|
inline for (@typeInfo(Tests).Enum.fields) |f|
|
||||||
try tests.append(allocator, @field(Tests, f.name));
|
try tests.append(@field(Tests, f.name));
|
||||||
}
|
}
|
||||||
|
|
||||||
std.log.info("Start\n", .{});
|
std.log.info("Start\n", .{});
|
||||||
|
@ -193,8 +192,8 @@ pub fn main() anyerror!void {
|
||||||
const func = fns[0];
|
const func = fns[0];
|
||||||
const arn = func.function_arn.?;
|
const arn = func.function_arn.?;
|
||||||
// This is a bit ugly. Maybe a helper function in the library would help?
|
// This is a bit ugly. Maybe a helper function in the library would help?
|
||||||
var tags = try std.ArrayList(aws.services.lambda.TagKeyValue).initCapacity(allocator, 1);
|
var tags = try std.ArrayList(@typeInfo(try typeForField(services.lambda.tag_resource.Request, "tags")).Pointer.child).initCapacity(allocator, 1);
|
||||||
defer tags.deinit(allocator);
|
defer tags.deinit();
|
||||||
tags.appendAssumeCapacity(.{ .key = "Foo", .value = "Bar" });
|
tags.appendAssumeCapacity(.{ .key = "Foo", .value = "Bar" });
|
||||||
const req = services.lambda.tag_resource.Request{ .resource = arn, .tags = tags.items };
|
const req = services.lambda.tag_resource.Request{ .resource = arn, .tags = tags.items };
|
||||||
const addtag = try aws.Request(services.lambda.tag_resource).call(req, options);
|
const addtag = try aws.Request(services.lambda.tag_resource).call(req, options);
|
||||||
|
@ -263,7 +262,7 @@ pub fn main() anyerror!void {
|
||||||
defer result.deinit();
|
defer result.deinit();
|
||||||
std.log.info("request id: {s}", .{result.response_metadata.request_id});
|
std.log.info("request id: {s}", .{result.response_metadata.request_id});
|
||||||
const list = result.response.key_group_list.?;
|
const list = result.response.key_group_list.?;
|
||||||
std.log.info("key group list max: {d}", .{list.max_items});
|
std.log.info("key group list max: {?d}", .{list.max_items});
|
||||||
std.log.info("key group quantity: {d}", .{list.quantity});
|
std.log.info("key group quantity: {d}", .{list.quantity});
|
||||||
},
|
},
|
||||||
.rest_xml_work_with_s3 => {
|
.rest_xml_work_with_s3 => {
|
||||||
|
@ -372,7 +371,7 @@ fn proxyFromString(string: []const u8) !std.http.Client.Proxy {
|
||||||
rc.port = 443;
|
rc.port = 443;
|
||||||
rc.protocol = .tls;
|
rc.protocol = .tls;
|
||||||
} else return error.InvalidScheme;
|
} else return error.InvalidScheme;
|
||||||
var split_iterator = std.mem.splitScalar(u8, remaining, ':');
|
var split_iterator = std.mem.split(u8, remaining, ":");
|
||||||
rc.host = std.mem.trimRight(u8, split_iterator.first(), "/");
|
rc.host = std.mem.trimRight(u8, split_iterator.first(), "/");
|
||||||
if (split_iterator.next()) |port|
|
if (split_iterator.next()) |port|
|
||||||
rc.port = try std.fmt.parseInt(u16, port, 10);
|
rc.port = try std.fmt.parseInt(u16, port, 10);
|
||||||
|
@ -381,8 +380,8 @@ fn proxyFromString(string: []const u8) !std.http.Client.Proxy {
|
||||||
fn typeForField(comptime T: type, comptime field_name: []const u8) !type {
|
fn typeForField(comptime T: type, comptime field_name: []const u8) !type {
|
||||||
const ti = @typeInfo(T);
|
const ti = @typeInfo(T);
|
||||||
switch (ti) {
|
switch (ti) {
|
||||||
.@"struct" => {
|
.Struct => {
|
||||||
inline for (ti.@"struct".fields) |field| {
|
inline for (ti.Struct.fields) |field| {
|
||||||
if (std.mem.eql(u8, field.name, field_name))
|
if (std.mem.eql(u8, field.name, field_name))
|
||||||
return field.type;
|
return field.type;
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
const std = @import("std");
|
const std = @import("std");
|
||||||
const service_list = @import("service_manifest");
|
const service_list = @import("models/service_manifest.zig");
|
||||||
const expectEqualStrings = std.testing.expectEqualStrings;
|
const expectEqualStrings = std.testing.expectEqualStrings;
|
||||||
|
|
||||||
pub fn Services(comptime service_imports: anytype) type {
|
pub fn Services(comptime service_imports: anytype) type {
|
||||||
|
@ -12,15 +12,15 @@ pub fn Services(comptime service_imports: anytype) type {
|
||||||
item.* = .{
|
item.* = .{
|
||||||
.name = @tagName(service_imports[i]),
|
.name = @tagName(service_imports[i]),
|
||||||
.type = @TypeOf(import_field),
|
.type = @TypeOf(import_field),
|
||||||
.default_value_ptr = &import_field,
|
.default_value = &import_field,
|
||||||
.is_comptime = false,
|
.is_comptime = false,
|
||||||
.alignment = std.meta.alignment(@TypeOf(import_field)),
|
.alignment = 0,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
// finally, generate the type
|
// finally, generate the type
|
||||||
return @Type(.{
|
return @Type(.{
|
||||||
.@"struct" = .{
|
.Struct = .{
|
||||||
.layout = .auto,
|
.layout = .auto,
|
||||||
.fields = &fields,
|
.fields = &fields,
|
||||||
.decls = &[_]std.builtin.Type.Declaration{},
|
.decls = &[_]std.builtin.Type.Declaration{},
|
||||||
|
@ -39,7 +39,7 @@ fn serviceCount(desired_services: anytype) usize {
|
||||||
pub const services = service_list;
|
pub const services = service_list;
|
||||||
|
|
||||||
test "services includes sts" {
|
test "services includes sts" {
|
||||||
try expectEqualStrings("2011-06-15", services.sts.version.?);
|
try expectEqualStrings("2011-06-15", services.sts.version);
|
||||||
}
|
}
|
||||||
test "sts includes get_caller_identity" {
|
test "sts includes get_caller_identity" {
|
||||||
try expectEqualStrings("GetCallerIdentity", services.sts.get_caller_identity.action_name);
|
try expectEqualStrings("GetCallerIdentity", services.sts.get_caller_identity.action_name);
|
||||||
|
@ -47,9 +47,9 @@ test "sts includes get_caller_identity" {
|
||||||
test "can get service and action name from request" {
|
test "can get service and action name from request" {
|
||||||
// get request object. This call doesn't have parameters
|
// get request object. This call doesn't have parameters
|
||||||
const metadata = services.sts.get_caller_identity.Request.metaInfo();
|
const metadata = services.sts.get_caller_identity.Request.metaInfo();
|
||||||
try expectEqualStrings("2011-06-15", metadata.service_metadata.version.?);
|
try expectEqualStrings("2011-06-15", metadata.service_metadata.version);
|
||||||
}
|
}
|
||||||
test "can filter services" {
|
test "can filter services" {
|
||||||
const filtered_services = Services(.{ .sts, .wafv2 }){};
|
const filtered_services = Services(.{ .sts, .wafv2 }){};
|
||||||
try expectEqualStrings("2011-06-15", filtered_services.sts.version.?);
|
try expectEqualStrings("2011-06-15", filtered_services.sts.version);
|
||||||
}
|
}
|
||||||
|
|
177
src/url.zig
177
src/url.zig
|
@ -11,7 +11,7 @@ pub const EncodingOptions = struct {
|
||||||
field_name_transformer: fieldNameTransformerFn = defaultTransformer,
|
field_name_transformer: fieldNameTransformerFn = defaultTransformer,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub fn encode(allocator: std.mem.Allocator, obj: anytype, writer: *std.Io.Writer, comptime options: EncodingOptions) !void {
|
pub fn encode(allocator: std.mem.Allocator, obj: anytype, writer: anytype, comptime options: EncodingOptions) !void {
|
||||||
_ = try encodeInternal(allocator, "", "", true, obj, writer, options);
|
_ = try encodeInternal(allocator, "", "", true, obj, writer, options);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -20,15 +20,14 @@ fn encodeStruct(
|
||||||
parent: []const u8,
|
parent: []const u8,
|
||||||
first: bool,
|
first: bool,
|
||||||
obj: anytype,
|
obj: anytype,
|
||||||
writer: *std.Io.Writer,
|
writer: anytype,
|
||||||
comptime options: EncodingOptions,
|
comptime options: EncodingOptions,
|
||||||
) !bool {
|
) !bool {
|
||||||
var rc = first;
|
var rc = first;
|
||||||
var arena = std.heap.ArenaAllocator.init(allocator);
|
inline for (@typeInfo(@TypeOf(obj)).Struct.fields) |field| {
|
||||||
defer arena.deinit();
|
const field_name = try options.field_name_transformer(allocator, field.name);
|
||||||
const arena_alloc = arena.allocator();
|
defer if (options.field_name_transformer.* != defaultTransformer)
|
||||||
inline for (@typeInfo(@TypeOf(obj)).@"struct".fields) |field| {
|
allocator.free(field_name);
|
||||||
const field_name = try options.field_name_transformer(arena_alloc, field.name);
|
|
||||||
// @compileLog(@typeInfo(field.field_type).Pointer);
|
// @compileLog(@typeInfo(field.field_type).Pointer);
|
||||||
rc = try encodeInternal(allocator, parent, field_name, rc, @field(obj, field.name), writer, options);
|
rc = try encodeInternal(allocator, parent, field_name, rc, @field(obj, field.name), writer, options);
|
||||||
}
|
}
|
||||||
|
@ -41,37 +40,28 @@ pub fn encodeInternal(
|
||||||
field_name: []const u8,
|
field_name: []const u8,
|
||||||
first: bool,
|
first: bool,
|
||||||
obj: anytype,
|
obj: anytype,
|
||||||
writer: *std.Io.Writer,
|
writer: anytype,
|
||||||
comptime options: EncodingOptions,
|
comptime options: EncodingOptions,
|
||||||
) !bool {
|
) !bool {
|
||||||
// @compileLog(@typeName(@TypeOf(obj)));
|
// @compileLog(@typeName(@TypeOf(obj)));
|
||||||
// @compileLog(@typeInfo(@TypeOf(obj)));
|
// @compileLog(@typeInfo(@TypeOf(obj)));
|
||||||
var rc = first;
|
var rc = first;
|
||||||
switch (@typeInfo(@TypeOf(obj))) {
|
switch (@typeInfo(@TypeOf(obj))) {
|
||||||
.optional => if (obj) |o| {
|
.Optional => if (obj) |o| {
|
||||||
rc = try encodeInternal(allocator, parent, field_name, first, o, writer, options);
|
rc = try encodeInternal(allocator, parent, field_name, first, o, writer, options);
|
||||||
},
|
},
|
||||||
.pointer => |ti| if (ti.size == .one) {
|
.Pointer => |ti| if (ti.size == .One) {
|
||||||
rc = try encodeInternal(allocator, parent, field_name, first, obj.*, writer, options);
|
rc = try encodeInternal(allocator, parent, field_name, first, obj.*, writer, options);
|
||||||
} else {
|
} else {
|
||||||
if (!first) _ = try writer.write("&");
|
if (!first) _ = try writer.write("&");
|
||||||
// @compileLog(@typeInfo(@TypeOf(obj)));
|
// @compileLog(@typeInfo(@TypeOf(obj)));
|
||||||
switch (ti.child) {
|
if (ti.child == []const u8 or ti.child == u8)
|
||||||
// TODO: not sure this first one is valid. How should [][]const u8 be serialized here?
|
try writer.print("{s}{s}={s}", .{ parent, field_name, obj })
|
||||||
[]const u8 => {
|
else
|
||||||
// if (true) @panic("panic at the disco!");
|
try writer.print("{s}{s}={any}", .{ parent, field_name, obj });
|
||||||
std.log.warn(
|
|
||||||
"encoding object of type [][]const u8...pretty sure this is wrong {s}{s}={any}",
|
|
||||||
.{ parent, field_name, obj },
|
|
||||||
);
|
|
||||||
try writer.print("{s}{s}={any}", .{ parent, field_name, obj });
|
|
||||||
},
|
|
||||||
u8 => try writer.print("{s}{s}={s}", .{ parent, field_name, obj }),
|
|
||||||
else => try writer.print("{s}{s}={any}", .{ parent, field_name, obj }),
|
|
||||||
}
|
|
||||||
rc = false;
|
rc = false;
|
||||||
},
|
},
|
||||||
.@"struct" => if (std.mem.eql(u8, "", field_name)) {
|
.Struct => if (std.mem.eql(u8, "", field_name)) {
|
||||||
rc = try encodeStruct(allocator, parent, first, obj, writer, options);
|
rc = try encodeStruct(allocator, parent, first, obj, writer, options);
|
||||||
} else {
|
} else {
|
||||||
// TODO: It would be lovely if we could concat at compile time or allocPrint at runtime
|
// TODO: It would be lovely if we could concat at compile time or allocPrint at runtime
|
||||||
|
@ -83,12 +73,12 @@ pub fn encodeInternal(
|
||||||
rc = try encodeStruct(allocator, new_parent, first, obj, writer, options);
|
rc = try encodeStruct(allocator, new_parent, first, obj, writer, options);
|
||||||
// try encodeStruct(parent ++ field_name ++ ".", first, obj, writer, options);
|
// try encodeStruct(parent ++ field_name ++ ".", first, obj, writer, options);
|
||||||
},
|
},
|
||||||
.array => {
|
.Array => {
|
||||||
if (!first) _ = try writer.write("&");
|
if (!first) _ = try writer.write("&");
|
||||||
try writer.print("{s}{s}={s}", .{ parent, field_name, obj });
|
try writer.print("{s}{s}={s}", .{ parent, field_name, obj });
|
||||||
rc = false;
|
rc = false;
|
||||||
},
|
},
|
||||||
.int, .comptime_int, .float, .comptime_float => {
|
.Int, .ComptimeInt, .Float, .ComptimeFloat => {
|
||||||
if (!first) _ = try writer.write("&");
|
if (!first) _ = try writer.write("&");
|
||||||
try writer.print("{s}{s}={d}", .{ parent, field_name, obj });
|
try writer.print("{s}{s}={d}", .{ parent, field_name, obj });
|
||||||
rc = false;
|
rc = false;
|
||||||
|
@ -104,29 +94,78 @@ pub fn encodeInternal(
|
||||||
return rc;
|
return rc;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn testencode(allocator: std.mem.Allocator, expected: []const u8, value: anytype, comptime options: EncodingOptions) !void {
|
||||||
|
const ValidationWriter = struct {
|
||||||
|
const Self = @This();
|
||||||
|
pub const Writer = std.io.Writer(*Self, Error, write);
|
||||||
|
pub const Error = error{
|
||||||
|
TooMuchData,
|
||||||
|
DifferentData,
|
||||||
|
};
|
||||||
|
|
||||||
|
expected_remaining: []const u8,
|
||||||
|
|
||||||
|
fn init(exp: []const u8) Self {
|
||||||
|
return .{ .expected_remaining = exp };
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn writer(self: *Self) Writer {
|
||||||
|
return .{ .context = self };
|
||||||
|
}
|
||||||
|
|
||||||
|
fn write(self: *Self, bytes: []const u8) Error!usize {
|
||||||
|
// std.debug.print("{s}\n", .{bytes});
|
||||||
|
if (self.expected_remaining.len < bytes.len) {
|
||||||
|
std.log.warn(
|
||||||
|
\\====== expected this output: =========
|
||||||
|
\\{s}
|
||||||
|
\\======== instead found this: =========
|
||||||
|
\\{s}
|
||||||
|
\\======================================
|
||||||
|
, .{
|
||||||
|
self.expected_remaining,
|
||||||
|
bytes,
|
||||||
|
});
|
||||||
|
return error.TooMuchData;
|
||||||
|
}
|
||||||
|
if (!std.mem.eql(u8, self.expected_remaining[0..bytes.len], bytes)) {
|
||||||
|
std.log.warn(
|
||||||
|
\\====== expected this output: =========
|
||||||
|
\\{s}
|
||||||
|
\\======== instead found this: =========
|
||||||
|
\\{s}
|
||||||
|
\\======================================
|
||||||
|
, .{
|
||||||
|
self.expected_remaining[0..bytes.len],
|
||||||
|
bytes,
|
||||||
|
});
|
||||||
|
return error.DifferentData;
|
||||||
|
}
|
||||||
|
self.expected_remaining = self.expected_remaining[bytes.len..];
|
||||||
|
return bytes.len;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
var vos = ValidationWriter.init(expected);
|
||||||
|
try encode(allocator, value, vos.writer(), options);
|
||||||
|
if (vos.expected_remaining.len > 0) return error.NotEnoughData;
|
||||||
|
}
|
||||||
|
|
||||||
test "can urlencode an object" {
|
test "can urlencode an object" {
|
||||||
const expected = "Action=GetCallerIdentity&Version=2021-01-01";
|
try testencode(
|
||||||
var aw = std.Io.Writer.Allocating.init(std.testing.allocator);
|
|
||||||
defer aw.deinit();
|
|
||||||
try encode(
|
|
||||||
std.testing.allocator,
|
std.testing.allocator,
|
||||||
|
"Action=GetCallerIdentity&Version=2021-01-01",
|
||||||
.{ .Action = "GetCallerIdentity", .Version = "2021-01-01" },
|
.{ .Action = "GetCallerIdentity", .Version = "2021-01-01" },
|
||||||
&aw.writer,
|
|
||||||
.{},
|
.{},
|
||||||
);
|
);
|
||||||
try std.testing.expectEqualStrings(expected, aw.written());
|
|
||||||
}
|
}
|
||||||
test "can urlencode an object with integer" {
|
test "can urlencode an object with integer" {
|
||||||
const expected = "Action=GetCallerIdentity&Duration=32";
|
try testencode(
|
||||||
var aw = std.Io.Writer.Allocating.init(std.testing.allocator);
|
|
||||||
defer aw.deinit();
|
|
||||||
try encode(
|
|
||||||
std.testing.allocator,
|
std.testing.allocator,
|
||||||
|
"Action=GetCallerIdentity&Duration=32",
|
||||||
.{ .Action = "GetCallerIdentity", .Duration = 32 },
|
.{ .Action = "GetCallerIdentity", .Duration = 32 },
|
||||||
&aw.writer,
|
|
||||||
.{},
|
.{},
|
||||||
);
|
);
|
||||||
try std.testing.expectEqualStrings(expected, aw.written());
|
|
||||||
}
|
}
|
||||||
const UnsetValues = struct {
|
const UnsetValues = struct {
|
||||||
action: ?[]const u8 = null,
|
action: ?[]const u8 = null,
|
||||||
|
@ -135,28 +174,30 @@ const UnsetValues = struct {
|
||||||
val2: ?[]const u8 = null,
|
val2: ?[]const u8 = null,
|
||||||
};
|
};
|
||||||
test "can urlencode an object with unset values" {
|
test "can urlencode an object with unset values" {
|
||||||
const expected = "action=GetCallerIdentity&duration=32";
|
// var buffer = std.ArrayList(u8).init(std.testing.allocator);
|
||||||
var aw = std.Io.Writer.Allocating.init(std.testing.allocator);
|
// defer buffer.deinit();
|
||||||
defer aw.deinit();
|
// const writer = buffer.writer();
|
||||||
try encode(
|
// try encode(
|
||||||
|
// std.testing.allocator,
|
||||||
|
// UnsetValues{ .action = "GetCallerIdentity", .duration = 32 },
|
||||||
|
// writer,
|
||||||
|
// .{},
|
||||||
|
// );
|
||||||
|
// std.debug.print("\n\nEncoded as '{s}'\n", .{buffer.items});
|
||||||
|
try testencode(
|
||||||
std.testing.allocator,
|
std.testing.allocator,
|
||||||
|
"action=GetCallerIdentity&duration=32",
|
||||||
UnsetValues{ .action = "GetCallerIdentity", .duration = 32 },
|
UnsetValues{ .action = "GetCallerIdentity", .duration = 32 },
|
||||||
&aw.writer,
|
|
||||||
.{},
|
.{},
|
||||||
);
|
);
|
||||||
try std.testing.expectEqualStrings(expected, aw.written());
|
|
||||||
}
|
}
|
||||||
test "can urlencode a complex object" {
|
test "can urlencode a complex object" {
|
||||||
const expected = "Action=GetCallerIdentity&Version=2021-01-01&complex.innermember=foo";
|
try testencode(
|
||||||
var aw = std.Io.Writer.Allocating.init(std.testing.allocator);
|
|
||||||
defer aw.deinit();
|
|
||||||
try encode(
|
|
||||||
std.testing.allocator,
|
std.testing.allocator,
|
||||||
|
"Action=GetCallerIdentity&Version=2021-01-01&complex.innermember=foo",
|
||||||
.{ .Action = "GetCallerIdentity", .Version = "2021-01-01", .complex = .{ .innermember = "foo" } },
|
.{ .Action = "GetCallerIdentity", .Version = "2021-01-01", .complex = .{ .innermember = "foo" } },
|
||||||
&aw.writer,
|
|
||||||
.{},
|
.{},
|
||||||
);
|
);
|
||||||
try std.testing.expectEqualStrings(expected, aw.written());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const Filter = struct {
|
const Filter = struct {
|
||||||
|
@ -179,28 +220,26 @@ const Request: type = struct {
|
||||||
all_regions: ?bool = null,
|
all_regions: ?bool = null,
|
||||||
};
|
};
|
||||||
test "can urlencode an EC2 Filter" {
|
test "can urlencode an EC2 Filter" {
|
||||||
// TODO: This is a strange test, mainly to document current behavior
|
// TODO: Fix this encoding...
|
||||||
// EC2 filters are supposed to be something like
|
testencode(
|
||||||
// Filter.Name=foo&Filter.Values=bar or, when there is more, something like
|
|
||||||
// Filter.1.Name=instance-type&Filter.1.Value.1=m1.small&Filter.1.Value.2=m1.large&Filter.2.Name=block-device-mapping.status&Filter.2.Value.1=attached
|
|
||||||
//
|
|
||||||
// This looks like a real PITA, so until it is actually needed, this is
|
|
||||||
// a placeholder test to track what actual encoding is happening. This
|
|
||||||
// changed between zig 0.14.x and 0.15.1, and I'm not entirely sure why
|
|
||||||
// yet, but because the remaining functionality is fine, we're going with
|
|
||||||
// this
|
|
||||||
const zig_14x_expected = "filters={ url.Filter{ .name = { 102, 111, 111 }, .values = { { ... } } } }";
|
|
||||||
_ = zig_14x_expected;
|
|
||||||
const expected = "filters={ .{ .name = { 102, 111, 111 }, .values = { { ... } } } }";
|
|
||||||
var aw = std.Io.Writer.Allocating.init(std.testing.allocator);
|
|
||||||
defer aw.deinit();
|
|
||||||
try encode(
|
|
||||||
std.testing.allocator,
|
std.testing.allocator,
|
||||||
|
"filters={ url.Filter{ .name = { 102, 111, 111 }, .values = { { ... } } } }",
|
||||||
Request{
|
Request{
|
||||||
.filters = @constCast(&[_]Filter{.{ .name = "foo", .values = @constCast(&[_][]const u8{"bar"}) }}),
|
.filters = @constCast(&[_]Filter{.{ .name = "foo", .values = @constCast(&[_][]const u8{"bar"}) }}),
|
||||||
},
|
},
|
||||||
&aw.writer,
|
|
||||||
.{},
|
.{},
|
||||||
);
|
) catch |err| {
|
||||||
try std.testing.expectEqualStrings(expected, aw.written());
|
var al = std.ArrayList(u8).init(std.testing.allocator);
|
||||||
|
defer al.deinit();
|
||||||
|
try encode(
|
||||||
|
std.testing.allocator,
|
||||||
|
Request{
|
||||||
|
.filters = @constCast(&[_]Filter{.{ .name = "foo", .values = @constCast(&[_][]const u8{"bar"}) }}),
|
||||||
|
},
|
||||||
|
al.writer(),
|
||||||
|
.{},
|
||||||
|
);
|
||||||
|
std.log.warn("Error found. Full encoding is '{s}'", .{al.items});
|
||||||
|
return err;
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
51
src/xml.zig
51
src/xml.zig
|
@ -25,15 +25,12 @@ pub const Element = struct {
|
||||||
tag: []const u8,
|
tag: []const u8,
|
||||||
attributes: AttributeList,
|
attributes: AttributeList,
|
||||||
children: ContentList,
|
children: ContentList,
|
||||||
next_sibling: ?*Element = null,
|
|
||||||
allocator: std.mem.Allocator,
|
|
||||||
|
|
||||||
fn init(tag: []const u8, alloc: Allocator) Element {
|
fn init(tag: []const u8, alloc: Allocator) Element {
|
||||||
return .{
|
return .{
|
||||||
.tag = tag,
|
.tag = tag,
|
||||||
.attributes = AttributeList{},
|
.attributes = AttributeList.init(alloc),
|
||||||
.children = ContentList{},
|
.children = ContentList.init(alloc),
|
||||||
.allocator = alloc,
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -350,7 +347,7 @@ fn parseDocument(ctx: *ParseContext, backing_allocator: Allocator) !Document {
|
||||||
_ = ctx.eatWs();
|
_ = ctx.eatWs();
|
||||||
try trySkipComments(ctx, allocator);
|
try trySkipComments(ctx, allocator);
|
||||||
|
|
||||||
doc.root = (try tryParseElement(ctx, allocator, null)) orelse return error.InvalidDocument;
|
doc.root = (try tryParseElement(ctx, allocator)) orelse return error.InvalidDocument;
|
||||||
_ = ctx.eatWs();
|
_ = ctx.eatWs();
|
||||||
try trySkipComments(ctx, allocator);
|
try trySkipComments(ctx, allocator);
|
||||||
|
|
||||||
|
@ -418,12 +415,12 @@ fn tryParseCharData(ctx: *ParseContext, alloc: Allocator) !?[]const u8 {
|
||||||
return try dupeAndUnescape(alloc, ctx.source[begin..end]);
|
return try dupeAndUnescape(alloc, ctx.source[begin..end]);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parseContent(ctx: *ParseContext, alloc: Allocator, parent: ?*Element) ParseError!Content {
|
fn parseContent(ctx: *ParseContext, alloc: Allocator) ParseError!Content {
|
||||||
if (try tryParseCharData(ctx, alloc)) |cd| {
|
if (try tryParseCharData(ctx, alloc)) |cd| {
|
||||||
return Content{ .CharData = cd };
|
return Content{ .CharData = cd };
|
||||||
} else if (try tryParseComment(ctx, alloc)) |comment| {
|
} else if (try tryParseComment(ctx, alloc)) |comment| {
|
||||||
return Content{ .Comment = comment };
|
return Content{ .Comment = comment };
|
||||||
} else if (try tryParseElement(ctx, alloc, parent)) |elem| {
|
} else if (try tryParseElement(ctx, alloc)) |elem| {
|
||||||
return Content{ .Element = elem };
|
return Content{ .Element = elem };
|
||||||
} else {
|
} else {
|
||||||
return error.UnexpectedCharacter;
|
return error.UnexpectedCharacter;
|
||||||
|
@ -443,7 +440,7 @@ fn tryParseAttr(ctx: *ParseContext, alloc: Allocator) !?*Attribute {
|
||||||
return attr;
|
return attr;
|
||||||
}
|
}
|
||||||
|
|
||||||
fn tryParseElement(ctx: *ParseContext, alloc: Allocator, parent: ?*Element) !?*Element {
|
fn tryParseElement(ctx: *ParseContext, alloc: Allocator) !?*Element {
|
||||||
const start = ctx.offset;
|
const start = ctx.offset;
|
||||||
if (!ctx.eat('<')) return null;
|
if (!ctx.eat('<')) return null;
|
||||||
const tag = parseNameNoDupe(ctx) catch {
|
const tag = parseNameNoDupe(ctx) catch {
|
||||||
|
@ -456,7 +453,7 @@ fn tryParseElement(ctx: *ParseContext, alloc: Allocator, parent: ?*Element) !?*E
|
||||||
|
|
||||||
while (ctx.eatWs()) {
|
while (ctx.eatWs()) {
|
||||||
const attr = (try tryParseAttr(ctx, alloc)) orelse break;
|
const attr = (try tryParseAttr(ctx, alloc)) orelse break;
|
||||||
try element.attributes.append(element.allocator, attr);
|
try element.attributes.append(attr);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (ctx.eatStr("/>")) {
|
if (ctx.eatStr("/>")) {
|
||||||
|
@ -472,8 +469,8 @@ fn tryParseElement(ctx: *ParseContext, alloc: Allocator, parent: ?*Element) !?*E
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
const content = try parseContent(ctx, alloc, element);
|
const content = try parseContent(ctx, alloc);
|
||||||
try element.children.append(element.allocator, content);
|
try element.children.append(content);
|
||||||
}
|
}
|
||||||
|
|
||||||
const closing_tag = try parseNameNoDupe(ctx);
|
const closing_tag = try parseNameNoDupe(ctx);
|
||||||
|
@ -483,23 +480,6 @@ fn tryParseElement(ctx: *ParseContext, alloc: Allocator, parent: ?*Element) !?*E
|
||||||
|
|
||||||
_ = ctx.eatWs();
|
_ = ctx.eatWs();
|
||||||
try ctx.expect('>');
|
try ctx.expect('>');
|
||||||
|
|
||||||
if (parent) |p| {
|
|
||||||
var last_element: ?*Element = null;
|
|
||||||
|
|
||||||
for (0..p.children.items.len) |i| {
|
|
||||||
const child = p.children.items[p.children.items.len - i - 1];
|
|
||||||
if (child == .Element) {
|
|
||||||
last_element = child.Element;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (last_element) |lc| {
|
|
||||||
lc.next_sibling = element;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return element;
|
return element;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -510,13 +490,13 @@ test "tryParseElement" {
|
||||||
|
|
||||||
{
|
{
|
||||||
var ctx = ParseContext.init("<= a='b'/>");
|
var ctx = ParseContext.init("<= a='b'/>");
|
||||||
try testing.expectEqual(@as(?*Element, null), try tryParseElement(&ctx, alloc, null));
|
try testing.expectEqual(@as(?*Element, null), try tryParseElement(&ctx, alloc));
|
||||||
try testing.expectEqual(@as(?u8, '<'), ctx.peek());
|
try testing.expectEqual(@as(?u8, '<'), ctx.peek());
|
||||||
}
|
}
|
||||||
|
|
||||||
{
|
{
|
||||||
var ctx = ParseContext.init("<python size='15' color = \"green\"/>");
|
var ctx = ParseContext.init("<python size='15' color = \"green\"/>");
|
||||||
const elem = try tryParseElement(&ctx, alloc, null);
|
const elem = try tryParseElement(&ctx, alloc);
|
||||||
try testing.expectEqualSlices(u8, elem.?.tag, "python");
|
try testing.expectEqualSlices(u8, elem.?.tag, "python");
|
||||||
|
|
||||||
const size_attr = elem.?.attributes.items[0];
|
const size_attr = elem.?.attributes.items[0];
|
||||||
|
@ -530,14 +510,14 @@ test "tryParseElement" {
|
||||||
|
|
||||||
{
|
{
|
||||||
var ctx = ParseContext.init("<python>test</python>");
|
var ctx = ParseContext.init("<python>test</python>");
|
||||||
const elem = try tryParseElement(&ctx, alloc, null);
|
const elem = try tryParseElement(&ctx, alloc);
|
||||||
try testing.expectEqualSlices(u8, elem.?.tag, "python");
|
try testing.expectEqualSlices(u8, elem.?.tag, "python");
|
||||||
try testing.expectEqualSlices(u8, elem.?.children.items[0].CharData, "test");
|
try testing.expectEqualSlices(u8, elem.?.children.items[0].CharData, "test");
|
||||||
}
|
}
|
||||||
|
|
||||||
{
|
{
|
||||||
var ctx = ParseContext.init("<a>b<c/>d<e/>f<!--g--></a>");
|
var ctx = ParseContext.init("<a>b<c/>d<e/>f<!--g--></a>");
|
||||||
const elem = try tryParseElement(&ctx, alloc, null);
|
const elem = try tryParseElement(&ctx, alloc);
|
||||||
try testing.expectEqualSlices(u8, elem.?.tag, "a");
|
try testing.expectEqualSlices(u8, elem.?.tag, "a");
|
||||||
try testing.expectEqualSlices(u8, elem.?.children.items[0].CharData, "b");
|
try testing.expectEqualSlices(u8, elem.?.children.items[0].CharData, "b");
|
||||||
try testing.expectEqualSlices(u8, elem.?.children.items[1].Element.tag, "c");
|
try testing.expectEqualSlices(u8, elem.?.children.items[1].Element.tag, "c");
|
||||||
|
@ -673,10 +653,7 @@ fn dupeAndUnescape(alloc: Allocator, text: []const u8) ![]const u8 {
|
||||||
|
|
||||||
// This error is not strictly true, but we need to match one of the items
|
// This error is not strictly true, but we need to match one of the items
|
||||||
// from the error set provided by the other stdlib calls at the calling site
|
// from the error set provided by the other stdlib calls at the calling site
|
||||||
if (!alloc.resize(str, j)) {
|
if (!alloc.resize(str, j)) return error.OutOfMemory;
|
||||||
defer alloc.free(str);
|
|
||||||
return alloc.dupe(u8, str[0..j]) catch return error.OutOfMemory;
|
|
||||||
}
|
|
||||||
return str[0..j];
|
return str[0..j];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,794 +0,0 @@
|
||||||
const std = @import("std");
|
|
||||||
const mem = std.mem;
|
|
||||||
const Allocator = mem.Allocator;
|
|
||||||
|
|
||||||
/// Options for controlling XML serialization behavior
|
|
||||||
pub const StringifyOptions = struct {
|
|
||||||
/// Controls whitespace insertion for easier human readability
|
|
||||||
whitespace: Whitespace = .minified,
|
|
||||||
|
|
||||||
/// Should optional fields with null value be written?
|
|
||||||
emit_null_optional_fields: bool = true,
|
|
||||||
|
|
||||||
// TODO: Implement
|
|
||||||
/// Arrays/slices of u8 are typically encoded as strings. This option emits them as arrays of numbers instead. Does not affect calls to objectField*().
|
|
||||||
emit_strings_as_arrays: bool = false,
|
|
||||||
|
|
||||||
/// Controls whether to include XML declaration at the beginning
|
|
||||||
include_declaration: bool = true,
|
|
||||||
|
|
||||||
/// Root element name to use when serializing a value that doesn't have a natural name
|
|
||||||
root_name: ?[]const u8 = "root",
|
|
||||||
|
|
||||||
/// Root attributes (e.g. xmlns="...") that will be added to the root element node only
|
|
||||||
root_attributes: []const u8 = "",
|
|
||||||
|
|
||||||
/// Function to determine the element name for an array item based on the element
|
|
||||||
/// name of the array containing the elements. See arrayElementPluralToSingluarTransformation
|
|
||||||
/// and arrayElementNoopTransformation functions for examples
|
|
||||||
arrayElementNameConversion: *const fn (allocator: std.mem.Allocator, name: ?[]const u8) error{OutOfMemory}!?[]const u8 = arrayElementPluralToSingluarTransformation,
|
|
||||||
|
|
||||||
pub const Whitespace = enum {
|
|
||||||
minified,
|
|
||||||
indent_1,
|
|
||||||
indent_2,
|
|
||||||
indent_3,
|
|
||||||
indent_4,
|
|
||||||
indent_8,
|
|
||||||
indent_tab,
|
|
||||||
};
|
|
||||||
};
|
|
||||||
|
|
||||||
/// Error set for XML serialization
|
|
||||||
pub const XmlSerializeError = error{
|
|
||||||
/// Unsupported type for XML serialization
|
|
||||||
UnsupportedType,
|
|
||||||
/// Out of memory
|
|
||||||
OutOfMemory,
|
|
||||||
/// Write error
|
|
||||||
WriteError,
|
|
||||||
};
|
|
||||||
|
|
||||||
/// Serializes a value to XML and writes it to the provided writer
|
|
||||||
pub fn stringify(
|
|
||||||
value: anytype,
|
|
||||||
options: StringifyOptions,
|
|
||||||
writer: *std.Io.Writer,
|
|
||||||
) !void {
|
|
||||||
// Write XML declaration if requested
|
|
||||||
if (options.include_declaration)
|
|
||||||
try writer.writeAll("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n");
|
|
||||||
|
|
||||||
// Start serialization with the root element
|
|
||||||
const root_name = options.root_name;
|
|
||||||
if (@typeInfo(@TypeOf(value)) != .optional or value == null)
|
|
||||||
try serializeValue(value, root_name, options, writer, 0)
|
|
||||||
else
|
|
||||||
try serializeValue(value.?, root_name, options, writer, 0);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Serializes a value to XML and returns an allocated string
|
|
||||||
pub fn stringifyAlloc(
|
|
||||||
allocator: Allocator,
|
|
||||||
value: anytype,
|
|
||||||
options: StringifyOptions,
|
|
||||||
) ![]u8 {
|
|
||||||
var list = std.Io.Writer.Allocating.init(allocator);
|
|
||||||
defer list.deinit();
|
|
||||||
|
|
||||||
try stringify(value, options, &list.writer);
|
|
||||||
return list.toOwnedSlice();
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Internal function to serialize a value with proper indentation
|
|
||||||
fn serializeValue(
|
|
||||||
value: anytype,
|
|
||||||
element_name: ?[]const u8,
|
|
||||||
options: StringifyOptions,
|
|
||||||
writer: *std.Io.Writer,
|
|
||||||
depth: usize,
|
|
||||||
) !void {
|
|
||||||
const T = @TypeOf(value);
|
|
||||||
|
|
||||||
// const output_indent = !(!options.emit_null_optional_fields and @typeInfo(@TypeOf(value)) == .optional and value == null);
|
|
||||||
const output_indent = options.emit_null_optional_fields or @typeInfo(@TypeOf(value)) != .optional or value != null;
|
|
||||||
|
|
||||||
if (output_indent and element_name != null)
|
|
||||||
try writeIndent(writer, depth, options.whitespace);
|
|
||||||
|
|
||||||
// Start element tag
|
|
||||||
if (@typeInfo(T) != .optional and @typeInfo(T) != .array) {
|
|
||||||
if (element_name) |n| {
|
|
||||||
try writer.writeAll("<");
|
|
||||||
try writer.writeAll(n);
|
|
||||||
if (depth == 0 and options.root_attributes.len > 0) {
|
|
||||||
try writer.writeByte(' ');
|
|
||||||
try writer.writeAll(options.root_attributes);
|
|
||||||
}
|
|
||||||
try writer.writeAll(">");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Handle different types
|
|
||||||
switch (@typeInfo(T)) {
|
|
||||||
.bool => try writer.writeAll(if (value) "true" else "false"),
|
|
||||||
.int, .comptime_int, .float, .comptime_float => try writer.print("{}", .{value}),
|
|
||||||
.pointer => |ptr_info| {
|
|
||||||
switch (ptr_info.size) {
|
|
||||||
.one => {
|
|
||||||
// We don't want to write the opening tag a second time, so
|
|
||||||
// we will pass null, then come back and close before returning
|
|
||||||
//
|
|
||||||
// ...but...in the event of a *[]const u8, we do want to pass that in,
|
|
||||||
// but only if emit_strings_as_arrays is true
|
|
||||||
const child_ti = @typeInfo(ptr_info.child);
|
|
||||||
const el_name = if (options.emit_strings_as_arrays and child_ti == .array and child_ti.array.child == u8)
|
|
||||||
element_name
|
|
||||||
else
|
|
||||||
null;
|
|
||||||
try serializeValue(value.*, el_name, options, writer, depth);
|
|
||||||
try writeClose(writer, element_name);
|
|
||||||
return;
|
|
||||||
},
|
|
||||||
.slice => {
|
|
||||||
if (ptr_info.child == u8) {
|
|
||||||
// String type
|
|
||||||
try serializeString(writer, element_name, value, options, depth);
|
|
||||||
} else {
|
|
||||||
// Array of values
|
|
||||||
if (options.whitespace != .minified) {
|
|
||||||
try writer.writeByte('\n');
|
|
||||||
}
|
|
||||||
|
|
||||||
var buf: [256]u8 = undefined;
|
|
||||||
var fba = std.heap.FixedBufferAllocator.init(&buf);
|
|
||||||
const alloc = fba.allocator();
|
|
||||||
const item_name = try options.arrayElementNameConversion(alloc, element_name);
|
|
||||||
|
|
||||||
for (value) |item| {
|
|
||||||
try serializeValue(item, item_name, options, writer, depth + 1);
|
|
||||||
if (options.whitespace != .minified) {
|
|
||||||
try writer.writeByte('\n');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
try writeIndent(writer, depth, options.whitespace);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
else => return error.UnsupportedType,
|
|
||||||
}
|
|
||||||
},
|
|
||||||
.array => |array_info| {
|
|
||||||
if (!options.emit_strings_as_arrays or array_info.child != u8) {
|
|
||||||
if (element_name) |n| {
|
|
||||||
try writer.writeAll("<");
|
|
||||||
try writer.writeAll(n);
|
|
||||||
try writer.writeAll(">");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (array_info.child == u8) {
|
|
||||||
// Fixed-size string
|
|
||||||
const slice = &value;
|
|
||||||
try serializeString(writer, element_name, slice, options, depth);
|
|
||||||
} else {
|
|
||||||
// Fixed-size array
|
|
||||||
if (options.whitespace != .minified) {
|
|
||||||
try writer.writeByte('\n');
|
|
||||||
}
|
|
||||||
|
|
||||||
var buf: [256]u8 = undefined;
|
|
||||||
var fba = std.heap.FixedBufferAllocator.init(&buf);
|
|
||||||
const alloc = fba.allocator();
|
|
||||||
const item_name = try options.arrayElementNameConversion(alloc, element_name);
|
|
||||||
|
|
||||||
for (value) |item| {
|
|
||||||
try serializeValue(item, item_name, options, writer, depth + 1);
|
|
||||||
if (options.whitespace != .minified) {
|
|
||||||
try writer.writeByte('\n');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
try writeIndent(writer, depth, options.whitespace);
|
|
||||||
}
|
|
||||||
if (!options.emit_strings_as_arrays or array_info.child != u8)
|
|
||||||
try writeClose(writer, element_name);
|
|
||||||
return;
|
|
||||||
},
|
|
||||||
.@"struct" => |struct_info| {
|
|
||||||
if (options.whitespace != .minified) {
|
|
||||||
try writer.writeByte('\n');
|
|
||||||
}
|
|
||||||
|
|
||||||
inline for (struct_info.fields) |field| {
|
|
||||||
const field_name =
|
|
||||||
if (std.meta.hasFn(T, "fieldNameFor"))
|
|
||||||
value.fieldNameFor(field.name)
|
|
||||||
else
|
|
||||||
field.name; // TODO: field mapping
|
|
||||||
|
|
||||||
const field_value = @field(value, field.name);
|
|
||||||
try serializeValue(
|
|
||||||
field_value,
|
|
||||||
field_name,
|
|
||||||
options,
|
|
||||||
writer,
|
|
||||||
depth + 1,
|
|
||||||
);
|
|
||||||
|
|
||||||
if (options.whitespace != .minified) {
|
|
||||||
if (!options.emit_null_optional_fields and @typeInfo(@TypeOf(field_value)) == .optional and field_value == null) {
|
|
||||||
// Skip writing anything
|
|
||||||
} else {
|
|
||||||
try writer.writeByte('\n');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
try writeIndent(writer, depth, options.whitespace);
|
|
||||||
},
|
|
||||||
.optional => {
|
|
||||||
if (options.emit_null_optional_fields or value != null) {
|
|
||||||
if (element_name) |n| {
|
|
||||||
try writer.writeAll("<");
|
|
||||||
try writer.writeAll(n);
|
|
||||||
try writer.writeAll(">");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (value) |payload| {
|
|
||||||
try serializeValue(payload, null, options, writer, depth);
|
|
||||||
} else {
|
|
||||||
// For null values, we'll write an empty element
|
|
||||||
// We've already written the opening tag, so just close it immediately
|
|
||||||
if (options.emit_null_optional_fields)
|
|
||||||
try writeClose(writer, element_name);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
},
|
|
||||||
.null => {
|
|
||||||
// Empty element
|
|
||||||
},
|
|
||||||
.@"enum" => {
|
|
||||||
try std.fmt.format(writer, "{s}", .{@tagName(value)});
|
|
||||||
},
|
|
||||||
.@"union" => |union_info| {
|
|
||||||
if (union_info.tag_type) |_| {
|
|
||||||
inline for (union_info.fields) |field| {
|
|
||||||
if (@field(std.meta.Tag(T), field.name) == std.meta.activeTag(value)) {
|
|
||||||
try serializeValue(
|
|
||||||
@field(value, field.name),
|
|
||||||
field.name,
|
|
||||||
options,
|
|
||||||
writer,
|
|
||||||
depth,
|
|
||||||
);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
return error.UnsupportedType;
|
|
||||||
}
|
|
||||||
},
|
|
||||||
else => return error.UnsupportedType,
|
|
||||||
}
|
|
||||||
|
|
||||||
try writeClose(writer, element_name);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn writeClose(writer: *std.Io.Writer, element_name: ?[]const u8) !void {
|
|
||||||
// Close element tag
|
|
||||||
if (element_name) |n| {
|
|
||||||
try writer.writeAll("</");
|
|
||||||
try writer.writeAll(n);
|
|
||||||
try writer.writeAll(">");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Writes indentation based on depth and indent level
|
|
||||||
fn writeIndent(writer: *std.Io.Writer, depth: usize, whitespace: StringifyOptions.Whitespace) std.Io.Writer.Error!void {
|
|
||||||
var char: u8 = ' ';
|
|
||||||
const n_chars = switch (whitespace) {
|
|
||||||
.minified => return,
|
|
||||||
.indent_1 => 1 * depth,
|
|
||||||
.indent_2 => 2 * depth,
|
|
||||||
.indent_3 => 3 * depth,
|
|
||||||
.indent_4 => 4 * depth,
|
|
||||||
.indent_8 => 8 * depth,
|
|
||||||
.indent_tab => blk: {
|
|
||||||
char = '\t';
|
|
||||||
break :blk depth;
|
|
||||||
},
|
|
||||||
};
|
|
||||||
try writer.splatBytesAll(&.{char}, n_chars);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn serializeString(
|
|
||||||
writer: *std.Io.Writer,
|
|
||||||
element_name: ?[]const u8,
|
|
||||||
value: []const u8,
|
|
||||||
options: StringifyOptions,
|
|
||||||
depth: usize,
|
|
||||||
) error{ WriteFailed, OutOfMemory }!void {
|
|
||||||
if (options.emit_strings_as_arrays) {
|
|
||||||
// if (true) return error.seestackrun;
|
|
||||||
for (value) |c| {
|
|
||||||
try writeIndent(writer, depth + 1, options.whitespace);
|
|
||||||
|
|
||||||
var buf: [256]u8 = undefined;
|
|
||||||
var fba = std.heap.FixedBufferAllocator.init(&buf);
|
|
||||||
const alloc = fba.allocator();
|
|
||||||
const item_name = try options.arrayElementNameConversion(alloc, element_name);
|
|
||||||
if (item_name) |n| {
|
|
||||||
try writer.writeAll("<");
|
|
||||||
try writer.writeAll(n);
|
|
||||||
try writer.writeAll(">");
|
|
||||||
}
|
|
||||||
try writer.print("{d}", .{c});
|
|
||||||
try writeClose(writer, item_name);
|
|
||||||
if (options.whitespace != .minified) {
|
|
||||||
try writer.writeByte('\n');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
try escapeString(writer, value);
|
|
||||||
}
|
|
||||||
/// Escapes special characters in XML strings
|
|
||||||
fn escapeString(writer: *std.Io.Writer, value: []const u8) std.Io.Writer.Error!void {
|
|
||||||
for (value) |c| {
|
|
||||||
switch (c) {
|
|
||||||
'&' => try writer.writeAll("&"),
|
|
||||||
'<' => try writer.writeAll("<"),
|
|
||||||
'>' => try writer.writeAll(">"),
|
|
||||||
'"' => try writer.writeAll("""),
|
|
||||||
'\'' => try writer.writeAll("'"),
|
|
||||||
else => try writer.writeByte(c),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Does no transformation on the input array
|
|
||||||
pub fn arrayElementNoopTransformation(allocator: std.mem.Allocator, name: ?[]const u8) !?[]const u8 {
|
|
||||||
_ = allocator;
|
|
||||||
return name;
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Attempts to convert a plural name to singular for array items
|
|
||||||
pub fn arrayElementPluralToSingluarTransformation(allocator: std.mem.Allocator, name: ?[]const u8) !?[]const u8 {
|
|
||||||
if (name == null or name.?.len < 3) return name;
|
|
||||||
|
|
||||||
const n = name.?;
|
|
||||||
// There are a ton of these words, I'm just adding two for now
|
|
||||||
// https://wordmom.com/nouns/end-e
|
|
||||||
const es_exceptions = &[_][]const u8{
|
|
||||||
"types",
|
|
||||||
"bytes",
|
|
||||||
};
|
|
||||||
for (es_exceptions) |exception| {
|
|
||||||
if (std.mem.eql(u8, exception, n)) {
|
|
||||||
return n[0 .. n.len - 1];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// Very basic English pluralization rules
|
|
||||||
if (std.mem.endsWith(u8, n, "s")) {
|
|
||||||
if (std.mem.endsWith(u8, n, "ies")) {
|
|
||||||
// e.g., "entries" -> "entry"
|
|
||||||
return try std.mem.concat(allocator, u8, &[_][]const u8{ n[0 .. n.len - 3], "y" });
|
|
||||||
} else if (std.mem.endsWith(u8, n, "es")) {
|
|
||||||
return n[0 .. n.len - 2]; // e.g., "boxes" -> "box"
|
|
||||||
} else {
|
|
||||||
return n[0 .. n.len - 1]; // e.g., "items" -> "item"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return name; // Not recognized as plural
|
|
||||||
}
|
|
||||||
|
|
||||||
// Tests
|
|
||||||
test "stringify basic types" {
|
|
||||||
const testing = std.testing;
|
|
||||||
const allocator = testing.allocator;
|
|
||||||
|
|
||||||
// Test boolean
|
|
||||||
{
|
|
||||||
const result = try stringifyAlloc(allocator, true, .{});
|
|
||||||
defer allocator.free(result);
|
|
||||||
try testing.expectEqualStrings("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<root>true</root>", result);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Test comptime integer
|
|
||||||
{
|
|
||||||
const result = try stringifyAlloc(allocator, 42, .{});
|
|
||||||
defer allocator.free(result);
|
|
||||||
try testing.expectEqualStrings("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<root>42</root>", result);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Test integer
|
|
||||||
{
|
|
||||||
const result = try stringifyAlloc(allocator, @as(usize, 42), .{});
|
|
||||||
defer allocator.free(result);
|
|
||||||
try testing.expectEqualStrings("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<root>42</root>", result);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Test float
|
|
||||||
{
|
|
||||||
const result = try stringifyAlloc(allocator, 3.14, .{});
|
|
||||||
defer allocator.free(result);
|
|
||||||
// zig 0.14.x outputs 3.14e0, but zig 0.15.1 outputs 3.14. Either *should* be acceptable
|
|
||||||
try testing.expectEqualStrings("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<root>3.14</root>", result);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Test string
|
|
||||||
{
|
|
||||||
const result = try stringifyAlloc(allocator, "hello", .{});
|
|
||||||
// @compileLog(@typeInfo(@TypeOf("hello")).pointer.size);
|
|
||||||
// @compileLog(@typeName(@typeInfo(@TypeOf("hello")).pointer.child));
|
|
||||||
defer allocator.free(result);
|
|
||||||
try testing.expectEqualStrings("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<root>hello</root>", result);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Test string with special characters
|
|
||||||
{
|
|
||||||
const result = try stringifyAlloc(allocator, "hello & world < > \" '", .{});
|
|
||||||
defer allocator.free(result);
|
|
||||||
try testing.expectEqualStrings("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<root>hello & world < > " '</root>", result);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
test "stringify arrays" {
|
|
||||||
const testing = std.testing;
|
|
||||||
const allocator = testing.allocator;
|
|
||||||
|
|
||||||
// Test array of integers
|
|
||||||
{
|
|
||||||
const arr = [_]i32{ 1, 2, 3 };
|
|
||||||
const result = try stringifyAlloc(allocator, arr, .{});
|
|
||||||
defer allocator.free(result);
|
|
||||||
try testing.expectEqualStrings("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<root><root>1</root><root>2</root><root>3</root></root>", result);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Test array of strings
|
|
||||||
{
|
|
||||||
const arr = [_][]const u8{ "one", "two", "three" };
|
|
||||||
const result = try stringifyAlloc(allocator, arr, .{});
|
|
||||||
defer allocator.free(result);
|
|
||||||
try testing.expectEqualStrings("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<root><root>one</root><root>two</root><root>three</root></root>", result);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Test array with custom root name
|
|
||||||
{
|
|
||||||
const arr = [_]i32{ 1, 2, 3 };
|
|
||||||
const result = try stringifyAlloc(allocator, arr, .{ .root_name = "items" });
|
|
||||||
defer allocator.free(result);
|
|
||||||
try testing.expectEqualStrings("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<items><item>1</item><item>2</item><item>3</item></items>", result);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
test "stringify structs" {
|
|
||||||
const testing = std.testing;
|
|
||||||
const allocator = testing.allocator;
|
|
||||||
|
|
||||||
const Person = struct {
|
|
||||||
name: []const u8,
|
|
||||||
age: u32,
|
|
||||||
is_active: bool,
|
|
||||||
};
|
|
||||||
|
|
||||||
// Test basic struct
|
|
||||||
{
|
|
||||||
const person = Person{
|
|
||||||
.name = "John",
|
|
||||||
.age = 30,
|
|
||||||
.is_active = true,
|
|
||||||
};
|
|
||||||
|
|
||||||
const result = try stringifyAlloc(allocator, person, .{});
|
|
||||||
defer allocator.free(result);
|
|
||||||
try testing.expectEqualStrings("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<root><name>John</name><age>30</age><is_active>true</is_active></root>", result);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Test struct with pretty printing
|
|
||||||
{
|
|
||||||
const person = Person{
|
|
||||||
.name = "John",
|
|
||||||
.age = 30,
|
|
||||||
.is_active = true,
|
|
||||||
};
|
|
||||||
|
|
||||||
const result = try stringifyAlloc(allocator, person, .{ .whitespace = .indent_4 });
|
|
||||||
defer allocator.free(result);
|
|
||||||
try testing.expectEqualStrings("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<root>\n <name>John</name>\n <age>30</age>\n <is_active>true</is_active>\n</root>", result);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Test nested struct
|
|
||||||
{
|
|
||||||
const Address = struct {
|
|
||||||
street: []const u8,
|
|
||||||
city: []const u8,
|
|
||||||
};
|
|
||||||
|
|
||||||
const PersonWithAddress = struct {
|
|
||||||
name: []const u8,
|
|
||||||
address: Address,
|
|
||||||
};
|
|
||||||
|
|
||||||
const person = PersonWithAddress{
|
|
||||||
.name = "John",
|
|
||||||
.address = Address{
|
|
||||||
.street = "123 Main St",
|
|
||||||
.city = "Anytown",
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
const result = try stringifyAlloc(allocator, person, .{ .whitespace = .indent_4 });
|
|
||||||
defer allocator.free(result);
|
|
||||||
try testing.expectEqualStrings("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<root>\n <name>John</name>\n <address>\n <street>123 Main St</street>\n <city>Anytown</city>\n </address>\n</root>", result);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
test "stringify optional values" {
|
|
||||||
const testing = std.testing;
|
|
||||||
const allocator = testing.allocator;
|
|
||||||
|
|
||||||
const Person = struct {
|
|
||||||
name: []const u8,
|
|
||||||
middle_name: ?[]const u8,
|
|
||||||
};
|
|
||||||
|
|
||||||
// Test with present optional
|
|
||||||
{
|
|
||||||
const person = Person{
|
|
||||||
.name = "John",
|
|
||||||
.middle_name = "Robert",
|
|
||||||
};
|
|
||||||
|
|
||||||
const result = try stringifyAlloc(allocator, person, .{});
|
|
||||||
defer allocator.free(result);
|
|
||||||
try testing.expectEqualStrings("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<root><name>John</name><middle_name>Robert</middle_name></root>", result);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Test with null optional
|
|
||||||
{
|
|
||||||
const person = Person{
|
|
||||||
.name = "John",
|
|
||||||
.middle_name = null,
|
|
||||||
};
|
|
||||||
|
|
||||||
const result = try stringifyAlloc(allocator, person, .{});
|
|
||||||
defer allocator.free(result);
|
|
||||||
try testing.expectEqualStrings("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<root><name>John</name><middle_name></middle_name></root>", result);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
test "stringify optional values with emit_null_optional_fields == false" {
|
|
||||||
const testing = std.testing;
|
|
||||||
const allocator = testing.allocator;
|
|
||||||
|
|
||||||
const Person = struct {
|
|
||||||
name: []const u8,
|
|
||||||
middle_name: ?[]const u8,
|
|
||||||
};
|
|
||||||
|
|
||||||
// Test with present optional
|
|
||||||
{
|
|
||||||
const person = Person{
|
|
||||||
.name = "John",
|
|
||||||
.middle_name = "Robert",
|
|
||||||
};
|
|
||||||
|
|
||||||
const result = try stringifyAlloc(allocator, person, .{ .emit_null_optional_fields = false });
|
|
||||||
defer allocator.free(result);
|
|
||||||
try testing.expectEqualStrings("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<root><name>John</name><middle_name>Robert</middle_name></root>", result);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Test with null optional
|
|
||||||
{
|
|
||||||
const person = Person{
|
|
||||||
.name = "John",
|
|
||||||
.middle_name = null,
|
|
||||||
};
|
|
||||||
|
|
||||||
const result = try stringifyAlloc(allocator, person, .{ .emit_null_optional_fields = false });
|
|
||||||
defer allocator.free(result);
|
|
||||||
try testing.expectEqualStrings("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<root><name>John</name></root>", result);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
test "stringify with custom options" {
|
|
||||||
const testing = std.testing;
|
|
||||||
const allocator = testing.allocator;
|
|
||||||
|
|
||||||
const Person = struct {
|
|
||||||
first_name: []const u8,
|
|
||||||
last_name: []const u8,
|
|
||||||
};
|
|
||||||
|
|
||||||
const person = Person{
|
|
||||||
.first_name = "John",
|
|
||||||
.last_name = "Doe",
|
|
||||||
};
|
|
||||||
|
|
||||||
// Test without XML declaration
|
|
||||||
{
|
|
||||||
const result = try stringifyAlloc(allocator, person, .{ .include_declaration = false });
|
|
||||||
defer allocator.free(result);
|
|
||||||
try testing.expectEqualStrings("<root><first_name>John</first_name><last_name>Doe</last_name></root>", result);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Test with custom root name
|
|
||||||
{
|
|
||||||
const result = try stringifyAlloc(allocator, person, .{ .root_name = "person" });
|
|
||||||
defer allocator.free(result);
|
|
||||||
try testing.expectEqualStrings("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<person><first_name>John</first_name><last_name>Doe</last_name></person>", result);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Test with custom indent level
|
|
||||||
{
|
|
||||||
const result = try stringifyAlloc(allocator, person, .{ .whitespace = .indent_2 });
|
|
||||||
defer allocator.free(result);
|
|
||||||
try testing.expectEqualStrings(
|
|
||||||
\\<?xml version="1.0" encoding="UTF-8"?>
|
|
||||||
\\<root>
|
|
||||||
\\ <first_name>John</first_name>
|
|
||||||
\\ <last_name>Doe</last_name>
|
|
||||||
\\</root>
|
|
||||||
, result);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Test with output []u8 as array
|
|
||||||
{
|
|
||||||
// pointer, size 1, child == .array, child.array.child == u8
|
|
||||||
// @compileLog(@typeInfo(@typeInfo(@TypeOf("foo")).pointer.child));
|
|
||||||
const result = try stringifyAlloc(allocator, "foo", .{ .emit_strings_as_arrays = true, .root_name = "bytes" });
|
|
||||||
defer allocator.free(result);
|
|
||||||
try testing.expectEqualStrings("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<bytes><byte>102</byte><byte>111</byte><byte>111</byte></bytes>", result);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
test "structs with custom field names" {
|
|
||||||
const testing = std.testing;
|
|
||||||
const allocator = testing.allocator;
|
|
||||||
|
|
||||||
const Person = struct {
|
|
||||||
first_name: []const u8,
|
|
||||||
last_name: []const u8,
|
|
||||||
|
|
||||||
pub fn fieldNameFor(_: @This(), comptime field_name: []const u8) []const u8 {
|
|
||||||
if (std.mem.eql(u8, field_name, "first_name")) return "GivenName";
|
|
||||||
if (std.mem.eql(u8, field_name, "last_name")) return "FamilyName";
|
|
||||||
unreachable;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const person = Person{
|
|
||||||
.first_name = "John",
|
|
||||||
.last_name = "Doe",
|
|
||||||
};
|
|
||||||
|
|
||||||
{
|
|
||||||
const result = try stringifyAlloc(allocator, person, .{ .whitespace = .indent_2 });
|
|
||||||
defer allocator.free(result);
|
|
||||||
try testing.expectEqualStrings(
|
|
||||||
\\<?xml version="1.0" encoding="UTF-8"?>
|
|
||||||
\\<root>
|
|
||||||
\\ <GivenName>John</GivenName>
|
|
||||||
\\ <FamilyName>Doe</FamilyName>
|
|
||||||
\\</root>
|
|
||||||
, result);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
test "structs with optional values" {
|
|
||||||
const testing = std.testing;
|
|
||||||
const allocator = testing.allocator;
|
|
||||||
|
|
||||||
const Person = struct {
|
|
||||||
first_name: []const u8,
|
|
||||||
middle_name: ?[]const u8 = null,
|
|
||||||
last_name: []const u8,
|
|
||||||
};
|
|
||||||
|
|
||||||
const person = Person{
|
|
||||||
.first_name = "John",
|
|
||||||
.last_name = "Doe",
|
|
||||||
};
|
|
||||||
|
|
||||||
{
|
|
||||||
const result = try stringifyAlloc(
|
|
||||||
allocator,
|
|
||||||
person,
|
|
||||||
.{
|
|
||||||
.whitespace = .indent_2,
|
|
||||||
.emit_null_optional_fields = false,
|
|
||||||
.root_attributes = "xmlns=\"http://example.com/blah/xxxx/\"",
|
|
||||||
},
|
|
||||||
);
|
|
||||||
defer allocator.free(result);
|
|
||||||
try testing.expectEqualStrings(
|
|
||||||
\\<?xml version="1.0" encoding="UTF-8"?>
|
|
||||||
\\<root xmlns="http://example.com/blah/xxxx/">
|
|
||||||
\\ <first_name>John</first_name>
|
|
||||||
\\ <last_name>Doe</last_name>
|
|
||||||
\\</root>
|
|
||||||
, result);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
test "optional structs with value" {
|
|
||||||
const testing = std.testing;
|
|
||||||
const allocator = testing.allocator;
|
|
||||||
|
|
||||||
const Person = struct {
|
|
||||||
first_name: []const u8,
|
|
||||||
middle_name: ?[]const u8 = null,
|
|
||||||
last_name: []const u8,
|
|
||||||
};
|
|
||||||
|
|
||||||
const person: ?Person = Person{
|
|
||||||
.first_name = "John",
|
|
||||||
.last_name = "Doe",
|
|
||||||
};
|
|
||||||
|
|
||||||
{
|
|
||||||
const result = try stringifyAlloc(
|
|
||||||
allocator,
|
|
||||||
person,
|
|
||||||
.{
|
|
||||||
.whitespace = .indent_2,
|
|
||||||
.emit_null_optional_fields = false,
|
|
||||||
.root_attributes = "xmlns=\"http://example.com/blah/xxxx/\"",
|
|
||||||
},
|
|
||||||
);
|
|
||||||
defer allocator.free(result);
|
|
||||||
try testing.expectEqualStrings(
|
|
||||||
\\<?xml version="1.0" encoding="UTF-8"?>
|
|
||||||
\\<root xmlns="http://example.com/blah/xxxx/">
|
|
||||||
\\ <first_name>John</first_name>
|
|
||||||
\\ <last_name>Doe</last_name>
|
|
||||||
\\</root>
|
|
||||||
, result);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
test "nested optional structs with value" {
|
|
||||||
const testing = std.testing;
|
|
||||||
const allocator = testing.allocator;
|
|
||||||
|
|
||||||
const Name = struct {
|
|
||||||
first_name: []const u8,
|
|
||||||
middle_name: ?[]const u8 = null,
|
|
||||||
last_name: []const u8,
|
|
||||||
};
|
|
||||||
|
|
||||||
const Person = struct {
|
|
||||||
name: ?Name,
|
|
||||||
};
|
|
||||||
|
|
||||||
const person: ?Person = Person{
|
|
||||||
.name = .{
|
|
||||||
.first_name = "John",
|
|
||||||
.last_name = "Doe",
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
{
|
|
||||||
const result = try stringifyAlloc(
|
|
||||||
allocator,
|
|
||||||
person,
|
|
||||||
.{
|
|
||||||
.whitespace = .indent_2,
|
|
||||||
.emit_null_optional_fields = false,
|
|
||||||
.root_attributes = "xmlns=\"http://example.com/blah/xxxx/\"",
|
|
||||||
},
|
|
||||||
);
|
|
||||||
defer allocator.free(result);
|
|
||||||
try testing.expectEqualStrings(
|
|
||||||
\\<?xml version="1.0" encoding="UTF-8"?>
|
|
||||||
\\<root xmlns="http://example.com/blah/xxxx/">
|
|
||||||
\\ <name>
|
|
||||||
\\ <first_name>John</first_name>
|
|
||||||
\\ <last_name>Doe</last_name>
|
|
||||||
\\ </name>
|
|
||||||
\\</root>
|
|
||||||
, result);
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,7 +1,6 @@
|
||||||
const std = @import("std");
|
const std = @import("std");
|
||||||
const xml = @import("xml.zig");
|
const xml = @import("xml.zig");
|
||||||
const date = @import("date");
|
const date = @import("date.zig");
|
||||||
const sm = @import("service_manifest");
|
|
||||||
|
|
||||||
const log = std.log.scoped(.xml_shaper);
|
const log = std.log.scoped(.xml_shaper);
|
||||||
|
|
||||||
|
@ -95,69 +94,17 @@ pub fn parse(comptime T: type, source: []const u8, options: ParseOptions) !Parse
|
||||||
return Parsed(T).init(arena_allocator, try parseInternal(T, root, opts), parsed);
|
return Parsed(T).init(arena_allocator, try parseInternal(T, root, opts), parsed);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub const XmlArrayStyle = enum {
|
|
||||||
collection, // Has a container element and list of child elements
|
|
||||||
repeated_root, // Repeats the same element without a container, e.g. S3 ListBucketResult
|
|
||||||
};
|
|
||||||
|
|
||||||
fn detectArrayStyle(comptime T: type, element: *xml.Element, options: ParseOptions) !XmlArrayStyle {
|
|
||||||
_ = options;
|
|
||||||
|
|
||||||
if (@typeInfo(T) != .@"struct") {
|
|
||||||
return .collection;
|
|
||||||
}
|
|
||||||
|
|
||||||
// does the element have child elements that match our expected struct?
|
|
||||||
const field_names = comptime blk: {
|
|
||||||
var result: [std.meta.fieldNames(T).len]struct {
|
|
||||||
[]const u8,
|
|
||||||
} = undefined;
|
|
||||||
|
|
||||||
for (std.meta.fieldNames(T), 0..) |field_name, i| {
|
|
||||||
const key = if (@hasDecl(T, "fieldNameFor"))
|
|
||||||
T.fieldNameFor(undefined, field_name)
|
|
||||||
else
|
|
||||||
field_name;
|
|
||||||
|
|
||||||
result[i] = .{key};
|
|
||||||
}
|
|
||||||
|
|
||||||
break :blk std.StaticStringMap(void).initComptime(result);
|
|
||||||
};
|
|
||||||
|
|
||||||
var matching_fields: usize = 0;
|
|
||||||
var element_iterator = element.elements();
|
|
||||||
|
|
||||||
while (element_iterator.next()) |el| {
|
|
||||||
if (field_names.has(el.tag)) {
|
|
||||||
matching_fields += 1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (matching_fields > 0) {
|
|
||||||
return .repeated_root;
|
|
||||||
}
|
|
||||||
|
|
||||||
return .collection;
|
|
||||||
}
|
|
||||||
|
|
||||||
fn parseInternal(comptime T: type, element: *xml.Element, options: ParseOptions) !T {
|
fn parseInternal(comptime T: type, element: *xml.Element, options: ParseOptions) !T {
|
||||||
switch (@typeInfo(T)) {
|
switch (@typeInfo(T)) {
|
||||||
.bool => {
|
.Bool => {
|
||||||
if (std.ascii.eqlIgnoreCase("true", element.children.items[0].CharData))
|
if (std.ascii.eqlIgnoreCase("true", element.children.items[0].CharData))
|
||||||
return true;
|
return true;
|
||||||
if (std.ascii.eqlIgnoreCase("false", element.children.items[0].CharData))
|
if (std.ascii.eqlIgnoreCase("false", element.children.items[0].CharData))
|
||||||
return false;
|
return false;
|
||||||
return error.UnexpectedToken;
|
return error.UnexpectedToken;
|
||||||
},
|
},
|
||||||
.float, .comptime_float => {
|
.Float, .ComptimeFloat => {
|
||||||
return std.fmt.parseFloat(T, element.children.items[0].CharData) catch |e| {
|
return std.fmt.parseFloat(T, element.children.items[0].CharData) catch |e| {
|
||||||
if (element.children.items[0].CharData[element.children.items[0].CharData.len - 1] == 'Z') {
|
|
||||||
// We have an iso8601 in an integer field (we think)
|
|
||||||
// Try to coerce this into our type
|
|
||||||
const timestamp = try date.parseIso8601ToTimestamp(element.children.items[0].CharData);
|
|
||||||
return @floatFromInt(timestamp);
|
|
||||||
}
|
|
||||||
if (log_parse_traces) {
|
if (log_parse_traces) {
|
||||||
std.log.err(
|
std.log.err(
|
||||||
"Could not parse '{s}' as float in element '{s}': {any}",
|
"Could not parse '{s}' as float in element '{s}': {any}",
|
||||||
|
@ -174,7 +121,7 @@ fn parseInternal(comptime T: type, element: *xml.Element, options: ParseOptions)
|
||||||
return e;
|
return e;
|
||||||
};
|
};
|
||||||
},
|
},
|
||||||
.int, .comptime_int => {
|
.Int, .ComptimeInt => {
|
||||||
// 2021-10-05T16:39:45.000Z
|
// 2021-10-05T16:39:45.000Z
|
||||||
return std.fmt.parseInt(T, element.children.items[0].CharData, 10) catch |e| {
|
return std.fmt.parseInt(T, element.children.items[0].CharData, 10) catch |e| {
|
||||||
if (element.children.items[0].CharData[element.children.items[0].CharData.len - 1] == 'Z') {
|
if (element.children.items[0].CharData[element.children.items[0].CharData.len - 1] == 'Z') {
|
||||||
|
@ -199,7 +146,7 @@ fn parseInternal(comptime T: type, element: *xml.Element, options: ParseOptions)
|
||||||
return e;
|
return e;
|
||||||
};
|
};
|
||||||
},
|
},
|
||||||
.optional => |optional_info| {
|
.Optional => |optional_info| {
|
||||||
if (element.children.items.len == 0) {
|
if (element.children.items.len == 0) {
|
||||||
// This is almost certainly incomplete. Empty strings? xsi:nil?
|
// This is almost certainly incomplete. Empty strings? xsi:nil?
|
||||||
return null;
|
return null;
|
||||||
|
@ -209,10 +156,8 @@ fn parseInternal(comptime T: type, element: *xml.Element, options: ParseOptions)
|
||||||
return try parseInternal(optional_info.child, element, options);
|
return try parseInternal(optional_info.child, element, options);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
.@"enum" => {
|
.Enum => |enum_info| {
|
||||||
if (T == date.Timestamp) {
|
_ = enum_info;
|
||||||
return try date.Timestamp.parse(element.children.items[0].CharData);
|
|
||||||
}
|
|
||||||
// const numeric: ?enum_info.tag_type = std.fmt.parseInt(enum_info.tag_type, element.children.items[0].CharData, 10) catch null;
|
// const numeric: ?enum_info.tag_type = std.fmt.parseInt(enum_info.tag_type, element.children.items[0].CharData, 10) catch null;
|
||||||
// if (numeric) |num| {
|
// if (numeric) |num| {
|
||||||
// return std.meta.intToEnum(T, num);
|
// return std.meta.intToEnum(T, num);
|
||||||
|
@ -221,7 +166,7 @@ fn parseInternal(comptime T: type, element: *xml.Element, options: ParseOptions)
|
||||||
// return std.meta.stringToEnum(T, element.CharData);
|
// return std.meta.stringToEnum(T, element.CharData);
|
||||||
// }
|
// }
|
||||||
},
|
},
|
||||||
.@"union" => |union_info| {
|
.Union => |union_info| {
|
||||||
if (union_info.tag_type) |_| {
|
if (union_info.tag_type) |_| {
|
||||||
// try each of the union fields until we find one that matches
|
// try each of the union fields until we find one that matches
|
||||||
// inline for (union_info.fields) |u_field| {
|
// inline for (union_info.fields) |u_field| {
|
||||||
|
@ -244,7 +189,7 @@ fn parseInternal(comptime T: type, element: *xml.Element, options: ParseOptions)
|
||||||
}
|
}
|
||||||
@compileError("Unable to parse into untagged union '" ++ @typeName(T) ++ "'");
|
@compileError("Unable to parse into untagged union '" ++ @typeName(T) ++ "'");
|
||||||
},
|
},
|
||||||
.@"struct" => |struct_info| {
|
.Struct => |struct_info| {
|
||||||
var r: T = undefined;
|
var r: T = undefined;
|
||||||
var fields_seen = [_]bool{false} ** struct_info.fields.len;
|
var fields_seen = [_]bool{false} ** struct_info.fields.len;
|
||||||
var fields_set: u64 = 0;
|
var fields_set: u64 = 0;
|
||||||
|
@ -299,7 +244,7 @@ fn parseInternal(comptime T: type, element: *xml.Element, options: ParseOptions)
|
||||||
fields_set = fields_set + 1;
|
fields_set = fields_set + 1;
|
||||||
found_value = true;
|
found_value = true;
|
||||||
}
|
}
|
||||||
if (@typeInfo(field.type) == .optional) {
|
if (@typeInfo(field.type) == .Optional) {
|
||||||
// Test "compiler assertion failure 2"
|
// Test "compiler assertion failure 2"
|
||||||
// Zig compiler bug circa 0.9.0. Using "and !found_value"
|
// Zig compiler bug circa 0.9.0. Using "and !found_value"
|
||||||
// in the if statement above will trigger assertion failure
|
// in the if statement above will trigger assertion failure
|
||||||
|
@ -324,7 +269,7 @@ fn parseInternal(comptime T: type, element: *xml.Element, options: ParseOptions)
|
||||||
return error.FieldElementMismatch; // see fields_seen for details
|
return error.FieldElementMismatch; // see fields_seen for details
|
||||||
return r;
|
return r;
|
||||||
},
|
},
|
||||||
.array => //|array_info| {
|
.Array => //|array_info| {
|
||||||
return error.ArrayNotImplemented,
|
return error.ArrayNotImplemented,
|
||||||
// switch (token) {
|
// switch (token) {
|
||||||
// .ArrayBegin => {
|
// .ArrayBegin => {
|
||||||
|
@ -359,16 +304,16 @@ fn parseInternal(comptime T: type, element: *xml.Element, options: ParseOptions)
|
||||||
// else => return error.UnexpectedToken,
|
// else => return error.UnexpectedToken,
|
||||||
// }
|
// }
|
||||||
// },
|
// },
|
||||||
.pointer => |ptr_info| {
|
.Pointer => |ptr_info| {
|
||||||
const allocator = options.allocator orelse return error.AllocatorRequired;
|
const allocator = options.allocator orelse return error.AllocatorRequired;
|
||||||
switch (ptr_info.size) {
|
switch (ptr_info.size) {
|
||||||
.one => {
|
.One => {
|
||||||
const r: T = try allocator.create(ptr_info.child);
|
const r: T = try allocator.create(ptr_info.child);
|
||||||
errdefer allocator.free(r);
|
errdefer allocator.free(r);
|
||||||
r.* = try parseInternal(ptr_info.child, element, options);
|
r.* = try parseInternal(ptr_info.child, element, options);
|
||||||
return r;
|
return r;
|
||||||
},
|
},
|
||||||
.slice => {
|
.Slice => {
|
||||||
// TODO: Detect and deal with arrays. This will require two
|
// TODO: Detect and deal with arrays. This will require two
|
||||||
// passes through the element children - one to
|
// passes through the element children - one to
|
||||||
// determine if it is an array, one to parse the elements
|
// determine if it is an array, one to parse the elements
|
||||||
|
@ -377,44 +322,30 @@ fn parseInternal(comptime T: type, element: *xml.Element, options: ParseOptions)
|
||||||
// <Item>bar</Item>
|
// <Item>bar</Item>
|
||||||
// <Items>
|
// <Items>
|
||||||
if (ptr_info.child != u8) {
|
if (ptr_info.child != u8) {
|
||||||
const array_style = try detectArrayStyle(ptr_info.child, element, options);
|
log.debug("type = {s}, ptr_info.child == {s}, element = {s}", .{ @typeName(T), @typeName(ptr_info.child), element.tag });
|
||||||
|
var iterator = element.elements();
|
||||||
log.debug("type = {s}, style = {s}, ptr_info.child == {s}, element = {s}", .{ @typeName(T), @tagName(array_style), @typeName(ptr_info.child), element.tag });
|
var children = std.ArrayList(ptr_info.child).init(allocator);
|
||||||
|
defer children.deinit();
|
||||||
var children = std.ArrayList(ptr_info.child){};
|
while (iterator.next()) |child_element| {
|
||||||
defer children.deinit(allocator);
|
try children.append(try parseInternal(ptr_info.child, child_element, options));
|
||||||
|
|
||||||
switch (array_style) {
|
|
||||||
.collection => {
|
|
||||||
var iterator = element.elements();
|
|
||||||
while (iterator.next()) |child_element| {
|
|
||||||
try children.append(
|
|
||||||
allocator,
|
|
||||||
try parseInternal(ptr_info.child, child_element, options),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
.repeated_root => {
|
|
||||||
var current: ?*Element = element;
|
|
||||||
while (current) |el| : (current = el.next_sibling) {
|
|
||||||
if (!std.mem.eql(u8, el.tag, element.tag)) continue;
|
|
||||||
|
|
||||||
try children.append(
|
|
||||||
allocator,
|
|
||||||
try parseInternal(ptr_info.child, el, options),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
|
return children.toOwnedSlice();
|
||||||
return children.toOwnedSlice(allocator);
|
// var inx: usize = 0;
|
||||||
|
// while (inx < children.len) {
|
||||||
|
// switch (element.children.items[inx]) {
|
||||||
|
// .Element => children[inx] = try parseInternal(ptr_info.child, element.children.items[inx].Element, options),
|
||||||
|
// .CharData => children[inx] = try allocator.dupe(u8, element.children.items[inx].CharData),
|
||||||
|
// .Comment => children[inx] = try allocator.dupe(u8, element.children.items[inx].Comment), // This might be an error...
|
||||||
|
// }
|
||||||
|
// inx += 1;
|
||||||
|
// }
|
||||||
}
|
}
|
||||||
return try allocator.dupe(u8, element.children.items[0].CharData);
|
return try allocator.dupe(u8, element.children.items[0].CharData);
|
||||||
},
|
},
|
||||||
.many => {
|
.Many => {
|
||||||
return error.ManyPointerSizeNotImplemented;
|
return error.ManyPointerSizeNotImplemented;
|
||||||
},
|
},
|
||||||
.c => {
|
.C => {
|
||||||
return error.CPointerSizeNotImplemented;
|
return error.CPointerSizeNotImplemented;
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
@ -799,33 +730,3 @@ test "compiler assertion failure 2" {
|
||||||
defer parsed_data.deinit();
|
defer parsed_data.deinit();
|
||||||
try testing.expect(parsed_data.parsed_value.key_group_list.?.quantity == 42);
|
try testing.expect(parsed_data.parsed_value.key_group_list.?.quantity == 42);
|
||||||
}
|
}
|
||||||
|
|
||||||
test "can parse list objects" {
|
|
||||||
const data =
|
|
||||||
\\<?xml version="1.0" encoding="UTF-8"?>
|
|
||||||
\\<ListBucketResult>
|
|
||||||
\\ <Contents>
|
|
||||||
\\ <Key>file1.txt</Key>
|
|
||||||
\\ <Size>1024</Size>
|
|
||||||
\\ </Contents>
|
|
||||||
\\ <Contents>
|
|
||||||
\\ <Key>file2.jpg</Key>
|
|
||||||
\\ <Size>2048</Size>
|
|
||||||
\\ </Contents>
|
|
||||||
\\</ListBucketResult>
|
|
||||||
;
|
|
||||||
|
|
||||||
const Response = sm.s3.list_objects_v2.Response;
|
|
||||||
|
|
||||||
const parsed_data = try parse(Response, data, .{ .allocator = testing.allocator });
|
|
||||||
defer parsed_data.deinit();
|
|
||||||
|
|
||||||
const response: Response = parsed_data.parsed_value;
|
|
||||||
const s3_objects: []sm.s3.Object = response.contents.?;
|
|
||||||
|
|
||||||
try testing.expectEqual(2, s3_objects.len);
|
|
||||||
try testing.expectEqualStrings(s3_objects[0].key.?, "file1.txt");
|
|
||||||
try testing.expectEqualStrings(s3_objects[1].key.?, "file2.jpg");
|
|
||||||
try testing.expectEqual(s3_objects[0].size.?, 1024);
|
|
||||||
try testing.expectEqual(s3_objects[1].size.?, 2048);
|
|
||||||
}
|
|
||||||
|
|
|
@ -1,3 +0,0 @@
|
||||||
{
|
|
||||||
"ignore": ["lib/json/src/json.zig"]
|
|
||||||
}
|
|
Loading…
Add table
Reference in a new issue