Compare commits
35 commits
Author | SHA1 | Date | |
---|---|---|---|
219bd32aa0 | |||
ea14e3b90a | |||
12c7c46594 | |||
ebb727c464 | |||
0d575c92bb | |||
98fff010dd | |||
072dcc1cec | |||
7a6086447c | |||
|
1b4788f469 | ||
|
b5cd321263 | ||
8ac7aa47f7 | |||
e194debb96 | |||
e0e09fb19e | |||
8421fd9e55 | |||
9e8b3a6fc6 | |||
34c097e45f | |||
ffe3941dbe | |||
cdaf924867 | |||
6c106c1c71 | |||
f325ef4236 | |||
30d46261b7 | |||
86483ec84d | |||
4f16553410 | |||
12e24b01ad | |||
|
220d45ab20 | ||
|
71495a4d1d | ||
|
303af8661c | ||
8c68dd6902 | |||
96e2b7bbc1 | |||
4313f8585b | |||
dfda8e77d6 | |||
0cda404b0a | |||
5aa191c415 | |||
370011eb1e | |||
0adebe10da |
19 changed files with 244 additions and 289 deletions
6
.envrc
6
.envrc
|
@ -1,8 +1,8 @@
|
||||||
# vi: ft=sh
|
# vi: ft=sh
|
||||||
# shellcheck shell=bash
|
# shellcheck shell=bash
|
||||||
|
|
||||||
if ! has zvm_direnv_version || ! zvm_direnv_version 1.0.0; then
|
if ! has zvm_direnv_version || ! zvm_direnv_version 2.0.0; then
|
||||||
source_url "https://git.lerch.org/lobo/zvm-direnv/raw/tag/1.0.0/direnvrc" "sha256-Gtddvcr6aJsrjKd53uChxA1reQmJgEBpmPUWmMdtDIQ="
|
source_url "https://git.lerch.org/lobo/zvm-direnv/raw/tag/2.0.0/direnvrc" "sha256-8Umzxj32hFU6G0a7Wrq0KTNDQ8XEuje2A3s2ljh/hFY="
|
||||||
fi
|
fi
|
||||||
|
|
||||||
use zig 2024.11.0-mach
|
use zig 0.14.0
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
name: AWS-Zig Build
|
name: AWS-Zig Build
|
||||||
on:
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- 'master'
|
- 'master'
|
||||||
|
@ -17,11 +18,19 @@ jobs:
|
||||||
- name: Check out repository code
|
- name: Check out repository code
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
- name: Setup Zig
|
- name: Setup Zig
|
||||||
uses: mlugg/setup-zig@v1.2.1
|
uses: https://github.com/mlugg/setup-zig@v1.2.1
|
||||||
with:
|
with:
|
||||||
version: 0.14.0
|
version: 0.14.0
|
||||||
|
- name: Restore Zig caches
|
||||||
|
uses: https://github.com/Hanaasagi/zig-action-cache@3954aae427f8b05914e08dfd79f15e1f2e435929
|
||||||
|
- name: Ulimit
|
||||||
|
run: ulimit -a
|
||||||
|
- name: Run smoke test
|
||||||
|
run: zig build smoke-test --verbose
|
||||||
- name: Run tests
|
- name: Run tests
|
||||||
run: zig build test --verbose
|
run: zig build test --verbose --summary all
|
||||||
|
- name: Run tests (release mode)
|
||||||
|
run: zig build test -Doptimize=ReleaseSafe --verbose
|
||||||
# Zig build scripts don't have the ability to import depenedencies directly
|
# Zig build scripts don't have the ability to import depenedencies directly
|
||||||
# (https://github.com/ziglang/zig/issues/18164). We can allow downstream
|
# (https://github.com/ziglang/zig/issues/18164). We can allow downstream
|
||||||
# build scripts to import aws with a few tweaks, but we can't @import("src/aws.zig")
|
# build scripts to import aws with a few tweaks, but we can't @import("src/aws.zig")
|
||||||
|
@ -66,7 +75,7 @@ jobs:
|
||||||
# - run: echo "URL of foo (1) is ${{ steps.sign.outputs.URL_1 }}"
|
# - run: echo "URL of foo (1) is ${{ steps.sign.outputs.URL_1 }}"
|
||||||
- name: Publish source code with generated models
|
- name: Publish source code with generated models
|
||||||
run: |
|
run: |
|
||||||
curl --user ${{ github.actor }}:${{ secrets.PACKAGE_PUSH }} \
|
curl -s --user ${{ github.actor }}:${{ secrets.PACKAGE_PUSH }} \
|
||||||
--upload-file ${{ runner.temp }}/${{ github.sha }}-with-models.tar.gz \
|
--upload-file ${{ runner.temp }}/${{ github.sha }}-with-models.tar.gz \
|
||||||
https://git.lerch.org/api/packages/lobo/generic/aws-sdk-with-models/${{ github.sha }}/${{ github.sha }}-with-models.tar.gz
|
https://git.lerch.org/api/packages/lobo/generic/aws-sdk-with-models/${{ github.sha }}/${{ github.sha }}-with-models.tar.gz
|
||||||
- name: Build example
|
- name: Build example
|
||||||
|
|
|
@ -26,11 +26,17 @@ jobs:
|
||||||
with:
|
with:
|
||||||
ref: zig-develop
|
ref: zig-develop
|
||||||
- name: Setup Zig
|
- name: Setup Zig
|
||||||
uses: mlugg/setup-zig@v1.2.1
|
uses: https://github.com/mlugg/setup-zig@v1.2.1
|
||||||
with:
|
with:
|
||||||
version: master
|
version: master
|
||||||
- name: Run tests
|
- name: Restore Zig caches
|
||||||
run: zig build test --verbose
|
uses: https://github.com/Hanaasagi/zig-action-cache@3954aae427f8b05914e08dfd79f15e1f2e435929
|
||||||
|
- name: Run smoke test
|
||||||
|
run: zig build smoke-test --verbose
|
||||||
|
- name: Run full tests
|
||||||
|
run: zig build test --verbose --summary all
|
||||||
|
- name: Run tests (release mode)
|
||||||
|
run: zig build test -Doptimize=ReleaseSafe --verbose
|
||||||
# Zig package manager expects everything to be inside a directory in the archive,
|
# Zig package manager expects everything to be inside a directory in the archive,
|
||||||
# which it then strips out on download. So we need to shove everything inside a directory
|
# which it then strips out on download. So we need to shove everything inside a directory
|
||||||
# the way GitHub/Gitea does for repo archives
|
# the way GitHub/Gitea does for repo archives
|
||||||
|
@ -63,7 +69,7 @@ jobs:
|
||||||
# - run: echo "URL of foo (1) is ${{ steps.sign.outputs.URL_1 }}"
|
# - run: echo "URL of foo (1) is ${{ steps.sign.outputs.URL_1 }}"
|
||||||
- name: Publish source code with generated models
|
- name: Publish source code with generated models
|
||||||
run: |
|
run: |
|
||||||
curl --user ${{ github.actor }}:${{ secrets.PACKAGE_PUSH }} \
|
curl -s --user ${{ github.actor }}:${{ secrets.PACKAGE_PUSH }} \
|
||||||
--upload-file ${{ runner.temp }}/${{ github.sha }}${{ env.PKG_PREFIX }}-with-models.tar.gz \
|
--upload-file ${{ runner.temp }}/${{ github.sha }}${{ env.PKG_PREFIX }}-with-models.tar.gz \
|
||||||
https://git.lerch.org/api/packages/lobo/generic/aws-sdk-with-models/${{ github.sha }}/${{ github.sha }}${{ env.PKG_PREFIX }}-with-models.tar.gz
|
https://git.lerch.org/api/packages/lobo/generic/aws-sdk-with-models/${{ github.sha }}/${{ github.sha }}${{ env.PKG_PREFIX }}-with-models.tar.gz
|
||||||
- name: Build example
|
- name: Build example
|
||||||
|
|
|
@ -20,11 +20,18 @@ jobs:
|
||||||
with:
|
with:
|
||||||
ref: zig-0.13
|
ref: zig-0.13
|
||||||
- name: Setup Zig
|
- name: Setup Zig
|
||||||
uses: mlugg/setup-zig@v1.2.1
|
uses: https://github.com/mlugg/setup-zig@v1.2.1
|
||||||
with:
|
with:
|
||||||
version: 0.13.0
|
version: 0.13.0
|
||||||
- name: Run tests
|
- name: Restore Zig caches
|
||||||
run: zig build test --verbose
|
uses: https://github.com/Hanaasagi/zig-action-cache@3954aae427f8b05914e08dfd79f15e1f2e435929
|
||||||
|
- name: Run smoke test
|
||||||
|
run: zig build smoke-test --verbose
|
||||||
|
- name: Run full tests
|
||||||
|
run: zig build test --verbose --summary all
|
||||||
|
# Release mode fix not backported to 0.13.0 code
|
||||||
|
#- name: Run tests (release mode)
|
||||||
|
# run: zig build test -Doptimize=ReleaseSafe --verbose
|
||||||
# Zig build scripts don't have the ability to import depenedencies directly
|
# Zig build scripts don't have the ability to import depenedencies directly
|
||||||
# (https://github.com/ziglang/zig/issues/18164). We can allow downstream
|
# (https://github.com/ziglang/zig/issues/18164). We can allow downstream
|
||||||
# build scripts to import aws with a few tweaks, but we can't @import("src/aws.zig")
|
# build scripts to import aws with a few tweaks, but we can't @import("src/aws.zig")
|
||||||
|
@ -69,7 +76,7 @@ jobs:
|
||||||
# - run: echo "URL of foo (1) is ${{ steps.sign.outputs.URL_1 }}"
|
# - run: echo "URL of foo (1) is ${{ steps.sign.outputs.URL_1 }}"
|
||||||
- name: Publish source code with generated models
|
- name: Publish source code with generated models
|
||||||
run: |
|
run: |
|
||||||
curl --user ${{ github.actor }}:${{ secrets.PACKAGE_PUSH }} \
|
curl -s --user ${{ github.actor }}:${{ secrets.PACKAGE_PUSH }} \
|
||||||
--upload-file ${{ runner.temp }}/${{ github.sha }}-with-models.tar.gz \
|
--upload-file ${{ runner.temp }}/${{ github.sha }}-with-models.tar.gz \
|
||||||
https://git.lerch.org/api/packages/lobo/generic/aws-sdk-with-models/${{ github.sha }}/${{ github.sha }}-with-models.tar.gz
|
https://git.lerch.org/api/packages/lobo/generic/aws-sdk-with-models/${{ github.sha }}/${{ github.sha }}-with-models.tar.gz
|
||||||
- name: Build example
|
- name: Build example
|
||||||
|
|
31
.github/workflows/build.yaml
vendored
31
.github/workflows/build.yaml
vendored
|
@ -1,31 +0,0 @@
|
||||||
name: AWS-Zig Build
|
|
||||||
run-name: ${{ github.actor }} building AWS Zig SDK
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- '*'
|
|
||||||
- '!zig-develop*'
|
|
||||||
jobs:
|
|
||||||
build-zig-0-12-0-amd64:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
env:
|
|
||||||
ZIG_VERSION: 0.13.0
|
|
||||||
ARCH: x86_64
|
|
||||||
if: ${{ github.env.GITEA_ACTIONS != 'true' }}
|
|
||||||
steps:
|
|
||||||
- name: Check out repository code
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
# ARCH is fine, but we can't substitute directly because zig
|
|
||||||
# uses x86_64 instead of amd64. They also use aarch64 instead of arm64.
|
|
||||||
#
|
|
||||||
# However, arm64/linux isn't quite fully tier 1 yet, so this is more of a
|
|
||||||
# TODO: https://github.com/ziglang/zig/issues/2443
|
|
||||||
- name: Install zig
|
|
||||||
run: |
|
|
||||||
wget -q https://ziglang.org/download/${ZIG_VERSION}/zig-linux-${ARCH}-${ZIG_VERSION}.tar.xz
|
|
||||||
sudo tar x -C /usr/local -f zig-linux-${ARCH}-${ZIG_VERSION}.tar.xz
|
|
||||||
sudo ln -s /usr/local/zig-linux-${ARCH}-${ZIG_VERSION}/zig /usr/local/bin/zig
|
|
||||||
- name: Run tests
|
|
||||||
run: zig build test -Dbroken-windows --verbose # Github runners try to run the windows tests despite disabling foreign checks
|
|
||||||
- name: Build example
|
|
||||||
run: ( cd example && zig build ) # Make sure example builds
|
|
36
.github/workflows/zig-mach.yaml
vendored
36
.github/workflows/zig-mach.yaml
vendored
|
@ -1,36 +0,0 @@
|
||||||
name: aws-zig mach nominated build
|
|
||||||
run-name: ${{ github.actor }} building AWS Zig SDK
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- 'zig-develop*'
|
|
||||||
jobs:
|
|
||||||
build-zig-mach-latest:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
# Need to use the default container with node and all that, so we can
|
|
||||||
# use JS-based actions like actions/checkout@v3...
|
|
||||||
# container:
|
|
||||||
# image: alpine:3.15.0
|
|
||||||
env:
|
|
||||||
ZIG_VERSION: mach-latest
|
|
||||||
ARCH: x86_64
|
|
||||||
if: ${{ github.env.GITEA_ACTIONS != 'true' }}
|
|
||||||
steps:
|
|
||||||
- name: Check out repository code
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
# ARCH is fine, but we can't substitute directly because zig
|
|
||||||
# uses x86_64 instead of amd64. They also use aarch64 instead of arm64.
|
|
||||||
#
|
|
||||||
# However, arm64/linux isn't quite fully tier 1 yet, so this is more of a
|
|
||||||
# TODO: https://github.com/ziglang/zig/issues/2443
|
|
||||||
- name: Install zig
|
|
||||||
run: |
|
|
||||||
apt-get update && apt-get install -y jq
|
|
||||||
file="$(curl -Osw '%{filename_effective}' "$(curl -s https://machengine.org/zig/index.json |jq -r '."'${ZIG_VERSION}'"."x86_64-linux".tarball')")"
|
|
||||||
sudo tar x -C /usr/local -f "${file}"
|
|
||||||
sudo ln -s /usr/local/"${file%%.tar.xz}"/zig /usr/local/bin/zig
|
|
||||||
zig version
|
|
||||||
- name: Run tests
|
|
||||||
run: zig build test -Dbroken-windows --verbose
|
|
||||||
- name: Build example
|
|
||||||
run: ( cd example && zig build ) # Make sure example builds
|
|
36
.github/workflows/zig-nightly.yaml
vendored
36
.github/workflows/zig-nightly.yaml
vendored
|
@ -1,36 +0,0 @@
|
||||||
name: aws-zig nightly build
|
|
||||||
run-name: ${{ github.actor }} building AWS Zig SDK
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- 'zig-develop*'
|
|
||||||
jobs:
|
|
||||||
build-zig-nightly:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
# Need to use the default container with node and all that, so we can
|
|
||||||
# use JS-based actions like actions/checkout@v3...
|
|
||||||
# container:
|
|
||||||
# image: alpine:3.15.0
|
|
||||||
env:
|
|
||||||
ZIG_VERSION: master
|
|
||||||
ARCH: x86_64
|
|
||||||
if: ${{ github.env.GITEA_ACTIONS != 'true' }}
|
|
||||||
steps:
|
|
||||||
- name: Check out repository code
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
# ARCH is fine, but we can't substitute directly because zig
|
|
||||||
# uses x86_64 instead of amd64. They also use aarch64 instead of arm64.
|
|
||||||
#
|
|
||||||
# However, arm64/linux isn't quite fully tier 1 yet, so this is more of a
|
|
||||||
# TODO: https://github.com/ziglang/zig/issues/2443
|
|
||||||
- name: Install zig
|
|
||||||
run: |
|
|
||||||
apt-get update && apt-get install -y jq
|
|
||||||
file="$(curl -Osw '%{filename_effective}' "$(curl -s https://ziglang.org/download/index.json |jq -r '."'${ZIG_VERSION}'"."x86_64-linux".tarball')")"
|
|
||||||
sudo tar x -C /usr/local -f "${file}"
|
|
||||||
sudo ln -s /usr/local/"${file%%.tar.xz}"/zig /usr/local/bin/zig
|
|
||||||
zig version
|
|
||||||
- name: Run tests
|
|
||||||
run: zig build test -Dbroken-windows --verbose
|
|
||||||
- name: Build example
|
|
||||||
run: ( cd example && zig build ) # Make sure example builds
|
|
61
README.md
61
README.md
|
@ -1,11 +1,11 @@
|
||||||
AWS SDK for Zig
|
AWS SDK for Zig
|
||||||
===============
|
===============
|
||||||
|
|
||||||
[Zig 0.13](https://ziglang.org/download/#release-0.13.0):
|
[Zig 0.14](https://ziglang.org/download/#release-0.14.0):
|
||||||
|
|
||||||
[](https://git.lerch.org/lobo/aws-sdk-for-zig/actions?workflow=build.yaml&state=closed)
|
[](https://git.lerch.org/lobo/aws-sdk-for-zig/actions?workflow=build.yaml&state=closed)
|
||||||
|
|
||||||
[Last Mach Nominated Zig Version](https://machengine.org/about/nominated-zig/):
|
[Last Mach Nominated Zig Version](https://machengine.org/docs/nominated-zig/):
|
||||||
|
|
||||||
[](https://git.lerch.org/lobo/aws-sdk-for-zig/actions?workflow=zig-mach.yaml&state=closed)
|
[](https://git.lerch.org/lobo/aws-sdk-for-zig/actions?workflow=zig-mach.yaml&state=closed)
|
||||||
|
|
||||||
|
@ -13,12 +13,13 @@ AWS SDK for Zig
|
||||||
|
|
||||||
[](https://git.lerch.org/lobo/aws-sdk-for-zig/actions?workflow=zig-nightly.yaml&state=closed)
|
[](https://git.lerch.org/lobo/aws-sdk-for-zig/actions?workflow=zig-nightly.yaml&state=closed)
|
||||||
|
|
||||||
**NOTE ON BUILD STATUS**: The nightly/mach nominated version of this currently
|
[Zig 0.13](https://ziglang.org/download/#release-0.13.0):
|
||||||
panics under CI, but I have not yet reproduced this panic. Running manually on
|
|
||||||
multiple machines appears to be working properly
|
[](https://git.lerch.org/lobo/aws-sdk-for-zig/actions?workflow=zig-previous.yaml&state=closed)
|
||||||
|
|
||||||
|
|
||||||
Current executable size for the demo is 980k after compiling with -Doptimize=ReleaseSmall
|
Current executable size for the demo is 980k after compiling with -Doptimize=ReleaseSmall
|
||||||
in x86_linux, and will vary based on services used. Tested targets:
|
in x86_64-linux, and will vary based on services used. Tested targets:
|
||||||
|
|
||||||
* x86_64-linux
|
* x86_64-linux
|
||||||
* riscv64-linux
|
* riscv64-linux
|
||||||
|
@ -30,22 +31,38 @@ in x86_linux, and will vary based on services used. Tested targets:
|
||||||
|
|
||||||
Tested targets are built, but not continuously tested, by CI.
|
Tested targets are built, but not continuously tested, by CI.
|
||||||
|
|
||||||
Zig-Develop Branch
|
Branches
|
||||||
------------------
|
--------
|
||||||
|
|
||||||
This branch is intended for use with the in-development version of Zig. This
|
* **master**: This branch tracks the latest released zig version
|
||||||
starts with 0.12.0-dev.3180+83e578a18. This is aligned with [Mach Engine's Nominated
|
* **zig-0.13**: This branch tracks the previous released zig version (0.13 currently).
|
||||||
Zig Versions](https://machengine.org/about/nominated-zig/). Nightly zig versions
|
Support for the previous version is best effort, generally
|
||||||
are difficult to keep up with and there is no special effort made there, build
|
degrading over time. Fixes will generally appear in master, then
|
||||||
status is FYI (and used as a canary for nominated zig versions).
|
backported into the previous version.
|
||||||
|
* **zig-mach**: This branch tracks the latest mach nominated version. A separate
|
||||||
|
branch is necessary as mach nominated is usually, but not always,
|
||||||
|
more recent than the latest production zig. Support for the mach
|
||||||
|
version is best effort.
|
||||||
|
* **zig-develop**: This branch tracks zig nightly, and is used mainly as a canary
|
||||||
|
for breaking changes that will need to be dealt with when
|
||||||
|
a new mach nominated version or new zig release appear.
|
||||||
|
Expect significant delays in any build failures.
|
||||||
|
|
||||||
|
Other branches/tags exist but are unsupported
|
||||||
|
|
||||||
Building
|
Building
|
||||||
--------
|
--------
|
||||||
|
|
||||||
`zig build` should work. It will build the code generation project, fetch model
|
`zig build` should work. It will build the code generation project, fetch model
|
||||||
files from upstream AWS Go SDK v2, run the code generation, then build the main
|
files from upstream AWS Go SDK v2, run the code generation, then build the main
|
||||||
project with the generated code. Testing can be done with `zig test`.
|
project with the generated code. Testing can be done with `zig build test`. Note that
|
||||||
|
this command tests on all supported architectures, so for a faster testing
|
||||||
|
process, use `zig build smoke-test` instead.
|
||||||
|
|
||||||
|
To make development even faster, a build option is provided to avoid the use of
|
||||||
|
LLVM. To use this, use the command `zig build -Dno-llvm smoke-test`. This
|
||||||
|
can reduce build/test time 300%. Note, however, native code generation in zig
|
||||||
|
is not yet complete, so you may see errors.
|
||||||
|
|
||||||
Using
|
Using
|
||||||
-----
|
-----
|
||||||
|
@ -53,7 +70,8 @@ Using
|
||||||
This is designed for use with the Zig package manager, and exposes a module
|
This is designed for use with the Zig package manager, and exposes a module
|
||||||
called "aws". Set up `build.zig.zon` and add the dependency/module to your project
|
called "aws". Set up `build.zig.zon` and add the dependency/module to your project
|
||||||
as normal and the package manager should do its thing. A full example can be found
|
as normal and the package manager should do its thing. A full example can be found
|
||||||
in [/example](example/README.md).
|
in [/example](example/build.zig.zon). This can also be used at build time in
|
||||||
|
a downstream project's `build.zig`.
|
||||||
|
|
||||||
Configuring the module and/or Running the demo
|
Configuring the module and/or Running the demo
|
||||||
----------------------------------------------
|
----------------------------------------------
|
||||||
|
@ -61,8 +79,8 @@ Configuring the module and/or Running the demo
|
||||||
This library mimics the aws c libraries for it's work, so it operates like most
|
This library mimics the aws c libraries for it's work, so it operates like most
|
||||||
other 'AWS things'. [/src/main.zig](src/main.zig) gives you a handful of examples
|
other 'AWS things'. [/src/main.zig](src/main.zig) gives you a handful of examples
|
||||||
for working with services. For local testing or alternative endpoints, there's
|
for working with services. For local testing or alternative endpoints, there's
|
||||||
no real standard, so there is code to look for `AWS_ENDPOINT_URL` environment
|
no real standard, so there is code to look for an environment variable
|
||||||
variable that will supersede all other configuration.
|
`AWS_ENDPOINT_URL` variable that will supersede all other configuration.
|
||||||
|
|
||||||
Limitations
|
Limitations
|
||||||
-----------
|
-----------
|
||||||
|
@ -83,13 +101,6 @@ TODO List:
|
||||||
* Implement timeouts and other TODO's in the code
|
* Implement timeouts and other TODO's in the code
|
||||||
* Add option to cache signature keys
|
* Add option to cache signature keys
|
||||||
|
|
||||||
Services without TLS 1.3 support
|
|
||||||
--------------------------------
|
|
||||||
|
|
||||||
All AWS services should support TLS 1.3 at this point, but there are many regions
|
|
||||||
and several partitions, and not all of them have been tested, so your mileage
|
|
||||||
may vary. If something doesn't work, please submit an issue to let others know.
|
|
||||||
|
|
||||||
Dependency tree
|
Dependency tree
|
||||||
---------------
|
---------------
|
||||||
|
|
||||||
|
|
84
build.zig
84
build.zig
|
@ -34,12 +34,18 @@ pub fn build(b: *Builder) !void {
|
||||||
"no-llvm",
|
"no-llvm",
|
||||||
"Disable LLVM",
|
"Disable LLVM",
|
||||||
) orelse false;
|
) orelse false;
|
||||||
|
|
||||||
const broken_windows = b.option(
|
const broken_windows = b.option(
|
||||||
bool,
|
bool,
|
||||||
"broken-windows",
|
"broken-windows",
|
||||||
"Windows is broken in this environment (do not run Windows tests)",
|
"Windows is broken in this environment (do not run Windows tests)",
|
||||||
) orelse false;
|
) orelse false;
|
||||||
|
const no_bin = b.option(bool, "no-bin", "skip emitting binary") orelse false;
|
||||||
|
|
||||||
|
const test_filters: []const []const u8 = b.option(
|
||||||
|
[]const []const u8,
|
||||||
|
"test-filter",
|
||||||
|
"Skip tests that do not match any of the specified filters",
|
||||||
|
) orelse &.{};
|
||||||
// TODO: Embed the current git version in the code. We can do this
|
// TODO: Embed the current git version in the code. We can do this
|
||||||
// by looking for .git/HEAD (if it exists, follow the ref to /ref/heads/whatevs,
|
// by looking for .git/HEAD (if it exists, follow the ref to /ref/heads/whatevs,
|
||||||
// grab that commit, and use b.addOptions/exe.addOptions to generate the
|
// grab that commit, and use b.addOptions/exe.addOptions to generate the
|
||||||
|
@ -67,17 +73,6 @@ pub fn build(b: *Builder) !void {
|
||||||
const smithy_module = smithy_dep.module("smithy");
|
const smithy_module = smithy_dep.module("smithy");
|
||||||
exe.root_module.addImport("smithy", smithy_module); // not sure this should be here...
|
exe.root_module.addImport("smithy", smithy_module); // not sure this should be here...
|
||||||
|
|
||||||
// Expose module to others
|
|
||||||
_ = b.addModule("aws", .{
|
|
||||||
.root_source_file = b.path("src/aws.zig"),
|
|
||||||
.imports = &.{.{ .name = "smithy", .module = smithy_module }},
|
|
||||||
});
|
|
||||||
|
|
||||||
// Expose module to others
|
|
||||||
_ = b.addModule("aws-signing", .{
|
|
||||||
.root_source_file = b.path("src/aws_signing.zig"),
|
|
||||||
.imports = &.{.{ .name = "smithy", .module = smithy_module }},
|
|
||||||
});
|
|
||||||
// TODO: This does not work correctly due to https://github.com/ziglang/zig/issues/16354
|
// TODO: This does not work correctly due to https://github.com/ziglang/zig/issues/16354
|
||||||
//
|
//
|
||||||
// We are working here with kind of a weird dependency though. So we can do this
|
// We are working here with kind of a weird dependency though. So we can do this
|
||||||
|
@ -100,7 +95,6 @@ pub fn build(b: *Builder) !void {
|
||||||
const run_step = b.step("run", "Run the app");
|
const run_step = b.step("run", "Run the app");
|
||||||
run_step.dependOn(&run_cmd.step);
|
run_step.dependOn(&run_cmd.step);
|
||||||
|
|
||||||
const gen_step = blk: {
|
|
||||||
const cg = b.step("gen", "Generate zig service code from smithy models");
|
const cg = b.step("gen", "Generate zig service code from smithy models");
|
||||||
|
|
||||||
const cg_exe = b.addExecutable(.{
|
const cg_exe = b.addExecutable(.{
|
||||||
|
@ -110,30 +104,18 @@ pub fn build(b: *Builder) !void {
|
||||||
.target = b.graph.host,
|
.target = b.graph.host,
|
||||||
.optimize = if (b.verbose) .Debug else .ReleaseSafe,
|
.optimize = if (b.verbose) .Debug else .ReleaseSafe,
|
||||||
});
|
});
|
||||||
cg_exe.use_llvm = !no_llvm;
|
cg_exe.root_module.addImport("smithy", smithy_module);
|
||||||
cg_exe.root_module.addImport("smithy", smithy_dep.module("smithy"));
|
|
||||||
var cg_cmd = b.addRunArtifact(cg_exe);
|
var cg_cmd = b.addRunArtifact(cg_exe);
|
||||||
cg_cmd.addArg("--models");
|
cg_cmd.addArg("--models");
|
||||||
const hash = hash_blk: {
|
|
||||||
for (b.available_deps) |dep| {
|
|
||||||
const dep_name = dep.@"0";
|
|
||||||
const dep_hash = dep.@"1";
|
|
||||||
if (std.mem.eql(u8, dep_name, "models"))
|
|
||||||
break :hash_blk dep_hash;
|
|
||||||
}
|
|
||||||
return error.DependencyNamedModelsNotFoundInBuildZigZon;
|
|
||||||
};
|
|
||||||
cg_cmd.addArg(try std.fs.path.join(
|
cg_cmd.addArg(try std.fs.path.join(
|
||||||
b.allocator,
|
b.allocator,
|
||||||
&[_][]const u8{
|
&[_][]const u8{
|
||||||
b.graph.global_cache_root.path.?,
|
try b.dependency("models", .{}).path("").getPath3(b, null).toString(b.allocator),
|
||||||
"p",
|
|
||||||
hash,
|
|
||||||
models_subdir,
|
models_subdir,
|
||||||
},
|
},
|
||||||
));
|
));
|
||||||
cg_cmd.addArg("--output");
|
cg_cmd.addArg("--output");
|
||||||
cg_cmd.addDirectoryArg(b.path("src/models"));
|
const cg_output_dir = cg_cmd.addOutputDirectoryArg("src/models");
|
||||||
if (b.verbose)
|
if (b.verbose)
|
||||||
cg_cmd.addArg("--verbose");
|
cg_cmd.addArg("--verbose");
|
||||||
// cg_cmd.step.dependOn(&fetch_step.step);
|
// cg_cmd.step.dependOn(&fetch_step.step);
|
||||||
|
@ -152,10 +134,34 @@ pub fn build(b: *Builder) !void {
|
||||||
// later about warning on manual changes...
|
// later about warning on manual changes...
|
||||||
|
|
||||||
cg.dependOn(&cg_cmd.step);
|
cg.dependOn(&cg_cmd.step);
|
||||||
break :blk cg;
|
|
||||||
};
|
|
||||||
|
|
||||||
exe.step.dependOn(gen_step);
|
exe.step.dependOn(cg);
|
||||||
|
|
||||||
|
// This allows us to have each module depend on the
|
||||||
|
// generated service manifest.
|
||||||
|
const service_manifest_module = b.createModule(.{
|
||||||
|
.root_source_file = cg_output_dir.path(b, "service_manifest.zig"),
|
||||||
|
.target = target,
|
||||||
|
.optimize = optimize,
|
||||||
|
});
|
||||||
|
service_manifest_module.addImport("smithy", smithy_module);
|
||||||
|
|
||||||
|
exe.root_module.addImport("service_manifest", service_manifest_module);
|
||||||
|
|
||||||
|
// Expose module to others
|
||||||
|
_ = b.addModule("aws", .{
|
||||||
|
.root_source_file = b.path("src/aws.zig"),
|
||||||
|
.imports = &.{
|
||||||
|
.{ .name = "smithy", .module = smithy_module },
|
||||||
|
.{ .name = "service_manifest", .module = service_manifest_module },
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
// Expose module to others
|
||||||
|
_ = b.addModule("aws-signing", .{
|
||||||
|
.root_source_file = b.path("src/aws_signing.zig"),
|
||||||
|
.imports = &.{.{ .name = "smithy", .module = smithy_module }},
|
||||||
|
});
|
||||||
|
|
||||||
// Similar to creating the run step earlier, this exposes a `test` step to
|
// Similar to creating the run step earlier, this exposes a `test` step to
|
||||||
// the `zig build --help` menu, providing a way for the user to request
|
// the `zig build --help` menu, providing a way for the user to request
|
||||||
|
@ -184,9 +190,11 @@ pub fn build(b: *Builder) !void {
|
||||||
.root_source_file = b.path("src/aws.zig"),
|
.root_source_file = b.path("src/aws.zig"),
|
||||||
.target = b.resolveTargetQuery(t),
|
.target = b.resolveTargetQuery(t),
|
||||||
.optimize = optimize,
|
.optimize = optimize,
|
||||||
|
.filters = test_filters,
|
||||||
});
|
});
|
||||||
unit_tests.root_module.addImport("smithy", smithy_dep.module("smithy"));
|
unit_tests.root_module.addImport("smithy", smithy_module);
|
||||||
unit_tests.step.dependOn(gen_step);
|
unit_tests.root_module.addImport("service_manifest", service_manifest_module);
|
||||||
|
unit_tests.step.dependOn(cg);
|
||||||
unit_tests.use_llvm = !no_llvm;
|
unit_tests.use_llvm = !no_llvm;
|
||||||
|
|
||||||
const run_unit_tests = b.addRunArtifact(unit_tests);
|
const run_unit_tests = b.addRunArtifact(unit_tests);
|
||||||
|
@ -208,13 +216,19 @@ pub fn build(b: *Builder) !void {
|
||||||
.root_source_file = b.path("src/aws.zig"),
|
.root_source_file = b.path("src/aws.zig"),
|
||||||
.target = target,
|
.target = target,
|
||||||
.optimize = optimize,
|
.optimize = optimize,
|
||||||
|
.filters = test_filters,
|
||||||
});
|
});
|
||||||
smoke_test.use_llvm = !no_llvm;
|
smoke_test.use_llvm = !no_llvm;
|
||||||
smoke_test.root_module.addImport("smithy", smithy_dep.module("smithy"));
|
smoke_test.root_module.addImport("smithy", smithy_module);
|
||||||
smoke_test.step.dependOn(gen_step);
|
smoke_test.root_module.addImport("service_manifest", service_manifest_module);
|
||||||
|
smoke_test.step.dependOn(cg);
|
||||||
|
|
||||||
const run_smoke_test = b.addRunArtifact(smoke_test);
|
const run_smoke_test = b.addRunArtifact(smoke_test);
|
||||||
|
|
||||||
smoke_test_step.dependOn(&run_smoke_test.step);
|
smoke_test_step.dependOn(&run_smoke_test.step);
|
||||||
|
if (no_bin) {
|
||||||
|
b.getInstallStep().dependOn(&exe.step);
|
||||||
|
} else {
|
||||||
b.installArtifact(exe);
|
b.installArtifact(exe);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -1,18 +1,20 @@
|
||||||
.{
|
.{
|
||||||
.name = "aws",
|
.name = .aws,
|
||||||
.version = "0.0.1",
|
.version = "0.0.1",
|
||||||
|
.fingerprint = 0x1f26b7b27005bb49,
|
||||||
.paths = .{
|
.paths = .{
|
||||||
"build.zig",
|
"build.zig",
|
||||||
"build.zig.zon",
|
"build.zig.zon",
|
||||||
"src",
|
"src",
|
||||||
|
"codegen",
|
||||||
"README.md",
|
"README.md",
|
||||||
"LICENSE",
|
"LICENSE",
|
||||||
},
|
},
|
||||||
|
|
||||||
.dependencies = .{
|
.dependencies = .{
|
||||||
.smithy = .{
|
.smithy = .{
|
||||||
.url = "https://git.lerch.org/lobo/smithy/archive/3ed98751bc414e005af6ad185feb213d4366c0db.tar.gz",
|
.url = "https://git.lerch.org/lobo/smithy/archive/a4c6ec6dfe552c57bab601c7d99e8de02bbab1fe.tar.gz",
|
||||||
.hash = "12204a784751a4ad5ed6c8955ba91fcbc4a3cad6c5a7da38f39abf074ef801d13172",
|
.hash = "smithy-1.0.0-uAyBgS_MAgC4qgc9QaEy5Y5Nf7kv32buQZBYugqNQsAn",
|
||||||
},
|
},
|
||||||
.models = .{
|
.models = .{
|
||||||
.url = "https://github.com/aws/aws-sdk-go-v2/archive/58cf6509525a12d64fd826da883bfdbacbd2f00e.tar.gz",
|
.url = "https://github.com/aws/aws-sdk-go-v2/archive/58cf6509525a12d64fd826da883bfdbacbd2f00e.tar.gz",
|
||||||
|
|
|
@ -435,7 +435,7 @@ fn generateServices(allocator: std.mem.Allocator, comptime _: []const u8, file:
|
||||||
|
|
||||||
fn generateAdditionalTypes(allocator: std.mem.Allocator, file_state: FileGenerationState, writer: anytype) !void {
|
fn generateAdditionalTypes(allocator: std.mem.Allocator, file_state: FileGenerationState, writer: anytype) !void {
|
||||||
// More types may be added during processing
|
// More types may be added during processing
|
||||||
while (file_state.additional_types_to_generate.popOrNull()) |t| {
|
while (file_state.additional_types_to_generate.pop()) |t| {
|
||||||
if (file_state.additional_types_generated.getEntry(t.name) != null) continue;
|
if (file_state.additional_types_generated.getEntry(t.name) != null) continue;
|
||||||
// std.log.info("\t\t{s}", .{t.name});
|
// std.log.info("\t\t{s}", .{t.name});
|
||||||
var type_stack = std.ArrayList(*const smithy.ShapeInfo).init(allocator);
|
var type_stack = std.ArrayList(*const smithy.ShapeInfo).init(allocator);
|
||||||
|
|
|
@ -1,12 +1,13 @@
|
||||||
.{
|
.{
|
||||||
.name = "myapp",
|
.name = .myapp,
|
||||||
.version = "0.0.1",
|
.version = "0.0.1",
|
||||||
|
.fingerprint = 0x8798022a511224c5,
|
||||||
.paths = .{""},
|
.paths = .{""},
|
||||||
|
|
||||||
.dependencies = .{
|
.dependencies = .{
|
||||||
.aws = .{
|
.aws = .{
|
||||||
.url = "https://git.lerch.org/api/packages/lobo/generic/aws-sdk-with-models/e02fb699fc47f19d19cad99209bd480ca6963295/e02fb699fc47f19d19cad99209bd480ca6963295nominated-zig-with-models.tar.gz",
|
.url = "https://git.lerch.org/api/packages/lobo/generic/aws-sdk-with-models/7a6086447c1249b0e5b5b5f3873d2f7932bea56d/7a6086447c1249b0e5b5b5f3873d2f7932bea56d-with-models.tar.gz",
|
||||||
.hash = "1220fa9b39c985449936f0e3f02bbb6fdafa64435e502eb78fd47d457b96876b7968",
|
.hash = "aws-0.0.1-SbsFcGN_CQCBjurpc2GEMw4c_qAkGu6KpuVnLBLY4L4q",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
41
src/aws.zig
41
src/aws.zig
|
@ -688,7 +688,6 @@ pub fn Request(comptime request_action: anytype) type {
|
||||||
|
|
||||||
fn ParsedJsonData(comptime T: type) type {
|
fn ParsedJsonData(comptime T: type) type {
|
||||||
return struct {
|
return struct {
|
||||||
raw_response_parsed: bool,
|
|
||||||
parsed_response_ptr: *T,
|
parsed_response_ptr: *T,
|
||||||
allocator: std.mem.Allocator,
|
allocator: std.mem.Allocator,
|
||||||
|
|
||||||
|
@ -697,7 +696,6 @@ pub fn Request(comptime request_action: anytype) type {
|
||||||
pub fn deinit(self: MySelf) void {
|
pub fn deinit(self: MySelf) void {
|
||||||
// This feels like it should result in a use after free, but it
|
// This feels like it should result in a use after free, but it
|
||||||
// seems to be working?
|
// seems to be working?
|
||||||
if (self.raw_response_parsed)
|
|
||||||
self.allocator.destroy(self.parsed_response_ptr);
|
self.allocator.destroy(self.parsed_response_ptr);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -713,11 +711,13 @@ pub fn Request(comptime request_action: anytype) type {
|
||||||
std.mem.eql(u8, key, action.action_name ++ "Response") or
|
std.mem.eql(u8, key, action.action_name ++ "Response") or
|
||||||
std.mem.eql(u8, key, action.action_name ++ "Result") or
|
std.mem.eql(u8, key, action.action_name ++ "Result") or
|
||||||
isOtherNormalResponse(response_types.NormalResponse, key);
|
isOtherNormalResponse(response_types.NormalResponse, key);
|
||||||
var raw_response_parsed = false;
|
|
||||||
var stream = json.TokenStream.init(data);
|
var stream = json.TokenStream.init(data);
|
||||||
const parsed_response_ptr = blk: {
|
const parsed_response_ptr = blk: {
|
||||||
if (!response_types.isRawPossible or found_normal_json_response)
|
const ptr = try options.client.allocator.create(response_types.NormalResponse);
|
||||||
break :blk &(json.parse(response_types.NormalResponse, &stream, parser_options) catch |e| {
|
errdefer options.client.allocator.destroy(ptr);
|
||||||
|
|
||||||
|
if (!response_types.isRawPossible or found_normal_json_response) {
|
||||||
|
ptr.* = (json.parse(response_types.NormalResponse, &stream, parser_options) catch |e| {
|
||||||
log.err(
|
log.err(
|
||||||
\\Call successful, but unexpected response from service.
|
\\Call successful, but unexpected response from service.
|
||||||
\\This could be the result of a bug or a stale set of code generated
|
\\This could be the result of a bug or a stale set of code generated
|
||||||
|
@ -733,10 +733,10 @@ pub fn Request(comptime request_action: anytype) type {
|
||||||
return e;
|
return e;
|
||||||
});
|
});
|
||||||
|
|
||||||
|
break :blk ptr;
|
||||||
|
}
|
||||||
|
|
||||||
log.debug("Appears server has provided a raw response", .{});
|
log.debug("Appears server has provided a raw response", .{});
|
||||||
raw_response_parsed = true;
|
|
||||||
const ptr = try options.client.allocator.create(response_types.NormalResponse);
|
|
||||||
errdefer options.client.allocator.destroy(ptr);
|
|
||||||
@field(ptr.*, std.meta.fields(action.Response)[0].name) =
|
@field(ptr.*, std.meta.fields(action.Response)[0].name) =
|
||||||
json.parse(response_types.RawResponse, &stream, parser_options) catch |e| {
|
json.parse(response_types.RawResponse, &stream, parser_options) catch |e| {
|
||||||
log.err(
|
log.err(
|
||||||
|
@ -756,8 +756,7 @@ pub fn Request(comptime request_action: anytype) type {
|
||||||
break :blk ptr;
|
break :blk ptr;
|
||||||
};
|
};
|
||||||
return ParsedJsonData(response_types.NormalResponse){
|
return ParsedJsonData(response_types.NormalResponse){
|
||||||
.raw_response_parsed = raw_response_parsed,
|
.parsed_response_ptr = parsed_response_ptr,
|
||||||
.parsed_response_ptr = @constCast(parsed_response_ptr), //TODO: why doesn't changing const->var above fix this?
|
|
||||||
.allocator = options.client.allocator,
|
.allocator = options.client.allocator,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -777,8 +776,12 @@ fn coerceFromString(comptime T: type, val: []const u8) anyerror!T {
|
||||||
// TODO: This is terrible...fix it
|
// TODO: This is terrible...fix it
|
||||||
switch (T) {
|
switch (T) {
|
||||||
bool => return std.ascii.eqlIgnoreCase(val, "true"),
|
bool => return std.ascii.eqlIgnoreCase(val, "true"),
|
||||||
i64 => return parseInt(T, val) catch |e| {
|
i64, i128 => return parseInt(T, val) catch |e| {
|
||||||
log.err("Invalid string representing i64: {s}", .{val});
|
log.err("Invalid string representing {s}: {s}", .{ @typeName(T), val });
|
||||||
|
return e;
|
||||||
|
},
|
||||||
|
f64, f128 => return std.fmt.parseFloat(T, val) catch |e| {
|
||||||
|
log.err("Invalid string representing {s}: {s}", .{ @typeName(T), val });
|
||||||
return e;
|
return e;
|
||||||
},
|
},
|
||||||
else => return val,
|
else => return val,
|
||||||
|
@ -932,14 +935,14 @@ fn ServerResponse(comptime action: anytype) type {
|
||||||
.{
|
.{
|
||||||
.name = action.action_name ++ "Result",
|
.name = action.action_name ++ "Result",
|
||||||
.type = T,
|
.type = T,
|
||||||
.default_value = null,
|
.default_value_ptr = null,
|
||||||
.is_comptime = false,
|
.is_comptime = false,
|
||||||
.alignment = 0,
|
.alignment = 0,
|
||||||
},
|
},
|
||||||
.{
|
.{
|
||||||
.name = "ResponseMetadata",
|
.name = "ResponseMetadata",
|
||||||
.type = ResponseMetadata,
|
.type = ResponseMetadata,
|
||||||
.default_value = null,
|
.default_value_ptr = null,
|
||||||
.is_comptime = false,
|
.is_comptime = false,
|
||||||
.alignment = 0,
|
.alignment = 0,
|
||||||
},
|
},
|
||||||
|
@ -955,7 +958,7 @@ fn ServerResponse(comptime action: anytype) type {
|
||||||
.{
|
.{
|
||||||
.name = action.action_name ++ "Response",
|
.name = action.action_name ++ "Response",
|
||||||
.type = Result,
|
.type = Result,
|
||||||
.default_value = null,
|
.default_value_ptr = null,
|
||||||
.is_comptime = false,
|
.is_comptime = false,
|
||||||
.alignment = 0,
|
.alignment = 0,
|
||||||
},
|
},
|
||||||
|
@ -1143,7 +1146,7 @@ fn addQueryArg(comptime ValueType: type, prefix: []const u8, key: []const u8, va
|
||||||
},
|
},
|
||||||
// if this is a pointer, we want to make sure it is more than just a string
|
// if this is a pointer, we want to make sure it is more than just a string
|
||||||
.pointer => |ptr| {
|
.pointer => |ptr| {
|
||||||
if (ptr.child == u8 or ptr.size != .Slice) {
|
if (ptr.child == u8 or ptr.size != .slice) {
|
||||||
// This is just a string
|
// This is just a string
|
||||||
return try addBasicQueryArg(prefix, key, value, writer);
|
return try addBasicQueryArg(prefix, key, value, writer);
|
||||||
}
|
}
|
||||||
|
@ -1706,7 +1709,7 @@ const TestSetup = struct {
|
||||||
request_options: TestOptions,
|
request_options: TestOptions,
|
||||||
server_thread: std.Thread = undefined,
|
server_thread: std.Thread = undefined,
|
||||||
creds: aws_auth.Credentials = undefined,
|
creds: aws_auth.Credentials = undefined,
|
||||||
client: *Client = undefined,
|
client: Client = undefined,
|
||||||
started: bool = false,
|
started: bool = false,
|
||||||
|
|
||||||
const Self = @This();
|
const Self = @This();
|
||||||
|
@ -1743,8 +1746,8 @@ const TestSetup = struct {
|
||||||
null,
|
null,
|
||||||
);
|
);
|
||||||
aws_creds.static_credentials = self.creds;
|
aws_creds.static_credentials = self.creds;
|
||||||
var client = Client.init(self.allocator, .{});
|
const client = Client.init(self.allocator, .{});
|
||||||
self.client = &client;
|
self.client = client;
|
||||||
return .{
|
return .{
|
||||||
.region = "us-west-2",
|
.region = "us-west-2",
|
||||||
.client = client,
|
.client = client,
|
||||||
|
|
|
@ -662,12 +662,12 @@ fn canonicalUri(allocator: std.mem.Allocator, path: []const u8, double_encode: b
|
||||||
}
|
}
|
||||||
defer allocator.free(encoded_once);
|
defer allocator.free(encoded_once);
|
||||||
var encoded_twice = try encodeUri(allocator, encoded_once);
|
var encoded_twice = try encodeUri(allocator, encoded_once);
|
||||||
|
defer allocator.free(encoded_twice);
|
||||||
log.debug("encoded path (2): {s}", .{encoded_twice});
|
log.debug("encoded path (2): {s}", .{encoded_twice});
|
||||||
if (std.mem.lastIndexOf(u8, encoded_twice, "?")) |i| {
|
if (std.mem.lastIndexOf(u8, encoded_twice, "?")) |i| {
|
||||||
_ = allocator.resize(encoded_twice, i);
|
return try allocator.dupe(u8, encoded_twice[0..i]);
|
||||||
return encoded_twice[0..i];
|
|
||||||
}
|
}
|
||||||
return encoded_twice;
|
return try allocator.dupe(u8, encoded_twice);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn encodeParamPart(allocator: std.mem.Allocator, path: []const u8) ![]const u8 {
|
fn encodeParamPart(allocator: std.mem.Allocator, path: []const u8) ![]const u8 {
|
||||||
|
@ -936,6 +936,7 @@ fn canonicalHeaderValue(allocator: std.mem.Allocator, value: []const u8) ![]cons
|
||||||
const in_quote = false;
|
const in_quote = false;
|
||||||
var start: usize = 0;
|
var start: usize = 0;
|
||||||
const rc = try allocator.alloc(u8, value.len);
|
const rc = try allocator.alloc(u8, value.len);
|
||||||
|
defer allocator.free(rc);
|
||||||
var rc_inx: usize = 0;
|
var rc_inx: usize = 0;
|
||||||
for (value, 0..) |c, i| {
|
for (value, 0..) |c, i| {
|
||||||
if (!started and !std.ascii.isWhitespace(c)) {
|
if (!started and !std.ascii.isWhitespace(c)) {
|
||||||
|
@ -953,8 +954,7 @@ fn canonicalHeaderValue(allocator: std.mem.Allocator, value: []const u8) ![]cons
|
||||||
// Trim end
|
// Trim end
|
||||||
while (std.ascii.isWhitespace(rc[rc_inx - 1]))
|
while (std.ascii.isWhitespace(rc[rc_inx - 1]))
|
||||||
rc_inx -= 1;
|
rc_inx -= 1;
|
||||||
_ = allocator.resize(rc, rc_inx);
|
return try allocator.dupe(u8, rc[0..rc_inx]);
|
||||||
return rc[0..rc_inx];
|
|
||||||
}
|
}
|
||||||
fn lessThan(context: void, lhs: std.http.Header, rhs: std.http.Header) bool {
|
fn lessThan(context: void, lhs: std.http.Header, rhs: std.http.Header) bool {
|
||||||
_ = context;
|
_ = context;
|
||||||
|
@ -986,6 +986,7 @@ test "canonical uri" {
|
||||||
const path = "/documents and settings/?foo=bar";
|
const path = "/documents and settings/?foo=bar";
|
||||||
const expected = "/documents%2520and%2520settings/";
|
const expected = "/documents%2520and%2520settings/";
|
||||||
const actual = try canonicalUri(allocator, path, true);
|
const actual = try canonicalUri(allocator, path, true);
|
||||||
|
|
||||||
defer allocator.free(actual);
|
defer allocator.free(actual);
|
||||||
try std.testing.expectEqualStrings(expected, actual);
|
try std.testing.expectEqualStrings(expected, actual);
|
||||||
|
|
||||||
|
|
24
src/json.zig
24
src/json.zig
|
@ -1723,7 +1723,7 @@ fn parseInternal(comptime T: type, token: Token, tokens: *TokenStream, options:
|
||||||
}
|
}
|
||||||
inline for (structInfo.fields, 0..) |field, i| {
|
inline for (structInfo.fields, 0..) |field, i| {
|
||||||
if (!fields_seen[i]) {
|
if (!fields_seen[i]) {
|
||||||
if (field.default_value) |default_value_ptr| {
|
if (field.default_value_ptr) |default_value_ptr| {
|
||||||
if (!field.is_comptime) {
|
if (!field.is_comptime) {
|
||||||
const default_value = @as(*align(1) const field.type, @ptrCast(default_value_ptr)).*;
|
const default_value = @as(*align(1) const field.type, @ptrCast(default_value_ptr)).*;
|
||||||
@field(r, field.name) = default_value;
|
@field(r, field.name) = default_value;
|
||||||
|
@ -1773,18 +1773,18 @@ fn parseInternal(comptime T: type, token: Token, tokens: *TokenStream, options:
|
||||||
.pointer => |ptrInfo| {
|
.pointer => |ptrInfo| {
|
||||||
const allocator = options.allocator orelse return error.AllocatorRequired;
|
const allocator = options.allocator orelse return error.AllocatorRequired;
|
||||||
switch (ptrInfo.size) {
|
switch (ptrInfo.size) {
|
||||||
.One => {
|
.one => {
|
||||||
const r: T = try allocator.create(ptrInfo.child);
|
const r: T = try allocator.create(ptrInfo.child);
|
||||||
errdefer allocator.destroy(r);
|
errdefer allocator.destroy(r);
|
||||||
r.* = try parseInternal(ptrInfo.child, token, tokens, options);
|
r.* = try parseInternal(ptrInfo.child, token, tokens, options);
|
||||||
return r;
|
return r;
|
||||||
},
|
},
|
||||||
.Slice => {
|
.slice => {
|
||||||
switch (token) {
|
switch (token) {
|
||||||
.ArrayBegin => {
|
.ArrayBegin => {
|
||||||
var arraylist = std.ArrayList(ptrInfo.child).init(allocator);
|
var arraylist = std.ArrayList(ptrInfo.child).init(allocator);
|
||||||
errdefer {
|
errdefer {
|
||||||
while (arraylist.popOrNull()) |v| {
|
while (arraylist.pop()) |v| {
|
||||||
parseFree(ptrInfo.child, v, options);
|
parseFree(ptrInfo.child, v, options);
|
||||||
}
|
}
|
||||||
arraylist.deinit();
|
arraylist.deinit();
|
||||||
|
@ -1829,7 +1829,7 @@ fn parseInternal(comptime T: type, token: Token, tokens: *TokenStream, options:
|
||||||
if (value_type == null) return error.UnexpectedToken;
|
if (value_type == null) return error.UnexpectedToken;
|
||||||
var arraylist = std.ArrayList(ptrInfo.child).init(allocator);
|
var arraylist = std.ArrayList(ptrInfo.child).init(allocator);
|
||||||
errdefer {
|
errdefer {
|
||||||
while (arraylist.popOrNull()) |v| {
|
while (arraylist.pop()) |v| {
|
||||||
parseFree(ptrInfo.child, v, options);
|
parseFree(ptrInfo.child, v, options);
|
||||||
}
|
}
|
||||||
arraylist.deinit();
|
arraylist.deinit();
|
||||||
|
@ -1879,7 +1879,7 @@ fn isMapPattern(comptime T: type) bool {
|
||||||
// Let's just double check before proceeding
|
// Let's just double check before proceeding
|
||||||
const ti = @typeInfo(T);
|
const ti = @typeInfo(T);
|
||||||
if (ti != .pointer) return false;
|
if (ti != .pointer) return false;
|
||||||
if (ti.pointer.size != .Slice) return false;
|
if (ti.pointer.size != .slice) return false;
|
||||||
const ti_child = @typeInfo(ti.pointer.child);
|
const ti_child = @typeInfo(ti.pointer.child);
|
||||||
if (ti_child != .@"struct") return false;
|
if (ti_child != .@"struct") return false;
|
||||||
if (ti_child.@"struct".fields.len != 2) return false;
|
if (ti_child.@"struct".fields.len != 2) return false;
|
||||||
|
@ -1935,11 +1935,11 @@ pub fn parseFree(comptime T: type, value: T, options: ParseOptions) void {
|
||||||
.pointer => |ptrInfo| {
|
.pointer => |ptrInfo| {
|
||||||
const allocator = options.allocator orelse unreachable;
|
const allocator = options.allocator orelse unreachable;
|
||||||
switch (ptrInfo.size) {
|
switch (ptrInfo.size) {
|
||||||
.One => {
|
.one => {
|
||||||
parseFree(ptrInfo.child, value.*, options);
|
parseFree(ptrInfo.child, value.*, options);
|
||||||
allocator.destroy(value);
|
allocator.destroy(value);
|
||||||
},
|
},
|
||||||
.Slice => {
|
.slice => {
|
||||||
for (value) |v| {
|
for (value) |v| {
|
||||||
parseFree(ptrInfo.child, v, options);
|
parseFree(ptrInfo.child, v, options);
|
||||||
}
|
}
|
||||||
|
@ -2284,7 +2284,7 @@ pub const Parser = struct {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
var value = p.stack.pop();
|
var value = p.stack.pop().?;
|
||||||
try p.pushToParent(&value);
|
try p.pushToParent(&value);
|
||||||
},
|
},
|
||||||
.String => |s| {
|
.String => |s| {
|
||||||
|
@ -2350,7 +2350,7 @@ pub const Parser = struct {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
var value = p.stack.pop();
|
var value = p.stack.pop().?;
|
||||||
try p.pushToParent(&value);
|
try p.pushToParent(&value);
|
||||||
},
|
},
|
||||||
.ObjectBegin => {
|
.ObjectBegin => {
|
||||||
|
@ -2922,7 +2922,7 @@ pub fn stringify(
|
||||||
},
|
},
|
||||||
.error_set => return stringify(@as([]const u8, @errorName(value)), options, out_stream),
|
.error_set => return stringify(@as([]const u8, @errorName(value)), options, out_stream),
|
||||||
.pointer => |ptr_info| switch (ptr_info.size) {
|
.pointer => |ptr_info| switch (ptr_info.size) {
|
||||||
.One => switch (@typeInfo(ptr_info.child)) {
|
.one => switch (@typeInfo(ptr_info.child)) {
|
||||||
.array => {
|
.array => {
|
||||||
const Slice = []const std.meta.Elem(ptr_info.child);
|
const Slice = []const std.meta.Elem(ptr_info.child);
|
||||||
return stringify(@as(Slice, value), options, out_stream);
|
return stringify(@as(Slice, value), options, out_stream);
|
||||||
|
@ -2933,7 +2933,7 @@ pub fn stringify(
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
// TODO: .Many when there is a sentinel (waiting for https://github.com/ziglang/zig/pull/3972)
|
// TODO: .Many when there is a sentinel (waiting for https://github.com/ziglang/zig/pull/3972)
|
||||||
.Slice => {
|
.slice => {
|
||||||
if (ptr_info.child == u8 and options.string == .String and std.unicode.utf8ValidateSlice(value)) {
|
if (ptr_info.child == u8 and options.string == .String and std.unicode.utf8ValidateSlice(value)) {
|
||||||
try out_stream.writeByte('\"');
|
try out_stream.writeByte('\"');
|
||||||
var i: usize = 0;
|
var i: usize = 0;
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
const std = @import("std");
|
const std = @import("std");
|
||||||
const service_list = @import("models/service_manifest.zig");
|
const service_list = @import("service_manifest");
|
||||||
const expectEqualStrings = std.testing.expectEqualStrings;
|
const expectEqualStrings = std.testing.expectEqualStrings;
|
||||||
|
|
||||||
pub fn Services(comptime service_imports: anytype) type {
|
pub fn Services(comptime service_imports: anytype) type {
|
||||||
|
@ -12,7 +12,7 @@ pub fn Services(comptime service_imports: anytype) type {
|
||||||
item.* = .{
|
item.* = .{
|
||||||
.name = @tagName(service_imports[i]),
|
.name = @tagName(service_imports[i]),
|
||||||
.type = @TypeOf(import_field),
|
.type = @TypeOf(import_field),
|
||||||
.default_value = &import_field,
|
.default_value_ptr = &import_field,
|
||||||
.is_comptime = false,
|
.is_comptime = false,
|
||||||
.alignment = 0,
|
.alignment = 0,
|
||||||
};
|
};
|
||||||
|
|
|
@ -24,10 +24,11 @@ fn encodeStruct(
|
||||||
comptime options: EncodingOptions,
|
comptime options: EncodingOptions,
|
||||||
) !bool {
|
) !bool {
|
||||||
var rc = first;
|
var rc = first;
|
||||||
|
var arena = std.heap.ArenaAllocator.init(allocator);
|
||||||
|
defer arena.deinit();
|
||||||
|
const arena_alloc = arena.allocator();
|
||||||
inline for (@typeInfo(@TypeOf(obj)).@"struct".fields) |field| {
|
inline for (@typeInfo(@TypeOf(obj)).@"struct".fields) |field| {
|
||||||
const field_name = try options.field_name_transformer(allocator, field.name);
|
const field_name = try options.field_name_transformer(arena_alloc, field.name);
|
||||||
defer if (options.field_name_transformer.* != defaultTransformer)
|
|
||||||
allocator.free(field_name);
|
|
||||||
// @compileLog(@typeInfo(field.field_type).Pointer);
|
// @compileLog(@typeInfo(field.field_type).Pointer);
|
||||||
rc = try encodeInternal(allocator, parent, field_name, rc, @field(obj, field.name), writer, options);
|
rc = try encodeInternal(allocator, parent, field_name, rc, @field(obj, field.name), writer, options);
|
||||||
}
|
}
|
||||||
|
@ -50,7 +51,7 @@ pub fn encodeInternal(
|
||||||
.optional => if (obj) |o| {
|
.optional => if (obj) |o| {
|
||||||
rc = try encodeInternal(allocator, parent, field_name, first, o, writer, options);
|
rc = try encodeInternal(allocator, parent, field_name, first, o, writer, options);
|
||||||
},
|
},
|
||||||
.pointer => |ti| if (ti.size == .One) {
|
.pointer => |ti| if (ti.size == .one) {
|
||||||
rc = try encodeInternal(allocator, parent, field_name, first, obj.*, writer, options);
|
rc = try encodeInternal(allocator, parent, field_name, first, obj.*, writer, options);
|
||||||
} else {
|
} else {
|
||||||
if (!first) _ = try writer.write("&");
|
if (!first) _ = try writer.write("&");
|
||||||
|
|
|
@ -653,7 +653,10 @@ fn dupeAndUnescape(alloc: Allocator, text: []const u8) ![]const u8 {
|
||||||
|
|
||||||
// This error is not strictly true, but we need to match one of the items
|
// This error is not strictly true, but we need to match one of the items
|
||||||
// from the error set provided by the other stdlib calls at the calling site
|
// from the error set provided by the other stdlib calls at the calling site
|
||||||
if (!alloc.resize(str, j)) return error.OutOfMemory;
|
if (!alloc.resize(str, j)) {
|
||||||
|
defer alloc.free(str);
|
||||||
|
return alloc.dupe(u8, str[0..j]) catch return error.OutOfMemory;
|
||||||
|
}
|
||||||
return str[0..j];
|
return str[0..j];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -313,13 +313,13 @@ fn parseInternal(comptime T: type, element: *xml.Element, options: ParseOptions)
|
||||||
.pointer => |ptr_info| {
|
.pointer => |ptr_info| {
|
||||||
const allocator = options.allocator orelse return error.AllocatorRequired;
|
const allocator = options.allocator orelse return error.AllocatorRequired;
|
||||||
switch (ptr_info.size) {
|
switch (ptr_info.size) {
|
||||||
.One => {
|
.one => {
|
||||||
const r: T = try allocator.create(ptr_info.child);
|
const r: T = try allocator.create(ptr_info.child);
|
||||||
errdefer allocator.free(r);
|
errdefer allocator.free(r);
|
||||||
r.* = try parseInternal(ptr_info.child, element, options);
|
r.* = try parseInternal(ptr_info.child, element, options);
|
||||||
return r;
|
return r;
|
||||||
},
|
},
|
||||||
.Slice => {
|
.slice => {
|
||||||
// TODO: Detect and deal with arrays. This will require two
|
// TODO: Detect and deal with arrays. This will require two
|
||||||
// passes through the element children - one to
|
// passes through the element children - one to
|
||||||
// determine if it is an array, one to parse the elements
|
// determine if it is an array, one to parse the elements
|
||||||
|
@ -348,10 +348,10 @@ fn parseInternal(comptime T: type, element: *xml.Element, options: ParseOptions)
|
||||||
}
|
}
|
||||||
return try allocator.dupe(u8, element.children.items[0].CharData);
|
return try allocator.dupe(u8, element.children.items[0].CharData);
|
||||||
},
|
},
|
||||||
.Many => {
|
.many => {
|
||||||
return error.ManyPointerSizeNotImplemented;
|
return error.ManyPointerSizeNotImplemented;
|
||||||
},
|
},
|
||||||
.C => {
|
.c => {
|
||||||
return error.CPointerSizeNotImplemented;
|
return error.CPointerSizeNotImplemented;
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
Loading…
Add table
Reference in a new issue