Compare commits
31 commits
Author | SHA1 | Date | |
---|---|---|---|
8ac7aa47f7 | |||
e194debb96 | |||
e0e09fb19e | |||
8421fd9e55 | |||
9e8b3a6fc6 | |||
34c097e45f | |||
ffe3941dbe | |||
cdaf924867 | |||
6c106c1c71 | |||
f325ef4236 | |||
30d46261b7 | |||
86483ec84d | |||
4f16553410 | |||
12e24b01ad | |||
|
220d45ab20 | ||
|
71495a4d1d | ||
|
303af8661c | ||
acd6589909 | |||
78b36e2316 | |||
b369c29e84 | |||
e3bb4142d6 | |||
e02fb699fc | |||
35fad85c13 | |||
88d7e99d6b | |||
debb4dab60 | |||
6240225db2 | |||
0892914c5b | |||
97b784f8e3 | |||
4fa30a70cc | |||
9497db373c | |||
3d78705ea5 |
23 changed files with 475 additions and 254 deletions
8
.envrc
Normal file
8
.envrc
Normal file
|
@ -0,0 +1,8 @@
|
|||
# vi: ft=sh
|
||||
# shellcheck shell=bash
|
||||
|
||||
if ! has zvm_direnv_version || ! zvm_direnv_version 2.0.0; then
|
||||
source_url "https://git.lerch.org/lobo/zvm-direnv/raw/tag/2.0.0/direnvrc" "sha256-8Umzxj32hFU6G0a7Wrq0KTNDQ8XEuje2A3s2ljh/hFY="
|
||||
fi
|
||||
|
||||
use zig 0.14.0
|
|
@ -1,9 +1,9 @@
|
|||
name: AWS-Zig Build
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
branches:
|
||||
- '*'
|
||||
- '!zig-develop*'
|
||||
- 'master'
|
||||
env:
|
||||
ACTIONS_RUNTIME_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
ACTIONS_RUNTIME_URL: ${{ env.GITHUB_SERVER_URL }}/api/actions_pipeline/
|
||||
|
@ -20,7 +20,7 @@ jobs:
|
|||
- name: Setup Zig
|
||||
uses: mlugg/setup-zig@v1.2.1
|
||||
with:
|
||||
version: 0.13.0
|
||||
version: 0.14.0
|
||||
- name: Run tests
|
||||
run: zig build test --verbose
|
||||
# Zig build scripts don't have the ability to import depenedencies directly
|
||||
|
|
|
@ -1,10 +1,11 @@
|
|||
name: aws-zig mach nominated build
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: '0 12 * * *' # noon UTC, 4AM Pacific
|
||||
push:
|
||||
branches:
|
||||
- 'zig-develop*'
|
||||
- 'zig-mach'
|
||||
env:
|
||||
PKG_PREFIX: nominated-zig
|
||||
jobs:
|
||||
|
@ -23,12 +24,16 @@ jobs:
|
|||
- name: Check out repository code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: zig-develop
|
||||
ref: zig-mach
|
||||
- name: Setup Zig
|
||||
uses: mlugg/setup-zig@v1.2.1
|
||||
with:
|
||||
version: mach-latest
|
||||
- name: Run tests
|
||||
- name: Run gen
|
||||
run: zig build gen --verbose
|
||||
- name: Run smoke test
|
||||
run: zig build smoke-test --verbose
|
||||
- name: Run full tests
|
||||
run: zig build test --verbose
|
||||
# Zig package manager expects everything to be inside a directory in the archive,
|
||||
# which it then strips out on download. So we need to shove everything inside a directory
|
||||
|
|
|
@ -1,10 +1,11 @@
|
|||
name: aws-zig nightly build
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: '30 12 * * *' # 12:30 UTC, 4:30AM Pacific
|
||||
push:
|
||||
branches:
|
||||
- 'zig-develop*'
|
||||
- 'zig-develop'
|
||||
env:
|
||||
PKG_PREFIX: nightly-zig
|
||||
jobs:
|
||||
|
|
84
.gitea/workflows/zig-previous.yaml
Normal file
84
.gitea/workflows/zig-previous.yaml
Normal file
|
@ -0,0 +1,84 @@
|
|||
name: AWS-Zig Build
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
branches:
|
||||
- 'zig-0.13'
|
||||
env:
|
||||
ACTIONS_RUNTIME_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
ACTIONS_RUNTIME_URL: ${{ env.GITHUB_SERVER_URL }}/api/actions_pipeline/
|
||||
jobs:
|
||||
build-zig-amd64-host:
|
||||
runs-on: ubuntu-latest
|
||||
# Need to use the default container with node and all that, so we can
|
||||
# use JS-based actions like actions/checkout@v3...
|
||||
# container:
|
||||
# image: alpine:3.15.0
|
||||
steps:
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: zig-0.13
|
||||
- name: Setup Zig
|
||||
uses: mlugg/setup-zig@v1.2.1
|
||||
with:
|
||||
version: 0.13.0
|
||||
- name: Run tests
|
||||
run: zig build test --verbose
|
||||
# Zig build scripts don't have the ability to import depenedencies directly
|
||||
# (https://github.com/ziglang/zig/issues/18164). We can allow downstream
|
||||
# build scripts to import aws with a few tweaks, but we can't @import("src/aws.zig")
|
||||
# until we have our models built. So we have to have the build script
|
||||
# basically modified, only during packaging, to allow this use case
|
||||
#
|
||||
# Zig package manager expects everything to be inside a directory in the archive,
|
||||
# which it then strips out on download. So we need to shove everything inside a directory
|
||||
# the way GitHub/Gitea does for repo archives
|
||||
#
|
||||
# Also, zig tar process doesn't handle gnu format for long names, nor does it seam to
|
||||
# handle posix long name semantics cleanly either. ustar works. This
|
||||
# should be using git archive, but we need our generated code to be part of it
|
||||
- name: Package source code with generated models
|
||||
run: |
|
||||
sed -i 's#// UNCOMMENT AFTER MODEL GEN TO USE IN BUILD SCRIPTS //##' build.zig
|
||||
tar -czf ${{ runner.temp }}/${{ github.sha }}-with-models.tar.gz \
|
||||
--format ustar \
|
||||
--exclude 'zig-*' \
|
||||
*
|
||||
# Something in this PR broke this transform. I don't mind removing it, but
|
||||
# the PR attempts to handle situations with or without a prefix, but it
|
||||
# doesn't. I have not yet determined what the problem is, though
|
||||
# https://github.com/ziglang/zig/pull/19111/files
|
||||
# --transform 's,^,${{ github.sha }}/,' *
|
||||
# - name: Sign
|
||||
# id: sign
|
||||
# uses: https://git.lerch.org/lobo/action-hsm-sign@v1
|
||||
# with:
|
||||
# pin: ${{ secrets.HSM_USER_PIN }}
|
||||
# files: ???
|
||||
# public_key: 'https://emil.lerch.org/serverpublic.pem'
|
||||
# - run: |
|
||||
# echo "Source 0 should be ./bar: ${{ steps.sign.outputs.SOURCE_0 }}"
|
||||
# - run: |
|
||||
# echo "Signature 0 should be ./bar.sig: ${{ steps.sign.outputs.SIG_0 }}"
|
||||
# - run: echo "URL of bar (0) is ${{ steps.sign.outputs.URL_0 }}"
|
||||
# - run: |
|
||||
# echo "Source 1 should be ./foo: ${{ steps.sign.outputs.SOURCE_1 }}"
|
||||
# - run: |
|
||||
# echo "Signature 1 should be ./foo.sig: ${{ steps.sign.outputs.SIG_1 }}"
|
||||
# - run: echo "URL of foo (1) is ${{ steps.sign.outputs.URL_1 }}"
|
||||
- name: Publish source code with generated models
|
||||
run: |
|
||||
curl --user ${{ github.actor }}:${{ secrets.PACKAGE_PUSH }} \
|
||||
--upload-file ${{ runner.temp }}/${{ github.sha }}-with-models.tar.gz \
|
||||
https://git.lerch.org/api/packages/lobo/generic/aws-sdk-with-models/${{ github.sha }}/${{ github.sha }}-with-models.tar.gz
|
||||
- name: Build example
|
||||
run: ( cd example && zig build ) # Make sure example builds
|
||||
- name: Notify
|
||||
uses: https://git.lerch.org/lobo/action-notify-ntfy@v2
|
||||
if: always()
|
||||
with:
|
||||
host: ${{ secrets.NTFY_HOST }}
|
||||
topic: ${{ secrets.NTFY_TOPIC }}
|
||||
user: ${{ secrets.NTFY_USER }}
|
||||
password: ${{ secrets.NTFY_PASSWORD }}
|
9
.github/workflows/build.yaml
vendored
9
.github/workflows/build.yaml
vendored
|
@ -1,11 +1,10 @@
|
|||
name: AWS-Zig Build
|
||||
name: Current zig version build
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- '*'
|
||||
- '!zig-develop*'
|
||||
- 'master'
|
||||
jobs:
|
||||
build-zig-0-12-0-amd64:
|
||||
build-amd64:
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ github.env.GITEA_ACTIONS != 'true' }}
|
||||
steps:
|
||||
|
@ -14,7 +13,7 @@ jobs:
|
|||
- name: Setup Zig
|
||||
uses: mlugg/setup-zig@v1.2.1
|
||||
with:
|
||||
version: 0.13.0
|
||||
version: 0.14.0
|
||||
- name: Run tests
|
||||
run: zig build test -Dbroken-windows --verbose # Github runners try to run the windows tests despite disabling foreign checks
|
||||
- name: Build example
|
||||
|
|
4
.github/workflows/zig-mach.yaml
vendored
4
.github/workflows/zig-mach.yaml
vendored
|
@ -1,8 +1,8 @@
|
|||
name: aws-zig mach nominated build
|
||||
name: Latest mach nominated zig version build
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- 'zig-develop*'
|
||||
- 'zig-mach*'
|
||||
jobs:
|
||||
build-zig-mach-latest:
|
||||
runs-on: ubuntu-latest
|
||||
|
|
2
.github/workflows/zig-nightly.yaml
vendored
2
.github/workflows/zig-nightly.yaml
vendored
|
@ -1,4 +1,4 @@
|
|||
name: aws-zig nightly build
|
||||
name: Nightly zig version Build
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
|
|
20
.github/workflows/zig-previous.yaml
vendored
Normal file
20
.github/workflows/zig-previous.yaml
vendored
Normal file
|
@ -0,0 +1,20 @@
|
|||
name: Previous zig version Build
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- 'zig-0.13'
|
||||
jobs:
|
||||
build-amd64:
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ github.env.GITEA_ACTIONS != 'true' }}
|
||||
steps:
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v4
|
||||
- name: Setup Zig
|
||||
uses: mlugg/setup-zig@v1.2.1
|
||||
with:
|
||||
version: 0.13.0
|
||||
- name: Run tests
|
||||
run: zig build test -Dbroken-windows --verbose # Github runners try to run the windows tests despite disabling foreign checks
|
||||
- name: Build example
|
||||
run: ( cd example && zig build ) # Make sure example builds
|
62
README.md
62
README.md
|
@ -1,11 +1,11 @@
|
|||
AWS SDK for Zig
|
||||
===============
|
||||
|
||||
[Zig 0.13](https://ziglang.org/download/#release-0.13.0):
|
||||
[Zig 0.14](https://ziglang.org/download/#release-0.14.0):
|
||||
|
||||
[](https://git.lerch.org/lobo/aws-sdk-for-zig/actions?workflow=build.yaml&state=closed)
|
||||
[](https://git.lerch.org/lobo/aws-sdk-for-zig/actions?workflow=build.yaml&state=closed)
|
||||
|
||||
[Last Mach Nominated Zig Version](https://machengine.org/about/nominated-zig/):
|
||||
[Last Mach Nominated Zig Version](https://machengine.org/docs/nominated-zig/):
|
||||
|
||||
[](https://git.lerch.org/lobo/aws-sdk-for-zig/actions?workflow=zig-mach.yaml&state=closed)
|
||||
|
||||
|
@ -13,11 +13,16 @@ AWS SDK for Zig
|
|||
|
||||
[](https://git.lerch.org/lobo/aws-sdk-for-zig/actions?workflow=zig-nightly.yaml&state=closed)
|
||||
|
||||
[Zig 0.13](https://ziglang.org/download/#release-0.13.0):
|
||||
|
||||
[](https://git.lerch.org/lobo/aws-sdk-for-zig/actions?workflow=zig-previous.yaml&state=closed)
|
||||
|
||||
|
||||
Current executable size for the demo is 980k after compiling with -Doptimize=ReleaseSmall
|
||||
in x86_linux, and will vary based on services used. Tested targets:
|
||||
in x86_64-linux, and will vary based on services used. Tested targets:
|
||||
|
||||
* x86_64-linux
|
||||
* riscv64-linux\*
|
||||
* riscv64-linux
|
||||
* aarch64-linux
|
||||
* x86_64-windows
|
||||
* arm-linux
|
||||
|
@ -26,25 +31,38 @@ in x86_linux, and will vary based on services used. Tested targets:
|
|||
|
||||
Tested targets are built, but not continuously tested, by CI.
|
||||
|
||||
\* On Zig 0.12/0.13, riscv64-linux disabled due to [LLLM's O(N^2) codegen](https://github.com/ziglang/zig/issues/18872)
|
||||
Branches
|
||||
--------
|
||||
|
||||
* **master**: This branch tracks the latest released zig version
|
||||
* **zig-0.13**: This branch tracks the previous released zig version (0.13 currently).
|
||||
Support for the previous version is best effort, generally
|
||||
degrading over time. Fixes will generally appear in master, then
|
||||
backported into the previous version.
|
||||
* **zig-mach**: This branch tracks the latest mach nominated version. A separate
|
||||
branch is necessary as mach nominated is usually, but not always,
|
||||
more recent than the latest production zig. Support for the mach
|
||||
version is best effort.
|
||||
* **zig-develop**: This branch tracks zig nightly, and is used mainly as a canary
|
||||
for breaking changes that will need to be dealt with when
|
||||
a new mach nominated version or new zig release appear.
|
||||
Expect significant delays in any build failures.
|
||||
|
||||
Zig-Develop Branch
|
||||
------------------
|
||||
|
||||
This branch is intended for use with the in-development version of Zig. This
|
||||
starts with 0.12.0-dev.3180+83e578a18. This is aligned with [Mach Engine's Nominated
|
||||
Zig Versions](https://machengine.org/about/nominated-zig/). Nightly zig versions
|
||||
are difficult to keep up with and there is no special effort made there, build
|
||||
status is FYI (and used as a canary for nominated zig versions).
|
||||
Other branches/tags exist but are unsupported
|
||||
|
||||
Building
|
||||
--------
|
||||
|
||||
`zig build` should work. It will build the code generation project, fetch model
|
||||
files from upstream AWS Go SDK v2, run the code generation, then build the main
|
||||
project with the generated code. Testing can be done with `zig test`.
|
||||
project with the generated code. Testing can be done with `zig build test`. Note that
|
||||
this command tests on all supported architectures, so for a faster testing
|
||||
process, use `zig build smoke-test` instead.
|
||||
|
||||
To make development even faster, a build option is provided to avoid the use of
|
||||
LLVM. To use this, use the command `zig build -Dno-llvm smoke-test`. This
|
||||
can reduce build/test time 300%. Note, however, native code generation in zig
|
||||
is not yet complete, so you may see errors.
|
||||
|
||||
Using
|
||||
-----
|
||||
|
@ -52,7 +70,8 @@ Using
|
|||
This is designed for use with the Zig package manager, and exposes a module
|
||||
called "aws". Set up `build.zig.zon` and add the dependency/module to your project
|
||||
as normal and the package manager should do its thing. A full example can be found
|
||||
in [/example](example/README.md).
|
||||
in [/example](example/build.zig.zon). This can also be used at build time in
|
||||
a downstream project's `build.zig`.
|
||||
|
||||
Configuring the module and/or Running the demo
|
||||
----------------------------------------------
|
||||
|
@ -60,8 +79,8 @@ Configuring the module and/or Running the demo
|
|||
This library mimics the aws c libraries for it's work, so it operates like most
|
||||
other 'AWS things'. [/src/main.zig](src/main.zig) gives you a handful of examples
|
||||
for working with services. For local testing or alternative endpoints, there's
|
||||
no real standard, so there is code to look for `AWS_ENDPOINT_URL` environment
|
||||
variable that will supersede all other configuration.
|
||||
no real standard, so there is code to look for an environment variable
|
||||
`AWS_ENDPOINT_URL` variable that will supersede all other configuration.
|
||||
|
||||
Limitations
|
||||
-----------
|
||||
|
@ -82,13 +101,6 @@ TODO List:
|
|||
* Implement timeouts and other TODO's in the code
|
||||
* Add option to cache signature keys
|
||||
|
||||
Services without TLS 1.3 support
|
||||
--------------------------------
|
||||
|
||||
All AWS services should support TLS 1.3 at this point, but there are many regions
|
||||
and several partitions, and not all of them have been tested, so your mileage
|
||||
may vary. If something doesn't work, please submit an issue to let others know.
|
||||
|
||||
Dependency tree
|
||||
---------------
|
||||
|
||||
|
|
158
build.zig
158
build.zig
|
@ -10,11 +10,7 @@ const test_targets = [_]std.Target.Query{
|
|||
.{}, // native
|
||||
.{ .cpu_arch = .x86_64, .os_tag = .linux },
|
||||
.{ .cpu_arch = .aarch64, .os_tag = .linux },
|
||||
// The test executable linking process just spins forever in LLVM using nominated zig 0.13 May 2024
|
||||
// This is likely a LLVM problem unlikely to be fixed in zig 0.13
|
||||
// Potentially this issue: https://github.com/llvm/llvm-project/issues/81440
|
||||
// Zig tracker: https://github.com/ziglang/zig/issues/18872
|
||||
// .{ .cpu_arch = .riscv64, .os_tag = .linux },
|
||||
.{ .cpu_arch = .riscv64, .os_tag = .linux },
|
||||
.{ .cpu_arch = .arm, .os_tag = .linux },
|
||||
.{ .cpu_arch = .x86_64, .os_tag = .windows },
|
||||
.{ .cpu_arch = .aarch64, .os_tag = .macos },
|
||||
|
@ -33,11 +29,17 @@ pub fn build(b: *Builder) !void {
|
|||
// between Debug, ReleaseSafe, ReleaseFast, and ReleaseSmall.
|
||||
const optimize = b.standardOptimizeOption(.{});
|
||||
|
||||
const no_llvm = b.option(
|
||||
bool,
|
||||
"no-llvm",
|
||||
"Disable LLVM",
|
||||
) orelse false;
|
||||
const broken_windows = b.option(
|
||||
bool,
|
||||
"broken-windows",
|
||||
"Windows is broken in this environment (do not run Windows tests)",
|
||||
) orelse false;
|
||||
const no_bin = b.option(bool, "no-bin", "skip emitting binary") orelse false;
|
||||
// TODO: Embed the current git version in the code. We can do this
|
||||
// by looking for .git/HEAD (if it exists, follow the ref to /ref/heads/whatevs,
|
||||
// grab that commit, and use b.addOptions/exe.addOptions to generate the
|
||||
|
@ -56,6 +58,7 @@ pub fn build(b: *Builder) !void {
|
|||
.target = target,
|
||||
.optimize = optimize,
|
||||
});
|
||||
exe.use_llvm = !no_llvm;
|
||||
const smithy_dep = b.dependency("smithy", .{
|
||||
// These are the arguments to the dependency. It expects a target and optimization level.
|
||||
.target = target,
|
||||
|
@ -64,17 +67,6 @@ pub fn build(b: *Builder) !void {
|
|||
const smithy_module = smithy_dep.module("smithy");
|
||||
exe.root_module.addImport("smithy", smithy_module); // not sure this should be here...
|
||||
|
||||
// Expose module to others
|
||||
_ = b.addModule("aws", .{
|
||||
.root_source_file = b.path("src/aws.zig"),
|
||||
.imports = &.{.{ .name = "smithy", .module = smithy_module }},
|
||||
});
|
||||
|
||||
// Expose module to others
|
||||
_ = b.addModule("aws-signing", .{
|
||||
.root_source_file = b.path("src/aws_signing.zig"),
|
||||
.imports = &.{.{ .name = "smithy", .module = smithy_module }},
|
||||
});
|
||||
// TODO: This does not work correctly due to https://github.com/ziglang/zig/issues/16354
|
||||
//
|
||||
// We are working here with kind of a weird dependency though. So we can do this
|
||||
|
@ -97,61 +89,73 @@ pub fn build(b: *Builder) !void {
|
|||
const run_step = b.step("run", "Run the app");
|
||||
run_step.dependOn(&run_cmd.step);
|
||||
|
||||
const gen_step = blk: {
|
||||
const cg = b.step("gen", "Generate zig service code from smithy models");
|
||||
const cg = b.step("gen", "Generate zig service code from smithy models");
|
||||
|
||||
const cg_exe = b.addExecutable(.{
|
||||
.name = "codegen",
|
||||
.root_source_file = b.path("codegen/src/main.zig"),
|
||||
// We need this generated for the host, not the real target
|
||||
.target = b.graph.host,
|
||||
.optimize = if (b.verbose) .Debug else .ReleaseSafe,
|
||||
});
|
||||
cg_exe.root_module.addImport("smithy", smithy_dep.module("smithy"));
|
||||
var cg_cmd = b.addRunArtifact(cg_exe);
|
||||
cg_cmd.addArg("--models");
|
||||
const hash = hash_blk: {
|
||||
for (b.available_deps) |dep| {
|
||||
const dep_name = dep.@"0";
|
||||
const dep_hash = dep.@"1";
|
||||
if (std.mem.eql(u8, dep_name, "models"))
|
||||
break :hash_blk dep_hash;
|
||||
}
|
||||
return error.DependencyNamedModelsNotFoundInBuildZigZon;
|
||||
};
|
||||
cg_cmd.addArg(try std.fs.path.join(
|
||||
b.allocator,
|
||||
&[_][]const u8{
|
||||
b.graph.global_cache_root.path.?,
|
||||
"p",
|
||||
hash,
|
||||
models_subdir,
|
||||
},
|
||||
));
|
||||
cg_cmd.addArg("--output");
|
||||
cg_cmd.addDirectoryArg(b.path("src/models"));
|
||||
if (b.verbose)
|
||||
cg_cmd.addArg("--verbose");
|
||||
// cg_cmd.step.dependOn(&fetch_step.step);
|
||||
// TODO: this should use zig_exe from std.Build
|
||||
// codegen should store a hash in a comment
|
||||
// this would be hash of the exe that created the file
|
||||
// concatenated with hash of input json. this would
|
||||
// allow skipping generated files. May not include hash
|
||||
// of contents of output file as maybe we want to tweak
|
||||
// manually??
|
||||
//
|
||||
// All the hashes can be in service_manifest.zig, which
|
||||
// could be fun to just parse and go nuts. Top of
|
||||
// file, generator exe hash. Each import has comment
|
||||
// with both input and output hash and we can decide
|
||||
// later about warning on manual changes...
|
||||
const cg_exe = b.addExecutable(.{
|
||||
.name = "codegen",
|
||||
.root_source_file = b.path("codegen/src/main.zig"),
|
||||
// We need this generated for the host, not the real target
|
||||
.target = b.graph.host,
|
||||
.optimize = if (b.verbose) .Debug else .ReleaseSafe,
|
||||
});
|
||||
cg_exe.root_module.addImport("smithy", smithy_module);
|
||||
var cg_cmd = b.addRunArtifact(cg_exe);
|
||||
cg_cmd.addArg("--models");
|
||||
cg_cmd.addArg(try std.fs.path.join(
|
||||
b.allocator,
|
||||
&[_][]const u8{
|
||||
try b.dependency("models", .{}).path("").getPath3(b, null).toString(b.allocator),
|
||||
models_subdir,
|
||||
},
|
||||
));
|
||||
cg_cmd.addArg("--output");
|
||||
const cg_output_dir = cg_cmd.addOutputDirectoryArg("src/models");
|
||||
if (b.verbose)
|
||||
cg_cmd.addArg("--verbose");
|
||||
// cg_cmd.step.dependOn(&fetch_step.step);
|
||||
// TODO: this should use zig_exe from std.Build
|
||||
// codegen should store a hash in a comment
|
||||
// this would be hash of the exe that created the file
|
||||
// concatenated with hash of input json. this would
|
||||
// allow skipping generated files. May not include hash
|
||||
// of contents of output file as maybe we want to tweak
|
||||
// manually??
|
||||
//
|
||||
// All the hashes can be in service_manifest.zig, which
|
||||
// could be fun to just parse and go nuts. Top of
|
||||
// file, generator exe hash. Each import has comment
|
||||
// with both input and output hash and we can decide
|
||||
// later about warning on manual changes...
|
||||
|
||||
cg.dependOn(&cg_cmd.step);
|
||||
break :blk cg;
|
||||
};
|
||||
cg.dependOn(&cg_cmd.step);
|
||||
|
||||
exe.step.dependOn(gen_step);
|
||||
exe.step.dependOn(cg);
|
||||
|
||||
// This allows us to have each module depend on the
|
||||
// generated service manifest.
|
||||
const service_manifest_module = b.createModule(.{
|
||||
.root_source_file = cg_output_dir.path(b, "service_manifest.zig"),
|
||||
.target = target,
|
||||
.optimize = optimize,
|
||||
});
|
||||
service_manifest_module.addImport("smithy", smithy_module);
|
||||
|
||||
exe.root_module.addImport("service_manifest", service_manifest_module);
|
||||
|
||||
// Expose module to others
|
||||
_ = b.addModule("aws", .{
|
||||
.root_source_file = b.path("src/aws.zig"),
|
||||
.imports = &.{
|
||||
.{ .name = "smithy", .module = smithy_module },
|
||||
.{ .name = "service_manifest", .module = service_manifest_module },
|
||||
},
|
||||
});
|
||||
|
||||
// Expose module to others
|
||||
_ = b.addModule("aws-signing", .{
|
||||
.root_source_file = b.path("src/aws_signing.zig"),
|
||||
.imports = &.{.{ .name = "smithy", .module = smithy_module }},
|
||||
});
|
||||
|
||||
// Similar to creating the run step earlier, this exposes a `test` step to
|
||||
// the `zig build --help` menu, providing a way for the user to request
|
||||
|
@ -181,8 +185,10 @@ pub fn build(b: *Builder) !void {
|
|||
.target = b.resolveTargetQuery(t),
|
||||
.optimize = optimize,
|
||||
});
|
||||
unit_tests.root_module.addImport("smithy", smithy_dep.module("smithy"));
|
||||
unit_tests.step.dependOn(gen_step);
|
||||
unit_tests.root_module.addImport("smithy", smithy_module);
|
||||
unit_tests.root_module.addImport("service_manifest", service_manifest_module);
|
||||
unit_tests.step.dependOn(cg);
|
||||
unit_tests.use_llvm = !no_llvm;
|
||||
|
||||
const run_unit_tests = b.addRunArtifact(unit_tests);
|
||||
run_unit_tests.skip_foreign_checks = true;
|
||||
|
@ -204,11 +210,17 @@ pub fn build(b: *Builder) !void {
|
|||
.target = target,
|
||||
.optimize = optimize,
|
||||
});
|
||||
smoke_test.root_module.addImport("smithy", smithy_dep.module("smithy"));
|
||||
smoke_test.step.dependOn(gen_step);
|
||||
smoke_test.use_llvm = !no_llvm;
|
||||
smoke_test.root_module.addImport("smithy", smithy_module);
|
||||
smoke_test.root_module.addImport("service_manifest", service_manifest_module);
|
||||
smoke_test.step.dependOn(cg);
|
||||
|
||||
const run_smoke_test = b.addRunArtifact(smoke_test);
|
||||
|
||||
smoke_test_step.dependOn(&run_smoke_test.step);
|
||||
b.installArtifact(exe);
|
||||
if (no_bin) {
|
||||
b.getInstallStep().dependOn(&exe.step);
|
||||
} else {
|
||||
b.installArtifact(exe);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,18 +1,20 @@
|
|||
.{
|
||||
.name = "aws",
|
||||
.name = .aws,
|
||||
.version = "0.0.1",
|
||||
.fingerprint = 0x1f26b7b27005bb49,
|
||||
.paths = .{
|
||||
"build.zig",
|
||||
"build.zig.zon",
|
||||
"src",
|
||||
"codegen",
|
||||
"README.md",
|
||||
"LICENSE",
|
||||
},
|
||||
|
||||
.dependencies = .{
|
||||
.smithy = .{
|
||||
.url = "https://git.lerch.org/lobo/smithy/archive/3ed98751bc414e005af6ad185feb213d4366c0db.tar.gz",
|
||||
.hash = "12204a784751a4ad5ed6c8955ba91fcbc4a3cad6c5a7da38f39abf074ef801d13172",
|
||||
.url = "https://git.lerch.org/lobo/smithy/archive/a4c6ec6dfe552c57bab601c7d99e8de02bbab1fe.tar.gz",
|
||||
.hash = "smithy-1.0.0-uAyBgS_MAgC4qgc9QaEy5Y5Nf7kv32buQZBYugqNQsAn",
|
||||
},
|
||||
.models = .{
|
||||
.url = "https://github.com/aws/aws-sdk-go-v2/archive/58cf6509525a12d64fd826da883bfdbacbd2f00e.tar.gz",
|
||||
|
|
|
@ -2,9 +2,9 @@ const std = @import("std");
|
|||
// options is a json.Options, but since we're using our hacked json.zig we don't want to
|
||||
// specifically call this out
|
||||
pub fn serializeMap(map: anytype, key: []const u8, options: anytype, out_stream: anytype) !bool {
|
||||
if (@typeInfo(@TypeOf(map)) == .Optional) {
|
||||
if (@typeInfo(@TypeOf(map)) == .optional) {
|
||||
if (map == null)
|
||||
return true
|
||||
return false
|
||||
else
|
||||
return serializeMapInternal(map.?, key, options, out_stream);
|
||||
}
|
||||
|
@ -12,7 +12,23 @@ pub fn serializeMap(map: anytype, key: []const u8, options: anytype, out_stream:
|
|||
}
|
||||
|
||||
fn serializeMapInternal(map: anytype, key: []const u8, options: anytype, out_stream: anytype) !bool {
|
||||
if (map.len == 0) return true;
|
||||
if (map.len == 0) {
|
||||
var child_options = options;
|
||||
if (child_options.whitespace) |*child_ws|
|
||||
child_ws.indent_level += 1;
|
||||
|
||||
try out_stream.writeByte('"');
|
||||
try out_stream.writeAll(key);
|
||||
_ = try out_stream.write("\":");
|
||||
if (options.whitespace) |ws| {
|
||||
if (ws.separator) {
|
||||
try out_stream.writeByte(' ');
|
||||
}
|
||||
}
|
||||
try out_stream.writeByte('{');
|
||||
try out_stream.writeByte('}');
|
||||
return true;
|
||||
}
|
||||
// TODO: Map might be [][]struct{key, value} rather than []struct{key, value}
|
||||
var child_options = options;
|
||||
if (child_options.whitespace) |*child_ws|
|
||||
|
|
|
@ -435,7 +435,7 @@ fn generateServices(allocator: std.mem.Allocator, comptime _: []const u8, file:
|
|||
|
||||
fn generateAdditionalTypes(allocator: std.mem.Allocator, file_state: FileGenerationState, writer: anytype) !void {
|
||||
// More types may be added during processing
|
||||
while (file_state.additional_types_to_generate.popOrNull()) |t| {
|
||||
while (file_state.additional_types_to_generate.pop()) |t| {
|
||||
if (file_state.additional_types_generated.getEntry(t.name) != null) continue;
|
||||
// std.log.info("\t\t{s}", .{t.name});
|
||||
var type_stack = std.ArrayList(*const smithy.ShapeInfo).init(allocator);
|
||||
|
|
|
@ -1,12 +1,13 @@
|
|||
.{
|
||||
.name = "myapp",
|
||||
.name = .myapp,
|
||||
.version = "0.0.1",
|
||||
.fingerprint = 0x8798022a511224c5,
|
||||
.paths = .{""},
|
||||
|
||||
.dependencies = .{
|
||||
.aws = .{
|
||||
.url = "https://git.lerch.org/api/packages/lobo/generic/aws-sdk-with-models/dfda8e77d624dfb776e3a70471501a7c610fbac1/dfda8e77d624dfb776e3a70471501a7c610fbac1-with-models.tar.gz",
|
||||
.hash = "122000ad704234e68fee82a52e3b4e365a52874ec851d978b109e05ac66a80dc86ac",
|
||||
.url = "https://git.lerch.org/api/packages/lobo/generic/aws-sdk-with-models/30d46261b791a1a916e30e60814b39c7ee994a74/30d46261b791a1a916e30e60814b39c7ee994a74-with-models.tar.gz",
|
||||
.hash = "aws-0.0.1-SbsFcLuV6gEkmY-mNp_x-V_GJ-zuJRqIljc4tAu60-g_",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
|
146
src/aws.zig
146
src/aws.zig
|
@ -263,9 +263,9 @@ pub fn Request(comptime request_action: anytype) type {
|
|||
fn callJson(request: ActionRequest, options: Options) !FullResponseType {
|
||||
const target =
|
||||
try std.fmt.allocPrint(options.client.allocator, "{s}.{s}", .{
|
||||
Self.service_meta.name,
|
||||
action.action_name,
|
||||
});
|
||||
Self.service_meta.name,
|
||||
action.action_name,
|
||||
});
|
||||
defer options.client.allocator.free(target);
|
||||
|
||||
var buffer = std.ArrayList(u8).init(options.client.allocator);
|
||||
|
@ -326,11 +326,11 @@ pub fn Request(comptime request_action: anytype) type {
|
|||
// originally?
|
||||
const body =
|
||||
try std.fmt.allocPrint(options.client.allocator, "Action={s}&Version={s}{s}{s}", .{
|
||||
action.action_name,
|
||||
Self.service_meta.version,
|
||||
continuation,
|
||||
buffer.items,
|
||||
});
|
||||
action.action_name,
|
||||
Self.service_meta.version,
|
||||
continuation,
|
||||
buffer.items,
|
||||
});
|
||||
defer options.client.allocator.free(body);
|
||||
return try Self.callAws(.{
|
||||
.query = query,
|
||||
|
@ -517,9 +517,9 @@ pub fn Request(comptime request_action: anytype) type {
|
|||
// And the response property below will pull whatever is the ActionResult object
|
||||
// We can grab index [0] as structs are guaranteed by zig to be returned in the order
|
||||
// declared, and we're declaring in that order in ServerResponse().
|
||||
const real_response = @field(parsed_response, @typeInfo(response_types.NormalResponse).Struct.fields[0].name);
|
||||
const real_response = @field(parsed_response, @typeInfo(response_types.NormalResponse).@"struct".fields[0].name);
|
||||
return FullResponseType{
|
||||
.response = @field(real_response, @typeInfo(@TypeOf(real_response)).Struct.fields[0].name),
|
||||
.response = @field(real_response, @typeInfo(@TypeOf(real_response)).@"struct".fields[0].name),
|
||||
.response_metadata = .{
|
||||
.request_id = try options.client.allocator.dupe(u8, real_response.ResponseMetadata.RequestId),
|
||||
},
|
||||
|
@ -739,20 +739,20 @@ pub fn Request(comptime request_action: anytype) type {
|
|||
errdefer options.client.allocator.destroy(ptr);
|
||||
@field(ptr.*, std.meta.fields(action.Response)[0].name) =
|
||||
json.parse(response_types.RawResponse, &stream, parser_options) catch |e| {
|
||||
log.err(
|
||||
\\Call successful, but unexpected response from service.
|
||||
\\This could be the result of a bug or a stale set of code generated
|
||||
\\service models.
|
||||
\\
|
||||
\\Model Type: {}
|
||||
\\
|
||||
\\Response from server:
|
||||
\\
|
||||
\\{s}
|
||||
\\
|
||||
, .{ action.Response, data });
|
||||
return e;
|
||||
};
|
||||
log.err(
|
||||
\\Call successful, but unexpected response from service.
|
||||
\\This could be the result of a bug or a stale set of code generated
|
||||
\\service models.
|
||||
\\
|
||||
\\Model Type: {}
|
||||
\\
|
||||
\\Response from server:
|
||||
\\
|
||||
\\{s}
|
||||
\\
|
||||
, .{ action.Response, data });
|
||||
return e;
|
||||
};
|
||||
break :blk ptr;
|
||||
};
|
||||
return ParsedJsonData(response_types.NormalResponse){
|
||||
|
@ -773,12 +773,16 @@ fn isOtherNormalResponse(comptime T: type, first_key: []const u8) bool {
|
|||
return std.mem.eql(u8, first_key, expected_key);
|
||||
}
|
||||
fn coerceFromString(comptime T: type, val: []const u8) anyerror!T {
|
||||
if (@typeInfo(T) == .Optional) return try coerceFromString(@typeInfo(T).Optional.child, val);
|
||||
if (@typeInfo(T) == .optional) return try coerceFromString(@typeInfo(T).optional.child, val);
|
||||
// TODO: This is terrible...fix it
|
||||
switch (T) {
|
||||
bool => return std.ascii.eqlIgnoreCase(val, "true"),
|
||||
i64 => return parseInt(T, val) catch |e| {
|
||||
log.err("Invalid string representing i64: {s}", .{val});
|
||||
i64, i128 => return parseInt(T, val) catch |e| {
|
||||
log.err("Invalid string representing {s}: {s}", .{ @typeName(T), val });
|
||||
return e;
|
||||
},
|
||||
f64, f128 => return std.fmt.parseFloat(T, val) catch |e| {
|
||||
log.err("Invalid string representing {s}: {s}", .{ @typeName(T), val });
|
||||
return e;
|
||||
},
|
||||
else => return val,
|
||||
|
@ -806,8 +810,8 @@ fn parseInt(comptime T: type, val: []const u8) !T {
|
|||
|
||||
fn generalAllocPrint(allocator: std.mem.Allocator, val: anytype) !?[]const u8 {
|
||||
switch (@typeInfo(@TypeOf(val))) {
|
||||
.Optional => if (val) |v| return generalAllocPrint(allocator, v) else return null,
|
||||
.Array, .Pointer => return try std.fmt.allocPrint(allocator, "{s}", .{val}),
|
||||
.optional => if (val) |v| return generalAllocPrint(allocator, v) else return null,
|
||||
.array, .pointer => return try std.fmt.allocPrint(allocator, "{s}", .{val}),
|
||||
else => return try std.fmt.allocPrint(allocator, "{any}", .{val}),
|
||||
}
|
||||
}
|
||||
|
@ -926,20 +930,20 @@ fn ServerResponse(comptime action: anytype) type {
|
|||
RequestId: []u8,
|
||||
};
|
||||
const Result = @Type(.{
|
||||
.Struct = .{
|
||||
.@"struct" = .{
|
||||
.layout = .auto,
|
||||
.fields = &[_]std.builtin.Type.StructField{
|
||||
.{
|
||||
.name = action.action_name ++ "Result",
|
||||
.type = T,
|
||||
.default_value = null,
|
||||
.default_value_ptr = null,
|
||||
.is_comptime = false,
|
||||
.alignment = 0,
|
||||
},
|
||||
.{
|
||||
.name = "ResponseMetadata",
|
||||
.type = ResponseMetadata,
|
||||
.default_value = null,
|
||||
.default_value_ptr = null,
|
||||
.is_comptime = false,
|
||||
.alignment = 0,
|
||||
},
|
||||
|
@ -949,13 +953,13 @@ fn ServerResponse(comptime action: anytype) type {
|
|||
},
|
||||
});
|
||||
return @Type(.{
|
||||
.Struct = .{
|
||||
.@"struct" = .{
|
||||
.layout = .auto,
|
||||
.fields = &[_]std.builtin.Type.StructField{
|
||||
.{
|
||||
.name = action.action_name ++ "Response",
|
||||
.type = Result,
|
||||
.default_value = null,
|
||||
.default_value_ptr = null,
|
||||
.is_comptime = false,
|
||||
.alignment = 0,
|
||||
},
|
||||
|
@ -1015,8 +1019,8 @@ fn FullResponse(comptime action: anytype) type {
|
|||
}
|
||||
fn safeFree(allocator: std.mem.Allocator, obj: anytype) void {
|
||||
switch (@typeInfo(@TypeOf(obj))) {
|
||||
.Pointer => allocator.free(obj),
|
||||
.Optional => if (obj) |o| safeFree(allocator, o),
|
||||
.pointer => allocator.free(obj),
|
||||
.optional => if (obj) |o| safeFree(allocator, o),
|
||||
else => {},
|
||||
}
|
||||
}
|
||||
|
@ -1125,7 +1129,7 @@ fn buildQuery(allocator: std.mem.Allocator, request: anytype) ![]const u8 {
|
|||
var prefix = "?";
|
||||
if (@hasDecl(@TypeOf(request), "http_query")) {
|
||||
const query_arguments = @field(@TypeOf(request), "http_query");
|
||||
inline for (@typeInfo(@TypeOf(query_arguments)).Struct.fields) |arg| {
|
||||
inline for (@typeInfo(@TypeOf(query_arguments)).@"struct".fields) |arg| {
|
||||
const val = @field(request, arg.name);
|
||||
if (try addQueryArg(arg.type, prefix, @field(query_arguments, arg.name), val, writer))
|
||||
prefix = "&";
|
||||
|
@ -1136,14 +1140,14 @@ fn buildQuery(allocator: std.mem.Allocator, request: anytype) ![]const u8 {
|
|||
|
||||
fn addQueryArg(comptime ValueType: type, prefix: []const u8, key: []const u8, value: anytype, writer: anytype) !bool {
|
||||
switch (@typeInfo(@TypeOf(value))) {
|
||||
.Optional => {
|
||||
.optional => {
|
||||
if (value) |v|
|
||||
return try addQueryArg(ValueType, prefix, key, v, writer);
|
||||
return false;
|
||||
},
|
||||
// if this is a pointer, we want to make sure it is more than just a string
|
||||
.Pointer => |ptr| {
|
||||
if (ptr.child == u8 or ptr.size != .Slice) {
|
||||
.pointer => |ptr| {
|
||||
if (ptr.child == u8 or ptr.size != .slice) {
|
||||
// This is just a string
|
||||
return try addBasicQueryArg(prefix, key, value, writer);
|
||||
}
|
||||
|
@ -1154,7 +1158,7 @@ fn addQueryArg(comptime ValueType: type, prefix: []const u8, key: []const u8, va
|
|||
}
|
||||
return std.mem.eql(u8, "&", p);
|
||||
},
|
||||
.Array => |arr| {
|
||||
.array => |arr| {
|
||||
if (arr.child == u8)
|
||||
return try addBasicQueryArg(prefix, key, value, writer);
|
||||
var p = prefix;
|
||||
|
@ -1274,8 +1278,8 @@ fn reportTraffic(
|
|||
fn typeForField(comptime T: type, comptime field_name: []const u8) !type {
|
||||
const ti = @typeInfo(T);
|
||||
switch (ti) {
|
||||
.Struct => {
|
||||
inline for (ti.Struct.fields) |field| {
|
||||
.@"struct" => {
|
||||
inline for (ti.@"struct".fields) |field| {
|
||||
if (std.mem.eql(u8, field.name, field_name))
|
||||
return field.type;
|
||||
}
|
||||
|
@ -1289,7 +1293,7 @@ test "custom serialization for map objects" {
|
|||
const allocator = std.testing.allocator;
|
||||
var buffer = std.ArrayList(u8).init(allocator);
|
||||
defer buffer.deinit();
|
||||
var tags = try std.ArrayList(@typeInfo(try typeForField(services.lambda.tag_resource.Request, "tags")).Pointer.child).initCapacity(allocator, 2);
|
||||
var tags = try std.ArrayList(@typeInfo(try typeForField(services.lambda.tag_resource.Request, "tags")).pointer.child).initCapacity(allocator, 2);
|
||||
defer tags.deinit();
|
||||
tags.appendAssumeCapacity(.{ .key = "Foo", .value = "Bar" });
|
||||
tags.appendAssumeCapacity(.{ .key = "Baz", .value = "Qux" });
|
||||
|
@ -1306,6 +1310,58 @@ test "custom serialization for map objects" {
|
|||
, buffer.items);
|
||||
}
|
||||
|
||||
test "proper serialization for kms" {
|
||||
// Github issue #8
|
||||
// https://github.com/elerch/aws-sdk-for-zig/issues/8
|
||||
const allocator = std.testing.allocator;
|
||||
var buffer = std.ArrayList(u8).init(allocator);
|
||||
defer buffer.deinit();
|
||||
const req = services.kms.encrypt.Request{
|
||||
.encryption_algorithm = "SYMMETRIC_DEFAULT",
|
||||
// Since encryption_context is not null, we expect "{}" to be the value
|
||||
// here, not "[]", because this is our special AWS map pattern
|
||||
.encryption_context = &.{},
|
||||
.key_id = "42",
|
||||
.plaintext = "foo",
|
||||
.dry_run = false,
|
||||
.grant_tokens = &[_][]const u8{},
|
||||
};
|
||||
try json.stringify(req, .{ .whitespace = .{} }, buffer.writer());
|
||||
try std.testing.expectEqualStrings(
|
||||
\\{
|
||||
\\ "KeyId": "42",
|
||||
\\ "Plaintext": "foo",
|
||||
\\ "EncryptionContext": {},
|
||||
\\ "GrantTokens": [],
|
||||
\\ "EncryptionAlgorithm": "SYMMETRIC_DEFAULT",
|
||||
\\ "DryRun": false
|
||||
\\}
|
||||
, buffer.items);
|
||||
|
||||
var buffer_null = std.ArrayList(u8).init(allocator);
|
||||
defer buffer_null.deinit();
|
||||
const req_null = services.kms.encrypt.Request{
|
||||
.encryption_algorithm = "SYMMETRIC_DEFAULT",
|
||||
// Since encryption_context here *IS* null, we expect simply "null" to be the value
|
||||
.encryption_context = null,
|
||||
.key_id = "42",
|
||||
.plaintext = "foo",
|
||||
.dry_run = false,
|
||||
.grant_tokens = &[_][]const u8{},
|
||||
};
|
||||
try json.stringify(req_null, .{ .whitespace = .{} }, buffer_null.writer());
|
||||
try std.testing.expectEqualStrings(
|
||||
\\{
|
||||
\\ "KeyId": "42",
|
||||
\\ "Plaintext": "foo",
|
||||
\\ "EncryptionContext": null,
|
||||
\\ "GrantTokens": [],
|
||||
\\ "EncryptionAlgorithm": "SYMMETRIC_DEFAULT",
|
||||
\\ "DryRun": false
|
||||
\\}
|
||||
, buffer_null.items);
|
||||
}
|
||||
|
||||
test "REST Json v1 builds proper queries" {
|
||||
const allocator = std.testing.allocator;
|
||||
const svs = Services(.{.lambda}){};
|
||||
|
@ -2051,7 +2107,7 @@ test "rest_json_1_work_with_lambda: lambda tagResource (only), to excercise zig
|
|||
defer test_harness.deinit();
|
||||
const options = try test_harness.start();
|
||||
const lambda = (Services(.{.lambda}){}).lambda;
|
||||
var tags = try std.ArrayList(@typeInfo(try typeForField(lambda.tag_resource.Request, "tags")).Pointer.child).initCapacity(allocator, 1);
|
||||
var tags = try std.ArrayList(@typeInfo(try typeForField(lambda.tag_resource.Request, "tags")).pointer.child).initCapacity(allocator, 1);
|
||||
defer tags.deinit();
|
||||
tags.appendAssumeCapacity(.{ .key = "Foo", .value = "Bar" });
|
||||
const req = services.lambda.tag_resource.Request{ .resource = "arn:aws:lambda:us-west-2:550620852718:function:awsome-lambda-LambdaStackawsomeLambda", .tags = tags.items };
|
||||
|
|
|
@ -662,12 +662,12 @@ fn canonicalUri(allocator: std.mem.Allocator, path: []const u8, double_encode: b
|
|||
}
|
||||
defer allocator.free(encoded_once);
|
||||
var encoded_twice = try encodeUri(allocator, encoded_once);
|
||||
defer allocator.free(encoded_twice);
|
||||
log.debug("encoded path (2): {s}", .{encoded_twice});
|
||||
if (std.mem.lastIndexOf(u8, encoded_twice, "?")) |i| {
|
||||
_ = allocator.resize(encoded_twice, i);
|
||||
return encoded_twice[0..i];
|
||||
return try allocator.dupe(u8, encoded_twice[0..i]);
|
||||
}
|
||||
return encoded_twice;
|
||||
return try allocator.dupe(u8, encoded_twice);
|
||||
}
|
||||
|
||||
fn encodeParamPart(allocator: std.mem.Allocator, path: []const u8) ![]const u8 {
|
||||
|
@ -936,6 +936,7 @@ fn canonicalHeaderValue(allocator: std.mem.Allocator, value: []const u8) ![]cons
|
|||
const in_quote = false;
|
||||
var start: usize = 0;
|
||||
const rc = try allocator.alloc(u8, value.len);
|
||||
defer allocator.free(rc);
|
||||
var rc_inx: usize = 0;
|
||||
for (value, 0..) |c, i| {
|
||||
if (!started and !std.ascii.isWhitespace(c)) {
|
||||
|
@ -953,8 +954,7 @@ fn canonicalHeaderValue(allocator: std.mem.Allocator, value: []const u8) ![]cons
|
|||
// Trim end
|
||||
while (std.ascii.isWhitespace(rc[rc_inx - 1]))
|
||||
rc_inx -= 1;
|
||||
_ = allocator.resize(rc, rc_inx);
|
||||
return rc[0..rc_inx];
|
||||
return try allocator.dupe(u8, rc[0..rc_inx]);
|
||||
}
|
||||
fn lessThan(context: void, lhs: std.http.Header, rhs: std.http.Header) bool {
|
||||
_ = context;
|
||||
|
@ -986,6 +986,7 @@ test "canonical uri" {
|
|||
const path = "/documents and settings/?foo=bar";
|
||||
const expected = "/documents%2520and%2520settings/";
|
||||
const actual = try canonicalUri(allocator, path, true);
|
||||
|
||||
defer allocator.free(actual);
|
||||
try std.testing.expectEqualStrings(expected, actual);
|
||||
|
||||
|
|
98
src/json.zig
98
src/json.zig
|
@ -1560,21 +1560,21 @@ fn skipValue(tokens: *TokenStream) SkipValueError!void {
|
|||
|
||||
fn parseInternal(comptime T: type, token: Token, tokens: *TokenStream, options: ParseOptions) !T {
|
||||
switch (@typeInfo(T)) {
|
||||
.Bool => {
|
||||
.bool => {
|
||||
return switch (token) {
|
||||
.True => true,
|
||||
.False => false,
|
||||
else => error.UnexpectedToken,
|
||||
};
|
||||
},
|
||||
.Float, .ComptimeFloat => {
|
||||
.float, .comptime_float => {
|
||||
const numberToken = switch (token) {
|
||||
.Number => |n| n,
|
||||
else => return error.UnexpectedToken,
|
||||
};
|
||||
return try std.fmt.parseFloat(T, numberToken.slice(tokens.slice, tokens.i - 1));
|
||||
},
|
||||
.Int, .ComptimeInt => {
|
||||
.int, .comptime_int => {
|
||||
const numberToken = switch (token) {
|
||||
.Number => |n| n,
|
||||
else => return error.UnexpectedToken,
|
||||
|
@ -1587,14 +1587,14 @@ fn parseInternal(comptime T: type, token: Token, tokens: *TokenStream, options:
|
|||
if (std.math.round(float) != float) return error.InvalidNumber;
|
||||
return @as(T, @intFromFloat(float));
|
||||
},
|
||||
.Optional => |optionalInfo| {
|
||||
.optional => |optionalInfo| {
|
||||
if (token == .Null) {
|
||||
return null;
|
||||
} else {
|
||||
return try parseInternal(optionalInfo.child, token, tokens, options);
|
||||
}
|
||||
},
|
||||
.Enum => |enumInfo| {
|
||||
.@"enum" => |enumInfo| {
|
||||
switch (token) {
|
||||
.Number => |numberToken| {
|
||||
if (!numberToken.is_integer) return error.UnexpectedToken;
|
||||
|
@ -1618,7 +1618,7 @@ fn parseInternal(comptime T: type, token: Token, tokens: *TokenStream, options:
|
|||
else => return error.UnexpectedToken,
|
||||
}
|
||||
},
|
||||
.Union => |unionInfo| {
|
||||
.@"union" => |unionInfo| {
|
||||
if (unionInfo.tag_type) |_| {
|
||||
// try each of the union fields until we find one that matches
|
||||
inline for (unionInfo.fields) |u_field| {
|
||||
|
@ -1642,7 +1642,7 @@ fn parseInternal(comptime T: type, token: Token, tokens: *TokenStream, options:
|
|||
@compileError("Unable to parse into untagged union '" ++ @typeName(T) ++ "'");
|
||||
}
|
||||
},
|
||||
.Struct => |structInfo| {
|
||||
.@"struct" => |structInfo| {
|
||||
switch (token) {
|
||||
.ObjectBegin => {},
|
||||
else => return error.UnexpectedToken,
|
||||
|
@ -1723,7 +1723,7 @@ fn parseInternal(comptime T: type, token: Token, tokens: *TokenStream, options:
|
|||
}
|
||||
inline for (structInfo.fields, 0..) |field, i| {
|
||||
if (!fields_seen[i]) {
|
||||
if (field.default_value) |default_value_ptr| {
|
||||
if (field.default_value_ptr) |default_value_ptr| {
|
||||
if (!field.is_comptime) {
|
||||
const default_value = @as(*align(1) const field.type, @ptrCast(default_value_ptr)).*;
|
||||
@field(r, field.name) = default_value;
|
||||
|
@ -1736,7 +1736,7 @@ fn parseInternal(comptime T: type, token: Token, tokens: *TokenStream, options:
|
|||
}
|
||||
return r;
|
||||
},
|
||||
.Array => |arrayInfo| {
|
||||
.array => |arrayInfo| {
|
||||
switch (token) {
|
||||
.ArrayBegin => {
|
||||
var r: T = undefined;
|
||||
|
@ -1770,21 +1770,21 @@ fn parseInternal(comptime T: type, token: Token, tokens: *TokenStream, options:
|
|||
else => return error.UnexpectedToken,
|
||||
}
|
||||
},
|
||||
.Pointer => |ptrInfo| {
|
||||
.pointer => |ptrInfo| {
|
||||
const allocator = options.allocator orelse return error.AllocatorRequired;
|
||||
switch (ptrInfo.size) {
|
||||
.One => {
|
||||
.one => {
|
||||
const r: T = try allocator.create(ptrInfo.child);
|
||||
errdefer allocator.destroy(r);
|
||||
r.* = try parseInternal(ptrInfo.child, token, tokens, options);
|
||||
return r;
|
||||
},
|
||||
.Slice => {
|
||||
.slice => {
|
||||
switch (token) {
|
||||
.ArrayBegin => {
|
||||
var arraylist = std.ArrayList(ptrInfo.child).init(allocator);
|
||||
errdefer {
|
||||
while (arraylist.popOrNull()) |v| {
|
||||
while (arraylist.pop()) |v| {
|
||||
parseFree(ptrInfo.child, v, options);
|
||||
}
|
||||
arraylist.deinit();
|
||||
|
@ -1829,7 +1829,7 @@ fn parseInternal(comptime T: type, token: Token, tokens: *TokenStream, options:
|
|||
if (value_type == null) return error.UnexpectedToken;
|
||||
var arraylist = std.ArrayList(ptrInfo.child).init(allocator);
|
||||
errdefer {
|
||||
while (arraylist.popOrNull()) |v| {
|
||||
while (arraylist.pop()) |v| {
|
||||
parseFree(ptrInfo.child, v, options);
|
||||
}
|
||||
arraylist.deinit();
|
||||
|
@ -1863,8 +1863,8 @@ fn parseInternal(comptime T: type, token: Token, tokens: *TokenStream, options:
|
|||
fn typeForField(comptime T: type, comptime field_name: []const u8) ?type {
|
||||
const ti = @typeInfo(T);
|
||||
switch (ti) {
|
||||
.Struct => {
|
||||
inline for (ti.Struct.fields) |field| {
|
||||
.@"struct" => {
|
||||
inline for (ti.@"struct".fields) |field| {
|
||||
if (std.mem.eql(u8, field.name, field_name))
|
||||
return field.type;
|
||||
}
|
||||
|
@ -1878,14 +1878,14 @@ fn isMapPattern(comptime T: type) bool {
|
|||
// We should be getting a type that is a pointer to a slice.
|
||||
// Let's just double check before proceeding
|
||||
const ti = @typeInfo(T);
|
||||
if (ti != .Pointer) return false;
|
||||
if (ti.Pointer.size != .Slice) return false;
|
||||
const ti_child = @typeInfo(ti.Pointer.child);
|
||||
if (ti_child != .Struct) return false;
|
||||
if (ti_child.Struct.fields.len != 2) return false;
|
||||
if (ti != .pointer) return false;
|
||||
if (ti.pointer.size != .slice) return false;
|
||||
const ti_child = @typeInfo(ti.pointer.child);
|
||||
if (ti_child != .@"struct") return false;
|
||||
if (ti_child.@"struct".fields.len != 2) return false;
|
||||
var key_found = false;
|
||||
var value_found = false;
|
||||
inline for (ti_child.Struct.fields) |field| {
|
||||
inline for (ti_child.@"struct".fields) |field| {
|
||||
if (std.mem.eql(u8, "key", field.name))
|
||||
key_found = true;
|
||||
if (std.mem.eql(u8, "value", field.name))
|
||||
|
@ -1904,13 +1904,13 @@ pub fn parse(comptime T: type, tokens: *TokenStream, options: ParseOptions) !T {
|
|||
/// Should be called with the same type and `ParseOptions` that were passed to `parse`
|
||||
pub fn parseFree(comptime T: type, value: T, options: ParseOptions) void {
|
||||
switch (@typeInfo(T)) {
|
||||
.Bool, .Float, .ComptimeFloat, .Int, .ComptimeInt, .Enum => {},
|
||||
.Optional => {
|
||||
.bool, .float, .comptime_float, .int, .comptime_int, .@"enum" => {},
|
||||
.optional => {
|
||||
if (value) |v| {
|
||||
return parseFree(@TypeOf(v), v, options);
|
||||
}
|
||||
},
|
||||
.Union => |unionInfo| {
|
||||
.@"union" => |unionInfo| {
|
||||
if (unionInfo.tag_type) |UnionTagType| {
|
||||
inline for (unionInfo.fields) |u_field| {
|
||||
if (value == @field(UnionTagType, u_field.name)) {
|
||||
|
@ -1922,24 +1922,24 @@ pub fn parseFree(comptime T: type, value: T, options: ParseOptions) void {
|
|||
unreachable;
|
||||
}
|
||||
},
|
||||
.Struct => |structInfo| {
|
||||
.@"struct" => |structInfo| {
|
||||
inline for (structInfo.fields) |field| {
|
||||
parseFree(field.type, @field(value, field.name), options);
|
||||
}
|
||||
},
|
||||
.Array => |arrayInfo| {
|
||||
.array => |arrayInfo| {
|
||||
for (value) |v| {
|
||||
parseFree(arrayInfo.child, v, options);
|
||||
}
|
||||
},
|
||||
.Pointer => |ptrInfo| {
|
||||
.pointer => |ptrInfo| {
|
||||
const allocator = options.allocator orelse unreachable;
|
||||
switch (ptrInfo.size) {
|
||||
.One => {
|
||||
.one => {
|
||||
parseFree(ptrInfo.child, value.*, options);
|
||||
allocator.destroy(value);
|
||||
},
|
||||
.Slice => {
|
||||
.slice => {
|
||||
for (value) |v| {
|
||||
parseFree(ptrInfo.child, v, options);
|
||||
}
|
||||
|
@ -2284,7 +2284,7 @@ pub const Parser = struct {
|
|||
return;
|
||||
}
|
||||
|
||||
var value = p.stack.pop();
|
||||
var value = p.stack.pop().?;
|
||||
try p.pushToParent(&value);
|
||||
},
|
||||
.String => |s| {
|
||||
|
@ -2350,7 +2350,7 @@ pub const Parser = struct {
|
|||
return;
|
||||
}
|
||||
|
||||
var value = p.stack.pop();
|
||||
var value = p.stack.pop().?;
|
||||
try p.pushToParent(&value);
|
||||
},
|
||||
.ObjectBegin => {
|
||||
|
@ -2812,38 +2812,38 @@ pub fn stringify(
|
|||
) !void {
|
||||
const T = @TypeOf(value);
|
||||
switch (@typeInfo(T)) {
|
||||
.Float, .ComptimeFloat => {
|
||||
.float, .comptime_float => {
|
||||
return std.fmt.format(out_stream, "{e}", .{value});
|
||||
},
|
||||
.Int, .ComptimeInt => {
|
||||
.int, .comptime_int => {
|
||||
return std.fmt.formatIntValue(value, "", std.fmt.FormatOptions{}, out_stream);
|
||||
},
|
||||
.Bool => {
|
||||
.bool => {
|
||||
return out_stream.writeAll(if (value) "true" else "false");
|
||||
},
|
||||
.Null => {
|
||||
.null => {
|
||||
return out_stream.writeAll("null");
|
||||
},
|
||||
.Optional => {
|
||||
.optional => {
|
||||
if (value) |payload| {
|
||||
return try stringify(payload, options, out_stream);
|
||||
} else {
|
||||
return try stringify(null, options, out_stream);
|
||||
}
|
||||
},
|
||||
.Enum => {
|
||||
.@"enum" => {
|
||||
if (comptime std.meta.hasFn(T, "jsonStringify")) {
|
||||
return value.jsonStringify(options, out_stream);
|
||||
}
|
||||
|
||||
@compileError("Unable to stringify enum '" ++ @typeName(T) ++ "'");
|
||||
},
|
||||
.Union => {
|
||||
.@"union" => {
|
||||
if (comptime std.meta.hasFn(T, "jsonStringify")) {
|
||||
return value.jsonStringify(options, out_stream);
|
||||
}
|
||||
|
||||
const info = @typeInfo(T).Union;
|
||||
const info = @typeInfo(T).@"union";
|
||||
if (info.tag_type) |UnionTagType| {
|
||||
inline for (info.fields) |u_field| {
|
||||
if (value == @field(UnionTagType, u_field.name)) {
|
||||
|
@ -2854,7 +2854,7 @@ pub fn stringify(
|
|||
@compileError("Unable to stringify untagged union '" ++ @typeName(T) ++ "'");
|
||||
}
|
||||
},
|
||||
.Struct => |S| {
|
||||
.@"struct" => |S| {
|
||||
if (comptime std.meta.hasFn(T, "jsonStringify")) {
|
||||
return value.jsonStringify(options, out_stream);
|
||||
}
|
||||
|
@ -2870,7 +2870,7 @@ pub fn stringify(
|
|||
if (Field.type == void) continue;
|
||||
|
||||
var output_this_field = true;
|
||||
if (!options.emit_null and @typeInfo(Field.type) == .Optional and @field(value, Field.name) == null) output_this_field = false;
|
||||
if (!options.emit_null and @typeInfo(Field.type) == .optional and @field(value, Field.name) == null) output_this_field = false;
|
||||
|
||||
const final_name = if (comptime std.meta.hasFn(T, "fieldNameFor"))
|
||||
value.fieldNameFor(Field.name)
|
||||
|
@ -2920,10 +2920,10 @@ pub fn stringify(
|
|||
try out_stream.writeByte('}');
|
||||
return;
|
||||
},
|
||||
.ErrorSet => return stringify(@as([]const u8, @errorName(value)), options, out_stream),
|
||||
.Pointer => |ptr_info| switch (ptr_info.size) {
|
||||
.One => switch (@typeInfo(ptr_info.child)) {
|
||||
.Array => {
|
||||
.error_set => return stringify(@as([]const u8, @errorName(value)), options, out_stream),
|
||||
.pointer => |ptr_info| switch (ptr_info.size) {
|
||||
.one => switch (@typeInfo(ptr_info.child)) {
|
||||
.array => {
|
||||
const Slice = []const std.meta.Elem(ptr_info.child);
|
||||
return stringify(@as(Slice, value), options, out_stream);
|
||||
},
|
||||
|
@ -2933,7 +2933,7 @@ pub fn stringify(
|
|||
},
|
||||
},
|
||||
// TODO: .Many when there is a sentinel (waiting for https://github.com/ziglang/zig/pull/3972)
|
||||
.Slice => {
|
||||
.slice => {
|
||||
if (ptr_info.child == u8 and options.string == .String and std.unicode.utf8ValidateSlice(value)) {
|
||||
try out_stream.writeByte('\"');
|
||||
var i: usize = 0;
|
||||
|
@ -3002,8 +3002,8 @@ pub fn stringify(
|
|||
},
|
||||
else => @compileError("Unable to stringify type '" ++ @typeName(T) ++ "'"),
|
||||
},
|
||||
.Array => return stringify(&value, options, out_stream),
|
||||
.Vector => |info| {
|
||||
.array => return stringify(&value, options, out_stream),
|
||||
.vector => |info| {
|
||||
const array: [info.len]info.child = value;
|
||||
return stringify(&array, options, out_stream);
|
||||
},
|
||||
|
|
10
src/main.zig
10
src/main.zig
|
@ -97,7 +97,7 @@ pub fn main() anyerror!void {
|
|||
}
|
||||
continue;
|
||||
}
|
||||
inline for (@typeInfo(Tests).Enum.fields) |f| {
|
||||
inline for (@typeInfo(Tests).@"enum".fields) |f| {
|
||||
if (std.mem.eql(u8, f.name, arg)) {
|
||||
try tests.append(@field(Tests, f.name));
|
||||
break;
|
||||
|
@ -105,7 +105,7 @@ pub fn main() anyerror!void {
|
|||
}
|
||||
}
|
||||
if (tests.items.len == 0) {
|
||||
inline for (@typeInfo(Tests).Enum.fields) |f|
|
||||
inline for (@typeInfo(Tests).@"enum".fields) |f|
|
||||
try tests.append(@field(Tests, f.name));
|
||||
}
|
||||
|
||||
|
@ -192,7 +192,7 @@ pub fn main() anyerror!void {
|
|||
const func = fns[0];
|
||||
const arn = func.function_arn.?;
|
||||
// This is a bit ugly. Maybe a helper function in the library would help?
|
||||
var tags = try std.ArrayList(@typeInfo(try typeForField(services.lambda.tag_resource.Request, "tags")).Pointer.child).initCapacity(allocator, 1);
|
||||
var tags = try std.ArrayList(@typeInfo(try typeForField(services.lambda.tag_resource.Request, "tags")).pointer.child).initCapacity(allocator, 1);
|
||||
defer tags.deinit();
|
||||
tags.appendAssumeCapacity(.{ .key = "Foo", .value = "Bar" });
|
||||
const req = services.lambda.tag_resource.Request{ .resource = arn, .tags = tags.items };
|
||||
|
@ -380,8 +380,8 @@ fn proxyFromString(string: []const u8) !std.http.Client.Proxy {
|
|||
fn typeForField(comptime T: type, comptime field_name: []const u8) !type {
|
||||
const ti = @typeInfo(T);
|
||||
switch (ti) {
|
||||
.Struct => {
|
||||
inline for (ti.Struct.fields) |field| {
|
||||
.@"struct" => {
|
||||
inline for (ti.@"struct".fields) |field| {
|
||||
if (std.mem.eql(u8, field.name, field_name))
|
||||
return field.type;
|
||||
}
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
const std = @import("std");
|
||||
const service_list = @import("models/service_manifest.zig");
|
||||
const service_list = @import("service_manifest");
|
||||
const expectEqualStrings = std.testing.expectEqualStrings;
|
||||
|
||||
pub fn Services(comptime service_imports: anytype) type {
|
||||
|
@ -12,7 +12,7 @@ pub fn Services(comptime service_imports: anytype) type {
|
|||
item.* = .{
|
||||
.name = @tagName(service_imports[i]),
|
||||
.type = @TypeOf(import_field),
|
||||
.default_value = &import_field,
|
||||
.default_value_ptr = &import_field,
|
||||
.is_comptime = false,
|
||||
.alignment = 0,
|
||||
};
|
||||
|
@ -20,7 +20,7 @@ pub fn Services(comptime service_imports: anytype) type {
|
|||
|
||||
// finally, generate the type
|
||||
return @Type(.{
|
||||
.Struct = .{
|
||||
.@"struct" = .{
|
||||
.layout = .auto,
|
||||
.fields = &fields,
|
||||
.decls = &[_]std.builtin.Type.Declaration{},
|
||||
|
|
19
src/url.zig
19
src/url.zig
|
@ -24,10 +24,11 @@ fn encodeStruct(
|
|||
comptime options: EncodingOptions,
|
||||
) !bool {
|
||||
var rc = first;
|
||||
inline for (@typeInfo(@TypeOf(obj)).Struct.fields) |field| {
|
||||
const field_name = try options.field_name_transformer(allocator, field.name);
|
||||
defer if (options.field_name_transformer.* != defaultTransformer)
|
||||
allocator.free(field_name);
|
||||
var arena = std.heap.ArenaAllocator.init(allocator);
|
||||
defer arena.deinit();
|
||||
const arena_alloc = arena.allocator();
|
||||
inline for (@typeInfo(@TypeOf(obj)).@"struct".fields) |field| {
|
||||
const field_name = try options.field_name_transformer(arena_alloc, field.name);
|
||||
// @compileLog(@typeInfo(field.field_type).Pointer);
|
||||
rc = try encodeInternal(allocator, parent, field_name, rc, @field(obj, field.name), writer, options);
|
||||
}
|
||||
|
@ -47,10 +48,10 @@ pub fn encodeInternal(
|
|||
// @compileLog(@typeInfo(@TypeOf(obj)));
|
||||
var rc = first;
|
||||
switch (@typeInfo(@TypeOf(obj))) {
|
||||
.Optional => if (obj) |o| {
|
||||
.optional => if (obj) |o| {
|
||||
rc = try encodeInternal(allocator, parent, field_name, first, o, writer, options);
|
||||
},
|
||||
.Pointer => |ti| if (ti.size == .One) {
|
||||
.pointer => |ti| if (ti.size == .one) {
|
||||
rc = try encodeInternal(allocator, parent, field_name, first, obj.*, writer, options);
|
||||
} else {
|
||||
if (!first) _ = try writer.write("&");
|
||||
|
@ -61,7 +62,7 @@ pub fn encodeInternal(
|
|||
try writer.print("{s}{s}={any}", .{ parent, field_name, obj });
|
||||
rc = false;
|
||||
},
|
||||
.Struct => if (std.mem.eql(u8, "", field_name)) {
|
||||
.@"struct" => if (std.mem.eql(u8, "", field_name)) {
|
||||
rc = try encodeStruct(allocator, parent, first, obj, writer, options);
|
||||
} else {
|
||||
// TODO: It would be lovely if we could concat at compile time or allocPrint at runtime
|
||||
|
@ -73,12 +74,12 @@ pub fn encodeInternal(
|
|||
rc = try encodeStruct(allocator, new_parent, first, obj, writer, options);
|
||||
// try encodeStruct(parent ++ field_name ++ ".", first, obj, writer, options);
|
||||
},
|
||||
.Array => {
|
||||
.array => {
|
||||
if (!first) _ = try writer.write("&");
|
||||
try writer.print("{s}{s}={s}", .{ parent, field_name, obj });
|
||||
rc = false;
|
||||
},
|
||||
.Int, .ComptimeInt, .Float, .ComptimeFloat => {
|
||||
.int, .comptime_int, .float, .comptime_float => {
|
||||
if (!first) _ = try writer.write("&");
|
||||
try writer.print("{s}{s}={d}", .{ parent, field_name, obj });
|
||||
rc = false;
|
||||
|
|
|
@ -653,7 +653,10 @@ fn dupeAndUnescape(alloc: Allocator, text: []const u8) ![]const u8 {
|
|||
|
||||
// This error is not strictly true, but we need to match one of the items
|
||||
// from the error set provided by the other stdlib calls at the calling site
|
||||
if (!alloc.resize(str, j)) return error.OutOfMemory;
|
||||
if (!alloc.resize(str, j)) {
|
||||
defer alloc.free(str);
|
||||
return alloc.dupe(u8, str[0..j]) catch return error.OutOfMemory;
|
||||
}
|
||||
return str[0..j];
|
||||
}
|
||||
|
||||
|
|
|
@ -96,14 +96,14 @@ pub fn parse(comptime T: type, source: []const u8, options: ParseOptions) !Parse
|
|||
|
||||
fn parseInternal(comptime T: type, element: *xml.Element, options: ParseOptions) !T {
|
||||
switch (@typeInfo(T)) {
|
||||
.Bool => {
|
||||
.bool => {
|
||||
if (std.ascii.eqlIgnoreCase("true", element.children.items[0].CharData))
|
||||
return true;
|
||||
if (std.ascii.eqlIgnoreCase("false", element.children.items[0].CharData))
|
||||
return false;
|
||||
return error.UnexpectedToken;
|
||||
},
|
||||
.Float, .ComptimeFloat => {
|
||||
.float, .comptime_float => {
|
||||
return std.fmt.parseFloat(T, element.children.items[0].CharData) catch |e| {
|
||||
if (element.children.items[0].CharData[element.children.items[0].CharData.len - 1] == 'Z') {
|
||||
// We have an iso8601 in an integer field (we think)
|
||||
|
@ -127,7 +127,7 @@ fn parseInternal(comptime T: type, element: *xml.Element, options: ParseOptions)
|
|||
return e;
|
||||
};
|
||||
},
|
||||
.Int, .ComptimeInt => {
|
||||
.int, .comptime_int => {
|
||||
// 2021-10-05T16:39:45.000Z
|
||||
return std.fmt.parseInt(T, element.children.items[0].CharData, 10) catch |e| {
|
||||
if (element.children.items[0].CharData[element.children.items[0].CharData.len - 1] == 'Z') {
|
||||
|
@ -152,7 +152,7 @@ fn parseInternal(comptime T: type, element: *xml.Element, options: ParseOptions)
|
|||
return e;
|
||||
};
|
||||
},
|
||||
.Optional => |optional_info| {
|
||||
.optional => |optional_info| {
|
||||
if (element.children.items.len == 0) {
|
||||
// This is almost certainly incomplete. Empty strings? xsi:nil?
|
||||
return null;
|
||||
|
@ -162,7 +162,7 @@ fn parseInternal(comptime T: type, element: *xml.Element, options: ParseOptions)
|
|||
return try parseInternal(optional_info.child, element, options);
|
||||
}
|
||||
},
|
||||
.Enum => |enum_info| {
|
||||
.@"enum" => |enum_info| {
|
||||
_ = enum_info;
|
||||
// const numeric: ?enum_info.tag_type = std.fmt.parseInt(enum_info.tag_type, element.children.items[0].CharData, 10) catch null;
|
||||
// if (numeric) |num| {
|
||||
|
@ -172,7 +172,7 @@ fn parseInternal(comptime T: type, element: *xml.Element, options: ParseOptions)
|
|||
// return std.meta.stringToEnum(T, element.CharData);
|
||||
// }
|
||||
},
|
||||
.Union => |union_info| {
|
||||
.@"union" => |union_info| {
|
||||
if (union_info.tag_type) |_| {
|
||||
// try each of the union fields until we find one that matches
|
||||
// inline for (union_info.fields) |u_field| {
|
||||
|
@ -195,7 +195,7 @@ fn parseInternal(comptime T: type, element: *xml.Element, options: ParseOptions)
|
|||
}
|
||||
@compileError("Unable to parse into untagged union '" ++ @typeName(T) ++ "'");
|
||||
},
|
||||
.Struct => |struct_info| {
|
||||
.@"struct" => |struct_info| {
|
||||
var r: T = undefined;
|
||||
var fields_seen = [_]bool{false} ** struct_info.fields.len;
|
||||
var fields_set: u64 = 0;
|
||||
|
@ -250,7 +250,7 @@ fn parseInternal(comptime T: type, element: *xml.Element, options: ParseOptions)
|
|||
fields_set = fields_set + 1;
|
||||
found_value = true;
|
||||
}
|
||||
if (@typeInfo(field.type) == .Optional) {
|
||||
if (@typeInfo(field.type) == .optional) {
|
||||
// Test "compiler assertion failure 2"
|
||||
// Zig compiler bug circa 0.9.0. Using "and !found_value"
|
||||
// in the if statement above will trigger assertion failure
|
||||
|
@ -275,7 +275,7 @@ fn parseInternal(comptime T: type, element: *xml.Element, options: ParseOptions)
|
|||
return error.FieldElementMismatch; // see fields_seen for details
|
||||
return r;
|
||||
},
|
||||
.Array => //|array_info| {
|
||||
.array => //|array_info| {
|
||||
return error.ArrayNotImplemented,
|
||||
// switch (token) {
|
||||
// .ArrayBegin => {
|
||||
|
@ -310,16 +310,16 @@ fn parseInternal(comptime T: type, element: *xml.Element, options: ParseOptions)
|
|||
// else => return error.UnexpectedToken,
|
||||
// }
|
||||
// },
|
||||
.Pointer => |ptr_info| {
|
||||
.pointer => |ptr_info| {
|
||||
const allocator = options.allocator orelse return error.AllocatorRequired;
|
||||
switch (ptr_info.size) {
|
||||
.One => {
|
||||
.one => {
|
||||
const r: T = try allocator.create(ptr_info.child);
|
||||
errdefer allocator.free(r);
|
||||
r.* = try parseInternal(ptr_info.child, element, options);
|
||||
return r;
|
||||
},
|
||||
.Slice => {
|
||||
.slice => {
|
||||
// TODO: Detect and deal with arrays. This will require two
|
||||
// passes through the element children - one to
|
||||
// determine if it is an array, one to parse the elements
|
||||
|
@ -348,10 +348,10 @@ fn parseInternal(comptime T: type, element: *xml.Element, options: ParseOptions)
|
|||
}
|
||||
return try allocator.dupe(u8, element.children.items[0].CharData);
|
||||
},
|
||||
.Many => {
|
||||
.many => {
|
||||
return error.ManyPointerSizeNotImplemented;
|
||||
},
|
||||
.C => {
|
||||
.c => {
|
||||
return error.CPointerSizeNotImplemented;
|
||||
},
|
||||
}
|
||||
|
|
Loading…
Add table
Reference in a new issue