Compare commits
4 Commits
zig-develo
...
master
Author | SHA1 | Date | |
---|---|---|---|
0cda404b0a | |||
5aa191c415 | |||
370011eb1e | |||
0adebe10da |
|
@ -1,5 +1,4 @@
|
||||||
name: AWS-Zig Build
|
name: AWS-Zig Build
|
||||||
run-name: ${{ github.actor }} building AWS Zig SDK
|
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
|
@ -17,9 +16,9 @@ jobs:
|
||||||
# image: alpine:3.15.0
|
# image: alpine:3.15.0
|
||||||
steps:
|
steps:
|
||||||
- name: Check out repository code
|
- name: Check out repository code
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
- name: Setup Zig
|
- name: Setup Zig
|
||||||
uses: https://git.lerch.org/lobo/setup-zig@v3
|
uses: mlugg/setup-zig@v1.2.1
|
||||||
with:
|
with:
|
||||||
version: 0.13.0
|
version: 0.13.0
|
||||||
- name: Run tests
|
- name: Run tests
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
name: aws-zig mach nominated build
|
name: aws-zig mach nominated build
|
||||||
run-name: ${{ github.actor }} building AWS Zig SDK
|
|
||||||
on:
|
on:
|
||||||
schedule:
|
schedule:
|
||||||
- cron: '0 12 * * *' # noon UTC, 4AM Pacific
|
- cron: '0 12 * * *' # noon UTC, 4AM Pacific
|
||||||
|
@ -22,18 +21,14 @@ jobs:
|
||||||
# image: alpine:3.15.0
|
# image: alpine:3.15.0
|
||||||
steps:
|
steps:
|
||||||
- name: Check out repository code
|
- name: Check out repository code
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
ref: zig-develop
|
ref: zig-develop
|
||||||
- name: Setup Zig
|
- name: Setup Zig
|
||||||
uses: mlugg/setup-zig@v1.2.1
|
uses: mlugg/setup-zig@v1.2.1
|
||||||
with:
|
with:
|
||||||
version: mach-latest
|
version: mach-latest
|
||||||
- name: Run gen
|
- name: Run tests
|
||||||
run: zig build gen --verbose
|
|
||||||
- name: Run smoke test
|
|
||||||
run: zig build smoke-test --verbose
|
|
||||||
- name: Run full tests
|
|
||||||
run: zig build test --verbose
|
run: zig build test --verbose
|
||||||
# Zig package manager expects everything to be inside a directory in the archive,
|
# Zig package manager expects everything to be inside a directory in the archive,
|
||||||
# which it then strips out on download. So we need to shove everything inside a directory
|
# which it then strips out on download. So we need to shove everything inside a directory
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
name: aws-zig nightly build
|
name: aws-zig nightly build
|
||||||
run-name: ${{ github.actor }} building AWS Zig SDK
|
|
||||||
on:
|
on:
|
||||||
schedule:
|
schedule:
|
||||||
- cron: '30 12 * * *' # 12:30 UTC, 4:30AM Pacific
|
- cron: '30 12 * * *' # 12:30 UTC, 4:30AM Pacific
|
||||||
|
@ -22,11 +21,11 @@ jobs:
|
||||||
# image: alpine:3.15.0
|
# image: alpine:3.15.0
|
||||||
steps:
|
steps:
|
||||||
- name: Check out repository code
|
- name: Check out repository code
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
ref: zig-develop
|
ref: zig-develop
|
||||||
- name: Setup Zig
|
- name: Setup Zig
|
||||||
uses: https://git.lerch.org/lobo/setup-zig@v3
|
uses: mlugg/setup-zig@v1.2.1
|
||||||
with:
|
with:
|
||||||
version: master
|
version: master
|
||||||
- name: Run tests
|
- name: Run tests
|
||||||
|
|
18
.github/workflows/build.yaml
vendored
18
.github/workflows/build.yaml
vendored
|
@ -1,5 +1,4 @@
|
||||||
name: AWS-Zig Build
|
name: AWS-Zig Build
|
||||||
run-name: ${{ github.actor }} building AWS Zig SDK
|
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
|
@ -8,23 +7,14 @@ on:
|
||||||
jobs:
|
jobs:
|
||||||
build-zig-0-12-0-amd64:
|
build-zig-0-12-0-amd64:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
env:
|
|
||||||
ZIG_VERSION: 0.13.0
|
|
||||||
ARCH: x86_64
|
|
||||||
if: ${{ github.env.GITEA_ACTIONS != 'true' }}
|
if: ${{ github.env.GITEA_ACTIONS != 'true' }}
|
||||||
steps:
|
steps:
|
||||||
- name: Check out repository code
|
- name: Check out repository code
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
# ARCH is fine, but we can't substitute directly because zig
|
- name: Setup Zig
|
||||||
# uses x86_64 instead of amd64. They also use aarch64 instead of arm64.
|
uses: mlugg/setup-zig@v1.2.1
|
||||||
#
|
with:
|
||||||
# However, arm64/linux isn't quite fully tier 1 yet, so this is more of a
|
version: 0.13.0
|
||||||
# TODO: https://github.com/ziglang/zig/issues/2443
|
|
||||||
- name: Install zig
|
|
||||||
run: |
|
|
||||||
wget -q https://ziglang.org/download/${ZIG_VERSION}/zig-linux-${ARCH}-${ZIG_VERSION}.tar.xz
|
|
||||||
sudo tar x -C /usr/local -f zig-linux-${ARCH}-${ZIG_VERSION}.tar.xz
|
|
||||||
sudo ln -s /usr/local/zig-linux-${ARCH}-${ZIG_VERSION}/zig /usr/local/bin/zig
|
|
||||||
- name: Run tests
|
- name: Run tests
|
||||||
run: zig build test -Dbroken-windows --verbose # Github runners try to run the windows tests despite disabling foreign checks
|
run: zig build test -Dbroken-windows --verbose # Github runners try to run the windows tests despite disabling foreign checks
|
||||||
- name: Build example
|
- name: Build example
|
||||||
|
|
24
.github/workflows/zig-mach.yaml
vendored
24
.github/workflows/zig-mach.yaml
vendored
|
@ -1,5 +1,4 @@
|
||||||
name: aws-zig mach nominated build
|
name: aws-zig mach nominated build
|
||||||
run-name: ${{ github.actor }} building AWS Zig SDK
|
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
|
@ -7,29 +6,14 @@ on:
|
||||||
jobs:
|
jobs:
|
||||||
build-zig-mach-latest:
|
build-zig-mach-latest:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
# Need to use the default container with node and all that, so we can
|
|
||||||
# use JS-based actions like actions/checkout@v3...
|
|
||||||
# container:
|
|
||||||
# image: alpine:3.15.0
|
|
||||||
env:
|
|
||||||
ZIG_VERSION: mach-latest
|
|
||||||
ARCH: x86_64
|
|
||||||
if: ${{ github.env.GITEA_ACTIONS != 'true' }}
|
if: ${{ github.env.GITEA_ACTIONS != 'true' }}
|
||||||
steps:
|
steps:
|
||||||
- name: Check out repository code
|
- name: Check out repository code
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
# ARCH is fine, but we can't substitute directly because zig
|
- name: Setup Zig
|
||||||
# uses x86_64 instead of amd64. They also use aarch64 instead of arm64.
|
uses: mlugg/setup-zig@v1.2.1
|
||||||
#
|
with:
|
||||||
# However, arm64/linux isn't quite fully tier 1 yet, so this is more of a
|
version: mach-latest
|
||||||
# TODO: https://github.com/ziglang/zig/issues/2443
|
|
||||||
- name: Install zig
|
|
||||||
run: |
|
|
||||||
apt-get update && apt-get install -y jq
|
|
||||||
file="$(curl -Osw '%{filename_effective}' "$(curl -s https://machengine.org/zig/index.json |jq -r '."'${ZIG_VERSION}'"."x86_64-linux".tarball')")"
|
|
||||||
sudo tar x -C /usr/local -f "${file}"
|
|
||||||
sudo ln -s /usr/local/"${file%%.tar.xz}"/zig /usr/local/bin/zig
|
|
||||||
zig version
|
|
||||||
- name: Run tests
|
- name: Run tests
|
||||||
run: zig build test -Dbroken-windows --verbose
|
run: zig build test -Dbroken-windows --verbose
|
||||||
- name: Build example
|
- name: Build example
|
||||||
|
|
24
.github/workflows/zig-nightly.yaml
vendored
24
.github/workflows/zig-nightly.yaml
vendored
|
@ -1,5 +1,4 @@
|
||||||
name: aws-zig nightly build
|
name: aws-zig nightly build
|
||||||
run-name: ${{ github.actor }} building AWS Zig SDK
|
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
|
@ -7,29 +6,14 @@ on:
|
||||||
jobs:
|
jobs:
|
||||||
build-zig-nightly:
|
build-zig-nightly:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
# Need to use the default container with node and all that, so we can
|
|
||||||
# use JS-based actions like actions/checkout@v3...
|
|
||||||
# container:
|
|
||||||
# image: alpine:3.15.0
|
|
||||||
env:
|
|
||||||
ZIG_VERSION: master
|
|
||||||
ARCH: x86_64
|
|
||||||
if: ${{ github.env.GITEA_ACTIONS != 'true' }}
|
if: ${{ github.env.GITEA_ACTIONS != 'true' }}
|
||||||
steps:
|
steps:
|
||||||
- name: Check out repository code
|
- name: Check out repository code
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
# ARCH is fine, but we can't substitute directly because zig
|
- name: Setup Zig
|
||||||
# uses x86_64 instead of amd64. They also use aarch64 instead of arm64.
|
uses: mlugg/setup-zig@v1.2.1
|
||||||
#
|
with:
|
||||||
# However, arm64/linux isn't quite fully tier 1 yet, so this is more of a
|
version: master
|
||||||
# TODO: https://github.com/ziglang/zig/issues/2443
|
|
||||||
- name: Install zig
|
|
||||||
run: |
|
|
||||||
apt-get update && apt-get install -y jq
|
|
||||||
file="$(curl -Osw '%{filename_effective}' "$(curl -s https://ziglang.org/download/index.json |jq -r '."'${ZIG_VERSION}'"."x86_64-linux".tarball')")"
|
|
||||||
sudo tar x -C /usr/local -f "${file}"
|
|
||||||
sudo ln -s /usr/local/"${file%%.tar.xz}"/zig /usr/local/bin/zig
|
|
||||||
zig version
|
|
||||||
- name: Run tests
|
- name: Run tests
|
||||||
run: zig build test -Dbroken-windows --verbose
|
run: zig build test -Dbroken-windows --verbose
|
||||||
- name: Build example
|
- name: Build example
|
||||||
|
|
|
@ -13,15 +13,11 @@ AWS SDK for Zig
|
||||||
|
|
||||||
[![Build Status: Zig Nightly](https://git.lerch.org/lobo/aws-sdk-for-zig/actions/workflows/zig-nightly.yaml/badge.svg)](https://git.lerch.org/lobo/aws-sdk-for-zig/actions?workflow=zig-nightly.yaml&state=closed)
|
[![Build Status: Zig Nightly](https://git.lerch.org/lobo/aws-sdk-for-zig/actions/workflows/zig-nightly.yaml/badge.svg)](https://git.lerch.org/lobo/aws-sdk-for-zig/actions?workflow=zig-nightly.yaml&state=closed)
|
||||||
|
|
||||||
**NOTE ON BUILD STATUS**: The nightly/mach nominated version of this currently
|
|
||||||
panics under CI, but I have not yet reproduced this panic. Running manually on
|
|
||||||
multiple machines appears to be working properly
|
|
||||||
|
|
||||||
Current executable size for the demo is 980k after compiling with -Doptimize=ReleaseSmall
|
Current executable size for the demo is 980k after compiling with -Doptimize=ReleaseSmall
|
||||||
in x86_linux, and will vary based on services used. Tested targets:
|
in x86_linux, and will vary based on services used. Tested targets:
|
||||||
|
|
||||||
* x86_64-linux
|
* x86_64-linux
|
||||||
* riscv64-linux
|
* riscv64-linux\*
|
||||||
* aarch64-linux
|
* aarch64-linux
|
||||||
* x86_64-windows
|
* x86_64-windows
|
||||||
* arm-linux
|
* arm-linux
|
||||||
|
@ -30,6 +26,9 @@ in x86_linux, and will vary based on services used. Tested targets:
|
||||||
|
|
||||||
Tested targets are built, but not continuously tested, by CI.
|
Tested targets are built, but not continuously tested, by CI.
|
||||||
|
|
||||||
|
\* On Zig 0.12/0.13, riscv64-linux disabled due to [LLLM's O(N^2) codegen](https://github.com/ziglang/zig/issues/18872)
|
||||||
|
|
||||||
|
|
||||||
Zig-Develop Branch
|
Zig-Develop Branch
|
||||||
------------------
|
------------------
|
||||||
|
|
||||||
|
|
|
@ -10,7 +10,11 @@ const test_targets = [_]std.Target.Query{
|
||||||
.{}, // native
|
.{}, // native
|
||||||
.{ .cpu_arch = .x86_64, .os_tag = .linux },
|
.{ .cpu_arch = .x86_64, .os_tag = .linux },
|
||||||
.{ .cpu_arch = .aarch64, .os_tag = .linux },
|
.{ .cpu_arch = .aarch64, .os_tag = .linux },
|
||||||
.{ .cpu_arch = .riscv64, .os_tag = .linux },
|
// The test executable linking process just spins forever in LLVM using nominated zig 0.13 May 2024
|
||||||
|
// This is likely a LLVM problem unlikely to be fixed in zig 0.13
|
||||||
|
// Potentially this issue: https://github.com/llvm/llvm-project/issues/81440
|
||||||
|
// Zig tracker: https://github.com/ziglang/zig/issues/18872
|
||||||
|
// .{ .cpu_arch = .riscv64, .os_tag = .linux },
|
||||||
.{ .cpu_arch = .arm, .os_tag = .linux },
|
.{ .cpu_arch = .arm, .os_tag = .linux },
|
||||||
.{ .cpu_arch = .x86_64, .os_tag = .windows },
|
.{ .cpu_arch = .x86_64, .os_tag = .windows },
|
||||||
.{ .cpu_arch = .aarch64, .os_tag = .macos },
|
.{ .cpu_arch = .aarch64, .os_tag = .macos },
|
||||||
|
|
|
@ -2,7 +2,7 @@ const std = @import("std");
|
||||||
// options is a json.Options, but since we're using our hacked json.zig we don't want to
|
// options is a json.Options, but since we're using our hacked json.zig we don't want to
|
||||||
// specifically call this out
|
// specifically call this out
|
||||||
pub fn serializeMap(map: anytype, key: []const u8, options: anytype, out_stream: anytype) !bool {
|
pub fn serializeMap(map: anytype, key: []const u8, options: anytype, out_stream: anytype) !bool {
|
||||||
if (@typeInfo(@TypeOf(map)) == .optional) {
|
if (@typeInfo(@TypeOf(map)) == .Optional) {
|
||||||
if (map == null)
|
if (map == null)
|
||||||
return true
|
return true
|
||||||
else
|
else
|
||||||
|
|
|
@ -5,8 +5,8 @@
|
||||||
|
|
||||||
.dependencies = .{
|
.dependencies = .{
|
||||||
.aws = .{
|
.aws = .{
|
||||||
.url = "https://git.lerch.org/api/packages/lobo/generic/aws-sdk-with-models/6240225db28c759d3977b3bd62896e7eb319479d/6240225db28c759d3977b3bd62896e7eb319479dnominated-zig-with-models.tar.gz",
|
.url = "https://git.lerch.org/api/packages/lobo/generic/aws-sdk-with-models/e5b662873a6745a7e761643b1ca3d8637bf1222f/e5b662873a6745a7e761643b1ca3d8637bf1222f-with-models.tar.gz",
|
||||||
.hash = "1220a8398a1040f731c02741639192c68bf911ac56640650329329b6e8d9a77ef278",
|
.hash = "12206394d50a9df1bf3fa6390cd5525bf97448d0f74a85113ef70c3bb60dcf4b7292",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
34
src/aws.zig
34
src/aws.zig
|
@ -517,9 +517,9 @@ pub fn Request(comptime request_action: anytype) type {
|
||||||
// And the response property below will pull whatever is the ActionResult object
|
// And the response property below will pull whatever is the ActionResult object
|
||||||
// We can grab index [0] as structs are guaranteed by zig to be returned in the order
|
// We can grab index [0] as structs are guaranteed by zig to be returned in the order
|
||||||
// declared, and we're declaring in that order in ServerResponse().
|
// declared, and we're declaring in that order in ServerResponse().
|
||||||
const real_response = @field(parsed_response, @typeInfo(response_types.NormalResponse).@"struct".fields[0].name);
|
const real_response = @field(parsed_response, @typeInfo(response_types.NormalResponse).Struct.fields[0].name);
|
||||||
return FullResponseType{
|
return FullResponseType{
|
||||||
.response = @field(real_response, @typeInfo(@TypeOf(real_response)).@"struct".fields[0].name),
|
.response = @field(real_response, @typeInfo(@TypeOf(real_response)).Struct.fields[0].name),
|
||||||
.response_metadata = .{
|
.response_metadata = .{
|
||||||
.request_id = try options.client.allocator.dupe(u8, real_response.ResponseMetadata.RequestId),
|
.request_id = try options.client.allocator.dupe(u8, real_response.ResponseMetadata.RequestId),
|
||||||
},
|
},
|
||||||
|
@ -762,7 +762,7 @@ pub fn Request(comptime request_action: anytype) type {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn coerceFromString(comptime T: type, val: []const u8) anyerror!T {
|
fn coerceFromString(comptime T: type, val: []const u8) anyerror!T {
|
||||||
if (@typeInfo(T) == .optional) return try coerceFromString(@typeInfo(T).optional.child, val);
|
if (@typeInfo(T) == .Optional) return try coerceFromString(@typeInfo(T).Optional.child, val);
|
||||||
// TODO: This is terrible...fix it
|
// TODO: This is terrible...fix it
|
||||||
switch (T) {
|
switch (T) {
|
||||||
bool => return std.ascii.eqlIgnoreCase(val, "true"),
|
bool => return std.ascii.eqlIgnoreCase(val, "true"),
|
||||||
|
@ -789,8 +789,8 @@ fn parseInt(comptime T: type, val: []const u8) !T {
|
||||||
|
|
||||||
fn generalAllocPrint(allocator: std.mem.Allocator, val: anytype) !?[]const u8 {
|
fn generalAllocPrint(allocator: std.mem.Allocator, val: anytype) !?[]const u8 {
|
||||||
switch (@typeInfo(@TypeOf(val))) {
|
switch (@typeInfo(@TypeOf(val))) {
|
||||||
.optional => if (val) |v| return generalAllocPrint(allocator, v) else return null,
|
.Optional => if (val) |v| return generalAllocPrint(allocator, v) else return null,
|
||||||
.array, .pointer => return try std.fmt.allocPrint(allocator, "{s}", .{val}),
|
.Array, .Pointer => return try std.fmt.allocPrint(allocator, "{s}", .{val}),
|
||||||
else => return try std.fmt.allocPrint(allocator, "{any}", .{val}),
|
else => return try std.fmt.allocPrint(allocator, "{any}", .{val}),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -909,7 +909,7 @@ fn ServerResponse(comptime action: anytype) type {
|
||||||
RequestId: []u8,
|
RequestId: []u8,
|
||||||
};
|
};
|
||||||
const Result = @Type(.{
|
const Result = @Type(.{
|
||||||
.@"struct" = .{
|
.Struct = .{
|
||||||
.layout = .auto,
|
.layout = .auto,
|
||||||
.fields = &[_]std.builtin.Type.StructField{
|
.fields = &[_]std.builtin.Type.StructField{
|
||||||
.{
|
.{
|
||||||
|
@ -932,7 +932,7 @@ fn ServerResponse(comptime action: anytype) type {
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
return @Type(.{
|
return @Type(.{
|
||||||
.@"struct" = .{
|
.Struct = .{
|
||||||
.layout = .auto,
|
.layout = .auto,
|
||||||
.fields = &[_]std.builtin.Type.StructField{
|
.fields = &[_]std.builtin.Type.StructField{
|
||||||
.{
|
.{
|
||||||
|
@ -998,8 +998,8 @@ fn FullResponse(comptime action: anytype) type {
|
||||||
}
|
}
|
||||||
fn safeFree(allocator: std.mem.Allocator, obj: anytype) void {
|
fn safeFree(allocator: std.mem.Allocator, obj: anytype) void {
|
||||||
switch (@typeInfo(@TypeOf(obj))) {
|
switch (@typeInfo(@TypeOf(obj))) {
|
||||||
.pointer => allocator.free(obj),
|
.Pointer => allocator.free(obj),
|
||||||
.optional => if (obj) |o| safeFree(allocator, o),
|
.Optional => if (obj) |o| safeFree(allocator, o),
|
||||||
else => {},
|
else => {},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1108,7 +1108,7 @@ fn buildQuery(allocator: std.mem.Allocator, request: anytype) ![]const u8 {
|
||||||
var prefix = "?";
|
var prefix = "?";
|
||||||
if (@hasDecl(@TypeOf(request), "http_query")) {
|
if (@hasDecl(@TypeOf(request), "http_query")) {
|
||||||
const query_arguments = @field(@TypeOf(request), "http_query");
|
const query_arguments = @field(@TypeOf(request), "http_query");
|
||||||
inline for (@typeInfo(@TypeOf(query_arguments)).@"struct".fields) |arg| {
|
inline for (@typeInfo(@TypeOf(query_arguments)).Struct.fields) |arg| {
|
||||||
const val = @field(request, arg.name);
|
const val = @field(request, arg.name);
|
||||||
if (try addQueryArg(arg.type, prefix, @field(query_arguments, arg.name), val, writer))
|
if (try addQueryArg(arg.type, prefix, @field(query_arguments, arg.name), val, writer))
|
||||||
prefix = "&";
|
prefix = "&";
|
||||||
|
@ -1119,13 +1119,13 @@ fn buildQuery(allocator: std.mem.Allocator, request: anytype) ![]const u8 {
|
||||||
|
|
||||||
fn addQueryArg(comptime ValueType: type, prefix: []const u8, key: []const u8, value: anytype, writer: anytype) !bool {
|
fn addQueryArg(comptime ValueType: type, prefix: []const u8, key: []const u8, value: anytype, writer: anytype) !bool {
|
||||||
switch (@typeInfo(@TypeOf(value))) {
|
switch (@typeInfo(@TypeOf(value))) {
|
||||||
.optional => {
|
.Optional => {
|
||||||
if (value) |v|
|
if (value) |v|
|
||||||
return try addQueryArg(ValueType, prefix, key, v, writer);
|
return try addQueryArg(ValueType, prefix, key, v, writer);
|
||||||
return false;
|
return false;
|
||||||
},
|
},
|
||||||
// if this is a pointer, we want to make sure it is more than just a string
|
// if this is a pointer, we want to make sure it is more than just a string
|
||||||
.pointer => |ptr| {
|
.Pointer => |ptr| {
|
||||||
if (ptr.child == u8 or ptr.size != .Slice) {
|
if (ptr.child == u8 or ptr.size != .Slice) {
|
||||||
// This is just a string
|
// This is just a string
|
||||||
return try addBasicQueryArg(prefix, key, value, writer);
|
return try addBasicQueryArg(prefix, key, value, writer);
|
||||||
|
@ -1137,7 +1137,7 @@ fn addQueryArg(comptime ValueType: type, prefix: []const u8, key: []const u8, va
|
||||||
}
|
}
|
||||||
return std.mem.eql(u8, "&", p);
|
return std.mem.eql(u8, "&", p);
|
||||||
},
|
},
|
||||||
.array => |arr| {
|
.Array => |arr| {
|
||||||
if (arr.child == u8)
|
if (arr.child == u8)
|
||||||
return try addBasicQueryArg(prefix, key, value, writer);
|
return try addBasicQueryArg(prefix, key, value, writer);
|
||||||
var p = prefix;
|
var p = prefix;
|
||||||
|
@ -1257,8 +1257,8 @@ fn reportTraffic(
|
||||||
fn typeForField(comptime T: type, comptime field_name: []const u8) !type {
|
fn typeForField(comptime T: type, comptime field_name: []const u8) !type {
|
||||||
const ti = @typeInfo(T);
|
const ti = @typeInfo(T);
|
||||||
switch (ti) {
|
switch (ti) {
|
||||||
.@"struct" => {
|
.Struct => {
|
||||||
inline for (ti.@"struct".fields) |field| {
|
inline for (ti.Struct.fields) |field| {
|
||||||
if (std.mem.eql(u8, field.name, field_name))
|
if (std.mem.eql(u8, field.name, field_name))
|
||||||
return field.type;
|
return field.type;
|
||||||
}
|
}
|
||||||
|
@ -1272,7 +1272,7 @@ test "custom serialization for map objects" {
|
||||||
const allocator = std.testing.allocator;
|
const allocator = std.testing.allocator;
|
||||||
var buffer = std.ArrayList(u8).init(allocator);
|
var buffer = std.ArrayList(u8).init(allocator);
|
||||||
defer buffer.deinit();
|
defer buffer.deinit();
|
||||||
var tags = try std.ArrayList(@typeInfo(try typeForField(services.lambda.tag_resource.Request, "tags")).pointer.child).initCapacity(allocator, 2);
|
var tags = try std.ArrayList(@typeInfo(try typeForField(services.lambda.tag_resource.Request, "tags")).Pointer.child).initCapacity(allocator, 2);
|
||||||
defer tags.deinit();
|
defer tags.deinit();
|
||||||
tags.appendAssumeCapacity(.{ .key = "Foo", .value = "Bar" });
|
tags.appendAssumeCapacity(.{ .key = "Foo", .value = "Bar" });
|
||||||
tags.appendAssumeCapacity(.{ .key = "Baz", .value = "Qux" });
|
tags.appendAssumeCapacity(.{ .key = "Baz", .value = "Qux" });
|
||||||
|
@ -2034,7 +2034,7 @@ test "rest_json_1_work_with_lambda: lambda tagResource (only), to excercise zig
|
||||||
defer test_harness.deinit();
|
defer test_harness.deinit();
|
||||||
const options = try test_harness.start();
|
const options = try test_harness.start();
|
||||||
const lambda = (Services(.{.lambda}){}).lambda;
|
const lambda = (Services(.{.lambda}){}).lambda;
|
||||||
var tags = try std.ArrayList(@typeInfo(try typeForField(lambda.tag_resource.Request, "tags")).pointer.child).initCapacity(allocator, 1);
|
var tags = try std.ArrayList(@typeInfo(try typeForField(lambda.tag_resource.Request, "tags")).Pointer.child).initCapacity(allocator, 1);
|
||||||
defer tags.deinit();
|
defer tags.deinit();
|
||||||
tags.appendAssumeCapacity(.{ .key = "Foo", .value = "Bar" });
|
tags.appendAssumeCapacity(.{ .key = "Foo", .value = "Bar" });
|
||||||
const req = services.lambda.tag_resource.Request{ .resource = "arn:aws:lambda:us-west-2:550620852718:function:awsome-lambda-LambdaStackawsomeLambda", .tags = tags.items };
|
const req = services.lambda.tag_resource.Request{ .resource = "arn:aws:lambda:us-west-2:550620852718:function:awsome-lambda-LambdaStackawsomeLambda", .tags = tags.items };
|
||||||
|
|
76
src/json.zig
76
src/json.zig
|
@ -1560,21 +1560,21 @@ fn skipValue(tokens: *TokenStream) SkipValueError!void {
|
||||||
|
|
||||||
fn parseInternal(comptime T: type, token: Token, tokens: *TokenStream, options: ParseOptions) !T {
|
fn parseInternal(comptime T: type, token: Token, tokens: *TokenStream, options: ParseOptions) !T {
|
||||||
switch (@typeInfo(T)) {
|
switch (@typeInfo(T)) {
|
||||||
.bool => {
|
.Bool => {
|
||||||
return switch (token) {
|
return switch (token) {
|
||||||
.True => true,
|
.True => true,
|
||||||
.False => false,
|
.False => false,
|
||||||
else => error.UnexpectedToken,
|
else => error.UnexpectedToken,
|
||||||
};
|
};
|
||||||
},
|
},
|
||||||
.float, .comptime_float => {
|
.Float, .ComptimeFloat => {
|
||||||
const numberToken = switch (token) {
|
const numberToken = switch (token) {
|
||||||
.Number => |n| n,
|
.Number => |n| n,
|
||||||
else => return error.UnexpectedToken,
|
else => return error.UnexpectedToken,
|
||||||
};
|
};
|
||||||
return try std.fmt.parseFloat(T, numberToken.slice(tokens.slice, tokens.i - 1));
|
return try std.fmt.parseFloat(T, numberToken.slice(tokens.slice, tokens.i - 1));
|
||||||
},
|
},
|
||||||
.int, .comptime_int => {
|
.Int, .ComptimeInt => {
|
||||||
const numberToken = switch (token) {
|
const numberToken = switch (token) {
|
||||||
.Number => |n| n,
|
.Number => |n| n,
|
||||||
else => return error.UnexpectedToken,
|
else => return error.UnexpectedToken,
|
||||||
|
@ -1587,14 +1587,14 @@ fn parseInternal(comptime T: type, token: Token, tokens: *TokenStream, options:
|
||||||
if (std.math.round(float) != float) return error.InvalidNumber;
|
if (std.math.round(float) != float) return error.InvalidNumber;
|
||||||
return @as(T, @intFromFloat(float));
|
return @as(T, @intFromFloat(float));
|
||||||
},
|
},
|
||||||
.optional => |optionalInfo| {
|
.Optional => |optionalInfo| {
|
||||||
if (token == .Null) {
|
if (token == .Null) {
|
||||||
return null;
|
return null;
|
||||||
} else {
|
} else {
|
||||||
return try parseInternal(optionalInfo.child, token, tokens, options);
|
return try parseInternal(optionalInfo.child, token, tokens, options);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
.@"enum" => |enumInfo| {
|
.Enum => |enumInfo| {
|
||||||
switch (token) {
|
switch (token) {
|
||||||
.Number => |numberToken| {
|
.Number => |numberToken| {
|
||||||
if (!numberToken.is_integer) return error.UnexpectedToken;
|
if (!numberToken.is_integer) return error.UnexpectedToken;
|
||||||
|
@ -1618,7 +1618,7 @@ fn parseInternal(comptime T: type, token: Token, tokens: *TokenStream, options:
|
||||||
else => return error.UnexpectedToken,
|
else => return error.UnexpectedToken,
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
.@"union" => |unionInfo| {
|
.Union => |unionInfo| {
|
||||||
if (unionInfo.tag_type) |_| {
|
if (unionInfo.tag_type) |_| {
|
||||||
// try each of the union fields until we find one that matches
|
// try each of the union fields until we find one that matches
|
||||||
inline for (unionInfo.fields) |u_field| {
|
inline for (unionInfo.fields) |u_field| {
|
||||||
|
@ -1642,7 +1642,7 @@ fn parseInternal(comptime T: type, token: Token, tokens: *TokenStream, options:
|
||||||
@compileError("Unable to parse into untagged union '" ++ @typeName(T) ++ "'");
|
@compileError("Unable to parse into untagged union '" ++ @typeName(T) ++ "'");
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
.@"struct" => |structInfo| {
|
.Struct => |structInfo| {
|
||||||
switch (token) {
|
switch (token) {
|
||||||
.ObjectBegin => {},
|
.ObjectBegin => {},
|
||||||
else => return error.UnexpectedToken,
|
else => return error.UnexpectedToken,
|
||||||
|
@ -1736,7 +1736,7 @@ fn parseInternal(comptime T: type, token: Token, tokens: *TokenStream, options:
|
||||||
}
|
}
|
||||||
return r;
|
return r;
|
||||||
},
|
},
|
||||||
.array => |arrayInfo| {
|
.Array => |arrayInfo| {
|
||||||
switch (token) {
|
switch (token) {
|
||||||
.ArrayBegin => {
|
.ArrayBegin => {
|
||||||
var r: T = undefined;
|
var r: T = undefined;
|
||||||
|
@ -1770,7 +1770,7 @@ fn parseInternal(comptime T: type, token: Token, tokens: *TokenStream, options:
|
||||||
else => return error.UnexpectedToken,
|
else => return error.UnexpectedToken,
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
.pointer => |ptrInfo| {
|
.Pointer => |ptrInfo| {
|
||||||
const allocator = options.allocator orelse return error.AllocatorRequired;
|
const allocator = options.allocator orelse return error.AllocatorRequired;
|
||||||
switch (ptrInfo.size) {
|
switch (ptrInfo.size) {
|
||||||
.One => {
|
.One => {
|
||||||
|
@ -1863,8 +1863,8 @@ fn parseInternal(comptime T: type, token: Token, tokens: *TokenStream, options:
|
||||||
fn typeForField(comptime T: type, comptime field_name: []const u8) ?type {
|
fn typeForField(comptime T: type, comptime field_name: []const u8) ?type {
|
||||||
const ti = @typeInfo(T);
|
const ti = @typeInfo(T);
|
||||||
switch (ti) {
|
switch (ti) {
|
||||||
.@"struct" => {
|
.Struct => {
|
||||||
inline for (ti.@"struct".fields) |field| {
|
inline for (ti.Struct.fields) |field| {
|
||||||
if (std.mem.eql(u8, field.name, field_name))
|
if (std.mem.eql(u8, field.name, field_name))
|
||||||
return field.type;
|
return field.type;
|
||||||
}
|
}
|
||||||
|
@ -1878,14 +1878,14 @@ fn isMapPattern(comptime T: type) bool {
|
||||||
// We should be getting a type that is a pointer to a slice.
|
// We should be getting a type that is a pointer to a slice.
|
||||||
// Let's just double check before proceeding
|
// Let's just double check before proceeding
|
||||||
const ti = @typeInfo(T);
|
const ti = @typeInfo(T);
|
||||||
if (ti != .pointer) return false;
|
if (ti != .Pointer) return false;
|
||||||
if (ti.pointer.size != .Slice) return false;
|
if (ti.Pointer.size != .Slice) return false;
|
||||||
const ti_child = @typeInfo(ti.pointer.child);
|
const ti_child = @typeInfo(ti.Pointer.child);
|
||||||
if (ti_child != .@"struct") return false;
|
if (ti_child != .Struct) return false;
|
||||||
if (ti_child.@"struct".fields.len != 2) return false;
|
if (ti_child.Struct.fields.len != 2) return false;
|
||||||
var key_found = false;
|
var key_found = false;
|
||||||
var value_found = false;
|
var value_found = false;
|
||||||
inline for (ti_child.@"struct".fields) |field| {
|
inline for (ti_child.Struct.fields) |field| {
|
||||||
if (std.mem.eql(u8, "key", field.name))
|
if (std.mem.eql(u8, "key", field.name))
|
||||||
key_found = true;
|
key_found = true;
|
||||||
if (std.mem.eql(u8, "value", field.name))
|
if (std.mem.eql(u8, "value", field.name))
|
||||||
|
@ -1903,13 +1903,13 @@ pub fn parse(comptime T: type, tokens: *TokenStream, options: ParseOptions) !T {
|
||||||
/// Should be called with the same type and `ParseOptions` that were passed to `parse`
|
/// Should be called with the same type and `ParseOptions` that were passed to `parse`
|
||||||
pub fn parseFree(comptime T: type, value: T, options: ParseOptions) void {
|
pub fn parseFree(comptime T: type, value: T, options: ParseOptions) void {
|
||||||
switch (@typeInfo(T)) {
|
switch (@typeInfo(T)) {
|
||||||
.bool, .float, .comptime_float, .int, .comptime_int, .@"enum" => {},
|
.Bool, .Float, .ComptimeFloat, .Int, .ComptimeInt, .Enum => {},
|
||||||
.optional => {
|
.Optional => {
|
||||||
if (value) |v| {
|
if (value) |v| {
|
||||||
return parseFree(@TypeOf(v), v, options);
|
return parseFree(@TypeOf(v), v, options);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
.@"union" => |unionInfo| {
|
.Union => |unionInfo| {
|
||||||
if (unionInfo.tag_type) |UnionTagType| {
|
if (unionInfo.tag_type) |UnionTagType| {
|
||||||
inline for (unionInfo.fields) |u_field| {
|
inline for (unionInfo.fields) |u_field| {
|
||||||
if (value == @field(UnionTagType, u_field.name)) {
|
if (value == @field(UnionTagType, u_field.name)) {
|
||||||
|
@ -1921,17 +1921,17 @@ pub fn parseFree(comptime T: type, value: T, options: ParseOptions) void {
|
||||||
unreachable;
|
unreachable;
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
.@"struct" => |structInfo| {
|
.Struct => |structInfo| {
|
||||||
inline for (structInfo.fields) |field| {
|
inline for (structInfo.fields) |field| {
|
||||||
parseFree(field.type, @field(value, field.name), options);
|
parseFree(field.type, @field(value, field.name), options);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
.array => |arrayInfo| {
|
.Array => |arrayInfo| {
|
||||||
for (value) |v| {
|
for (value) |v| {
|
||||||
parseFree(arrayInfo.child, v, options);
|
parseFree(arrayInfo.child, v, options);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
.pointer => |ptrInfo| {
|
.Pointer => |ptrInfo| {
|
||||||
const allocator = options.allocator orelse unreachable;
|
const allocator = options.allocator orelse unreachable;
|
||||||
switch (ptrInfo.size) {
|
switch (ptrInfo.size) {
|
||||||
.One => {
|
.One => {
|
||||||
|
@ -2811,38 +2811,38 @@ pub fn stringify(
|
||||||
) !void {
|
) !void {
|
||||||
const T = @TypeOf(value);
|
const T = @TypeOf(value);
|
||||||
switch (@typeInfo(T)) {
|
switch (@typeInfo(T)) {
|
||||||
.float, .comptime_float => {
|
.Float, .ComptimeFloat => {
|
||||||
return std.fmt.format(out_stream, "{e}", .{value});
|
return std.fmt.format(out_stream, "{e}", .{value});
|
||||||
},
|
},
|
||||||
.int, .comptime_int => {
|
.Int, .ComptimeInt => {
|
||||||
return std.fmt.formatIntValue(value, "", std.fmt.FormatOptions{}, out_stream);
|
return std.fmt.formatIntValue(value, "", std.fmt.FormatOptions{}, out_stream);
|
||||||
},
|
},
|
||||||
.bool => {
|
.Bool => {
|
||||||
return out_stream.writeAll(if (value) "true" else "false");
|
return out_stream.writeAll(if (value) "true" else "false");
|
||||||
},
|
},
|
||||||
.null => {
|
.Null => {
|
||||||
return out_stream.writeAll("null");
|
return out_stream.writeAll("null");
|
||||||
},
|
},
|
||||||
.optional => {
|
.Optional => {
|
||||||
if (value) |payload| {
|
if (value) |payload| {
|
||||||
return try stringify(payload, options, out_stream);
|
return try stringify(payload, options, out_stream);
|
||||||
} else {
|
} else {
|
||||||
return try stringify(null, options, out_stream);
|
return try stringify(null, options, out_stream);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
.@"enum" => {
|
.Enum => {
|
||||||
if (comptime std.meta.hasFn(T, "jsonStringify")) {
|
if (comptime std.meta.hasFn(T, "jsonStringify")) {
|
||||||
return value.jsonStringify(options, out_stream);
|
return value.jsonStringify(options, out_stream);
|
||||||
}
|
}
|
||||||
|
|
||||||
@compileError("Unable to stringify enum '" ++ @typeName(T) ++ "'");
|
@compileError("Unable to stringify enum '" ++ @typeName(T) ++ "'");
|
||||||
},
|
},
|
||||||
.@"union" => {
|
.Union => {
|
||||||
if (comptime std.meta.hasFn(T, "jsonStringify")) {
|
if (comptime std.meta.hasFn(T, "jsonStringify")) {
|
||||||
return value.jsonStringify(options, out_stream);
|
return value.jsonStringify(options, out_stream);
|
||||||
}
|
}
|
||||||
|
|
||||||
const info = @typeInfo(T).@"union";
|
const info = @typeInfo(T).Union;
|
||||||
if (info.tag_type) |UnionTagType| {
|
if (info.tag_type) |UnionTagType| {
|
||||||
inline for (info.fields) |u_field| {
|
inline for (info.fields) |u_field| {
|
||||||
if (value == @field(UnionTagType, u_field.name)) {
|
if (value == @field(UnionTagType, u_field.name)) {
|
||||||
|
@ -2853,7 +2853,7 @@ pub fn stringify(
|
||||||
@compileError("Unable to stringify untagged union '" ++ @typeName(T) ++ "'");
|
@compileError("Unable to stringify untagged union '" ++ @typeName(T) ++ "'");
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
.@"struct" => |S| {
|
.Struct => |S| {
|
||||||
if (comptime std.meta.hasFn(T, "jsonStringify")) {
|
if (comptime std.meta.hasFn(T, "jsonStringify")) {
|
||||||
return value.jsonStringify(options, out_stream);
|
return value.jsonStringify(options, out_stream);
|
||||||
}
|
}
|
||||||
|
@ -2869,7 +2869,7 @@ pub fn stringify(
|
||||||
if (Field.type == void) continue;
|
if (Field.type == void) continue;
|
||||||
|
|
||||||
var output_this_field = true;
|
var output_this_field = true;
|
||||||
if (!options.emit_null and @typeInfo(Field.type) == .optional and @field(value, Field.name) == null) output_this_field = false;
|
if (!options.emit_null and @typeInfo(Field.type) == .Optional and @field(value, Field.name) == null) output_this_field = false;
|
||||||
|
|
||||||
const final_name = if (comptime std.meta.hasFn(T, "fieldNameFor"))
|
const final_name = if (comptime std.meta.hasFn(T, "fieldNameFor"))
|
||||||
value.fieldNameFor(Field.name)
|
value.fieldNameFor(Field.name)
|
||||||
|
@ -2919,10 +2919,10 @@ pub fn stringify(
|
||||||
try out_stream.writeByte('}');
|
try out_stream.writeByte('}');
|
||||||
return;
|
return;
|
||||||
},
|
},
|
||||||
.error_set => return stringify(@as([]const u8, @errorName(value)), options, out_stream),
|
.ErrorSet => return stringify(@as([]const u8, @errorName(value)), options, out_stream),
|
||||||
.pointer => |ptr_info| switch (ptr_info.size) {
|
.Pointer => |ptr_info| switch (ptr_info.size) {
|
||||||
.One => switch (@typeInfo(ptr_info.child)) {
|
.One => switch (@typeInfo(ptr_info.child)) {
|
||||||
.array => {
|
.Array => {
|
||||||
const Slice = []const std.meta.Elem(ptr_info.child);
|
const Slice = []const std.meta.Elem(ptr_info.child);
|
||||||
return stringify(@as(Slice, value), options, out_stream);
|
return stringify(@as(Slice, value), options, out_stream);
|
||||||
},
|
},
|
||||||
|
@ -3001,8 +3001,8 @@ pub fn stringify(
|
||||||
},
|
},
|
||||||
else => @compileError("Unable to stringify type '" ++ @typeName(T) ++ "'"),
|
else => @compileError("Unable to stringify type '" ++ @typeName(T) ++ "'"),
|
||||||
},
|
},
|
||||||
.array => return stringify(&value, options, out_stream),
|
.Array => return stringify(&value, options, out_stream),
|
||||||
.vector => |info| {
|
.Vector => |info| {
|
||||||
const array: [info.len]info.child = value;
|
const array: [info.len]info.child = value;
|
||||||
return stringify(&array, options, out_stream);
|
return stringify(&array, options, out_stream);
|
||||||
},
|
},
|
||||||
|
|
10
src/main.zig
10
src/main.zig
|
@ -97,7 +97,7 @@ pub fn main() anyerror!void {
|
||||||
}
|
}
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
inline for (@typeInfo(Tests).@"enum".fields) |f| {
|
inline for (@typeInfo(Tests).Enum.fields) |f| {
|
||||||
if (std.mem.eql(u8, f.name, arg)) {
|
if (std.mem.eql(u8, f.name, arg)) {
|
||||||
try tests.append(@field(Tests, f.name));
|
try tests.append(@field(Tests, f.name));
|
||||||
break;
|
break;
|
||||||
|
@ -105,7 +105,7 @@ pub fn main() anyerror!void {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (tests.items.len == 0) {
|
if (tests.items.len == 0) {
|
||||||
inline for (@typeInfo(Tests).@"enum".fields) |f|
|
inline for (@typeInfo(Tests).Enum.fields) |f|
|
||||||
try tests.append(@field(Tests, f.name));
|
try tests.append(@field(Tests, f.name));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -192,7 +192,7 @@ pub fn main() anyerror!void {
|
||||||
const func = fns[0];
|
const func = fns[0];
|
||||||
const arn = func.function_arn.?;
|
const arn = func.function_arn.?;
|
||||||
// This is a bit ugly. Maybe a helper function in the library would help?
|
// This is a bit ugly. Maybe a helper function in the library would help?
|
||||||
var tags = try std.ArrayList(@typeInfo(try typeForField(services.lambda.tag_resource.Request, "tags")).pointer.child).initCapacity(allocator, 1);
|
var tags = try std.ArrayList(@typeInfo(try typeForField(services.lambda.tag_resource.Request, "tags")).Pointer.child).initCapacity(allocator, 1);
|
||||||
defer tags.deinit();
|
defer tags.deinit();
|
||||||
tags.appendAssumeCapacity(.{ .key = "Foo", .value = "Bar" });
|
tags.appendAssumeCapacity(.{ .key = "Foo", .value = "Bar" });
|
||||||
const req = services.lambda.tag_resource.Request{ .resource = arn, .tags = tags.items };
|
const req = services.lambda.tag_resource.Request{ .resource = arn, .tags = tags.items };
|
||||||
|
@ -380,8 +380,8 @@ fn proxyFromString(string: []const u8) !std.http.Client.Proxy {
|
||||||
fn typeForField(comptime T: type, comptime field_name: []const u8) !type {
|
fn typeForField(comptime T: type, comptime field_name: []const u8) !type {
|
||||||
const ti = @typeInfo(T);
|
const ti = @typeInfo(T);
|
||||||
switch (ti) {
|
switch (ti) {
|
||||||
.@"struct" => {
|
.Struct => {
|
||||||
inline for (ti.@"struct".fields) |field| {
|
inline for (ti.Struct.fields) |field| {
|
||||||
if (std.mem.eql(u8, field.name, field_name))
|
if (std.mem.eql(u8, field.name, field_name))
|
||||||
return field.type;
|
return field.type;
|
||||||
}
|
}
|
||||||
|
|
|
@ -20,7 +20,7 @@ pub fn Services(comptime service_imports: anytype) type {
|
||||||
|
|
||||||
// finally, generate the type
|
// finally, generate the type
|
||||||
return @Type(.{
|
return @Type(.{
|
||||||
.@"struct" = .{
|
.Struct = .{
|
||||||
.layout = .auto,
|
.layout = .auto,
|
||||||
.fields = &fields,
|
.fields = &fields,
|
||||||
.decls = &[_]std.builtin.Type.Declaration{},
|
.decls = &[_]std.builtin.Type.Declaration{},
|
||||||
|
|
12
src/url.zig
12
src/url.zig
|
@ -24,7 +24,7 @@ fn encodeStruct(
|
||||||
comptime options: EncodingOptions,
|
comptime options: EncodingOptions,
|
||||||
) !bool {
|
) !bool {
|
||||||
var rc = first;
|
var rc = first;
|
||||||
inline for (@typeInfo(@TypeOf(obj)).@"struct".fields) |field| {
|
inline for (@typeInfo(@TypeOf(obj)).Struct.fields) |field| {
|
||||||
const field_name = try options.field_name_transformer(allocator, field.name);
|
const field_name = try options.field_name_transformer(allocator, field.name);
|
||||||
defer if (options.field_name_transformer.* != defaultTransformer)
|
defer if (options.field_name_transformer.* != defaultTransformer)
|
||||||
allocator.free(field_name);
|
allocator.free(field_name);
|
||||||
|
@ -47,10 +47,10 @@ pub fn encodeInternal(
|
||||||
// @compileLog(@typeInfo(@TypeOf(obj)));
|
// @compileLog(@typeInfo(@TypeOf(obj)));
|
||||||
var rc = first;
|
var rc = first;
|
||||||
switch (@typeInfo(@TypeOf(obj))) {
|
switch (@typeInfo(@TypeOf(obj))) {
|
||||||
.optional => if (obj) |o| {
|
.Optional => if (obj) |o| {
|
||||||
rc = try encodeInternal(allocator, parent, field_name, first, o, writer, options);
|
rc = try encodeInternal(allocator, parent, field_name, first, o, writer, options);
|
||||||
},
|
},
|
||||||
.pointer => |ti| if (ti.size == .One) {
|
.Pointer => |ti| if (ti.size == .One) {
|
||||||
rc = try encodeInternal(allocator, parent, field_name, first, obj.*, writer, options);
|
rc = try encodeInternal(allocator, parent, field_name, first, obj.*, writer, options);
|
||||||
} else {
|
} else {
|
||||||
if (!first) _ = try writer.write("&");
|
if (!first) _ = try writer.write("&");
|
||||||
|
@ -61,7 +61,7 @@ pub fn encodeInternal(
|
||||||
try writer.print("{s}{s}={any}", .{ parent, field_name, obj });
|
try writer.print("{s}{s}={any}", .{ parent, field_name, obj });
|
||||||
rc = false;
|
rc = false;
|
||||||
},
|
},
|
||||||
.@"struct" => if (std.mem.eql(u8, "", field_name)) {
|
.Struct => if (std.mem.eql(u8, "", field_name)) {
|
||||||
rc = try encodeStruct(allocator, parent, first, obj, writer, options);
|
rc = try encodeStruct(allocator, parent, first, obj, writer, options);
|
||||||
} else {
|
} else {
|
||||||
// TODO: It would be lovely if we could concat at compile time or allocPrint at runtime
|
// TODO: It would be lovely if we could concat at compile time or allocPrint at runtime
|
||||||
|
@ -73,12 +73,12 @@ pub fn encodeInternal(
|
||||||
rc = try encodeStruct(allocator, new_parent, first, obj, writer, options);
|
rc = try encodeStruct(allocator, new_parent, first, obj, writer, options);
|
||||||
// try encodeStruct(parent ++ field_name ++ ".", first, obj, writer, options);
|
// try encodeStruct(parent ++ field_name ++ ".", first, obj, writer, options);
|
||||||
},
|
},
|
||||||
.array => {
|
.Array => {
|
||||||
if (!first) _ = try writer.write("&");
|
if (!first) _ = try writer.write("&");
|
||||||
try writer.print("{s}{s}={s}", .{ parent, field_name, obj });
|
try writer.print("{s}{s}={s}", .{ parent, field_name, obj });
|
||||||
rc = false;
|
rc = false;
|
||||||
},
|
},
|
||||||
.int, .comptime_int, .float, .comptime_float => {
|
.Int, .ComptimeInt, .Float, .ComptimeFloat => {
|
||||||
if (!first) _ = try writer.write("&");
|
if (!first) _ = try writer.write("&");
|
||||||
try writer.print("{s}{s}={d}", .{ parent, field_name, obj });
|
try writer.print("{s}{s}={d}", .{ parent, field_name, obj });
|
||||||
rc = false;
|
rc = false;
|
||||||
|
|
|
@ -96,14 +96,14 @@ pub fn parse(comptime T: type, source: []const u8, options: ParseOptions) !Parse
|
||||||
|
|
||||||
fn parseInternal(comptime T: type, element: *xml.Element, options: ParseOptions) !T {
|
fn parseInternal(comptime T: type, element: *xml.Element, options: ParseOptions) !T {
|
||||||
switch (@typeInfo(T)) {
|
switch (@typeInfo(T)) {
|
||||||
.bool => {
|
.Bool => {
|
||||||
if (std.ascii.eqlIgnoreCase("true", element.children.items[0].CharData))
|
if (std.ascii.eqlIgnoreCase("true", element.children.items[0].CharData))
|
||||||
return true;
|
return true;
|
||||||
if (std.ascii.eqlIgnoreCase("false", element.children.items[0].CharData))
|
if (std.ascii.eqlIgnoreCase("false", element.children.items[0].CharData))
|
||||||
return false;
|
return false;
|
||||||
return error.UnexpectedToken;
|
return error.UnexpectedToken;
|
||||||
},
|
},
|
||||||
.float, .comptime_float => {
|
.Float, .ComptimeFloat => {
|
||||||
return std.fmt.parseFloat(T, element.children.items[0].CharData) catch |e| {
|
return std.fmt.parseFloat(T, element.children.items[0].CharData) catch |e| {
|
||||||
if (log_parse_traces) {
|
if (log_parse_traces) {
|
||||||
std.log.err(
|
std.log.err(
|
||||||
|
@ -121,7 +121,7 @@ fn parseInternal(comptime T: type, element: *xml.Element, options: ParseOptions)
|
||||||
return e;
|
return e;
|
||||||
};
|
};
|
||||||
},
|
},
|
||||||
.int, .comptime_int => {
|
.Int, .ComptimeInt => {
|
||||||
// 2021-10-05T16:39:45.000Z
|
// 2021-10-05T16:39:45.000Z
|
||||||
return std.fmt.parseInt(T, element.children.items[0].CharData, 10) catch |e| {
|
return std.fmt.parseInt(T, element.children.items[0].CharData, 10) catch |e| {
|
||||||
if (element.children.items[0].CharData[element.children.items[0].CharData.len - 1] == 'Z') {
|
if (element.children.items[0].CharData[element.children.items[0].CharData.len - 1] == 'Z') {
|
||||||
|
@ -146,7 +146,7 @@ fn parseInternal(comptime T: type, element: *xml.Element, options: ParseOptions)
|
||||||
return e;
|
return e;
|
||||||
};
|
};
|
||||||
},
|
},
|
||||||
.optional => |optional_info| {
|
.Optional => |optional_info| {
|
||||||
if (element.children.items.len == 0) {
|
if (element.children.items.len == 0) {
|
||||||
// This is almost certainly incomplete. Empty strings? xsi:nil?
|
// This is almost certainly incomplete. Empty strings? xsi:nil?
|
||||||
return null;
|
return null;
|
||||||
|
@ -156,7 +156,7 @@ fn parseInternal(comptime T: type, element: *xml.Element, options: ParseOptions)
|
||||||
return try parseInternal(optional_info.child, element, options);
|
return try parseInternal(optional_info.child, element, options);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
.@"enum" => |enum_info| {
|
.Enum => |enum_info| {
|
||||||
_ = enum_info;
|
_ = enum_info;
|
||||||
// const numeric: ?enum_info.tag_type = std.fmt.parseInt(enum_info.tag_type, element.children.items[0].CharData, 10) catch null;
|
// const numeric: ?enum_info.tag_type = std.fmt.parseInt(enum_info.tag_type, element.children.items[0].CharData, 10) catch null;
|
||||||
// if (numeric) |num| {
|
// if (numeric) |num| {
|
||||||
|
@ -166,7 +166,7 @@ fn parseInternal(comptime T: type, element: *xml.Element, options: ParseOptions)
|
||||||
// return std.meta.stringToEnum(T, element.CharData);
|
// return std.meta.stringToEnum(T, element.CharData);
|
||||||
// }
|
// }
|
||||||
},
|
},
|
||||||
.@"union" => |union_info| {
|
.Union => |union_info| {
|
||||||
if (union_info.tag_type) |_| {
|
if (union_info.tag_type) |_| {
|
||||||
// try each of the union fields until we find one that matches
|
// try each of the union fields until we find one that matches
|
||||||
// inline for (union_info.fields) |u_field| {
|
// inline for (union_info.fields) |u_field| {
|
||||||
|
@ -189,7 +189,7 @@ fn parseInternal(comptime T: type, element: *xml.Element, options: ParseOptions)
|
||||||
}
|
}
|
||||||
@compileError("Unable to parse into untagged union '" ++ @typeName(T) ++ "'");
|
@compileError("Unable to parse into untagged union '" ++ @typeName(T) ++ "'");
|
||||||
},
|
},
|
||||||
.@"struct" => |struct_info| {
|
.Struct => |struct_info| {
|
||||||
var r: T = undefined;
|
var r: T = undefined;
|
||||||
var fields_seen = [_]bool{false} ** struct_info.fields.len;
|
var fields_seen = [_]bool{false} ** struct_info.fields.len;
|
||||||
var fields_set: u64 = 0;
|
var fields_set: u64 = 0;
|
||||||
|
@ -244,7 +244,7 @@ fn parseInternal(comptime T: type, element: *xml.Element, options: ParseOptions)
|
||||||
fields_set = fields_set + 1;
|
fields_set = fields_set + 1;
|
||||||
found_value = true;
|
found_value = true;
|
||||||
}
|
}
|
||||||
if (@typeInfo(field.type) == .optional) {
|
if (@typeInfo(field.type) == .Optional) {
|
||||||
// Test "compiler assertion failure 2"
|
// Test "compiler assertion failure 2"
|
||||||
// Zig compiler bug circa 0.9.0. Using "and !found_value"
|
// Zig compiler bug circa 0.9.0. Using "and !found_value"
|
||||||
// in the if statement above will trigger assertion failure
|
// in the if statement above will trigger assertion failure
|
||||||
|
@ -269,7 +269,7 @@ fn parseInternal(comptime T: type, element: *xml.Element, options: ParseOptions)
|
||||||
return error.FieldElementMismatch; // see fields_seen for details
|
return error.FieldElementMismatch; // see fields_seen for details
|
||||||
return r;
|
return r;
|
||||||
},
|
},
|
||||||
.array => //|array_info| {
|
.Array => //|array_info| {
|
||||||
return error.ArrayNotImplemented,
|
return error.ArrayNotImplemented,
|
||||||
// switch (token) {
|
// switch (token) {
|
||||||
// .ArrayBegin => {
|
// .ArrayBegin => {
|
||||||
|
@ -304,7 +304,7 @@ fn parseInternal(comptime T: type, element: *xml.Element, options: ParseOptions)
|
||||||
// else => return error.UnexpectedToken,
|
// else => return error.UnexpectedToken,
|
||||||
// }
|
// }
|
||||||
// },
|
// },
|
||||||
.pointer => |ptr_info| {
|
.Pointer => |ptr_info| {
|
||||||
const allocator = options.allocator orelse return error.AllocatorRequired;
|
const allocator = options.allocator orelse return error.AllocatorRequired;
|
||||||
switch (ptr_info.size) {
|
switch (ptr_info.size) {
|
||||||
.One => {
|
.One => {
|
||||||
|
|
Loading…
Reference in New Issue
Block a user