zig 0.14.0 upgrade: merge zig-develop branch
This commit is contained in:
		
						commit
						12e24b01ad
					
				
					 12 changed files with 109 additions and 90 deletions
				
			
		
							
								
								
									
										8
									
								
								.envrc
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										8
									
								
								.envrc
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,8 @@ | ||||||
|  | # vi: ft=sh | ||||||
|  | # shellcheck shell=bash | ||||||
|  | 
 | ||||||
|  | if ! has zvm_direnv_version || ! zvm_direnv_version 1.0.0; then | ||||||
|  |   source_url "https://git.lerch.org/lobo/zvm-direnv/raw/tag/1.0.0/direnvrc" "sha256-Gtddvcr6aJsrjKd53uChxA1reQmJgEBpmPUWmMdtDIQ=" | ||||||
|  | fi | ||||||
|  | 
 | ||||||
|  | use zig 2024.11.0-mach | ||||||
|  | @ -28,7 +28,11 @@ jobs: | ||||||
|         uses: mlugg/setup-zig@v1.2.1 |         uses: mlugg/setup-zig@v1.2.1 | ||||||
|         with: |         with: | ||||||
|           version: mach-latest |           version: mach-latest | ||||||
|       - name: Run tests |       - name: Run gen | ||||||
|  |         run: zig build gen --verbose | ||||||
|  |       - name: Run smoke test | ||||||
|  |         run: zig build smoke-test --verbose | ||||||
|  |       - name: Run full tests | ||||||
|         run: zig build test --verbose |         run: zig build test --verbose | ||||||
|       # Zig package manager expects everything to be inside a directory in the archive, |       # Zig package manager expects everything to be inside a directory in the archive, | ||||||
|       # which it then strips out on download. So we need to shove everything inside a directory |       # which it then strips out on download. So we need to shove everything inside a directory | ||||||
|  |  | ||||||
|  | @ -13,11 +13,15 @@ AWS SDK for Zig | ||||||
| 
 | 
 | ||||||
| [](https://git.lerch.org/lobo/aws-sdk-for-zig/actions?workflow=zig-nightly.yaml&state=closed) | [](https://git.lerch.org/lobo/aws-sdk-for-zig/actions?workflow=zig-nightly.yaml&state=closed) | ||||||
| 
 | 
 | ||||||
|  | **NOTE ON BUILD STATUS**: The nightly/mach nominated version of this currently | ||||||
|  | panics under CI, but I have not yet reproduced this panic. Running manually on | ||||||
|  | multiple machines appears to be working properly | ||||||
|  | 
 | ||||||
| Current executable size for the demo is 980k after compiling with -Doptimize=ReleaseSmall | Current executable size for the demo is 980k after compiling with -Doptimize=ReleaseSmall | ||||||
| in x86_linux, and will vary based on services used. Tested targets: | in x86_linux, and will vary based on services used. Tested targets: | ||||||
| 
 | 
 | ||||||
| * x86_64-linux | * x86_64-linux | ||||||
| * riscv64-linux\* | * riscv64-linux | ||||||
| * aarch64-linux | * aarch64-linux | ||||||
| * x86_64-windows | * x86_64-windows | ||||||
| * arm-linux | * arm-linux | ||||||
|  | @ -26,9 +30,6 @@ in x86_linux, and will vary based on services used. Tested targets: | ||||||
| 
 | 
 | ||||||
| Tested targets are built, but not continuously tested, by CI. | Tested targets are built, but not continuously tested, by CI. | ||||||
| 
 | 
 | ||||||
| \* On Zig 0.12/0.13, riscv64-linux disabled due to [LLLM's O(N^2) codegen](https://github.com/ziglang/zig/issues/18872) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| Zig-Develop Branch | Zig-Develop Branch | ||||||
| ------------------ | ------------------ | ||||||
| 
 | 
 | ||||||
|  |  | ||||||
							
								
								
									
										16
									
								
								build.zig
									
										
									
									
									
								
							
							
						
						
									
										16
									
								
								build.zig
									
										
									
									
									
								
							|  | @ -10,11 +10,7 @@ const test_targets = [_]std.Target.Query{ | ||||||
|     .{}, // native |     .{}, // native | ||||||
|     .{ .cpu_arch = .x86_64, .os_tag = .linux }, |     .{ .cpu_arch = .x86_64, .os_tag = .linux }, | ||||||
|     .{ .cpu_arch = .aarch64, .os_tag = .linux }, |     .{ .cpu_arch = .aarch64, .os_tag = .linux }, | ||||||
|     // The test executable linking process just spins forever in LLVM using nominated zig 0.13 May 2024 |     .{ .cpu_arch = .riscv64, .os_tag = .linux }, | ||||||
|     // This is likely a LLVM problem unlikely to be fixed in zig 0.13 |  | ||||||
|     // Potentially this issue: https://github.com/llvm/llvm-project/issues/81440 |  | ||||||
|     // Zig tracker: https://github.com/ziglang/zig/issues/18872 |  | ||||||
|     // .{ .cpu_arch = .riscv64, .os_tag = .linux }, |  | ||||||
|     .{ .cpu_arch = .arm, .os_tag = .linux }, |     .{ .cpu_arch = .arm, .os_tag = .linux }, | ||||||
|     .{ .cpu_arch = .x86_64, .os_tag = .windows }, |     .{ .cpu_arch = .x86_64, .os_tag = .windows }, | ||||||
|     .{ .cpu_arch = .aarch64, .os_tag = .macos }, |     .{ .cpu_arch = .aarch64, .os_tag = .macos }, | ||||||
|  | @ -33,6 +29,12 @@ pub fn build(b: *Builder) !void { | ||||||
|     // between Debug, ReleaseSafe, ReleaseFast, and ReleaseSmall. |     // between Debug, ReleaseSafe, ReleaseFast, and ReleaseSmall. | ||||||
|     const optimize = b.standardOptimizeOption(.{}); |     const optimize = b.standardOptimizeOption(.{}); | ||||||
| 
 | 
 | ||||||
|  |     const no_llvm = b.option( | ||||||
|  |         bool, | ||||||
|  |         "no-llvm", | ||||||
|  |         "Disable LLVM", | ||||||
|  |     ) orelse false; | ||||||
|  | 
 | ||||||
|     const broken_windows = b.option( |     const broken_windows = b.option( | ||||||
|         bool, |         bool, | ||||||
|         "broken-windows", |         "broken-windows", | ||||||
|  | @ -56,6 +58,7 @@ pub fn build(b: *Builder) !void { | ||||||
|         .target = target, |         .target = target, | ||||||
|         .optimize = optimize, |         .optimize = optimize, | ||||||
|     }); |     }); | ||||||
|  |     exe.use_llvm = !no_llvm; | ||||||
|     const smithy_dep = b.dependency("smithy", .{ |     const smithy_dep = b.dependency("smithy", .{ | ||||||
|         // These are the arguments to the dependency. It expects a target and optimization level. |         // These are the arguments to the dependency. It expects a target and optimization level. | ||||||
|         .target = target, |         .target = target, | ||||||
|  | @ -107,6 +110,7 @@ pub fn build(b: *Builder) !void { | ||||||
|             .target = b.graph.host, |             .target = b.graph.host, | ||||||
|             .optimize = if (b.verbose) .Debug else .ReleaseSafe, |             .optimize = if (b.verbose) .Debug else .ReleaseSafe, | ||||||
|         }); |         }); | ||||||
|  |         cg_exe.use_llvm = !no_llvm; | ||||||
|         cg_exe.root_module.addImport("smithy", smithy_dep.module("smithy")); |         cg_exe.root_module.addImport("smithy", smithy_dep.module("smithy")); | ||||||
|         var cg_cmd = b.addRunArtifact(cg_exe); |         var cg_cmd = b.addRunArtifact(cg_exe); | ||||||
|         cg_cmd.addArg("--models"); |         cg_cmd.addArg("--models"); | ||||||
|  | @ -183,6 +187,7 @@ pub fn build(b: *Builder) !void { | ||||||
|         }); |         }); | ||||||
|         unit_tests.root_module.addImport("smithy", smithy_dep.module("smithy")); |         unit_tests.root_module.addImport("smithy", smithy_dep.module("smithy")); | ||||||
|         unit_tests.step.dependOn(gen_step); |         unit_tests.step.dependOn(gen_step); | ||||||
|  |         unit_tests.use_llvm = !no_llvm; | ||||||
| 
 | 
 | ||||||
|         const run_unit_tests = b.addRunArtifact(unit_tests); |         const run_unit_tests = b.addRunArtifact(unit_tests); | ||||||
|         run_unit_tests.skip_foreign_checks = true; |         run_unit_tests.skip_foreign_checks = true; | ||||||
|  | @ -204,6 +209,7 @@ pub fn build(b: *Builder) !void { | ||||||
|         .target = target, |         .target = target, | ||||||
|         .optimize = optimize, |         .optimize = optimize, | ||||||
|     }); |     }); | ||||||
|  |     smoke_test.use_llvm = !no_llvm; | ||||||
|     smoke_test.root_module.addImport("smithy", smithy_dep.module("smithy")); |     smoke_test.root_module.addImport("smithy", smithy_dep.module("smithy")); | ||||||
|     smoke_test.step.dependOn(gen_step); |     smoke_test.step.dependOn(gen_step); | ||||||
| 
 | 
 | ||||||
|  |  | ||||||
|  | @ -2,7 +2,7 @@ const std = @import("std"); | ||||||
| // options is a json.Options, but since we're using our hacked json.zig we don't want to | // options is a json.Options, but since we're using our hacked json.zig we don't want to | ||||||
| // specifically call this out | // specifically call this out | ||||||
| pub fn serializeMap(map: anytype, key: []const u8, options: anytype, out_stream: anytype) !bool { | pub fn serializeMap(map: anytype, key: []const u8, options: anytype, out_stream: anytype) !bool { | ||||||
|     if (@typeInfo(@TypeOf(map)) == .Optional) { |     if (@typeInfo(@TypeOf(map)) == .optional) { | ||||||
|         if (map == null) |         if (map == null) | ||||||
|             return true |             return true | ||||||
|         else |         else | ||||||
|  |  | ||||||
|  | @ -5,8 +5,8 @@ | ||||||
| 
 | 
 | ||||||
|     .dependencies = .{ |     .dependencies = .{ | ||||||
|         .aws = .{ |         .aws = .{ | ||||||
|             .url = "https://git.lerch.org/api/packages/lobo/generic/aws-sdk-with-models/dfda8e77d624dfb776e3a70471501a7c610fbac1/dfda8e77d624dfb776e3a70471501a7c610fbac1-with-models.tar.gz", |             .url = "https://git.lerch.org/api/packages/lobo/generic/aws-sdk-with-models/e02fb699fc47f19d19cad99209bd480ca6963295/e02fb699fc47f19d19cad99209bd480ca6963295nominated-zig-with-models.tar.gz", | ||||||
|             .hash = "122000ad704234e68fee82a52e3b4e365a52874ec851d978b109e05ac66a80dc86ac", |             .hash = "1220fa9b39c985449936f0e3f02bbb6fdafa64435e502eb78fd47d457b96876b7968", | ||||||
|         }, |         }, | ||||||
|     }, |     }, | ||||||
| } | } | ||||||
|  |  | ||||||
							
								
								
									
										34
									
								
								src/aws.zig
									
										
									
									
									
								
							
							
						
						
									
										34
									
								
								src/aws.zig
									
										
									
									
									
								
							|  | @ -517,9 +517,9 @@ pub fn Request(comptime request_action: anytype) type { | ||||||
|                 // And the response property below will pull whatever is the ActionResult object |                 // And the response property below will pull whatever is the ActionResult object | ||||||
|                 // We can grab index [0] as structs are guaranteed by zig to be returned in the order |                 // We can grab index [0] as structs are guaranteed by zig to be returned in the order | ||||||
|                 // declared, and we're declaring in that order in ServerResponse(). |                 // declared, and we're declaring in that order in ServerResponse(). | ||||||
|                 const real_response = @field(parsed_response, @typeInfo(response_types.NormalResponse).Struct.fields[0].name); |                 const real_response = @field(parsed_response, @typeInfo(response_types.NormalResponse).@"struct".fields[0].name); | ||||||
|                 return FullResponseType{ |                 return FullResponseType{ | ||||||
|                     .response = @field(real_response, @typeInfo(@TypeOf(real_response)).Struct.fields[0].name), |                     .response = @field(real_response, @typeInfo(@TypeOf(real_response)).@"struct".fields[0].name), | ||||||
|                     .response_metadata = .{ |                     .response_metadata = .{ | ||||||
|                         .request_id = try options.client.allocator.dupe(u8, real_response.ResponseMetadata.RequestId), |                         .request_id = try options.client.allocator.dupe(u8, real_response.ResponseMetadata.RequestId), | ||||||
|                     }, |                     }, | ||||||
|  | @ -773,7 +773,7 @@ fn isOtherNormalResponse(comptime T: type, first_key: []const u8) bool { | ||||||
|     return std.mem.eql(u8, first_key, expected_key); |     return std.mem.eql(u8, first_key, expected_key); | ||||||
| } | } | ||||||
| fn coerceFromString(comptime T: type, val: []const u8) anyerror!T { | fn coerceFromString(comptime T: type, val: []const u8) anyerror!T { | ||||||
|     if (@typeInfo(T) == .Optional) return try coerceFromString(@typeInfo(T).Optional.child, val); |     if (@typeInfo(T) == .optional) return try coerceFromString(@typeInfo(T).optional.child, val); | ||||||
|     // TODO: This is terrible...fix it |     // TODO: This is terrible...fix it | ||||||
|     switch (T) { |     switch (T) { | ||||||
|         bool => return std.ascii.eqlIgnoreCase(val, "true"), |         bool => return std.ascii.eqlIgnoreCase(val, "true"), | ||||||
|  | @ -806,8 +806,8 @@ fn parseInt(comptime T: type, val: []const u8) !T { | ||||||
| 
 | 
 | ||||||
| fn generalAllocPrint(allocator: std.mem.Allocator, val: anytype) !?[]const u8 { | fn generalAllocPrint(allocator: std.mem.Allocator, val: anytype) !?[]const u8 { | ||||||
|     switch (@typeInfo(@TypeOf(val))) { |     switch (@typeInfo(@TypeOf(val))) { | ||||||
|         .Optional => if (val) |v| return generalAllocPrint(allocator, v) else return null, |         .optional => if (val) |v| return generalAllocPrint(allocator, v) else return null, | ||||||
|         .Array, .Pointer => return try std.fmt.allocPrint(allocator, "{s}", .{val}), |         .array, .pointer => return try std.fmt.allocPrint(allocator, "{s}", .{val}), | ||||||
|         else => return try std.fmt.allocPrint(allocator, "{any}", .{val}), |         else => return try std.fmt.allocPrint(allocator, "{any}", .{val}), | ||||||
|     } |     } | ||||||
| } | } | ||||||
|  | @ -926,7 +926,7 @@ fn ServerResponse(comptime action: anytype) type { | ||||||
|         RequestId: []u8, |         RequestId: []u8, | ||||||
|     }; |     }; | ||||||
|     const Result = @Type(.{ |     const Result = @Type(.{ | ||||||
|         .Struct = .{ |         .@"struct" = .{ | ||||||
|             .layout = .auto, |             .layout = .auto, | ||||||
|             .fields = &[_]std.builtin.Type.StructField{ |             .fields = &[_]std.builtin.Type.StructField{ | ||||||
|                 .{ |                 .{ | ||||||
|  | @ -949,7 +949,7 @@ fn ServerResponse(comptime action: anytype) type { | ||||||
|         }, |         }, | ||||||
|     }); |     }); | ||||||
|     return @Type(.{ |     return @Type(.{ | ||||||
|         .Struct = .{ |         .@"struct" = .{ | ||||||
|             .layout = .auto, |             .layout = .auto, | ||||||
|             .fields = &[_]std.builtin.Type.StructField{ |             .fields = &[_]std.builtin.Type.StructField{ | ||||||
|                 .{ |                 .{ | ||||||
|  | @ -1015,8 +1015,8 @@ fn FullResponse(comptime action: anytype) type { | ||||||
| } | } | ||||||
| fn safeFree(allocator: std.mem.Allocator, obj: anytype) void { | fn safeFree(allocator: std.mem.Allocator, obj: anytype) void { | ||||||
|     switch (@typeInfo(@TypeOf(obj))) { |     switch (@typeInfo(@TypeOf(obj))) { | ||||||
|         .Pointer => allocator.free(obj), |         .pointer => allocator.free(obj), | ||||||
|         .Optional => if (obj) |o| safeFree(allocator, o), |         .optional => if (obj) |o| safeFree(allocator, o), | ||||||
|         else => {}, |         else => {}, | ||||||
|     } |     } | ||||||
| } | } | ||||||
|  | @ -1125,7 +1125,7 @@ fn buildQuery(allocator: std.mem.Allocator, request: anytype) ![]const u8 { | ||||||
|     var prefix = "?"; |     var prefix = "?"; | ||||||
|     if (@hasDecl(@TypeOf(request), "http_query")) { |     if (@hasDecl(@TypeOf(request), "http_query")) { | ||||||
|         const query_arguments = @field(@TypeOf(request), "http_query"); |         const query_arguments = @field(@TypeOf(request), "http_query"); | ||||||
|         inline for (@typeInfo(@TypeOf(query_arguments)).Struct.fields) |arg| { |         inline for (@typeInfo(@TypeOf(query_arguments)).@"struct".fields) |arg| { | ||||||
|             const val = @field(request, arg.name); |             const val = @field(request, arg.name); | ||||||
|             if (try addQueryArg(arg.type, prefix, @field(query_arguments, arg.name), val, writer)) |             if (try addQueryArg(arg.type, prefix, @field(query_arguments, arg.name), val, writer)) | ||||||
|                 prefix = "&"; |                 prefix = "&"; | ||||||
|  | @ -1136,13 +1136,13 @@ fn buildQuery(allocator: std.mem.Allocator, request: anytype) ![]const u8 { | ||||||
| 
 | 
 | ||||||
| fn addQueryArg(comptime ValueType: type, prefix: []const u8, key: []const u8, value: anytype, writer: anytype) !bool { | fn addQueryArg(comptime ValueType: type, prefix: []const u8, key: []const u8, value: anytype, writer: anytype) !bool { | ||||||
|     switch (@typeInfo(@TypeOf(value))) { |     switch (@typeInfo(@TypeOf(value))) { | ||||||
|         .Optional => { |         .optional => { | ||||||
|             if (value) |v| |             if (value) |v| | ||||||
|                 return try addQueryArg(ValueType, prefix, key, v, writer); |                 return try addQueryArg(ValueType, prefix, key, v, writer); | ||||||
|             return false; |             return false; | ||||||
|         }, |         }, | ||||||
|         // if this is a pointer, we want to make sure it is more than just a string |         // if this is a pointer, we want to make sure it is more than just a string | ||||||
|         .Pointer => |ptr| { |         .pointer => |ptr| { | ||||||
|             if (ptr.child == u8 or ptr.size != .Slice) { |             if (ptr.child == u8 or ptr.size != .Slice) { | ||||||
|                 // This is just a string |                 // This is just a string | ||||||
|                 return try addBasicQueryArg(prefix, key, value, writer); |                 return try addBasicQueryArg(prefix, key, value, writer); | ||||||
|  | @ -1154,7 +1154,7 @@ fn addQueryArg(comptime ValueType: type, prefix: []const u8, key: []const u8, va | ||||||
|             } |             } | ||||||
|             return std.mem.eql(u8, "&", p); |             return std.mem.eql(u8, "&", p); | ||||||
|         }, |         }, | ||||||
|         .Array => |arr| { |         .array => |arr| { | ||||||
|             if (arr.child == u8) |             if (arr.child == u8) | ||||||
|                 return try addBasicQueryArg(prefix, key, value, writer); |                 return try addBasicQueryArg(prefix, key, value, writer); | ||||||
|             var p = prefix; |             var p = prefix; | ||||||
|  | @ -1274,8 +1274,8 @@ fn reportTraffic( | ||||||
| fn typeForField(comptime T: type, comptime field_name: []const u8) !type { | fn typeForField(comptime T: type, comptime field_name: []const u8) !type { | ||||||
|     const ti = @typeInfo(T); |     const ti = @typeInfo(T); | ||||||
|     switch (ti) { |     switch (ti) { | ||||||
|         .Struct => { |         .@"struct" => { | ||||||
|             inline for (ti.Struct.fields) |field| { |             inline for (ti.@"struct".fields) |field| { | ||||||
|                 if (std.mem.eql(u8, field.name, field_name)) |                 if (std.mem.eql(u8, field.name, field_name)) | ||||||
|                     return field.type; |                     return field.type; | ||||||
|             } |             } | ||||||
|  | @ -1289,7 +1289,7 @@ test "custom serialization for map objects" { | ||||||
|     const allocator = std.testing.allocator; |     const allocator = std.testing.allocator; | ||||||
|     var buffer = std.ArrayList(u8).init(allocator); |     var buffer = std.ArrayList(u8).init(allocator); | ||||||
|     defer buffer.deinit(); |     defer buffer.deinit(); | ||||||
|     var tags = try std.ArrayList(@typeInfo(try typeForField(services.lambda.tag_resource.Request, "tags")).Pointer.child).initCapacity(allocator, 2); |     var tags = try std.ArrayList(@typeInfo(try typeForField(services.lambda.tag_resource.Request, "tags")).pointer.child).initCapacity(allocator, 2); | ||||||
|     defer tags.deinit(); |     defer tags.deinit(); | ||||||
|     tags.appendAssumeCapacity(.{ .key = "Foo", .value = "Bar" }); |     tags.appendAssumeCapacity(.{ .key = "Foo", .value = "Bar" }); | ||||||
|     tags.appendAssumeCapacity(.{ .key = "Baz", .value = "Qux" }); |     tags.appendAssumeCapacity(.{ .key = "Baz", .value = "Qux" }); | ||||||
|  | @ -2051,7 +2051,7 @@ test "rest_json_1_work_with_lambda: lambda tagResource (only), to excercise zig | ||||||
|     defer test_harness.deinit(); |     defer test_harness.deinit(); | ||||||
|     const options = try test_harness.start(); |     const options = try test_harness.start(); | ||||||
|     const lambda = (Services(.{.lambda}){}).lambda; |     const lambda = (Services(.{.lambda}){}).lambda; | ||||||
|     var tags = try std.ArrayList(@typeInfo(try typeForField(lambda.tag_resource.Request, "tags")).Pointer.child).initCapacity(allocator, 1); |     var tags = try std.ArrayList(@typeInfo(try typeForField(lambda.tag_resource.Request, "tags")).pointer.child).initCapacity(allocator, 1); | ||||||
|     defer tags.deinit(); |     defer tags.deinit(); | ||||||
|     tags.appendAssumeCapacity(.{ .key = "Foo", .value = "Bar" }); |     tags.appendAssumeCapacity(.{ .key = "Foo", .value = "Bar" }); | ||||||
|     const req = services.lambda.tag_resource.Request{ .resource = "arn:aws:lambda:us-west-2:550620852718:function:awsome-lambda-LambdaStackawsomeLambda", .tags = tags.items }; |     const req = services.lambda.tag_resource.Request{ .resource = "arn:aws:lambda:us-west-2:550620852718:function:awsome-lambda-LambdaStackawsomeLambda", .tags = tags.items }; | ||||||
|  |  | ||||||
							
								
								
									
										76
									
								
								src/json.zig
									
										
									
									
									
								
							
							
						
						
									
										76
									
								
								src/json.zig
									
										
									
									
									
								
							|  | @ -1560,21 +1560,21 @@ fn skipValue(tokens: *TokenStream) SkipValueError!void { | ||||||
| 
 | 
 | ||||||
| fn parseInternal(comptime T: type, token: Token, tokens: *TokenStream, options: ParseOptions) !T { | fn parseInternal(comptime T: type, token: Token, tokens: *TokenStream, options: ParseOptions) !T { | ||||||
|     switch (@typeInfo(T)) { |     switch (@typeInfo(T)) { | ||||||
|         .Bool => { |         .bool => { | ||||||
|             return switch (token) { |             return switch (token) { | ||||||
|                 .True => true, |                 .True => true, | ||||||
|                 .False => false, |                 .False => false, | ||||||
|                 else => error.UnexpectedToken, |                 else => error.UnexpectedToken, | ||||||
|             }; |             }; | ||||||
|         }, |         }, | ||||||
|         .Float, .ComptimeFloat => { |         .float, .comptime_float => { | ||||||
|             const numberToken = switch (token) { |             const numberToken = switch (token) { | ||||||
|                 .Number => |n| n, |                 .Number => |n| n, | ||||||
|                 else => return error.UnexpectedToken, |                 else => return error.UnexpectedToken, | ||||||
|             }; |             }; | ||||||
|             return try std.fmt.parseFloat(T, numberToken.slice(tokens.slice, tokens.i - 1)); |             return try std.fmt.parseFloat(T, numberToken.slice(tokens.slice, tokens.i - 1)); | ||||||
|         }, |         }, | ||||||
|         .Int, .ComptimeInt => { |         .int, .comptime_int => { | ||||||
|             const numberToken = switch (token) { |             const numberToken = switch (token) { | ||||||
|                 .Number => |n| n, |                 .Number => |n| n, | ||||||
|                 else => return error.UnexpectedToken, |                 else => return error.UnexpectedToken, | ||||||
|  | @ -1587,14 +1587,14 @@ fn parseInternal(comptime T: type, token: Token, tokens: *TokenStream, options: | ||||||
|             if (std.math.round(float) != float) return error.InvalidNumber; |             if (std.math.round(float) != float) return error.InvalidNumber; | ||||||
|             return @as(T, @intFromFloat(float)); |             return @as(T, @intFromFloat(float)); | ||||||
|         }, |         }, | ||||||
|         .Optional => |optionalInfo| { |         .optional => |optionalInfo| { | ||||||
|             if (token == .Null) { |             if (token == .Null) { | ||||||
|                 return null; |                 return null; | ||||||
|             } else { |             } else { | ||||||
|                 return try parseInternal(optionalInfo.child, token, tokens, options); |                 return try parseInternal(optionalInfo.child, token, tokens, options); | ||||||
|             } |             } | ||||||
|         }, |         }, | ||||||
|         .Enum => |enumInfo| { |         .@"enum" => |enumInfo| { | ||||||
|             switch (token) { |             switch (token) { | ||||||
|                 .Number => |numberToken| { |                 .Number => |numberToken| { | ||||||
|                     if (!numberToken.is_integer) return error.UnexpectedToken; |                     if (!numberToken.is_integer) return error.UnexpectedToken; | ||||||
|  | @ -1618,7 +1618,7 @@ fn parseInternal(comptime T: type, token: Token, tokens: *TokenStream, options: | ||||||
|                 else => return error.UnexpectedToken, |                 else => return error.UnexpectedToken, | ||||||
|             } |             } | ||||||
|         }, |         }, | ||||||
|         .Union => |unionInfo| { |         .@"union" => |unionInfo| { | ||||||
|             if (unionInfo.tag_type) |_| { |             if (unionInfo.tag_type) |_| { | ||||||
|                 // try each of the union fields until we find one that matches |                 // try each of the union fields until we find one that matches | ||||||
|                 inline for (unionInfo.fields) |u_field| { |                 inline for (unionInfo.fields) |u_field| { | ||||||
|  | @ -1642,7 +1642,7 @@ fn parseInternal(comptime T: type, token: Token, tokens: *TokenStream, options: | ||||||
|                 @compileError("Unable to parse into untagged union '" ++ @typeName(T) ++ "'"); |                 @compileError("Unable to parse into untagged union '" ++ @typeName(T) ++ "'"); | ||||||
|             } |             } | ||||||
|         }, |         }, | ||||||
|         .Struct => |structInfo| { |         .@"struct" => |structInfo| { | ||||||
|             switch (token) { |             switch (token) { | ||||||
|                 .ObjectBegin => {}, |                 .ObjectBegin => {}, | ||||||
|                 else => return error.UnexpectedToken, |                 else => return error.UnexpectedToken, | ||||||
|  | @ -1736,7 +1736,7 @@ fn parseInternal(comptime T: type, token: Token, tokens: *TokenStream, options: | ||||||
|             } |             } | ||||||
|             return r; |             return r; | ||||||
|         }, |         }, | ||||||
|         .Array => |arrayInfo| { |         .array => |arrayInfo| { | ||||||
|             switch (token) { |             switch (token) { | ||||||
|                 .ArrayBegin => { |                 .ArrayBegin => { | ||||||
|                     var r: T = undefined; |                     var r: T = undefined; | ||||||
|  | @ -1770,7 +1770,7 @@ fn parseInternal(comptime T: type, token: Token, tokens: *TokenStream, options: | ||||||
|                 else => return error.UnexpectedToken, |                 else => return error.UnexpectedToken, | ||||||
|             } |             } | ||||||
|         }, |         }, | ||||||
|         .Pointer => |ptrInfo| { |         .pointer => |ptrInfo| { | ||||||
|             const allocator = options.allocator orelse return error.AllocatorRequired; |             const allocator = options.allocator orelse return error.AllocatorRequired; | ||||||
|             switch (ptrInfo.size) { |             switch (ptrInfo.size) { | ||||||
|                 .One => { |                 .One => { | ||||||
|  | @ -1863,8 +1863,8 @@ fn parseInternal(comptime T: type, token: Token, tokens: *TokenStream, options: | ||||||
| fn typeForField(comptime T: type, comptime field_name: []const u8) ?type { | fn typeForField(comptime T: type, comptime field_name: []const u8) ?type { | ||||||
|     const ti = @typeInfo(T); |     const ti = @typeInfo(T); | ||||||
|     switch (ti) { |     switch (ti) { | ||||||
|         .Struct => { |         .@"struct" => { | ||||||
|             inline for (ti.Struct.fields) |field| { |             inline for (ti.@"struct".fields) |field| { | ||||||
|                 if (std.mem.eql(u8, field.name, field_name)) |                 if (std.mem.eql(u8, field.name, field_name)) | ||||||
|                     return field.type; |                     return field.type; | ||||||
|             } |             } | ||||||
|  | @ -1878,14 +1878,14 @@ fn isMapPattern(comptime T: type) bool { | ||||||
|     // We should be getting a type that is a pointer to a slice. |     // We should be getting a type that is a pointer to a slice. | ||||||
|     // Let's just double check before proceeding |     // Let's just double check before proceeding | ||||||
|     const ti = @typeInfo(T); |     const ti = @typeInfo(T); | ||||||
|     if (ti != .Pointer) return false; |     if (ti != .pointer) return false; | ||||||
|     if (ti.Pointer.size != .Slice) return false; |     if (ti.pointer.size != .Slice) return false; | ||||||
|     const ti_child = @typeInfo(ti.Pointer.child); |     const ti_child = @typeInfo(ti.pointer.child); | ||||||
|     if (ti_child != .Struct) return false; |     if (ti_child != .@"struct") return false; | ||||||
|     if (ti_child.Struct.fields.len != 2) return false; |     if (ti_child.@"struct".fields.len != 2) return false; | ||||||
|     var key_found = false; |     var key_found = false; | ||||||
|     var value_found = false; |     var value_found = false; | ||||||
|     inline for (ti_child.Struct.fields) |field| { |     inline for (ti_child.@"struct".fields) |field| { | ||||||
|         if (std.mem.eql(u8, "key", field.name)) |         if (std.mem.eql(u8, "key", field.name)) | ||||||
|             key_found = true; |             key_found = true; | ||||||
|         if (std.mem.eql(u8, "value", field.name)) |         if (std.mem.eql(u8, "value", field.name)) | ||||||
|  | @ -1904,13 +1904,13 @@ pub fn parse(comptime T: type, tokens: *TokenStream, options: ParseOptions) !T { | ||||||
| /// Should be called with the same type and `ParseOptions` that were passed to `parse` | /// Should be called with the same type and `ParseOptions` that were passed to `parse` | ||||||
| pub fn parseFree(comptime T: type, value: T, options: ParseOptions) void { | pub fn parseFree(comptime T: type, value: T, options: ParseOptions) void { | ||||||
|     switch (@typeInfo(T)) { |     switch (@typeInfo(T)) { | ||||||
|         .Bool, .Float, .ComptimeFloat, .Int, .ComptimeInt, .Enum => {}, |         .bool, .float, .comptime_float, .int, .comptime_int, .@"enum" => {}, | ||||||
|         .Optional => { |         .optional => { | ||||||
|             if (value) |v| { |             if (value) |v| { | ||||||
|                 return parseFree(@TypeOf(v), v, options); |                 return parseFree(@TypeOf(v), v, options); | ||||||
|             } |             } | ||||||
|         }, |         }, | ||||||
|         .Union => |unionInfo| { |         .@"union" => |unionInfo| { | ||||||
|             if (unionInfo.tag_type) |UnionTagType| { |             if (unionInfo.tag_type) |UnionTagType| { | ||||||
|                 inline for (unionInfo.fields) |u_field| { |                 inline for (unionInfo.fields) |u_field| { | ||||||
|                     if (value == @field(UnionTagType, u_field.name)) { |                     if (value == @field(UnionTagType, u_field.name)) { | ||||||
|  | @ -1922,17 +1922,17 @@ pub fn parseFree(comptime T: type, value: T, options: ParseOptions) void { | ||||||
|                 unreachable; |                 unreachable; | ||||||
|             } |             } | ||||||
|         }, |         }, | ||||||
|         .Struct => |structInfo| { |         .@"struct" => |structInfo| { | ||||||
|             inline for (structInfo.fields) |field| { |             inline for (structInfo.fields) |field| { | ||||||
|                 parseFree(field.type, @field(value, field.name), options); |                 parseFree(field.type, @field(value, field.name), options); | ||||||
|             } |             } | ||||||
|         }, |         }, | ||||||
|         .Array => |arrayInfo| { |         .array => |arrayInfo| { | ||||||
|             for (value) |v| { |             for (value) |v| { | ||||||
|                 parseFree(arrayInfo.child, v, options); |                 parseFree(arrayInfo.child, v, options); | ||||||
|             } |             } | ||||||
|         }, |         }, | ||||||
|         .Pointer => |ptrInfo| { |         .pointer => |ptrInfo| { | ||||||
|             const allocator = options.allocator orelse unreachable; |             const allocator = options.allocator orelse unreachable; | ||||||
|             switch (ptrInfo.size) { |             switch (ptrInfo.size) { | ||||||
|                 .One => { |                 .One => { | ||||||
|  | @ -2812,38 +2812,38 @@ pub fn stringify( | ||||||
| ) !void { | ) !void { | ||||||
|     const T = @TypeOf(value); |     const T = @TypeOf(value); | ||||||
|     switch (@typeInfo(T)) { |     switch (@typeInfo(T)) { | ||||||
|         .Float, .ComptimeFloat => { |         .float, .comptime_float => { | ||||||
|             return std.fmt.format(out_stream, "{e}", .{value}); |             return std.fmt.format(out_stream, "{e}", .{value}); | ||||||
|         }, |         }, | ||||||
|         .Int, .ComptimeInt => { |         .int, .comptime_int => { | ||||||
|             return std.fmt.formatIntValue(value, "", std.fmt.FormatOptions{}, out_stream); |             return std.fmt.formatIntValue(value, "", std.fmt.FormatOptions{}, out_stream); | ||||||
|         }, |         }, | ||||||
|         .Bool => { |         .bool => { | ||||||
|             return out_stream.writeAll(if (value) "true" else "false"); |             return out_stream.writeAll(if (value) "true" else "false"); | ||||||
|         }, |         }, | ||||||
|         .Null => { |         .null => { | ||||||
|             return out_stream.writeAll("null"); |             return out_stream.writeAll("null"); | ||||||
|         }, |         }, | ||||||
|         .Optional => { |         .optional => { | ||||||
|             if (value) |payload| { |             if (value) |payload| { | ||||||
|                 return try stringify(payload, options, out_stream); |                 return try stringify(payload, options, out_stream); | ||||||
|             } else { |             } else { | ||||||
|                 return try stringify(null, options, out_stream); |                 return try stringify(null, options, out_stream); | ||||||
|             } |             } | ||||||
|         }, |         }, | ||||||
|         .Enum => { |         .@"enum" => { | ||||||
|             if (comptime std.meta.hasFn(T, "jsonStringify")) { |             if (comptime std.meta.hasFn(T, "jsonStringify")) { | ||||||
|                 return value.jsonStringify(options, out_stream); |                 return value.jsonStringify(options, out_stream); | ||||||
|             } |             } | ||||||
| 
 | 
 | ||||||
|             @compileError("Unable to stringify enum '" ++ @typeName(T) ++ "'"); |             @compileError("Unable to stringify enum '" ++ @typeName(T) ++ "'"); | ||||||
|         }, |         }, | ||||||
|         .Union => { |         .@"union" => { | ||||||
|             if (comptime std.meta.hasFn(T, "jsonStringify")) { |             if (comptime std.meta.hasFn(T, "jsonStringify")) { | ||||||
|                 return value.jsonStringify(options, out_stream); |                 return value.jsonStringify(options, out_stream); | ||||||
|             } |             } | ||||||
| 
 | 
 | ||||||
|             const info = @typeInfo(T).Union; |             const info = @typeInfo(T).@"union"; | ||||||
|             if (info.tag_type) |UnionTagType| { |             if (info.tag_type) |UnionTagType| { | ||||||
|                 inline for (info.fields) |u_field| { |                 inline for (info.fields) |u_field| { | ||||||
|                     if (value == @field(UnionTagType, u_field.name)) { |                     if (value == @field(UnionTagType, u_field.name)) { | ||||||
|  | @ -2854,7 +2854,7 @@ pub fn stringify( | ||||||
|                 @compileError("Unable to stringify untagged union '" ++ @typeName(T) ++ "'"); |                 @compileError("Unable to stringify untagged union '" ++ @typeName(T) ++ "'"); | ||||||
|             } |             } | ||||||
|         }, |         }, | ||||||
|         .Struct => |S| { |         .@"struct" => |S| { | ||||||
|             if (comptime std.meta.hasFn(T, "jsonStringify")) { |             if (comptime std.meta.hasFn(T, "jsonStringify")) { | ||||||
|                 return value.jsonStringify(options, out_stream); |                 return value.jsonStringify(options, out_stream); | ||||||
|             } |             } | ||||||
|  | @ -2870,7 +2870,7 @@ pub fn stringify( | ||||||
|                 if (Field.type == void) continue; |                 if (Field.type == void) continue; | ||||||
| 
 | 
 | ||||||
|                 var output_this_field = true; |                 var output_this_field = true; | ||||||
|                 if (!options.emit_null and @typeInfo(Field.type) == .Optional and @field(value, Field.name) == null) output_this_field = false; |                 if (!options.emit_null and @typeInfo(Field.type) == .optional and @field(value, Field.name) == null) output_this_field = false; | ||||||
| 
 | 
 | ||||||
|                 const final_name = if (comptime std.meta.hasFn(T, "fieldNameFor")) |                 const final_name = if (comptime std.meta.hasFn(T, "fieldNameFor")) | ||||||
|                     value.fieldNameFor(Field.name) |                     value.fieldNameFor(Field.name) | ||||||
|  | @ -2920,10 +2920,10 @@ pub fn stringify( | ||||||
|             try out_stream.writeByte('}'); |             try out_stream.writeByte('}'); | ||||||
|             return; |             return; | ||||||
|         }, |         }, | ||||||
|         .ErrorSet => return stringify(@as([]const u8, @errorName(value)), options, out_stream), |         .error_set => return stringify(@as([]const u8, @errorName(value)), options, out_stream), | ||||||
|         .Pointer => |ptr_info| switch (ptr_info.size) { |         .pointer => |ptr_info| switch (ptr_info.size) { | ||||||
|             .One => switch (@typeInfo(ptr_info.child)) { |             .One => switch (@typeInfo(ptr_info.child)) { | ||||||
|                 .Array => { |                 .array => { | ||||||
|                     const Slice = []const std.meta.Elem(ptr_info.child); |                     const Slice = []const std.meta.Elem(ptr_info.child); | ||||||
|                     return stringify(@as(Slice, value), options, out_stream); |                     return stringify(@as(Slice, value), options, out_stream); | ||||||
|                 }, |                 }, | ||||||
|  | @ -3002,8 +3002,8 @@ pub fn stringify( | ||||||
|             }, |             }, | ||||||
|             else => @compileError("Unable to stringify type '" ++ @typeName(T) ++ "'"), |             else => @compileError("Unable to stringify type '" ++ @typeName(T) ++ "'"), | ||||||
|         }, |         }, | ||||||
|         .Array => return stringify(&value, options, out_stream), |         .array => return stringify(&value, options, out_stream), | ||||||
|         .Vector => |info| { |         .vector => |info| { | ||||||
|             const array: [info.len]info.child = value; |             const array: [info.len]info.child = value; | ||||||
|             return stringify(&array, options, out_stream); |             return stringify(&array, options, out_stream); | ||||||
|         }, |         }, | ||||||
|  |  | ||||||
							
								
								
									
										10
									
								
								src/main.zig
									
										
									
									
									
								
							
							
						
						
									
										10
									
								
								src/main.zig
									
										
									
									
									
								
							|  | @ -97,7 +97,7 @@ pub fn main() anyerror!void { | ||||||
|             } |             } | ||||||
|             continue; |             continue; | ||||||
|         } |         } | ||||||
|         inline for (@typeInfo(Tests).Enum.fields) |f| { |         inline for (@typeInfo(Tests).@"enum".fields) |f| { | ||||||
|             if (std.mem.eql(u8, f.name, arg)) { |             if (std.mem.eql(u8, f.name, arg)) { | ||||||
|                 try tests.append(@field(Tests, f.name)); |                 try tests.append(@field(Tests, f.name)); | ||||||
|                 break; |                 break; | ||||||
|  | @ -105,7 +105,7 @@ pub fn main() anyerror!void { | ||||||
|         } |         } | ||||||
|     } |     } | ||||||
|     if (tests.items.len == 0) { |     if (tests.items.len == 0) { | ||||||
|         inline for (@typeInfo(Tests).Enum.fields) |f| |         inline for (@typeInfo(Tests).@"enum".fields) |f| | ||||||
|             try tests.append(@field(Tests, f.name)); |             try tests.append(@field(Tests, f.name)); | ||||||
|     } |     } | ||||||
| 
 | 
 | ||||||
|  | @ -192,7 +192,7 @@ pub fn main() anyerror!void { | ||||||
|                         const func = fns[0]; |                         const func = fns[0]; | ||||||
|                         const arn = func.function_arn.?; |                         const arn = func.function_arn.?; | ||||||
|                         // This is a bit ugly. Maybe a helper function in the library would help? |                         // This is a bit ugly. Maybe a helper function in the library would help? | ||||||
|                         var tags = try std.ArrayList(@typeInfo(try typeForField(services.lambda.tag_resource.Request, "tags")).Pointer.child).initCapacity(allocator, 1); |                         var tags = try std.ArrayList(@typeInfo(try typeForField(services.lambda.tag_resource.Request, "tags")).pointer.child).initCapacity(allocator, 1); | ||||||
|                         defer tags.deinit(); |                         defer tags.deinit(); | ||||||
|                         tags.appendAssumeCapacity(.{ .key = "Foo", .value = "Bar" }); |                         tags.appendAssumeCapacity(.{ .key = "Foo", .value = "Bar" }); | ||||||
|                         const req = services.lambda.tag_resource.Request{ .resource = arn, .tags = tags.items }; |                         const req = services.lambda.tag_resource.Request{ .resource = arn, .tags = tags.items }; | ||||||
|  | @ -380,8 +380,8 @@ fn proxyFromString(string: []const u8) !std.http.Client.Proxy { | ||||||
| fn typeForField(comptime T: type, comptime field_name: []const u8) !type { | fn typeForField(comptime T: type, comptime field_name: []const u8) !type { | ||||||
|     const ti = @typeInfo(T); |     const ti = @typeInfo(T); | ||||||
|     switch (ti) { |     switch (ti) { | ||||||
|         .Struct => { |         .@"struct" => { | ||||||
|             inline for (ti.Struct.fields) |field| { |             inline for (ti.@"struct".fields) |field| { | ||||||
|                 if (std.mem.eql(u8, field.name, field_name)) |                 if (std.mem.eql(u8, field.name, field_name)) | ||||||
|                     return field.type; |                     return field.type; | ||||||
|             } |             } | ||||||
|  |  | ||||||
|  | @ -20,7 +20,7 @@ pub fn Services(comptime service_imports: anytype) type { | ||||||
| 
 | 
 | ||||||
|     // finally, generate the type |     // finally, generate the type | ||||||
|     return @Type(.{ |     return @Type(.{ | ||||||
|         .Struct = .{ |         .@"struct" = .{ | ||||||
|             .layout = .auto, |             .layout = .auto, | ||||||
|             .fields = &fields, |             .fields = &fields, | ||||||
|             .decls = &[_]std.builtin.Type.Declaration{}, |             .decls = &[_]std.builtin.Type.Declaration{}, | ||||||
|  |  | ||||||
							
								
								
									
										12
									
								
								src/url.zig
									
										
									
									
									
								
							
							
						
						
									
										12
									
								
								src/url.zig
									
										
									
									
									
								
							|  | @ -24,7 +24,7 @@ fn encodeStruct( | ||||||
|     comptime options: EncodingOptions, |     comptime options: EncodingOptions, | ||||||
| ) !bool { | ) !bool { | ||||||
|     var rc = first; |     var rc = first; | ||||||
|     inline for (@typeInfo(@TypeOf(obj)).Struct.fields) |field| { |     inline for (@typeInfo(@TypeOf(obj)).@"struct".fields) |field| { | ||||||
|         const field_name = try options.field_name_transformer(allocator, field.name); |         const field_name = try options.field_name_transformer(allocator, field.name); | ||||||
|         defer if (options.field_name_transformer.* != defaultTransformer) |         defer if (options.field_name_transformer.* != defaultTransformer) | ||||||
|             allocator.free(field_name); |             allocator.free(field_name); | ||||||
|  | @ -47,10 +47,10 @@ pub fn encodeInternal( | ||||||
|     // @compileLog(@typeInfo(@TypeOf(obj))); |     // @compileLog(@typeInfo(@TypeOf(obj))); | ||||||
|     var rc = first; |     var rc = first; | ||||||
|     switch (@typeInfo(@TypeOf(obj))) { |     switch (@typeInfo(@TypeOf(obj))) { | ||||||
|         .Optional => if (obj) |o| { |         .optional => if (obj) |o| { | ||||||
|             rc = try encodeInternal(allocator, parent, field_name, first, o, writer, options); |             rc = try encodeInternal(allocator, parent, field_name, first, o, writer, options); | ||||||
|         }, |         }, | ||||||
|         .Pointer => |ti| if (ti.size == .One) { |         .pointer => |ti| if (ti.size == .One) { | ||||||
|             rc = try encodeInternal(allocator, parent, field_name, first, obj.*, writer, options); |             rc = try encodeInternal(allocator, parent, field_name, first, obj.*, writer, options); | ||||||
|         } else { |         } else { | ||||||
|             if (!first) _ = try writer.write("&"); |             if (!first) _ = try writer.write("&"); | ||||||
|  | @ -61,7 +61,7 @@ pub fn encodeInternal( | ||||||
|                 try writer.print("{s}{s}={any}", .{ parent, field_name, obj }); |                 try writer.print("{s}{s}={any}", .{ parent, field_name, obj }); | ||||||
|             rc = false; |             rc = false; | ||||||
|         }, |         }, | ||||||
|         .Struct => if (std.mem.eql(u8, "", field_name)) { |         .@"struct" => if (std.mem.eql(u8, "", field_name)) { | ||||||
|             rc = try encodeStruct(allocator, parent, first, obj, writer, options); |             rc = try encodeStruct(allocator, parent, first, obj, writer, options); | ||||||
|         } else { |         } else { | ||||||
|             // TODO: It would be lovely if we could concat at compile time or allocPrint at runtime |             // TODO: It would be lovely if we could concat at compile time or allocPrint at runtime | ||||||
|  | @ -73,12 +73,12 @@ pub fn encodeInternal( | ||||||
|             rc = try encodeStruct(allocator, new_parent, first, obj, writer, options); |             rc = try encodeStruct(allocator, new_parent, first, obj, writer, options); | ||||||
|             // try encodeStruct(parent ++ field_name ++ ".", first, obj,  writer, options); |             // try encodeStruct(parent ++ field_name ++ ".", first, obj,  writer, options); | ||||||
|         }, |         }, | ||||||
|         .Array => { |         .array => { | ||||||
|             if (!first) _ = try writer.write("&"); |             if (!first) _ = try writer.write("&"); | ||||||
|             try writer.print("{s}{s}={s}", .{ parent, field_name, obj }); |             try writer.print("{s}{s}={s}", .{ parent, field_name, obj }); | ||||||
|             rc = false; |             rc = false; | ||||||
|         }, |         }, | ||||||
|         .Int, .ComptimeInt, .Float, .ComptimeFloat => { |         .int, .comptime_int, .float, .comptime_float => { | ||||||
|             if (!first) _ = try writer.write("&"); |             if (!first) _ = try writer.write("&"); | ||||||
|             try writer.print("{s}{s}={d}", .{ parent, field_name, obj }); |             try writer.print("{s}{s}={d}", .{ parent, field_name, obj }); | ||||||
|             rc = false; |             rc = false; | ||||||
|  |  | ||||||
|  | @ -96,14 +96,14 @@ pub fn parse(comptime T: type, source: []const u8, options: ParseOptions) !Parse | ||||||
| 
 | 
 | ||||||
| fn parseInternal(comptime T: type, element: *xml.Element, options: ParseOptions) !T { | fn parseInternal(comptime T: type, element: *xml.Element, options: ParseOptions) !T { | ||||||
|     switch (@typeInfo(T)) { |     switch (@typeInfo(T)) { | ||||||
|         .Bool => { |         .bool => { | ||||||
|             if (std.ascii.eqlIgnoreCase("true", element.children.items[0].CharData)) |             if (std.ascii.eqlIgnoreCase("true", element.children.items[0].CharData)) | ||||||
|                 return true; |                 return true; | ||||||
|             if (std.ascii.eqlIgnoreCase("false", element.children.items[0].CharData)) |             if (std.ascii.eqlIgnoreCase("false", element.children.items[0].CharData)) | ||||||
|                 return false; |                 return false; | ||||||
|             return error.UnexpectedToken; |             return error.UnexpectedToken; | ||||||
|         }, |         }, | ||||||
|         .Float, .ComptimeFloat => { |         .float, .comptime_float => { | ||||||
|             return std.fmt.parseFloat(T, element.children.items[0].CharData) catch |e| { |             return std.fmt.parseFloat(T, element.children.items[0].CharData) catch |e| { | ||||||
|                 if (element.children.items[0].CharData[element.children.items[0].CharData.len - 1] == 'Z') { |                 if (element.children.items[0].CharData[element.children.items[0].CharData.len - 1] == 'Z') { | ||||||
|                     // We have an iso8601 in an integer field (we think) |                     // We have an iso8601 in an integer field (we think) | ||||||
|  | @ -127,7 +127,7 @@ fn parseInternal(comptime T: type, element: *xml.Element, options: ParseOptions) | ||||||
|                 return e; |                 return e; | ||||||
|             }; |             }; | ||||||
|         }, |         }, | ||||||
|         .Int, .ComptimeInt => { |         .int, .comptime_int => { | ||||||
|             // 2021-10-05T16:39:45.000Z |             // 2021-10-05T16:39:45.000Z | ||||||
|             return std.fmt.parseInt(T, element.children.items[0].CharData, 10) catch |e| { |             return std.fmt.parseInt(T, element.children.items[0].CharData, 10) catch |e| { | ||||||
|                 if (element.children.items[0].CharData[element.children.items[0].CharData.len - 1] == 'Z') { |                 if (element.children.items[0].CharData[element.children.items[0].CharData.len - 1] == 'Z') { | ||||||
|  | @ -152,7 +152,7 @@ fn parseInternal(comptime T: type, element: *xml.Element, options: ParseOptions) | ||||||
|                 return e; |                 return e; | ||||||
|             }; |             }; | ||||||
|         }, |         }, | ||||||
|         .Optional => |optional_info| { |         .optional => |optional_info| { | ||||||
|             if (element.children.items.len == 0) { |             if (element.children.items.len == 0) { | ||||||
|                 // This is almost certainly incomplete. Empty strings? xsi:nil? |                 // This is almost certainly incomplete. Empty strings? xsi:nil? | ||||||
|                 return null; |                 return null; | ||||||
|  | @ -162,7 +162,7 @@ fn parseInternal(comptime T: type, element: *xml.Element, options: ParseOptions) | ||||||
|                 return try parseInternal(optional_info.child, element, options); |                 return try parseInternal(optional_info.child, element, options); | ||||||
|             } |             } | ||||||
|         }, |         }, | ||||||
|         .Enum => |enum_info| { |         .@"enum" => |enum_info| { | ||||||
|             _ = enum_info; |             _ = enum_info; | ||||||
|             // const numeric: ?enum_info.tag_type = std.fmt.parseInt(enum_info.tag_type, element.children.items[0].CharData, 10) catch null; |             // const numeric: ?enum_info.tag_type = std.fmt.parseInt(enum_info.tag_type, element.children.items[0].CharData, 10) catch null; | ||||||
|             // if (numeric) |num| { |             // if (numeric) |num| { | ||||||
|  | @ -172,7 +172,7 @@ fn parseInternal(comptime T: type, element: *xml.Element, options: ParseOptions) | ||||||
|             //     return std.meta.stringToEnum(T, element.CharData); |             //     return std.meta.stringToEnum(T, element.CharData); | ||||||
|             // } |             // } | ||||||
|         }, |         }, | ||||||
|         .Union => |union_info| { |         .@"union" => |union_info| { | ||||||
|             if (union_info.tag_type) |_| { |             if (union_info.tag_type) |_| { | ||||||
|                 // try each of the union fields until we find one that matches |                 // try each of the union fields until we find one that matches | ||||||
|                 // inline for (union_info.fields) |u_field| { |                 // inline for (union_info.fields) |u_field| { | ||||||
|  | @ -195,7 +195,7 @@ fn parseInternal(comptime T: type, element: *xml.Element, options: ParseOptions) | ||||||
|             } |             } | ||||||
|             @compileError("Unable to parse into untagged union '" ++ @typeName(T) ++ "'"); |             @compileError("Unable to parse into untagged union '" ++ @typeName(T) ++ "'"); | ||||||
|         }, |         }, | ||||||
|         .Struct => |struct_info| { |         .@"struct" => |struct_info| { | ||||||
|             var r: T = undefined; |             var r: T = undefined; | ||||||
|             var fields_seen = [_]bool{false} ** struct_info.fields.len; |             var fields_seen = [_]bool{false} ** struct_info.fields.len; | ||||||
|             var fields_set: u64 = 0; |             var fields_set: u64 = 0; | ||||||
|  | @ -250,7 +250,7 @@ fn parseInternal(comptime T: type, element: *xml.Element, options: ParseOptions) | ||||||
|                     fields_set = fields_set + 1; |                     fields_set = fields_set + 1; | ||||||
|                     found_value = true; |                     found_value = true; | ||||||
|                 } |                 } | ||||||
|                 if (@typeInfo(field.type) == .Optional) { |                 if (@typeInfo(field.type) == .optional) { | ||||||
|                     // Test "compiler assertion failure 2" |                     // Test "compiler assertion failure 2" | ||||||
|                     // Zig compiler bug circa 0.9.0. Using "and !found_value" |                     // Zig compiler bug circa 0.9.0. Using "and !found_value" | ||||||
|                     // in the if statement above will trigger assertion failure |                     // in the if statement above will trigger assertion failure | ||||||
|  | @ -275,7 +275,7 @@ fn parseInternal(comptime T: type, element: *xml.Element, options: ParseOptions) | ||||||
|                 return error.FieldElementMismatch; // see fields_seen for details |                 return error.FieldElementMismatch; // see fields_seen for details | ||||||
|             return r; |             return r; | ||||||
|         }, |         }, | ||||||
|         .Array => //|array_info| { |         .array => //|array_info| { | ||||||
|         return error.ArrayNotImplemented, |         return error.ArrayNotImplemented, | ||||||
|         // switch (token) { |         // switch (token) { | ||||||
|         //     .ArrayBegin => { |         //     .ArrayBegin => { | ||||||
|  | @ -310,7 +310,7 @@ fn parseInternal(comptime T: type, element: *xml.Element, options: ParseOptions) | ||||||
|         //     else => return error.UnexpectedToken, |         //     else => return error.UnexpectedToken, | ||||||
|         // } |         // } | ||||||
|         // }, |         // }, | ||||||
|         .Pointer => |ptr_info| { |         .pointer => |ptr_info| { | ||||||
|             const allocator = options.allocator orelse return error.AllocatorRequired; |             const allocator = options.allocator orelse return error.AllocatorRequired; | ||||||
|             switch (ptr_info.size) { |             switch (ptr_info.size) { | ||||||
|                 .One => { |                 .One => { | ||||||
|  |  | ||||||
		Loading…
	
	Add table
		
		Reference in a new issue