zig 0.14.0 upgrade: merge zig-develop branch
This commit is contained in:
		
						commit
						12e24b01ad
					
				
					 12 changed files with 109 additions and 90 deletions
				
			
		
							
								
								
									
										8
									
								
								.envrc
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										8
									
								
								.envrc
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,8 @@ | |||
| # vi: ft=sh | ||||
| # shellcheck shell=bash | ||||
| 
 | ||||
| if ! has zvm_direnv_version || ! zvm_direnv_version 1.0.0; then | ||||
|   source_url "https://git.lerch.org/lobo/zvm-direnv/raw/tag/1.0.0/direnvrc" "sha256-Gtddvcr6aJsrjKd53uChxA1reQmJgEBpmPUWmMdtDIQ=" | ||||
| fi | ||||
| 
 | ||||
| use zig 2024.11.0-mach | ||||
|  | @ -28,7 +28,11 @@ jobs: | |||
|         uses: mlugg/setup-zig@v1.2.1 | ||||
|         with: | ||||
|           version: mach-latest | ||||
|       - name: Run tests | ||||
|       - name: Run gen | ||||
|         run: zig build gen --verbose | ||||
|       - name: Run smoke test | ||||
|         run: zig build smoke-test --verbose | ||||
|       - name: Run full tests | ||||
|         run: zig build test --verbose | ||||
|       # Zig package manager expects everything to be inside a directory in the archive, | ||||
|       # which it then strips out on download. So we need to shove everything inside a directory | ||||
|  |  | |||
|  | @ -13,11 +13,15 @@ AWS SDK for Zig | |||
| 
 | ||||
| [](https://git.lerch.org/lobo/aws-sdk-for-zig/actions?workflow=zig-nightly.yaml&state=closed) | ||||
| 
 | ||||
| **NOTE ON BUILD STATUS**: The nightly/mach nominated version of this currently | ||||
| panics under CI, but I have not yet reproduced this panic. Running manually on | ||||
| multiple machines appears to be working properly | ||||
| 
 | ||||
| Current executable size for the demo is 980k after compiling with -Doptimize=ReleaseSmall | ||||
| in x86_linux, and will vary based on services used. Tested targets: | ||||
| 
 | ||||
| * x86_64-linux | ||||
| * riscv64-linux\* | ||||
| * riscv64-linux | ||||
| * aarch64-linux | ||||
| * x86_64-windows | ||||
| * arm-linux | ||||
|  | @ -26,9 +30,6 @@ in x86_linux, and will vary based on services used. Tested targets: | |||
| 
 | ||||
| Tested targets are built, but not continuously tested, by CI. | ||||
| 
 | ||||
| \* On Zig 0.12/0.13, riscv64-linux disabled due to [LLLM's O(N^2) codegen](https://github.com/ziglang/zig/issues/18872) | ||||
| 
 | ||||
| 
 | ||||
| Zig-Develop Branch | ||||
| ------------------ | ||||
| 
 | ||||
|  |  | |||
							
								
								
									
										16
									
								
								build.zig
									
										
									
									
									
								
							
							
						
						
									
										16
									
								
								build.zig
									
										
									
									
									
								
							|  | @ -10,11 +10,7 @@ const test_targets = [_]std.Target.Query{ | |||
|     .{}, // native | ||||
|     .{ .cpu_arch = .x86_64, .os_tag = .linux }, | ||||
|     .{ .cpu_arch = .aarch64, .os_tag = .linux }, | ||||
|     // The test executable linking process just spins forever in LLVM using nominated zig 0.13 May 2024 | ||||
|     // This is likely a LLVM problem unlikely to be fixed in zig 0.13 | ||||
|     // Potentially this issue: https://github.com/llvm/llvm-project/issues/81440 | ||||
|     // Zig tracker: https://github.com/ziglang/zig/issues/18872 | ||||
|     // .{ .cpu_arch = .riscv64, .os_tag = .linux }, | ||||
|     .{ .cpu_arch = .riscv64, .os_tag = .linux }, | ||||
|     .{ .cpu_arch = .arm, .os_tag = .linux }, | ||||
|     .{ .cpu_arch = .x86_64, .os_tag = .windows }, | ||||
|     .{ .cpu_arch = .aarch64, .os_tag = .macos }, | ||||
|  | @ -33,6 +29,12 @@ pub fn build(b: *Builder) !void { | |||
|     // between Debug, ReleaseSafe, ReleaseFast, and ReleaseSmall. | ||||
|     const optimize = b.standardOptimizeOption(.{}); | ||||
| 
 | ||||
|     const no_llvm = b.option( | ||||
|         bool, | ||||
|         "no-llvm", | ||||
|         "Disable LLVM", | ||||
|     ) orelse false; | ||||
| 
 | ||||
|     const broken_windows = b.option( | ||||
|         bool, | ||||
|         "broken-windows", | ||||
|  | @ -56,6 +58,7 @@ pub fn build(b: *Builder) !void { | |||
|         .target = target, | ||||
|         .optimize = optimize, | ||||
|     }); | ||||
|     exe.use_llvm = !no_llvm; | ||||
|     const smithy_dep = b.dependency("smithy", .{ | ||||
|         // These are the arguments to the dependency. It expects a target and optimization level. | ||||
|         .target = target, | ||||
|  | @ -107,6 +110,7 @@ pub fn build(b: *Builder) !void { | |||
|             .target = b.graph.host, | ||||
|             .optimize = if (b.verbose) .Debug else .ReleaseSafe, | ||||
|         }); | ||||
|         cg_exe.use_llvm = !no_llvm; | ||||
|         cg_exe.root_module.addImport("smithy", smithy_dep.module("smithy")); | ||||
|         var cg_cmd = b.addRunArtifact(cg_exe); | ||||
|         cg_cmd.addArg("--models"); | ||||
|  | @ -183,6 +187,7 @@ pub fn build(b: *Builder) !void { | |||
|         }); | ||||
|         unit_tests.root_module.addImport("smithy", smithy_dep.module("smithy")); | ||||
|         unit_tests.step.dependOn(gen_step); | ||||
|         unit_tests.use_llvm = !no_llvm; | ||||
| 
 | ||||
|         const run_unit_tests = b.addRunArtifact(unit_tests); | ||||
|         run_unit_tests.skip_foreign_checks = true; | ||||
|  | @ -204,6 +209,7 @@ pub fn build(b: *Builder) !void { | |||
|         .target = target, | ||||
|         .optimize = optimize, | ||||
|     }); | ||||
|     smoke_test.use_llvm = !no_llvm; | ||||
|     smoke_test.root_module.addImport("smithy", smithy_dep.module("smithy")); | ||||
|     smoke_test.step.dependOn(gen_step); | ||||
| 
 | ||||
|  |  | |||
|  | @ -2,7 +2,7 @@ const std = @import("std"); | |||
| // options is a json.Options, but since we're using our hacked json.zig we don't want to | ||||
| // specifically call this out | ||||
| pub fn serializeMap(map: anytype, key: []const u8, options: anytype, out_stream: anytype) !bool { | ||||
|     if (@typeInfo(@TypeOf(map)) == .Optional) { | ||||
|     if (@typeInfo(@TypeOf(map)) == .optional) { | ||||
|         if (map == null) | ||||
|             return true | ||||
|         else | ||||
|  |  | |||
|  | @ -5,8 +5,8 @@ | |||
| 
 | ||||
|     .dependencies = .{ | ||||
|         .aws = .{ | ||||
|             .url = "https://git.lerch.org/api/packages/lobo/generic/aws-sdk-with-models/dfda8e77d624dfb776e3a70471501a7c610fbac1/dfda8e77d624dfb776e3a70471501a7c610fbac1-with-models.tar.gz", | ||||
|             .hash = "122000ad704234e68fee82a52e3b4e365a52874ec851d978b109e05ac66a80dc86ac", | ||||
|             .url = "https://git.lerch.org/api/packages/lobo/generic/aws-sdk-with-models/e02fb699fc47f19d19cad99209bd480ca6963295/e02fb699fc47f19d19cad99209bd480ca6963295nominated-zig-with-models.tar.gz", | ||||
|             .hash = "1220fa9b39c985449936f0e3f02bbb6fdafa64435e502eb78fd47d457b96876b7968", | ||||
|         }, | ||||
|     }, | ||||
| } | ||||
|  |  | |||
							
								
								
									
										34
									
								
								src/aws.zig
									
										
									
									
									
								
							
							
						
						
									
										34
									
								
								src/aws.zig
									
										
									
									
									
								
							|  | @ -517,9 +517,9 @@ pub fn Request(comptime request_action: anytype) type { | |||
|                 // And the response property below will pull whatever is the ActionResult object | ||||
|                 // We can grab index [0] as structs are guaranteed by zig to be returned in the order | ||||
|                 // declared, and we're declaring in that order in ServerResponse(). | ||||
|                 const real_response = @field(parsed_response, @typeInfo(response_types.NormalResponse).Struct.fields[0].name); | ||||
|                 const real_response = @field(parsed_response, @typeInfo(response_types.NormalResponse).@"struct".fields[0].name); | ||||
|                 return FullResponseType{ | ||||
|                     .response = @field(real_response, @typeInfo(@TypeOf(real_response)).Struct.fields[0].name), | ||||
|                     .response = @field(real_response, @typeInfo(@TypeOf(real_response)).@"struct".fields[0].name), | ||||
|                     .response_metadata = .{ | ||||
|                         .request_id = try options.client.allocator.dupe(u8, real_response.ResponseMetadata.RequestId), | ||||
|                     }, | ||||
|  | @ -773,7 +773,7 @@ fn isOtherNormalResponse(comptime T: type, first_key: []const u8) bool { | |||
|     return std.mem.eql(u8, first_key, expected_key); | ||||
| } | ||||
| fn coerceFromString(comptime T: type, val: []const u8) anyerror!T { | ||||
|     if (@typeInfo(T) == .Optional) return try coerceFromString(@typeInfo(T).Optional.child, val); | ||||
|     if (@typeInfo(T) == .optional) return try coerceFromString(@typeInfo(T).optional.child, val); | ||||
|     // TODO: This is terrible...fix it | ||||
|     switch (T) { | ||||
|         bool => return std.ascii.eqlIgnoreCase(val, "true"), | ||||
|  | @ -806,8 +806,8 @@ fn parseInt(comptime T: type, val: []const u8) !T { | |||
| 
 | ||||
| fn generalAllocPrint(allocator: std.mem.Allocator, val: anytype) !?[]const u8 { | ||||
|     switch (@typeInfo(@TypeOf(val))) { | ||||
|         .Optional => if (val) |v| return generalAllocPrint(allocator, v) else return null, | ||||
|         .Array, .Pointer => return try std.fmt.allocPrint(allocator, "{s}", .{val}), | ||||
|         .optional => if (val) |v| return generalAllocPrint(allocator, v) else return null, | ||||
|         .array, .pointer => return try std.fmt.allocPrint(allocator, "{s}", .{val}), | ||||
|         else => return try std.fmt.allocPrint(allocator, "{any}", .{val}), | ||||
|     } | ||||
| } | ||||
|  | @ -926,7 +926,7 @@ fn ServerResponse(comptime action: anytype) type { | |||
|         RequestId: []u8, | ||||
|     }; | ||||
|     const Result = @Type(.{ | ||||
|         .Struct = .{ | ||||
|         .@"struct" = .{ | ||||
|             .layout = .auto, | ||||
|             .fields = &[_]std.builtin.Type.StructField{ | ||||
|                 .{ | ||||
|  | @ -949,7 +949,7 @@ fn ServerResponse(comptime action: anytype) type { | |||
|         }, | ||||
|     }); | ||||
|     return @Type(.{ | ||||
|         .Struct = .{ | ||||
|         .@"struct" = .{ | ||||
|             .layout = .auto, | ||||
|             .fields = &[_]std.builtin.Type.StructField{ | ||||
|                 .{ | ||||
|  | @ -1015,8 +1015,8 @@ fn FullResponse(comptime action: anytype) type { | |||
| } | ||||
| fn safeFree(allocator: std.mem.Allocator, obj: anytype) void { | ||||
|     switch (@typeInfo(@TypeOf(obj))) { | ||||
|         .Pointer => allocator.free(obj), | ||||
|         .Optional => if (obj) |o| safeFree(allocator, o), | ||||
|         .pointer => allocator.free(obj), | ||||
|         .optional => if (obj) |o| safeFree(allocator, o), | ||||
|         else => {}, | ||||
|     } | ||||
| } | ||||
|  | @ -1125,7 +1125,7 @@ fn buildQuery(allocator: std.mem.Allocator, request: anytype) ![]const u8 { | |||
|     var prefix = "?"; | ||||
|     if (@hasDecl(@TypeOf(request), "http_query")) { | ||||
|         const query_arguments = @field(@TypeOf(request), "http_query"); | ||||
|         inline for (@typeInfo(@TypeOf(query_arguments)).Struct.fields) |arg| { | ||||
|         inline for (@typeInfo(@TypeOf(query_arguments)).@"struct".fields) |arg| { | ||||
|             const val = @field(request, arg.name); | ||||
|             if (try addQueryArg(arg.type, prefix, @field(query_arguments, arg.name), val, writer)) | ||||
|                 prefix = "&"; | ||||
|  | @ -1136,13 +1136,13 @@ fn buildQuery(allocator: std.mem.Allocator, request: anytype) ![]const u8 { | |||
| 
 | ||||
| fn addQueryArg(comptime ValueType: type, prefix: []const u8, key: []const u8, value: anytype, writer: anytype) !bool { | ||||
|     switch (@typeInfo(@TypeOf(value))) { | ||||
|         .Optional => { | ||||
|         .optional => { | ||||
|             if (value) |v| | ||||
|                 return try addQueryArg(ValueType, prefix, key, v, writer); | ||||
|             return false; | ||||
|         }, | ||||
|         // if this is a pointer, we want to make sure it is more than just a string | ||||
|         .Pointer => |ptr| { | ||||
|         .pointer => |ptr| { | ||||
|             if (ptr.child == u8 or ptr.size != .Slice) { | ||||
|                 // This is just a string | ||||
|                 return try addBasicQueryArg(prefix, key, value, writer); | ||||
|  | @ -1154,7 +1154,7 @@ fn addQueryArg(comptime ValueType: type, prefix: []const u8, key: []const u8, va | |||
|             } | ||||
|             return std.mem.eql(u8, "&", p); | ||||
|         }, | ||||
|         .Array => |arr| { | ||||
|         .array => |arr| { | ||||
|             if (arr.child == u8) | ||||
|                 return try addBasicQueryArg(prefix, key, value, writer); | ||||
|             var p = prefix; | ||||
|  | @ -1274,8 +1274,8 @@ fn reportTraffic( | |||
| fn typeForField(comptime T: type, comptime field_name: []const u8) !type { | ||||
|     const ti = @typeInfo(T); | ||||
|     switch (ti) { | ||||
|         .Struct => { | ||||
|             inline for (ti.Struct.fields) |field| { | ||||
|         .@"struct" => { | ||||
|             inline for (ti.@"struct".fields) |field| { | ||||
|                 if (std.mem.eql(u8, field.name, field_name)) | ||||
|                     return field.type; | ||||
|             } | ||||
|  | @ -1289,7 +1289,7 @@ test "custom serialization for map objects" { | |||
|     const allocator = std.testing.allocator; | ||||
|     var buffer = std.ArrayList(u8).init(allocator); | ||||
|     defer buffer.deinit(); | ||||
|     var tags = try std.ArrayList(@typeInfo(try typeForField(services.lambda.tag_resource.Request, "tags")).Pointer.child).initCapacity(allocator, 2); | ||||
|     var tags = try std.ArrayList(@typeInfo(try typeForField(services.lambda.tag_resource.Request, "tags")).pointer.child).initCapacity(allocator, 2); | ||||
|     defer tags.deinit(); | ||||
|     tags.appendAssumeCapacity(.{ .key = "Foo", .value = "Bar" }); | ||||
|     tags.appendAssumeCapacity(.{ .key = "Baz", .value = "Qux" }); | ||||
|  | @ -2051,7 +2051,7 @@ test "rest_json_1_work_with_lambda: lambda tagResource (only), to excercise zig | |||
|     defer test_harness.deinit(); | ||||
|     const options = try test_harness.start(); | ||||
|     const lambda = (Services(.{.lambda}){}).lambda; | ||||
|     var tags = try std.ArrayList(@typeInfo(try typeForField(lambda.tag_resource.Request, "tags")).Pointer.child).initCapacity(allocator, 1); | ||||
|     var tags = try std.ArrayList(@typeInfo(try typeForField(lambda.tag_resource.Request, "tags")).pointer.child).initCapacity(allocator, 1); | ||||
|     defer tags.deinit(); | ||||
|     tags.appendAssumeCapacity(.{ .key = "Foo", .value = "Bar" }); | ||||
|     const req = services.lambda.tag_resource.Request{ .resource = "arn:aws:lambda:us-west-2:550620852718:function:awsome-lambda-LambdaStackawsomeLambda", .tags = tags.items }; | ||||
|  |  | |||
							
								
								
									
										76
									
								
								src/json.zig
									
										
									
									
									
								
							
							
						
						
									
										76
									
								
								src/json.zig
									
										
									
									
									
								
							|  | @ -1560,21 +1560,21 @@ fn skipValue(tokens: *TokenStream) SkipValueError!void { | |||
| 
 | ||||
| fn parseInternal(comptime T: type, token: Token, tokens: *TokenStream, options: ParseOptions) !T { | ||||
|     switch (@typeInfo(T)) { | ||||
|         .Bool => { | ||||
|         .bool => { | ||||
|             return switch (token) { | ||||
|                 .True => true, | ||||
|                 .False => false, | ||||
|                 else => error.UnexpectedToken, | ||||
|             }; | ||||
|         }, | ||||
|         .Float, .ComptimeFloat => { | ||||
|         .float, .comptime_float => { | ||||
|             const numberToken = switch (token) { | ||||
|                 .Number => |n| n, | ||||
|                 else => return error.UnexpectedToken, | ||||
|             }; | ||||
|             return try std.fmt.parseFloat(T, numberToken.slice(tokens.slice, tokens.i - 1)); | ||||
|         }, | ||||
|         .Int, .ComptimeInt => { | ||||
|         .int, .comptime_int => { | ||||
|             const numberToken = switch (token) { | ||||
|                 .Number => |n| n, | ||||
|                 else => return error.UnexpectedToken, | ||||
|  | @ -1587,14 +1587,14 @@ fn parseInternal(comptime T: type, token: Token, tokens: *TokenStream, options: | |||
|             if (std.math.round(float) != float) return error.InvalidNumber; | ||||
|             return @as(T, @intFromFloat(float)); | ||||
|         }, | ||||
|         .Optional => |optionalInfo| { | ||||
|         .optional => |optionalInfo| { | ||||
|             if (token == .Null) { | ||||
|                 return null; | ||||
|             } else { | ||||
|                 return try parseInternal(optionalInfo.child, token, tokens, options); | ||||
|             } | ||||
|         }, | ||||
|         .Enum => |enumInfo| { | ||||
|         .@"enum" => |enumInfo| { | ||||
|             switch (token) { | ||||
|                 .Number => |numberToken| { | ||||
|                     if (!numberToken.is_integer) return error.UnexpectedToken; | ||||
|  | @ -1618,7 +1618,7 @@ fn parseInternal(comptime T: type, token: Token, tokens: *TokenStream, options: | |||
|                 else => return error.UnexpectedToken, | ||||
|             } | ||||
|         }, | ||||
|         .Union => |unionInfo| { | ||||
|         .@"union" => |unionInfo| { | ||||
|             if (unionInfo.tag_type) |_| { | ||||
|                 // try each of the union fields until we find one that matches | ||||
|                 inline for (unionInfo.fields) |u_field| { | ||||
|  | @ -1642,7 +1642,7 @@ fn parseInternal(comptime T: type, token: Token, tokens: *TokenStream, options: | |||
|                 @compileError("Unable to parse into untagged union '" ++ @typeName(T) ++ "'"); | ||||
|             } | ||||
|         }, | ||||
|         .Struct => |structInfo| { | ||||
|         .@"struct" => |structInfo| { | ||||
|             switch (token) { | ||||
|                 .ObjectBegin => {}, | ||||
|                 else => return error.UnexpectedToken, | ||||
|  | @ -1736,7 +1736,7 @@ fn parseInternal(comptime T: type, token: Token, tokens: *TokenStream, options: | |||
|             } | ||||
|             return r; | ||||
|         }, | ||||
|         .Array => |arrayInfo| { | ||||
|         .array => |arrayInfo| { | ||||
|             switch (token) { | ||||
|                 .ArrayBegin => { | ||||
|                     var r: T = undefined; | ||||
|  | @ -1770,7 +1770,7 @@ fn parseInternal(comptime T: type, token: Token, tokens: *TokenStream, options: | |||
|                 else => return error.UnexpectedToken, | ||||
|             } | ||||
|         }, | ||||
|         .Pointer => |ptrInfo| { | ||||
|         .pointer => |ptrInfo| { | ||||
|             const allocator = options.allocator orelse return error.AllocatorRequired; | ||||
|             switch (ptrInfo.size) { | ||||
|                 .One => { | ||||
|  | @ -1863,8 +1863,8 @@ fn parseInternal(comptime T: type, token: Token, tokens: *TokenStream, options: | |||
| fn typeForField(comptime T: type, comptime field_name: []const u8) ?type { | ||||
|     const ti = @typeInfo(T); | ||||
|     switch (ti) { | ||||
|         .Struct => { | ||||
|             inline for (ti.Struct.fields) |field| { | ||||
|         .@"struct" => { | ||||
|             inline for (ti.@"struct".fields) |field| { | ||||
|                 if (std.mem.eql(u8, field.name, field_name)) | ||||
|                     return field.type; | ||||
|             } | ||||
|  | @ -1878,14 +1878,14 @@ fn isMapPattern(comptime T: type) bool { | |||
|     // We should be getting a type that is a pointer to a slice. | ||||
|     // Let's just double check before proceeding | ||||
|     const ti = @typeInfo(T); | ||||
|     if (ti != .Pointer) return false; | ||||
|     if (ti.Pointer.size != .Slice) return false; | ||||
|     const ti_child = @typeInfo(ti.Pointer.child); | ||||
|     if (ti_child != .Struct) return false; | ||||
|     if (ti_child.Struct.fields.len != 2) return false; | ||||
|     if (ti != .pointer) return false; | ||||
|     if (ti.pointer.size != .Slice) return false; | ||||
|     const ti_child = @typeInfo(ti.pointer.child); | ||||
|     if (ti_child != .@"struct") return false; | ||||
|     if (ti_child.@"struct".fields.len != 2) return false; | ||||
|     var key_found = false; | ||||
|     var value_found = false; | ||||
|     inline for (ti_child.Struct.fields) |field| { | ||||
|     inline for (ti_child.@"struct".fields) |field| { | ||||
|         if (std.mem.eql(u8, "key", field.name)) | ||||
|             key_found = true; | ||||
|         if (std.mem.eql(u8, "value", field.name)) | ||||
|  | @ -1904,13 +1904,13 @@ pub fn parse(comptime T: type, tokens: *TokenStream, options: ParseOptions) !T { | |||
| /// Should be called with the same type and `ParseOptions` that were passed to `parse` | ||||
| pub fn parseFree(comptime T: type, value: T, options: ParseOptions) void { | ||||
|     switch (@typeInfo(T)) { | ||||
|         .Bool, .Float, .ComptimeFloat, .Int, .ComptimeInt, .Enum => {}, | ||||
|         .Optional => { | ||||
|         .bool, .float, .comptime_float, .int, .comptime_int, .@"enum" => {}, | ||||
|         .optional => { | ||||
|             if (value) |v| { | ||||
|                 return parseFree(@TypeOf(v), v, options); | ||||
|             } | ||||
|         }, | ||||
|         .Union => |unionInfo| { | ||||
|         .@"union" => |unionInfo| { | ||||
|             if (unionInfo.tag_type) |UnionTagType| { | ||||
|                 inline for (unionInfo.fields) |u_field| { | ||||
|                     if (value == @field(UnionTagType, u_field.name)) { | ||||
|  | @ -1922,17 +1922,17 @@ pub fn parseFree(comptime T: type, value: T, options: ParseOptions) void { | |||
|                 unreachable; | ||||
|             } | ||||
|         }, | ||||
|         .Struct => |structInfo| { | ||||
|         .@"struct" => |structInfo| { | ||||
|             inline for (structInfo.fields) |field| { | ||||
|                 parseFree(field.type, @field(value, field.name), options); | ||||
|             } | ||||
|         }, | ||||
|         .Array => |arrayInfo| { | ||||
|         .array => |arrayInfo| { | ||||
|             for (value) |v| { | ||||
|                 parseFree(arrayInfo.child, v, options); | ||||
|             } | ||||
|         }, | ||||
|         .Pointer => |ptrInfo| { | ||||
|         .pointer => |ptrInfo| { | ||||
|             const allocator = options.allocator orelse unreachable; | ||||
|             switch (ptrInfo.size) { | ||||
|                 .One => { | ||||
|  | @ -2812,38 +2812,38 @@ pub fn stringify( | |||
| ) !void { | ||||
|     const T = @TypeOf(value); | ||||
|     switch (@typeInfo(T)) { | ||||
|         .Float, .ComptimeFloat => { | ||||
|         .float, .comptime_float => { | ||||
|             return std.fmt.format(out_stream, "{e}", .{value}); | ||||
|         }, | ||||
|         .Int, .ComptimeInt => { | ||||
|         .int, .comptime_int => { | ||||
|             return std.fmt.formatIntValue(value, "", std.fmt.FormatOptions{}, out_stream); | ||||
|         }, | ||||
|         .Bool => { | ||||
|         .bool => { | ||||
|             return out_stream.writeAll(if (value) "true" else "false"); | ||||
|         }, | ||||
|         .Null => { | ||||
|         .null => { | ||||
|             return out_stream.writeAll("null"); | ||||
|         }, | ||||
|         .Optional => { | ||||
|         .optional => { | ||||
|             if (value) |payload| { | ||||
|                 return try stringify(payload, options, out_stream); | ||||
|             } else { | ||||
|                 return try stringify(null, options, out_stream); | ||||
|             } | ||||
|         }, | ||||
|         .Enum => { | ||||
|         .@"enum" => { | ||||
|             if (comptime std.meta.hasFn(T, "jsonStringify")) { | ||||
|                 return value.jsonStringify(options, out_stream); | ||||
|             } | ||||
| 
 | ||||
|             @compileError("Unable to stringify enum '" ++ @typeName(T) ++ "'"); | ||||
|         }, | ||||
|         .Union => { | ||||
|         .@"union" => { | ||||
|             if (comptime std.meta.hasFn(T, "jsonStringify")) { | ||||
|                 return value.jsonStringify(options, out_stream); | ||||
|             } | ||||
| 
 | ||||
|             const info = @typeInfo(T).Union; | ||||
|             const info = @typeInfo(T).@"union"; | ||||
|             if (info.tag_type) |UnionTagType| { | ||||
|                 inline for (info.fields) |u_field| { | ||||
|                     if (value == @field(UnionTagType, u_field.name)) { | ||||
|  | @ -2854,7 +2854,7 @@ pub fn stringify( | |||
|                 @compileError("Unable to stringify untagged union '" ++ @typeName(T) ++ "'"); | ||||
|             } | ||||
|         }, | ||||
|         .Struct => |S| { | ||||
|         .@"struct" => |S| { | ||||
|             if (comptime std.meta.hasFn(T, "jsonStringify")) { | ||||
|                 return value.jsonStringify(options, out_stream); | ||||
|             } | ||||
|  | @ -2870,7 +2870,7 @@ pub fn stringify( | |||
|                 if (Field.type == void) continue; | ||||
| 
 | ||||
|                 var output_this_field = true; | ||||
|                 if (!options.emit_null and @typeInfo(Field.type) == .Optional and @field(value, Field.name) == null) output_this_field = false; | ||||
|                 if (!options.emit_null and @typeInfo(Field.type) == .optional and @field(value, Field.name) == null) output_this_field = false; | ||||
| 
 | ||||
|                 const final_name = if (comptime std.meta.hasFn(T, "fieldNameFor")) | ||||
|                     value.fieldNameFor(Field.name) | ||||
|  | @ -2920,10 +2920,10 @@ pub fn stringify( | |||
|             try out_stream.writeByte('}'); | ||||
|             return; | ||||
|         }, | ||||
|         .ErrorSet => return stringify(@as([]const u8, @errorName(value)), options, out_stream), | ||||
|         .Pointer => |ptr_info| switch (ptr_info.size) { | ||||
|         .error_set => return stringify(@as([]const u8, @errorName(value)), options, out_stream), | ||||
|         .pointer => |ptr_info| switch (ptr_info.size) { | ||||
|             .One => switch (@typeInfo(ptr_info.child)) { | ||||
|                 .Array => { | ||||
|                 .array => { | ||||
|                     const Slice = []const std.meta.Elem(ptr_info.child); | ||||
|                     return stringify(@as(Slice, value), options, out_stream); | ||||
|                 }, | ||||
|  | @ -3002,8 +3002,8 @@ pub fn stringify( | |||
|             }, | ||||
|             else => @compileError("Unable to stringify type '" ++ @typeName(T) ++ "'"), | ||||
|         }, | ||||
|         .Array => return stringify(&value, options, out_stream), | ||||
|         .Vector => |info| { | ||||
|         .array => return stringify(&value, options, out_stream), | ||||
|         .vector => |info| { | ||||
|             const array: [info.len]info.child = value; | ||||
|             return stringify(&array, options, out_stream); | ||||
|         }, | ||||
|  |  | |||
							
								
								
									
										10
									
								
								src/main.zig
									
										
									
									
									
								
							
							
						
						
									
										10
									
								
								src/main.zig
									
										
									
									
									
								
							|  | @ -97,7 +97,7 @@ pub fn main() anyerror!void { | |||
|             } | ||||
|             continue; | ||||
|         } | ||||
|         inline for (@typeInfo(Tests).Enum.fields) |f| { | ||||
|         inline for (@typeInfo(Tests).@"enum".fields) |f| { | ||||
|             if (std.mem.eql(u8, f.name, arg)) { | ||||
|                 try tests.append(@field(Tests, f.name)); | ||||
|                 break; | ||||
|  | @ -105,7 +105,7 @@ pub fn main() anyerror!void { | |||
|         } | ||||
|     } | ||||
|     if (tests.items.len == 0) { | ||||
|         inline for (@typeInfo(Tests).Enum.fields) |f| | ||||
|         inline for (@typeInfo(Tests).@"enum".fields) |f| | ||||
|             try tests.append(@field(Tests, f.name)); | ||||
|     } | ||||
| 
 | ||||
|  | @ -192,7 +192,7 @@ pub fn main() anyerror!void { | |||
|                         const func = fns[0]; | ||||
|                         const arn = func.function_arn.?; | ||||
|                         // This is a bit ugly. Maybe a helper function in the library would help? | ||||
|                         var tags = try std.ArrayList(@typeInfo(try typeForField(services.lambda.tag_resource.Request, "tags")).Pointer.child).initCapacity(allocator, 1); | ||||
|                         var tags = try std.ArrayList(@typeInfo(try typeForField(services.lambda.tag_resource.Request, "tags")).pointer.child).initCapacity(allocator, 1); | ||||
|                         defer tags.deinit(); | ||||
|                         tags.appendAssumeCapacity(.{ .key = "Foo", .value = "Bar" }); | ||||
|                         const req = services.lambda.tag_resource.Request{ .resource = arn, .tags = tags.items }; | ||||
|  | @ -380,8 +380,8 @@ fn proxyFromString(string: []const u8) !std.http.Client.Proxy { | |||
| fn typeForField(comptime T: type, comptime field_name: []const u8) !type { | ||||
|     const ti = @typeInfo(T); | ||||
|     switch (ti) { | ||||
|         .Struct => { | ||||
|             inline for (ti.Struct.fields) |field| { | ||||
|         .@"struct" => { | ||||
|             inline for (ti.@"struct".fields) |field| { | ||||
|                 if (std.mem.eql(u8, field.name, field_name)) | ||||
|                     return field.type; | ||||
|             } | ||||
|  |  | |||
|  | @ -20,7 +20,7 @@ pub fn Services(comptime service_imports: anytype) type { | |||
| 
 | ||||
|     // finally, generate the type | ||||
|     return @Type(.{ | ||||
|         .Struct = .{ | ||||
|         .@"struct" = .{ | ||||
|             .layout = .auto, | ||||
|             .fields = &fields, | ||||
|             .decls = &[_]std.builtin.Type.Declaration{}, | ||||
|  |  | |||
							
								
								
									
										12
									
								
								src/url.zig
									
										
									
									
									
								
							
							
						
						
									
										12
									
								
								src/url.zig
									
										
									
									
									
								
							|  | @ -24,7 +24,7 @@ fn encodeStruct( | |||
|     comptime options: EncodingOptions, | ||||
| ) !bool { | ||||
|     var rc = first; | ||||
|     inline for (@typeInfo(@TypeOf(obj)).Struct.fields) |field| { | ||||
|     inline for (@typeInfo(@TypeOf(obj)).@"struct".fields) |field| { | ||||
|         const field_name = try options.field_name_transformer(allocator, field.name); | ||||
|         defer if (options.field_name_transformer.* != defaultTransformer) | ||||
|             allocator.free(field_name); | ||||
|  | @ -47,10 +47,10 @@ pub fn encodeInternal( | |||
|     // @compileLog(@typeInfo(@TypeOf(obj))); | ||||
|     var rc = first; | ||||
|     switch (@typeInfo(@TypeOf(obj))) { | ||||
|         .Optional => if (obj) |o| { | ||||
|         .optional => if (obj) |o| { | ||||
|             rc = try encodeInternal(allocator, parent, field_name, first, o, writer, options); | ||||
|         }, | ||||
|         .Pointer => |ti| if (ti.size == .One) { | ||||
|         .pointer => |ti| if (ti.size == .One) { | ||||
|             rc = try encodeInternal(allocator, parent, field_name, first, obj.*, writer, options); | ||||
|         } else { | ||||
|             if (!first) _ = try writer.write("&"); | ||||
|  | @ -61,7 +61,7 @@ pub fn encodeInternal( | |||
|                 try writer.print("{s}{s}={any}", .{ parent, field_name, obj }); | ||||
|             rc = false; | ||||
|         }, | ||||
|         .Struct => if (std.mem.eql(u8, "", field_name)) { | ||||
|         .@"struct" => if (std.mem.eql(u8, "", field_name)) { | ||||
|             rc = try encodeStruct(allocator, parent, first, obj, writer, options); | ||||
|         } else { | ||||
|             // TODO: It would be lovely if we could concat at compile time or allocPrint at runtime | ||||
|  | @ -73,12 +73,12 @@ pub fn encodeInternal( | |||
|             rc = try encodeStruct(allocator, new_parent, first, obj, writer, options); | ||||
|             // try encodeStruct(parent ++ field_name ++ ".", first, obj,  writer, options); | ||||
|         }, | ||||
|         .Array => { | ||||
|         .array => { | ||||
|             if (!first) _ = try writer.write("&"); | ||||
|             try writer.print("{s}{s}={s}", .{ parent, field_name, obj }); | ||||
|             rc = false; | ||||
|         }, | ||||
|         .Int, .ComptimeInt, .Float, .ComptimeFloat => { | ||||
|         .int, .comptime_int, .float, .comptime_float => { | ||||
|             if (!first) _ = try writer.write("&"); | ||||
|             try writer.print("{s}{s}={d}", .{ parent, field_name, obj }); | ||||
|             rc = false; | ||||
|  |  | |||
|  | @ -96,14 +96,14 @@ pub fn parse(comptime T: type, source: []const u8, options: ParseOptions) !Parse | |||
| 
 | ||||
| fn parseInternal(comptime T: type, element: *xml.Element, options: ParseOptions) !T { | ||||
|     switch (@typeInfo(T)) { | ||||
|         .Bool => { | ||||
|         .bool => { | ||||
|             if (std.ascii.eqlIgnoreCase("true", element.children.items[0].CharData)) | ||||
|                 return true; | ||||
|             if (std.ascii.eqlIgnoreCase("false", element.children.items[0].CharData)) | ||||
|                 return false; | ||||
|             return error.UnexpectedToken; | ||||
|         }, | ||||
|         .Float, .ComptimeFloat => { | ||||
|         .float, .comptime_float => { | ||||
|             return std.fmt.parseFloat(T, element.children.items[0].CharData) catch |e| { | ||||
|                 if (element.children.items[0].CharData[element.children.items[0].CharData.len - 1] == 'Z') { | ||||
|                     // We have an iso8601 in an integer field (we think) | ||||
|  | @ -127,7 +127,7 @@ fn parseInternal(comptime T: type, element: *xml.Element, options: ParseOptions) | |||
|                 return e; | ||||
|             }; | ||||
|         }, | ||||
|         .Int, .ComptimeInt => { | ||||
|         .int, .comptime_int => { | ||||
|             // 2021-10-05T16:39:45.000Z | ||||
|             return std.fmt.parseInt(T, element.children.items[0].CharData, 10) catch |e| { | ||||
|                 if (element.children.items[0].CharData[element.children.items[0].CharData.len - 1] == 'Z') { | ||||
|  | @ -152,7 +152,7 @@ fn parseInternal(comptime T: type, element: *xml.Element, options: ParseOptions) | |||
|                 return e; | ||||
|             }; | ||||
|         }, | ||||
|         .Optional => |optional_info| { | ||||
|         .optional => |optional_info| { | ||||
|             if (element.children.items.len == 0) { | ||||
|                 // This is almost certainly incomplete. Empty strings? xsi:nil? | ||||
|                 return null; | ||||
|  | @ -162,7 +162,7 @@ fn parseInternal(comptime T: type, element: *xml.Element, options: ParseOptions) | |||
|                 return try parseInternal(optional_info.child, element, options); | ||||
|             } | ||||
|         }, | ||||
|         .Enum => |enum_info| { | ||||
|         .@"enum" => |enum_info| { | ||||
|             _ = enum_info; | ||||
|             // const numeric: ?enum_info.tag_type = std.fmt.parseInt(enum_info.tag_type, element.children.items[0].CharData, 10) catch null; | ||||
|             // if (numeric) |num| { | ||||
|  | @ -172,7 +172,7 @@ fn parseInternal(comptime T: type, element: *xml.Element, options: ParseOptions) | |||
|             //     return std.meta.stringToEnum(T, element.CharData); | ||||
|             // } | ||||
|         }, | ||||
|         .Union => |union_info| { | ||||
|         .@"union" => |union_info| { | ||||
|             if (union_info.tag_type) |_| { | ||||
|                 // try each of the union fields until we find one that matches | ||||
|                 // inline for (union_info.fields) |u_field| { | ||||
|  | @ -195,7 +195,7 @@ fn parseInternal(comptime T: type, element: *xml.Element, options: ParseOptions) | |||
|             } | ||||
|             @compileError("Unable to parse into untagged union '" ++ @typeName(T) ++ "'"); | ||||
|         }, | ||||
|         .Struct => |struct_info| { | ||||
|         .@"struct" => |struct_info| { | ||||
|             var r: T = undefined; | ||||
|             var fields_seen = [_]bool{false} ** struct_info.fields.len; | ||||
|             var fields_set: u64 = 0; | ||||
|  | @ -250,7 +250,7 @@ fn parseInternal(comptime T: type, element: *xml.Element, options: ParseOptions) | |||
|                     fields_set = fields_set + 1; | ||||
|                     found_value = true; | ||||
|                 } | ||||
|                 if (@typeInfo(field.type) == .Optional) { | ||||
|                 if (@typeInfo(field.type) == .optional) { | ||||
|                     // Test "compiler assertion failure 2" | ||||
|                     // Zig compiler bug circa 0.9.0. Using "and !found_value" | ||||
|                     // in the if statement above will trigger assertion failure | ||||
|  | @ -275,7 +275,7 @@ fn parseInternal(comptime T: type, element: *xml.Element, options: ParseOptions) | |||
|                 return error.FieldElementMismatch; // see fields_seen for details | ||||
|             return r; | ||||
|         }, | ||||
|         .Array => //|array_info| { | ||||
|         .array => //|array_info| { | ||||
|         return error.ArrayNotImplemented, | ||||
|         // switch (token) { | ||||
|         //     .ArrayBegin => { | ||||
|  | @ -310,7 +310,7 @@ fn parseInternal(comptime T: type, element: *xml.Element, options: ParseOptions) | |||
|         //     else => return error.UnexpectedToken, | ||||
|         // } | ||||
|         // }, | ||||
|         .Pointer => |ptr_info| { | ||||
|         .pointer => |ptr_info| { | ||||
|             const allocator = options.allocator orelse return error.AllocatorRequired; | ||||
|             switch (ptr_info.size) { | ||||
|                 .One => { | ||||
|  |  | |||
		Loading…
	
	Add table
		
		Reference in a new issue