From 3d78705ea5b94d7b4fa250ea51b14c46a9620092 Mon Sep 17 00:00:00 2001 From: Emil Lerch Date: Thu, 17 Oct 2024 10:54:01 -0700 Subject: [PATCH 01/14] update to latest zig nominated --- codegen/src/json.zig | 2 +- src/aws.zig | 34 ++++++++++---------- src/json.zig | 76 ++++++++++++++++++++++---------------------- src/servicemodel.zig | 2 +- src/url.zig | 12 +++---- src/xml_shaper.zig | 20 ++++++------ 6 files changed, 73 insertions(+), 73 deletions(-) diff --git a/codegen/src/json.zig b/codegen/src/json.zig index f9bfcf8..f3f4f47 100644 --- a/codegen/src/json.zig +++ b/codegen/src/json.zig @@ -2,7 +2,7 @@ const std = @import("std"); // options is a json.Options, but since we're using our hacked json.zig we don't want to // specifically call this out pub fn serializeMap(map: anytype, key: []const u8, options: anytype, out_stream: anytype) !bool { - if (@typeInfo(@TypeOf(map)) == .Optional) { + if (@typeInfo(@TypeOf(map)) == .optional) { if (map == null) return true else diff --git a/src/aws.zig b/src/aws.zig index 25745a7..06756ab 100644 --- a/src/aws.zig +++ b/src/aws.zig @@ -517,9 +517,9 @@ pub fn Request(comptime request_action: anytype) type { // And the response property below will pull whatever is the ActionResult object // We can grab index [0] as structs are guaranteed by zig to be returned in the order // declared, and we're declaring in that order in ServerResponse(). - const real_response = @field(parsed_response, @typeInfo(response_types.NormalResponse).Struct.fields[0].name); + const real_response = @field(parsed_response, @typeInfo(response_types.NormalResponse).@"struct".fields[0].name); return FullResponseType{ - .response = @field(real_response, @typeInfo(@TypeOf(real_response)).Struct.fields[0].name), + .response = @field(real_response, @typeInfo(@TypeOf(real_response)).@"struct".fields[0].name), .response_metadata = .{ .request_id = try options.client.allocator.dupe(u8, real_response.ResponseMetadata.RequestId), }, @@ -762,7 +762,7 @@ pub fn Request(comptime request_action: anytype) type { } fn coerceFromString(comptime T: type, val: []const u8) anyerror!T { - if (@typeInfo(T) == .Optional) return try coerceFromString(@typeInfo(T).Optional.child, val); + if (@typeInfo(T) == .optional) return try coerceFromString(@typeInfo(T).optional.child, val); // TODO: This is terrible...fix it switch (T) { bool => return std.ascii.eqlIgnoreCase(val, "true"), @@ -789,8 +789,8 @@ fn parseInt(comptime T: type, val: []const u8) !T { fn generalAllocPrint(allocator: std.mem.Allocator, val: anytype) !?[]const u8 { switch (@typeInfo(@TypeOf(val))) { - .Optional => if (val) |v| return generalAllocPrint(allocator, v) else return null, - .Array, .Pointer => return try std.fmt.allocPrint(allocator, "{s}", .{val}), + .optional => if (val) |v| return generalAllocPrint(allocator, v) else return null, + .array, .pointer => return try std.fmt.allocPrint(allocator, "{s}", .{val}), else => return try std.fmt.allocPrint(allocator, "{any}", .{val}), } } @@ -909,7 +909,7 @@ fn ServerResponse(comptime action: anytype) type { RequestId: []u8, }; const Result = @Type(.{ - .Struct = .{ + .@"struct" = .{ .layout = .auto, .fields = &[_]std.builtin.Type.StructField{ .{ @@ -932,7 +932,7 @@ fn ServerResponse(comptime action: anytype) type { }, }); return @Type(.{ - .Struct = .{ + .@"struct" = .{ .layout = .auto, .fields = &[_]std.builtin.Type.StructField{ .{ @@ -998,8 +998,8 @@ fn FullResponse(comptime action: anytype) type { } fn safeFree(allocator: std.mem.Allocator, obj: anytype) void { switch (@typeInfo(@TypeOf(obj))) { - .Pointer => allocator.free(obj), - .Optional => if (obj) |o| safeFree(allocator, o), + .pointer => allocator.free(obj), + .optional => if (obj) |o| safeFree(allocator, o), else => {}, } } @@ -1108,7 +1108,7 @@ fn buildQuery(allocator: std.mem.Allocator, request: anytype) ![]const u8 { var prefix = "?"; if (@hasDecl(@TypeOf(request), "http_query")) { const query_arguments = @field(@TypeOf(request), "http_query"); - inline for (@typeInfo(@TypeOf(query_arguments)).Struct.fields) |arg| { + inline for (@typeInfo(@TypeOf(query_arguments)).@"struct".fields) |arg| { const val = @field(request, arg.name); if (try addQueryArg(arg.type, prefix, @field(query_arguments, arg.name), val, writer)) prefix = "&"; @@ -1119,13 +1119,13 @@ fn buildQuery(allocator: std.mem.Allocator, request: anytype) ![]const u8 { fn addQueryArg(comptime ValueType: type, prefix: []const u8, key: []const u8, value: anytype, writer: anytype) !bool { switch (@typeInfo(@TypeOf(value))) { - .Optional => { + .optional => { if (value) |v| return try addQueryArg(ValueType, prefix, key, v, writer); return false; }, // if this is a pointer, we want to make sure it is more than just a string - .Pointer => |ptr| { + .pointer => |ptr| { if (ptr.child == u8 or ptr.size != .Slice) { // This is just a string return try addBasicQueryArg(prefix, key, value, writer); @@ -1137,7 +1137,7 @@ fn addQueryArg(comptime ValueType: type, prefix: []const u8, key: []const u8, va } return std.mem.eql(u8, "&", p); }, - .Array => |arr| { + .array => |arr| { if (arr.child == u8) return try addBasicQueryArg(prefix, key, value, writer); var p = prefix; @@ -1257,8 +1257,8 @@ fn reportTraffic( fn typeForField(comptime T: type, comptime field_name: []const u8) !type { const ti = @typeInfo(T); switch (ti) { - .Struct => { - inline for (ti.Struct.fields) |field| { + .@"struct" => { + inline for (ti.@"struct".fields) |field| { if (std.mem.eql(u8, field.name, field_name)) return field.type; } @@ -1272,7 +1272,7 @@ test "custom serialization for map objects" { const allocator = std.testing.allocator; var buffer = std.ArrayList(u8).init(allocator); defer buffer.deinit(); - var tags = try std.ArrayList(@typeInfo(try typeForField(services.lambda.tag_resource.Request, "tags")).Pointer.child).initCapacity(allocator, 2); + var tags = try std.ArrayList(@typeInfo(try typeForField(services.lambda.tag_resource.Request, "tags")).pointer.child).initCapacity(allocator, 2); defer tags.deinit(); tags.appendAssumeCapacity(.{ .key = "Foo", .value = "Bar" }); tags.appendAssumeCapacity(.{ .key = "Baz", .value = "Qux" }); @@ -2034,7 +2034,7 @@ test "rest_json_1_work_with_lambda: lambda tagResource (only), to excercise zig defer test_harness.deinit(); const options = try test_harness.start(); const lambda = (Services(.{.lambda}){}).lambda; - var tags = try std.ArrayList(@typeInfo(try typeForField(lambda.tag_resource.Request, "tags")).Pointer.child).initCapacity(allocator, 1); + var tags = try std.ArrayList(@typeInfo(try typeForField(lambda.tag_resource.Request, "tags")).pointer.child).initCapacity(allocator, 1); defer tags.deinit(); tags.appendAssumeCapacity(.{ .key = "Foo", .value = "Bar" }); const req = services.lambda.tag_resource.Request{ .resource = "arn:aws:lambda:us-west-2:550620852718:function:awsome-lambda-LambdaStackawsomeLambda", .tags = tags.items }; diff --git a/src/json.zig b/src/json.zig index b29343e..e0f4c08 100644 --- a/src/json.zig +++ b/src/json.zig @@ -1560,21 +1560,21 @@ fn skipValue(tokens: *TokenStream) SkipValueError!void { fn parseInternal(comptime T: type, token: Token, tokens: *TokenStream, options: ParseOptions) !T { switch (@typeInfo(T)) { - .Bool => { + .bool => { return switch (token) { .True => true, .False => false, else => error.UnexpectedToken, }; }, - .Float, .ComptimeFloat => { + .float, .comptime_float => { const numberToken = switch (token) { .Number => |n| n, else => return error.UnexpectedToken, }; return try std.fmt.parseFloat(T, numberToken.slice(tokens.slice, tokens.i - 1)); }, - .Int, .ComptimeInt => { + .int, .comptime_int => { const numberToken = switch (token) { .Number => |n| n, else => return error.UnexpectedToken, @@ -1587,14 +1587,14 @@ fn parseInternal(comptime T: type, token: Token, tokens: *TokenStream, options: if (std.math.round(float) != float) return error.InvalidNumber; return @as(T, @intFromFloat(float)); }, - .Optional => |optionalInfo| { + .optional => |optionalInfo| { if (token == .Null) { return null; } else { return try parseInternal(optionalInfo.child, token, tokens, options); } }, - .Enum => |enumInfo| { + .@"enum" => |enumInfo| { switch (token) { .Number => |numberToken| { if (!numberToken.is_integer) return error.UnexpectedToken; @@ -1618,7 +1618,7 @@ fn parseInternal(comptime T: type, token: Token, tokens: *TokenStream, options: else => return error.UnexpectedToken, } }, - .Union => |unionInfo| { + .@"union" => |unionInfo| { if (unionInfo.tag_type) |_| { // try each of the union fields until we find one that matches inline for (unionInfo.fields) |u_field| { @@ -1642,7 +1642,7 @@ fn parseInternal(comptime T: type, token: Token, tokens: *TokenStream, options: @compileError("Unable to parse into untagged union '" ++ @typeName(T) ++ "'"); } }, - .Struct => |structInfo| { + .@"struct" => |structInfo| { switch (token) { .ObjectBegin => {}, else => return error.UnexpectedToken, @@ -1736,7 +1736,7 @@ fn parseInternal(comptime T: type, token: Token, tokens: *TokenStream, options: } return r; }, - .Array => |arrayInfo| { + .array => |arrayInfo| { switch (token) { .ArrayBegin => { var r: T = undefined; @@ -1770,7 +1770,7 @@ fn parseInternal(comptime T: type, token: Token, tokens: *TokenStream, options: else => return error.UnexpectedToken, } }, - .Pointer => |ptrInfo| { + .pointer => |ptrInfo| { const allocator = options.allocator orelse return error.AllocatorRequired; switch (ptrInfo.size) { .One => { @@ -1863,8 +1863,8 @@ fn parseInternal(comptime T: type, token: Token, tokens: *TokenStream, options: fn typeForField(comptime T: type, comptime field_name: []const u8) ?type { const ti = @typeInfo(T); switch (ti) { - .Struct => { - inline for (ti.Struct.fields) |field| { + .@"struct" => { + inline for (ti.@"struct".fields) |field| { if (std.mem.eql(u8, field.name, field_name)) return field.type; } @@ -1878,14 +1878,14 @@ fn isMapPattern(comptime T: type) bool { // We should be getting a type that is a pointer to a slice. // Let's just double check before proceeding const ti = @typeInfo(T); - if (ti != .Pointer) return false; - if (ti.Pointer.size != .Slice) return false; - const ti_child = @typeInfo(ti.Pointer.child); - if (ti_child != .Struct) return false; - if (ti_child.Struct.fields.len != 2) return false; + if (ti != .pointer) return false; + if (ti.pointer.size != .Slice) return false; + const ti_child = @typeInfo(ti.pointer.child); + if (ti_child != .@"struct") return false; + if (ti_child.@"struct".fields.len != 2) return false; var key_found = false; var value_found = false; - inline for (ti_child.Struct.fields) |field| { + inline for (ti_child.@"struct".fields) |field| { if (std.mem.eql(u8, "key", field.name)) key_found = true; if (std.mem.eql(u8, "value", field.name)) @@ -1903,13 +1903,13 @@ pub fn parse(comptime T: type, tokens: *TokenStream, options: ParseOptions) !T { /// Should be called with the same type and `ParseOptions` that were passed to `parse` pub fn parseFree(comptime T: type, value: T, options: ParseOptions) void { switch (@typeInfo(T)) { - .Bool, .Float, .ComptimeFloat, .Int, .ComptimeInt, .Enum => {}, - .Optional => { + .bool, .float, .comptime_float, .int, .comptime_int, .@"enum" => {}, + .optional => { if (value) |v| { return parseFree(@TypeOf(v), v, options); } }, - .Union => |unionInfo| { + .@"union" => |unionInfo| { if (unionInfo.tag_type) |UnionTagType| { inline for (unionInfo.fields) |u_field| { if (value == @field(UnionTagType, u_field.name)) { @@ -1921,17 +1921,17 @@ pub fn parseFree(comptime T: type, value: T, options: ParseOptions) void { unreachable; } }, - .Struct => |structInfo| { + .@"struct" => |structInfo| { inline for (structInfo.fields) |field| { parseFree(field.type, @field(value, field.name), options); } }, - .Array => |arrayInfo| { + .array => |arrayInfo| { for (value) |v| { parseFree(arrayInfo.child, v, options); } }, - .Pointer => |ptrInfo| { + .pointer => |ptrInfo| { const allocator = options.allocator orelse unreachable; switch (ptrInfo.size) { .One => { @@ -2811,38 +2811,38 @@ pub fn stringify( ) !void { const T = @TypeOf(value); switch (@typeInfo(T)) { - .Float, .ComptimeFloat => { + .float, .comptime_float => { return std.fmt.format(out_stream, "{e}", .{value}); }, - .Int, .ComptimeInt => { + .int, .comptime_int => { return std.fmt.formatIntValue(value, "", std.fmt.FormatOptions{}, out_stream); }, - .Bool => { + .bool => { return out_stream.writeAll(if (value) "true" else "false"); }, - .Null => { + .null => { return out_stream.writeAll("null"); }, - .Optional => { + .optional => { if (value) |payload| { return try stringify(payload, options, out_stream); } else { return try stringify(null, options, out_stream); } }, - .Enum => { + .@"enum" => { if (comptime std.meta.hasFn(T, "jsonStringify")) { return value.jsonStringify(options, out_stream); } @compileError("Unable to stringify enum '" ++ @typeName(T) ++ "'"); }, - .Union => { + .@"union" => { if (comptime std.meta.hasFn(T, "jsonStringify")) { return value.jsonStringify(options, out_stream); } - const info = @typeInfo(T).Union; + const info = @typeInfo(T).@"union"; if (info.tag_type) |UnionTagType| { inline for (info.fields) |u_field| { if (value == @field(UnionTagType, u_field.name)) { @@ -2853,7 +2853,7 @@ pub fn stringify( @compileError("Unable to stringify untagged union '" ++ @typeName(T) ++ "'"); } }, - .Struct => |S| { + .@"struct" => |S| { if (comptime std.meta.hasFn(T, "jsonStringify")) { return value.jsonStringify(options, out_stream); } @@ -2869,7 +2869,7 @@ pub fn stringify( if (Field.type == void) continue; var output_this_field = true; - if (!options.emit_null and @typeInfo(Field.type) == .Optional and @field(value, Field.name) == null) output_this_field = false; + if (!options.emit_null and @typeInfo(Field.type) == .optional and @field(value, Field.name) == null) output_this_field = false; const final_name = if (comptime std.meta.hasFn(T, "fieldNameFor")) value.fieldNameFor(Field.name) @@ -2919,10 +2919,10 @@ pub fn stringify( try out_stream.writeByte('}'); return; }, - .ErrorSet => return stringify(@as([]const u8, @errorName(value)), options, out_stream), - .Pointer => |ptr_info| switch (ptr_info.size) { + .error_set => return stringify(@as([]const u8, @errorName(value)), options, out_stream), + .pointer => |ptr_info| switch (ptr_info.size) { .One => switch (@typeInfo(ptr_info.child)) { - .Array => { + .array => { const Slice = []const std.meta.Elem(ptr_info.child); return stringify(@as(Slice, value), options, out_stream); }, @@ -3001,8 +3001,8 @@ pub fn stringify( }, else => @compileError("Unable to stringify type '" ++ @typeName(T) ++ "'"), }, - .Array => return stringify(&value, options, out_stream), - .Vector => |info| { + .array => return stringify(&value, options, out_stream), + .vector => |info| { const array: [info.len]info.child = value; return stringify(&array, options, out_stream); }, diff --git a/src/servicemodel.zig b/src/servicemodel.zig index 0f26886..93cf6db 100644 --- a/src/servicemodel.zig +++ b/src/servicemodel.zig @@ -20,7 +20,7 @@ pub fn Services(comptime service_imports: anytype) type { // finally, generate the type return @Type(.{ - .Struct = .{ + .@"struct" = .{ .layout = .auto, .fields = &fields, .decls = &[_]std.builtin.Type.Declaration{}, diff --git a/src/url.zig b/src/url.zig index b2e1500..ed8e61e 100644 --- a/src/url.zig +++ b/src/url.zig @@ -24,7 +24,7 @@ fn encodeStruct( comptime options: EncodingOptions, ) !bool { var rc = first; - inline for (@typeInfo(@TypeOf(obj)).Struct.fields) |field| { + inline for (@typeInfo(@TypeOf(obj)).@"struct".fields) |field| { const field_name = try options.field_name_transformer(allocator, field.name); defer if (options.field_name_transformer.* != defaultTransformer) allocator.free(field_name); @@ -47,10 +47,10 @@ pub fn encodeInternal( // @compileLog(@typeInfo(@TypeOf(obj))); var rc = first; switch (@typeInfo(@TypeOf(obj))) { - .Optional => if (obj) |o| { + .optional => if (obj) |o| { rc = try encodeInternal(allocator, parent, field_name, first, o, writer, options); }, - .Pointer => |ti| if (ti.size == .One) { + .pointer => |ti| if (ti.size == .One) { rc = try encodeInternal(allocator, parent, field_name, first, obj.*, writer, options); } else { if (!first) _ = try writer.write("&"); @@ -61,7 +61,7 @@ pub fn encodeInternal( try writer.print("{s}{s}={any}", .{ parent, field_name, obj }); rc = false; }, - .Struct => if (std.mem.eql(u8, "", field_name)) { + .@"struct" => if (std.mem.eql(u8, "", field_name)) { rc = try encodeStruct(allocator, parent, first, obj, writer, options); } else { // TODO: It would be lovely if we could concat at compile time or allocPrint at runtime @@ -73,12 +73,12 @@ pub fn encodeInternal( rc = try encodeStruct(allocator, new_parent, first, obj, writer, options); // try encodeStruct(parent ++ field_name ++ ".", first, obj, writer, options); }, - .Array => { + .array => { if (!first) _ = try writer.write("&"); try writer.print("{s}{s}={s}", .{ parent, field_name, obj }); rc = false; }, - .Int, .ComptimeInt, .Float, .ComptimeFloat => { + .int, .comptime_int, .float, .comptime_float => { if (!first) _ = try writer.write("&"); try writer.print("{s}{s}={d}", .{ parent, field_name, obj }); rc = false; diff --git a/src/xml_shaper.zig b/src/xml_shaper.zig index e4febc6..0acd482 100644 --- a/src/xml_shaper.zig +++ b/src/xml_shaper.zig @@ -96,14 +96,14 @@ pub fn parse(comptime T: type, source: []const u8, options: ParseOptions) !Parse fn parseInternal(comptime T: type, element: *xml.Element, options: ParseOptions) !T { switch (@typeInfo(T)) { - .Bool => { + .bool => { if (std.ascii.eqlIgnoreCase("true", element.children.items[0].CharData)) return true; if (std.ascii.eqlIgnoreCase("false", element.children.items[0].CharData)) return false; return error.UnexpectedToken; }, - .Float, .ComptimeFloat => { + .float, .comptime_float => { return std.fmt.parseFloat(T, element.children.items[0].CharData) catch |e| { if (log_parse_traces) { std.log.err( @@ -121,7 +121,7 @@ fn parseInternal(comptime T: type, element: *xml.Element, options: ParseOptions) return e; }; }, - .Int, .ComptimeInt => { + .int, .comptime_int => { // 2021-10-05T16:39:45.000Z return std.fmt.parseInt(T, element.children.items[0].CharData, 10) catch |e| { if (element.children.items[0].CharData[element.children.items[0].CharData.len - 1] == 'Z') { @@ -146,7 +146,7 @@ fn parseInternal(comptime T: type, element: *xml.Element, options: ParseOptions) return e; }; }, - .Optional => |optional_info| { + .optional => |optional_info| { if (element.children.items.len == 0) { // This is almost certainly incomplete. Empty strings? xsi:nil? return null; @@ -156,7 +156,7 @@ fn parseInternal(comptime T: type, element: *xml.Element, options: ParseOptions) return try parseInternal(optional_info.child, element, options); } }, - .Enum => |enum_info| { + .@"enum" => |enum_info| { _ = enum_info; // const numeric: ?enum_info.tag_type = std.fmt.parseInt(enum_info.tag_type, element.children.items[0].CharData, 10) catch null; // if (numeric) |num| { @@ -166,7 +166,7 @@ fn parseInternal(comptime T: type, element: *xml.Element, options: ParseOptions) // return std.meta.stringToEnum(T, element.CharData); // } }, - .Union => |union_info| { + .@"union" => |union_info| { if (union_info.tag_type) |_| { // try each of the union fields until we find one that matches // inline for (union_info.fields) |u_field| { @@ -189,7 +189,7 @@ fn parseInternal(comptime T: type, element: *xml.Element, options: ParseOptions) } @compileError("Unable to parse into untagged union '" ++ @typeName(T) ++ "'"); }, - .Struct => |struct_info| { + .@"struct" => |struct_info| { var r: T = undefined; var fields_seen = [_]bool{false} ** struct_info.fields.len; var fields_set: u64 = 0; @@ -244,7 +244,7 @@ fn parseInternal(comptime T: type, element: *xml.Element, options: ParseOptions) fields_set = fields_set + 1; found_value = true; } - if (@typeInfo(field.type) == .Optional) { + if (@typeInfo(field.type) == .optional) { // Test "compiler assertion failure 2" // Zig compiler bug circa 0.9.0. Using "and !found_value" // in the if statement above will trigger assertion failure @@ -269,7 +269,7 @@ fn parseInternal(comptime T: type, element: *xml.Element, options: ParseOptions) return error.FieldElementMismatch; // see fields_seen for details return r; }, - .Array => //|array_info| { + .array => //|array_info| { return error.ArrayNotImplemented, // switch (token) { // .ArrayBegin => { @@ -304,7 +304,7 @@ fn parseInternal(comptime T: type, element: *xml.Element, options: ParseOptions) // else => return error.UnexpectedToken, // } // }, - .Pointer => |ptr_info| { + .pointer => |ptr_info| { const allocator = options.allocator orelse return error.AllocatorRequired; switch (ptr_info.size) { .One => { From 9497db373c2ed181ad7d17ee13171179fd021709 Mon Sep 17 00:00:00 2001 From: Emil Lerch Date: Thu, 17 Oct 2024 11:08:14 -0700 Subject: [PATCH 02/14] ci failing with panic that does not happen locally --- .gitea/workflows/zig-mach.yaml | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/.gitea/workflows/zig-mach.yaml b/.gitea/workflows/zig-mach.yaml index 458c5d7..297e7e0 100644 --- a/.gitea/workflows/zig-mach.yaml +++ b/.gitea/workflows/zig-mach.yaml @@ -21,10 +21,12 @@ jobs: with: ref: zig-develop - name: Setup Zig - uses: https://git.lerch.org/lobo/setup-zig@v3 + uses: mlugg/setup-zig@v1.2.1 with: version: mach-latest - - name: Run tests + - name: Run smoke test + run: zig build smoke-test --verbose + - name: Run full tests run: zig build test --verbose # Zig package manager expects everything to be inside a directory in the archive, # which it then strips out on download. So we need to shove everything inside a directory From 4fa30a70cc615880adc54f1ce9c240ccd32dbe08 Mon Sep 17 00:00:00 2001 From: Emil Lerch Date: Thu, 17 Oct 2024 11:28:54 -0700 Subject: [PATCH 03/14] more ci stuff --- .gitea/workflows/zig-mach.yaml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.gitea/workflows/zig-mach.yaml b/.gitea/workflows/zig-mach.yaml index 297e7e0..292e393 100644 --- a/.gitea/workflows/zig-mach.yaml +++ b/.gitea/workflows/zig-mach.yaml @@ -24,6 +24,8 @@ jobs: uses: mlugg/setup-zig@v1.2.1 with: version: mach-latest + - name: Run gen + run: zig build gen --verbose - name: Run smoke test run: zig build smoke-test --verbose - name: Run full tests From 97b784f8e315f380075086d05f069dfdf6a65d91 Mon Sep 17 00:00:00 2001 From: Emil Lerch Date: Thu, 17 Oct 2024 12:00:20 -0700 Subject: [PATCH 04/14] cleanup main, re-enable riscv64-linux --- README.md | 5 +---- build.zig | 6 +----- src/main.zig | 10 +++++----- 3 files changed, 7 insertions(+), 14 deletions(-) diff --git a/README.md b/README.md index 2d4b993..a7f6964 100644 --- a/README.md +++ b/README.md @@ -17,7 +17,7 @@ Current executable size for the demo is 980k after compiling with -Doptimize=Rel in x86_linux, and will vary based on services used. Tested targets: * x86_64-linux -* riscv64-linux\* +* riscv64-linux * aarch64-linux * x86_64-windows * arm-linux @@ -26,9 +26,6 @@ in x86_linux, and will vary based on services used. Tested targets: Tested targets are built, but not continuously tested, by CI. -\* On Zig 0.12/0.13, riscv64-linux disabled due to [LLLM's O(N^2) codegen](https://github.com/ziglang/zig/issues/18872) - - Zig-Develop Branch ------------------ diff --git a/build.zig b/build.zig index a5bbc14..1c910fa 100644 --- a/build.zig +++ b/build.zig @@ -10,11 +10,7 @@ const test_targets = [_]std.Target.Query{ .{}, // native .{ .cpu_arch = .x86_64, .os_tag = .linux }, .{ .cpu_arch = .aarch64, .os_tag = .linux }, - // The test executable linking process just spins forever in LLVM using nominated zig 0.13 May 2024 - // This is likely a LLVM problem unlikely to be fixed in zig 0.13 - // Potentially this issue: https://github.com/llvm/llvm-project/issues/81440 - // Zig tracker: https://github.com/ziglang/zig/issues/18872 - // .{ .cpu_arch = .riscv64, .os_tag = .linux }, + .{ .cpu_arch = .riscv64, .os_tag = .linux }, .{ .cpu_arch = .arm, .os_tag = .linux }, .{ .cpu_arch = .x86_64, .os_tag = .windows }, .{ .cpu_arch = .aarch64, .os_tag = .macos }, diff --git a/src/main.zig b/src/main.zig index 6eb2fdc..ed3024a 100644 --- a/src/main.zig +++ b/src/main.zig @@ -97,7 +97,7 @@ pub fn main() anyerror!void { } continue; } - inline for (@typeInfo(Tests).Enum.fields) |f| { + inline for (@typeInfo(Tests).@"enum".fields) |f| { if (std.mem.eql(u8, f.name, arg)) { try tests.append(@field(Tests, f.name)); break; @@ -105,7 +105,7 @@ pub fn main() anyerror!void { } } if (tests.items.len == 0) { - inline for (@typeInfo(Tests).Enum.fields) |f| + inline for (@typeInfo(Tests).@"enum".fields) |f| try tests.append(@field(Tests, f.name)); } @@ -192,7 +192,7 @@ pub fn main() anyerror!void { const func = fns[0]; const arn = func.function_arn.?; // This is a bit ugly. Maybe a helper function in the library would help? - var tags = try std.ArrayList(@typeInfo(try typeForField(services.lambda.tag_resource.Request, "tags")).Pointer.child).initCapacity(allocator, 1); + var tags = try std.ArrayList(@typeInfo(try typeForField(services.lambda.tag_resource.Request, "tags")).pointer.child).initCapacity(allocator, 1); defer tags.deinit(); tags.appendAssumeCapacity(.{ .key = "Foo", .value = "Bar" }); const req = services.lambda.tag_resource.Request{ .resource = arn, .tags = tags.items }; @@ -380,8 +380,8 @@ fn proxyFromString(string: []const u8) !std.http.Client.Proxy { fn typeForField(comptime T: type, comptime field_name: []const u8) !type { const ti = @typeInfo(T); switch (ti) { - .Struct => { - inline for (ti.Struct.fields) |field| { + .@"struct" => { + inline for (ti.@"struct".fields) |field| { if (std.mem.eql(u8, field.name, field_name)) return field.type; } From 0892914c5b4f796b59bd90f0179a65f4209c9054 Mon Sep 17 00:00:00 2001 From: Emil Lerch Date: Thu, 17 Oct 2024 12:00:42 -0700 Subject: [PATCH 05/14] add build status note to readme --- README.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/README.md b/README.md index a7f6964..cfb703a 100644 --- a/README.md +++ b/README.md @@ -13,6 +13,10 @@ AWS SDK for Zig [![Build Status: Zig Nightly](https://git.lerch.org/lobo/aws-sdk-for-zig/actions/workflows/zig-nightly.yaml/badge.svg)](https://git.lerch.org/lobo/aws-sdk-for-zig/actions?workflow=zig-nightly.yaml&state=closed) +**NOTE ON BUILD STATUS**: The nightly/mach nominated version of this currently +panics under CI, but I have not yet reproduced this panic. Running manually on +multiple machines appears to be working properly + Current executable size for the demo is 980k after compiling with -Doptimize=ReleaseSmall in x86_linux, and will vary based on services used. Tested targets: From 6240225db28c759d3977b3bd62896e7eb319479d Mon Sep 17 00:00:00 2001 From: Emil Lerch Date: Sat, 26 Oct 2024 18:33:30 -0700 Subject: [PATCH 06/14] workaround for zig issue 21815 --- .gitea/workflows/zig-mach.yaml | 5 +++++ .gitea/workflows/zig-nightly.yaml | 5 +++++ 2 files changed, 10 insertions(+) diff --git a/.gitea/workflows/zig-mach.yaml b/.gitea/workflows/zig-mach.yaml index 292e393..e8ecb3d 100644 --- a/.gitea/workflows/zig-mach.yaml +++ b/.gitea/workflows/zig-mach.yaml @@ -10,6 +10,11 @@ env: PKG_PREFIX: nominated-zig jobs: build-zig-nominated-mach-latest: + container: + # We need CAP_SYS_PTRACE for stack traces due to a regression in 0.14.0 + # TODO: Remove this after https://github.com/ziglang/zig/issues/21815 is + # addressed + options: --cap-add CAP_SYS_PTRACE runs-on: ubuntu-latest # Need to use the default container with node and all that, so we can # use JS-based actions like actions/checkout@v3... diff --git a/.gitea/workflows/zig-nightly.yaml b/.gitea/workflows/zig-nightly.yaml index 71bf5b5..98605e6 100644 --- a/.gitea/workflows/zig-nightly.yaml +++ b/.gitea/workflows/zig-nightly.yaml @@ -10,6 +10,11 @@ env: PKG_PREFIX: nightly-zig jobs: build-zig-nightly: + container: + # We need CAP_SYS_PTRACE for stack traces due to a regression in 0.14.0 + # TODO: Remove this after https://github.com/ziglang/zig/issues/21815 is + # addressed + options: --cap-add CAP_SYS_PTRACE runs-on: ubuntu-latest # Need to use the default container with node and all that, so we can # use JS-based actions like actions/checkout@v3... From debb4dab60333da10676faab34cf0ad6d8622cf0 Mon Sep 17 00:00:00 2001 From: Emil Lerch Date: Sat, 26 Oct 2024 18:50:58 -0700 Subject: [PATCH 07/14] update example dependencies --- example/build.zig.zon | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/example/build.zig.zon b/example/build.zig.zon index 3b47a9e..e14b3b7 100644 --- a/example/build.zig.zon +++ b/example/build.zig.zon @@ -5,8 +5,8 @@ .dependencies = .{ .aws = .{ - .url = "https://git.lerch.org/api/packages/lobo/generic/aws-sdk-with-models/e5b662873a6745a7e761643b1ca3d8637bf1222f/e5b662873a6745a7e761643b1ca3d8637bf1222f-with-models.tar.gz", - .hash = "12206394d50a9df1bf3fa6390cd5525bf97448d0f74a85113ef70c3bb60dcf4b7292", + .url = "https://git.lerch.org/api/packages/lobo/generic/aws-sdk-with-models/6240225db28c759d3977b3bd62896e7eb319479d/6240225db28c759d3977b3bd62896e7eb319479dnominated-zig-with-models.tar.gz", + .hash = "1220a8398a1040f731c02741639192c68bf911ac56640650329329b6e8d9a77ef278", }, }, } From 88d7e99d6b8cd8114a8690d1e55c8fd2113deea5 Mon Sep 17 00:00:00 2001 From: Emil Lerch Date: Thu, 19 Dec 2024 08:43:25 -0800 Subject: [PATCH 08/14] add a build option to disable LLVM --- build.zig | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/build.zig b/build.zig index 1c910fa..7946a7e 100644 --- a/build.zig +++ b/build.zig @@ -29,6 +29,12 @@ pub fn build(b: *Builder) !void { // between Debug, ReleaseSafe, ReleaseFast, and ReleaseSmall. const optimize = b.standardOptimizeOption(.{}); + const no_llvm = b.option( + bool, + "no-llvm", + "Disable LLVM", + ) orelse false; + const broken_windows = b.option( bool, "broken-windows", @@ -52,6 +58,7 @@ pub fn build(b: *Builder) !void { .target = target, .optimize = optimize, }); + exe.use_llvm = !no_llvm; const smithy_dep = b.dependency("smithy", .{ // These are the arguments to the dependency. It expects a target and optimization level. .target = target, @@ -103,6 +110,7 @@ pub fn build(b: *Builder) !void { .target = b.host, .optimize = if (b.verbose) .Debug else .ReleaseSafe, }); + cg_exe.use_llvm = !no_llvm; cg_exe.root_module.addImport("smithy", smithy_dep.module("smithy")); var cg_cmd = b.addRunArtifact(cg_exe); cg_cmd.addArg("--models"); @@ -179,6 +187,7 @@ pub fn build(b: *Builder) !void { }); unit_tests.root_module.addImport("smithy", smithy_dep.module("smithy")); unit_tests.step.dependOn(gen_step); + unit_tests.use_llvm = !no_llvm; const run_unit_tests = b.addRunArtifact(unit_tests); run_unit_tests.skip_foreign_checks = true; @@ -200,6 +209,7 @@ pub fn build(b: *Builder) !void { .target = target, .optimize = optimize, }); + smoke_test.use_llvm = !no_llvm; smoke_test.root_module.addImport("smithy", smithy_dep.module("smithy")); smoke_test.step.dependOn(gen_step); From 35fad85c13fb0fe1eca9229d02ff0b253c7631ab Mon Sep 17 00:00:00 2001 From: Emil Lerch Date: Thu, 19 Dec 2024 08:48:37 -0800 Subject: [PATCH 09/14] add .envrc --- .envrc | 8 ++++++++ 1 file changed, 8 insertions(+) create mode 100644 .envrc diff --git a/.envrc b/.envrc new file mode 100644 index 0000000..75960b7 --- /dev/null +++ b/.envrc @@ -0,0 +1,8 @@ +# vi: ft=sh +# shellcheck shell=bash + +if ! has zvm_direnv_version || ! zvm_direnv_version 1.0.0; then + source_url "https://git.lerch.org/lobo/zvm-direnv/raw/tag/1.0.0/direnvrc" "sha256-Gtddvcr6aJsrjKd53uChxA1reQmJgEBpmPUWmMdtDIQ=" +fi + +use zig mach-latest From e02fb699fc47f19d19cad99209bd480ca6963295 Mon Sep 17 00:00:00 2001 From: Emil Lerch Date: Thu, 19 Dec 2024 08:53:57 -0800 Subject: [PATCH 10/14] move away from deprecated API --- build.zig | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.zig b/build.zig index 7946a7e..658934e 100644 --- a/build.zig +++ b/build.zig @@ -107,7 +107,7 @@ pub fn build(b: *Builder) !void { .name = "codegen", .root_source_file = b.path("codegen/src/main.zig"), // We need this generated for the host, not the real target - .target = b.host, + .target = b.graph.host, .optimize = if (b.verbose) .Debug else .ReleaseSafe, }); cg_exe.use_llvm = !no_llvm; From e3bb4142d64c38aa4a240ca3a6c567b8bc3f3435 Mon Sep 17 00:00:00 2001 From: Emil Lerch Date: Thu, 19 Dec 2024 09:04:42 -0800 Subject: [PATCH 11/14] update example dependency --- example/build.zig.zon | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/example/build.zig.zon b/example/build.zig.zon index e14b3b7..56555c2 100644 --- a/example/build.zig.zon +++ b/example/build.zig.zon @@ -5,8 +5,8 @@ .dependencies = .{ .aws = .{ - .url = "https://git.lerch.org/api/packages/lobo/generic/aws-sdk-with-models/6240225db28c759d3977b3bd62896e7eb319479d/6240225db28c759d3977b3bd62896e7eb319479dnominated-zig-with-models.tar.gz", - .hash = "1220a8398a1040f731c02741639192c68bf911ac56640650329329b6e8d9a77ef278", + .url = "https://git.lerch.org/api/packages/lobo/generic/aws-sdk-with-models/e02fb699fc47f19d19cad99209bd480ca6963295/e02fb699fc47f19d19cad99209bd480ca6963295nominated-zig-with-models.tar.gz", + .hash = "1220fa9b39c985449936f0e3f02bbb6fdafa64435e502eb78fd47d457b96876b7968", }, }, } From b369c29e84523763d2e3c9021a55c9f0e9bbf8e7 Mon Sep 17 00:00:00 2001 From: Emil Lerch Date: Wed, 5 Feb 2025 13:14:10 -0800 Subject: [PATCH 12/14] manually set latest mach to remove confusion --- .envrc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.envrc b/.envrc index 75960b7..7296f66 100644 --- a/.envrc +++ b/.envrc @@ -5,4 +5,4 @@ if ! has zvm_direnv_version || ! zvm_direnv_version 1.0.0; then source_url "https://git.lerch.org/lobo/zvm-direnv/raw/tag/1.0.0/direnvrc" "sha256-Gtddvcr6aJsrjKd53uChxA1reQmJgEBpmPUWmMdtDIQ=" fi -use zig mach-latest +use zig 2024.11.0-mach From 78b36e2316de9017d3aa1a440e7afe7064338b13 Mon Sep 17 00:00:00 2001 From: Emil Lerch Date: Wed, 5 Feb 2025 13:21:53 -0800 Subject: [PATCH 13/14] support for timestamp as f128 (more) f128 is not really the correct data type long term. More information on the exact details are https://smithy.io/2.0/aws/protocols/aws-json-1_1-protocol.html and https://smithy.io/2.0/spec/protocol-traits.html\#timestampformat-trait But...it will hold all our values and parses correctly, so we can use it for now --- codegen/src/main.zig | 2 +- src/aws.zig | 6 ++++++ src/xml_shaper.zig | 6 ++++++ 3 files changed, 13 insertions(+), 1 deletion(-) diff --git a/codegen/src/main.zig b/codegen/src/main.zig index 1556204..36f12af 100644 --- a/codegen/src/main.zig +++ b/codegen/src/main.zig @@ -716,7 +716,7 @@ fn generateTypeFor(shape_id: []const u8, writer: anytype, state: GenerationState // The serializer will have to deal with the idea we might be an array return try generateTypeFor(shape.set.member_target, writer, state, true); }, - .timestamp => |s| try generateSimpleTypeFor(s, "i64", writer), + .timestamp => |s| try generateSimpleTypeFor(s, "f128", writer), .blob => |s| try generateSimpleTypeFor(s, "[]const u8", writer), .boolean => |s| try generateSimpleTypeFor(s, "bool", writer), .double => |s| try generateSimpleTypeFor(s, "f64", writer), diff --git a/src/aws.zig b/src/aws.zig index 06756ab..a527cc2 100644 --- a/src/aws.zig +++ b/src/aws.zig @@ -783,6 +783,12 @@ fn parseInt(comptime T: type, val: []const u8) !T { return e; }; } + if (T == f128) { + return @as(f128, date.parseEnglishToTimestamp(val)) catch |e| { + log.err("Error coercing date string '{s}' to timestamp value", .{val}); + return e; + }; + } log.err("Error parsing string '{s}' to integer", .{val}); return rc; } diff --git a/src/xml_shaper.zig b/src/xml_shaper.zig index 0acd482..4a6c29d 100644 --- a/src/xml_shaper.zig +++ b/src/xml_shaper.zig @@ -105,6 +105,12 @@ fn parseInternal(comptime T: type, element: *xml.Element, options: ParseOptions) }, .float, .comptime_float => { return std.fmt.parseFloat(T, element.children.items[0].CharData) catch |e| { + if (element.children.items[0].CharData[element.children.items[0].CharData.len - 1] == 'Z') { + // We have an iso8601 in an integer field (we think) + // Try to coerce this into our type + const timestamp = try date.parseIso8601ToTimestamp(element.children.items[0].CharData); + return @floatFromInt(timestamp); + } if (log_parse_traces) { std.log.err( "Could not parse '{s}' as float in element '{s}': {any}", From acd658990964325959a458f2d6c08629bb76026f Mon Sep 17 00:00:00 2001 From: Emil Lerch Date: Wed, 5 Feb 2025 13:22:52 -0800 Subject: [PATCH 14/14] add support for raw responses beginning with name of single field in response struct --- src/aws.zig | 71 ++++++++++++++++++++++++++++++++++++++++++++++++++-- src/json.zig | 1 + 2 files changed, 70 insertions(+), 2 deletions(-) diff --git a/src/aws.zig b/src/aws.zig index a527cc2..2f07896 100644 --- a/src/aws.zig +++ b/src/aws.zig @@ -709,8 +709,10 @@ pub fn Request(comptime request_action: anytype) type { // Extract the first json key const key = firstJsonKey(data); - const found_normal_json_response = std.mem.eql(u8, key, action.action_name ++ "Response") or - std.mem.eql(u8, key, action.action_name ++ "Result"); + const found_normal_json_response = + std.mem.eql(u8, key, action.action_name ++ "Response") or + std.mem.eql(u8, key, action.action_name ++ "Result") or + isOtherNormalResponse(response_types.NormalResponse, key); var raw_response_parsed = false; var stream = json.TokenStream.init(data); const parsed_response_ptr = blk: { @@ -734,6 +736,7 @@ pub fn Request(comptime request_action: anytype) type { log.debug("Appears server has provided a raw response", .{}); raw_response_parsed = true; const ptr = try options.client.allocator.create(response_types.NormalResponse); + errdefer options.client.allocator.destroy(ptr); @field(ptr.*, std.meta.fields(action.Response)[0].name) = json.parse(response_types.RawResponse, &stream, parser_options) catch |e| { log.err( @@ -761,6 +764,14 @@ pub fn Request(comptime request_action: anytype) type { }; } +fn isOtherNormalResponse(comptime T: type, first_key: []const u8) bool { + const fields = std.meta.fields(T); + if (fields.len != 1) return false; + const first_field = fields[0]; + if (!@hasDecl(T, "fieldNameFor")) return false; + const expected_key = T.fieldNameFor(undefined, first_field.name); + return std.mem.eql(u8, first_key, expected_key); +} fn coerceFromString(comptime T: type, val: []const u8) anyerror!T { if (@typeInfo(T) == .optional) return try coerceFromString(@typeInfo(T).optional.child, val); // TODO: This is terrible...fix it @@ -2270,3 +2281,59 @@ test "rest_xml_with_input: S3 put object" { try std.testing.expectEqualStrings("AES256", result.response.server_side_encryption.?); try std.testing.expectEqualStrings("37b51d194a7513e45b56f6524f2d51f2", result.response.e_tag.?); } +test "raw ECR timestamps" { + // This is a way to test the json parsing. Ultimately the more robust tests + // should be preferred, but in this case we were tracking down an issue + // for which the root cause was the incorrect type being passed to the parse + // routine + const allocator = std.testing.allocator; + const ecr = (Services(.{.ecr}){}).ecr; + const options = json.ParseOptions{ + .allocator = allocator, + .allow_camel_case_conversion = true, // new option + .allow_snake_case_conversion = true, // new option + .allow_unknown_fields = true, // new option. Cannot yet handle non-struct fields though + .allow_missing_fields = false, // new option. Cannot yet handle non-struct fields though + }; + var stream = json.TokenStream.init( + \\{"authorizationData":[{"authorizationToken":"***","expiresAt":1.7385984915E9,"proxyEndpoint":"https://146325435496.dkr.ecr.us-west-2.amazonaws.com"}]} + ); + const ptr = try json.parse(ecr.get_authorization_token.Response, &stream, options); + defer json.parseFree(ecr.get_authorization_token.Response, ptr, options); +} +test "json_1_1: ECR timestamps" { + // See: https://github.com/elerch/aws-sdk-for-zig/issues/5 + // const old = std.testing.log_level; + // defer std.testing.log_level = old; + // std.testing.log_level = .debug; + const allocator = std.testing.allocator; + var test_harness = TestSetup.init(.{ + .allocator = allocator, + .server_response = + \\{"authorizationData":[{"authorizationToken":"***","expiresAt":1.7385984915E9,"proxyEndpoint":"https://146325435496.dkr.ecr.us-west-2.amazonaws.com"}]} + // \\{"authorizationData":[{"authorizationToken":"***","expiresAt":1.738598491557E9,"proxyEndpoint":"https://146325435496.dkr.ecr.us-west-2.amazonaws.com"}]} + , + .server_response_headers = &.{ + .{ .name = "Content-Type", .value = "application/json" }, + .{ .name = "x-amzn-RequestId", .value = "QBI72OUIN8U9M9AG6PCSADJL4JVV4KQNSO5AEMVJF66Q9ASUAAJG" }, + }, + }); + defer test_harness.deinit(); + const options = try test_harness.start(); + const ecr = (Services(.{.ecr}){}).ecr; + std.log.debug("Typeof response {}", .{@TypeOf(ecr.get_authorization_token.Response{})}); + const call = try test_harness.client.call(ecr.get_authorization_token.Request{}, options); + defer call.deinit(); + test_harness.stop(); + // Request expectations + try std.testing.expectEqual(std.http.Method.POST, test_harness.request_options.request_method); + try std.testing.expectEqualStrings("/", test_harness.request_options.request_target); + try test_harness.request_options.expectHeader("X-Amz-Target", "AmazonEC2ContainerRegistry_V20150921.GetAuthorizationToken"); + // Response expectations + try std.testing.expectEqualStrings("QBI72OUIN8U9M9AG6PCSADJL4JVV4KQNSO5AEMVJF66Q9ASUAAJG", call.response_metadata.request_id); + try std.testing.expectEqual(@as(usize, 1), call.response.authorization_data.?.len); + try std.testing.expectEqualStrings("***", call.response.authorization_data.?[0].authorization_token.?); + try std.testing.expectEqualStrings("https://146325435496.dkr.ecr.us-west-2.amazonaws.com", call.response.authorization_data.?[0].proxy_endpoint.?); + // try std.testing.expectEqual(@as(i64, 1.73859841557E9), call.response.authorization_data.?[0].expires_at.?); + try std.testing.expectEqual(@as(f128, 1.7385984915E9), call.response.authorization_data.?[0].expires_at.?); +} diff --git a/src/json.zig b/src/json.zig index e0f4c08..637beef 100644 --- a/src/json.zig +++ b/src/json.zig @@ -1895,6 +1895,7 @@ fn isMapPattern(comptime T: type) bool { } pub fn parse(comptime T: type, tokens: *TokenStream, options: ParseOptions) !T { + // std.log.debug("parsing {s} into type {s}", .{ tokens.slice, @typeName(T) }); const token = (try tokens.next()) orelse return error.UnexpectedEndOfJson; return parseInternal(T, token, tokens, options); }