refactor: handle empty response bodies and multiple types of timestamps

This commit is contained in:
Simon Hartcher 2025-04-22 16:06:50 +10:00
parent f4c306a2df
commit 3fea7d9f0e
8 changed files with 175 additions and 59 deletions

View file

@ -65,26 +65,22 @@ pub fn build(b: *Builder) !void {
.optimize = optimize, .optimize = optimize,
}); });
exe.use_llvm = !no_llvm; exe.use_llvm = !no_llvm;
const smithy_dep = b.dependency("smithy", .{
// These are the arguments to the dependency. It expects a target and optimization level. // External dependencies
const dep_smithy = b.dependency("smithy", .{
.target = target, .target = target,
.optimize = optimize, .optimize = optimize,
}); });
const smithy_module = smithy_dep.module("smithy"); const mod_smithy = dep_smithy.module("smithy");
exe.root_module.addImport("smithy", smithy_module); // not sure this should be here... exe.root_module.addImport("smithy", mod_smithy); // not sure this should be here...
// TODO: This does not work correctly due to https://github.com/ziglang/zig/issues/16354 const dep_zeit = b.dependency("zeit", .{
// .target = target,
// We are working here with kind of a weird dependency though. So we can do this .optimize = optimize,
// another way });
// const mod_zeit = dep_zeit.module("zeit");
// TODO: These target/optimize are not correct, as we need to run the thing exe.root_module.addImport("zeit", mod_zeit);
// const codegen = b.anonymousDependency("codegen/", @import("codegen/build.zig"), .{ // End External dependencies
// .target = target,
// .optimize = optimize,
// });
// const codegen_cmd = b.addRunArtifact(codegen.artifact("codegen"));
// exe.step.dependOn(&codegen_cmd.step);
const run_cmd = b.addRunArtifact(exe); const run_cmd = b.addRunArtifact(exe);
run_cmd.step.dependOn(b.getInstallStep()); run_cmd.step.dependOn(b.getInstallStep());
@ -104,7 +100,7 @@ pub fn build(b: *Builder) !void {
.target = b.graph.host, .target = b.graph.host,
.optimize = if (b.verbose) .Debug else .ReleaseSafe, .optimize = if (b.verbose) .Debug else .ReleaseSafe,
}); });
cg_exe.root_module.addImport("smithy", smithy_module); cg_exe.root_module.addImport("smithy", mod_smithy);
var cg_cmd = b.addRunArtifact(cg_exe); var cg_cmd = b.addRunArtifact(cg_exe);
cg_cmd.addArg("--models"); cg_cmd.addArg("--models");
cg_cmd.addArg(try std.fs.path.join( cg_cmd.addArg(try std.fs.path.join(
@ -137,6 +133,21 @@ pub fn build(b: *Builder) !void {
exe.step.dependOn(cg); exe.step.dependOn(cg);
// Codegen private modules
const mod_json = b.createModule(.{
.root_source_file = b.path("codegen/src/json.zig"),
.target = target,
.optimize = optimize,
});
const mod_date = b.createModule(.{
.root_source_file = b.path("codegen/src/date.zig"),
.target = target,
.optimize = optimize,
});
mod_date.addImport("zeit", mod_zeit);
// End codegen private modules
// This allows us to have each module depend on the // This allows us to have each module depend on the
// generated service manifest. // generated service manifest.
const service_manifest_module = b.createModule(.{ const service_manifest_module = b.createModule(.{
@ -144,23 +155,25 @@ pub fn build(b: *Builder) !void {
.target = target, .target = target,
.optimize = optimize, .optimize = optimize,
}); });
service_manifest_module.addImport("smithy", smithy_module); service_manifest_module.addImport("smithy", mod_smithy);
service_manifest_module.addImport("date", mod_date);
service_manifest_module.addImport("json", mod_json);
service_manifest_module.addImport("zeit", mod_zeit);
exe.root_module.addImport("service_manifest", service_manifest_module); exe.root_module.addImport("service_manifest", service_manifest_module);
// Expose module to others // Expose module to others
_ = b.addModule("aws", .{ const mod_aws = b.addModule("aws", .{
.root_source_file = b.path("src/aws.zig"), .root_source_file = b.path("src/aws.zig"),
.imports = &.{
.{ .name = "smithy", .module = smithy_module },
.{ .name = "service_manifest", .module = service_manifest_module },
},
}); });
mod_aws.addImport("smithy", mod_smithy);
mod_aws.addImport("service_manifest", service_manifest_module);
mod_aws.addImport("date", mod_date);
// Expose module to others // Expose module to others
_ = b.addModule("aws-signing", .{ _ = b.addModule("aws-signing", .{
.root_source_file = b.path("src/aws_signing.zig"), .root_source_file = b.path("src/aws_signing.zig"),
.imports = &.{.{ .name = "smithy", .module = smithy_module }}, .imports = &.{.{ .name = "smithy", .module = mod_smithy }},
}); });
// Similar to creating the run step earlier, this exposes a `test` step to // Similar to creating the run step earlier, this exposes a `test` step to
@ -184,16 +197,24 @@ pub fn build(b: *Builder) !void {
// test_step.dependOn(&run_unit_tests.step); // test_step.dependOn(&run_unit_tests.step);
for (test_targets) |t| { for (test_targets) |t| {
if (broken_windows and t.os_tag == .windows) continue; if (broken_windows and t.os_tag == .windows) continue;
// Creates a step for unit testing. This only builds the test executable
// but does not run it. const mod_unit_tests = b.createModule(.{
const unit_tests = b.addTest(.{
.root_source_file = b.path("src/aws.zig"), .root_source_file = b.path("src/aws.zig"),
.target = b.resolveTargetQuery(t), .target = b.resolveTargetQuery(t),
.optimize = optimize, .optimize = optimize,
});
mod_unit_tests.addImport("smithy", mod_smithy);
mod_unit_tests.addImport("service_manifest", service_manifest_module);
mod_unit_tests.addImport("date", mod_date);
mod_unit_tests.addImport("zeit", mod_zeit);
// Creates a step for unit testing. This only builds the test executable
// but does not run it.
const unit_tests = b.addTest(.{
.root_module = mod_unit_tests,
.filters = test_filters, .filters = test_filters,
}); });
unit_tests.root_module.addImport("smithy", smithy_module);
unit_tests.root_module.addImport("service_manifest", service_manifest_module);
unit_tests.step.dependOn(cg); unit_tests.step.dependOn(cg);
unit_tests.use_llvm = !no_llvm; unit_tests.use_llvm = !no_llvm;
@ -219,7 +240,7 @@ pub fn build(b: *Builder) !void {
.filters = test_filters, .filters = test_filters,
}); });
smoke_test.use_llvm = !no_llvm; smoke_test.use_llvm = !no_llvm;
smoke_test.root_module.addImport("smithy", smithy_module); smoke_test.root_module.addImport("smithy", mod_smithy);
smoke_test.root_module.addImport("service_manifest", service_manifest_module); smoke_test.root_module.addImport("service_manifest", service_manifest_module);
smoke_test.step.dependOn(cg); smoke_test.step.dependOn(cg);

View file

@ -20,5 +20,9 @@
.url = "https://github.com/aws/aws-sdk-go-v2/archive/58cf6509525a12d64fd826da883bfdbacbd2f00e.tar.gz", .url = "https://github.com/aws/aws-sdk-go-v2/archive/58cf6509525a12d64fd826da883bfdbacbd2f00e.tar.gz",
.hash = "122017a2f3081ce83c23e0c832feb1b8b4176d507b6077f522855dc774bcf83ee315", .hash = "122017a2f3081ce83c23e0c832feb1b8b4176d507b6077f522855dc774bcf83ee315",
}, },
.zeit = .{
.url = "https://github.com/deevus/zeit/archive/refs/heads/rfc1123.tar.gz",
.hash = "zeit-0.6.0-5I6bk29nAgDhK6AVMtXMWhkKTYgUncrWjnlI_8X9DPSd",
},
}, },
} }

53
codegen/src/date.zig Normal file
View file

@ -0,0 +1,53 @@
const std = @import("std");
const zeit = @import("zeit");
const DateFormat = enum {
rfc1123,
iso8601,
};
pub const Timestamp = enum(zeit.Nanoseconds) {
_,
pub fn jsonStringify(value: Timestamp, options: anytype, out_stream: anytype) !void {
_ = options;
const instant = try zeit.instant(.{
.source = .{
.unix_nano = @intFromEnum(value),
},
});
try out_stream.writeAll("\"");
try instant.time().gofmt(out_stream, "Mon, 02 Jan 2006 15:04:05 GMT");
try out_stream.writeAll("\"");
}
pub fn parse(val: []const u8) !Timestamp {
const date_format = blk: {
if (std.ascii.isDigit(val[0])) {
break :blk DateFormat.iso8601;
} else {
break :blk DateFormat.rfc1123;
}
};
const ins = try zeit.instant(.{
.source = switch (date_format) {
DateFormat.iso8601 => .{
.iso8601 = val,
},
DateFormat.rfc1123 => .{
.rfc1123 = val,
},
},
});
return @enumFromInt(ins.timestamp);
}
};
test Timestamp {
const http_date = try Timestamp.parse("Mon, 02 Jan 2006 15:04:05 GMT");
try std.testing.expectEqual(1136214245000, http_date);
}

View file

@ -2,7 +2,6 @@ const std = @import("std");
const smithy = @import("smithy"); const smithy = @import("smithy");
const snake = @import("snake.zig"); const snake = @import("snake.zig");
const Hasher = @import("Hasher.zig"); const Hasher = @import("Hasher.zig");
const json_zig = @embedFile("json.zig");
var verbose = false; var verbose = false;
@ -33,8 +32,6 @@ pub fn main() anyerror!void {
if (std.mem.eql(u8, "--models", arg)) if (std.mem.eql(u8, "--models", arg))
models_dir = try std.fs.cwd().openDir(args[i + 1], .{ .iterate = true }); models_dir = try std.fs.cwd().openDir(args[i + 1], .{ .iterate = true });
} }
// TODO: Seems like we should remove this in favor of a package
try output_dir.writeFile(.{ .sub_path = "json.zig", .data = json_zig });
// TODO: We need a different way to handle this file... // TODO: We need a different way to handle this file...
const manifest_file_started = false; const manifest_file_started = false;
@ -186,8 +183,13 @@ fn processFile(file_name: []const u8, output_dir: std.fs.Dir, manifest: anytype)
defer arena.deinit(); defer arena.deinit();
const allocator = arena.allocator(); const allocator = arena.allocator();
_ = try writer.write("const std = @import(\"std\");\n"); _ = try writer.write("const std = @import(\"std\");\n");
_ = try writer.write("const serializeMap = @import(\"json.zig\").serializeMap;\n"); _ = try writer.write("const smithy = @import(\"smithy\");\n");
_ = try writer.write("const smithy = @import(\"smithy\");\n\n"); _ = try writer.write("const json = @import(\"json\");\n");
_ = try writer.write("const date = @import(\"date\");\n");
_ = try writer.write("const zeit = @import(\"zeit\");\n");
_ = try writer.write("\n");
_ = try writer.write("const serializeMap = json.serializeMap;\n");
_ = try writer.write("\n");
if (verbose) std.log.info("Processing file: {s}", .{file_name}); if (verbose) std.log.info("Processing file: {s}", .{file_name});
const service_names = generateServicesForFilePath(allocator, ";", file_name, writer) catch |err| { const service_names = generateServicesForFilePath(allocator, ";", file_name, writer) catch |err| {
std.log.err("Error processing file: {s}", .{file_name}); std.log.err("Error processing file: {s}", .{file_name});
@ -716,7 +718,7 @@ fn generateTypeFor(shape_id: []const u8, writer: anytype, state: GenerationState
// The serializer will have to deal with the idea we might be an array // The serializer will have to deal with the idea we might be an array
return try generateTypeFor(shape.set.member_target, writer, state, true); return try generateTypeFor(shape.set.member_target, writer, state, true);
}, },
.timestamp => |s| try generateSimpleTypeFor(s, "f128", writer), .timestamp => |s| try generateSimpleTypeFor(s, "date.Timestamp", writer),
.blob => |s| try generateSimpleTypeFor(s, "[]const u8", writer), .blob => |s| try generateSimpleTypeFor(s, "[]const u8", writer),
.boolean => |s| try generateSimpleTypeFor(s, "bool", writer), .boolean => |s| try generateSimpleTypeFor(s, "bool", writer),
.double => |s| try generateSimpleTypeFor(s, "f64", writer), .double => |s| try generateSimpleTypeFor(s, "f64", writer),

View file

@ -1,5 +1,6 @@
const builtin = @import("builtin"); const builtin = @import("builtin");
const std = @import("std"); const std = @import("std");
const zeit = @import("zeit");
const awshttp = @import("aws_http.zig"); const awshttp = @import("aws_http.zig");
const json = @import("json.zig"); const json = @import("json.zig");
@ -396,6 +397,7 @@ pub fn Request(comptime request_action: anytype) type {
}, },
); );
defer response.deinit(); defer response.deinit();
if (response.response_code != options.success_http_code) { if (response.response_code != options.success_http_code) {
try reportTraffic(options.client.allocator, "Call Failed", aws_request, response, log.err); try reportTraffic(options.client.allocator, "Call Failed", aws_request, response, log.err);
if (options.diagnostics) |d| { if (options.diagnostics) |d| {
@ -484,8 +486,11 @@ pub fn Request(comptime request_action: anytype) type {
// If the expected result has no fields, there's no sense in // If the expected result has no fields, there's no sense in
// doing any more work. Let's bail early // doing any more work. Let's bail early
comptime var expected_body_field_len = std.meta.fields(action.Response).len; comptime var expected_body_field_len = std.meta.fields(action.Response).len;
if (@hasDecl(action.Response, "http_header"))
if (@hasDecl(action.Response, "http_header")) {
expected_body_field_len -= std.meta.fields(@TypeOf(action.Response.http_header)).len; expected_body_field_len -= std.meta.fields(@TypeOf(action.Response.http_header)).len;
}
if (@hasDecl(action.Response, "http_payload")) { if (@hasDecl(action.Response, "http_payload")) {
var rc = FullResponseType{ var rc = FullResponseType{
.response = .{}, .response = .{},
@ -508,21 +513,23 @@ pub fn Request(comptime request_action: anytype) type {
} }
// We don't care about the body if there are no fields we expect there... // We don't care about the body if there are no fields we expect there...
if (std.meta.fields(action.Response).len == 0 or expected_body_field_len == 0) { if (std.meta.fields(action.Response).len == 0 or expected_body_field_len == 0 or response.body.len == 0) {
// Do we care if an unexpected body comes in? // Do we care if an unexpected body comes in?
return FullResponseType{ return FullResponseType{
.response = .{}, .response = undefined,
.response_metadata = .{ .response_metadata = .{
.request_id = try requestIdFromHeaders(aws_request, response, options), .request_id = try requestIdFromHeaders(aws_request, response, options),
}, },
.parser_options = .{ .json = .{} }, .parser_options = .{ .json = .{} },
.raw_parsed = .{ .raw = .{} }, .raw_parsed = .{ .raw = undefined },
.allocator = options.client.allocator, .allocator = options.client.allocator,
}; };
} }
const isJson = try isJsonResponse(response.headers);
if (!isJson) return try xmlReturn(aws_request, options, response); return switch (try getContentType(response.headers)) {
return try jsonReturn(aws_request, options, response); .json => try jsonReturn(aws_request, options, response),
.xml => try xmlReturn(aws_request, options, response),
};
} }
fn jsonReturn(aws_request: awshttp.HttpRequest, options: Options, response: awshttp.HttpResult) !FullResponseType { fn jsonReturn(aws_request: awshttp.HttpRequest, options: Options, response: awshttp.HttpResult) !FullResponseType {
@ -739,8 +746,6 @@ pub fn Request(comptime request_action: anytype) type {
const MySelf = @This(); const MySelf = @This();
pub fn deinit(self: MySelf) void { pub fn deinit(self: MySelf) void {
// This feels like it should result in a use after free, but it
// seems to be working?
self.allocator.destroy(self.parsed_response_ptr); self.allocator.destroy(self.parsed_response_ptr);
} }
}; };
@ -829,6 +834,10 @@ fn coerceFromString(comptime T: type, val: []const u8) anyerror!T {
log.err("Invalid string representing {s}: {s}", .{ @typeName(T), val }); log.err("Invalid string representing {s}: {s}", .{ @typeName(T), val });
return e; return e;
}, },
date.Timestamp => return date.Timestamp.parse(val) catch |e| {
log.debug("Failed to parse timestamp from string '{s}': {}", .{ val, e });
return e;
},
else => return val, else => return val,
} }
} }
@ -910,23 +919,28 @@ fn firstJsonKey(data: []const u8) []const u8 {
log.debug("First json key: {s}", .{key}); log.debug("First json key: {s}", .{key});
return key; return key;
} }
fn isJsonResponse(headers: []const awshttp.Header) !bool {
pub const ContentType = enum {
json,
xml,
};
fn getContentType(headers: []const awshttp.Header) !ContentType {
// EC2 ignores our accept type, but technically query protocol only // EC2 ignores our accept type, but technically query protocol only
// returns XML as well. So, we'll ignore the protocol here and just // returns XML as well. So, we'll ignore the protocol here and just
// look at the return type // look at the return type
var isJson: ?bool = null;
for (headers) |h| { for (headers) |h| {
if (std.ascii.eqlIgnoreCase("Content-Type", h.name)) { if (std.ascii.eqlIgnoreCase("Content-Type", h.name)) {
if (std.mem.startsWith(u8, h.value, "application/json")) { if (std.mem.startsWith(u8, h.value, "application/json")) {
isJson = true; return .json;
} else if (std.mem.startsWith(u8, h.value, "application/x-amz-json-1.0")) { } else if (std.mem.startsWith(u8, h.value, "application/x-amz-json-1.0")) {
isJson = true; return .json;
} else if (std.mem.startsWith(u8, h.value, "application/x-amz-json-1.1")) { } else if (std.mem.startsWith(u8, h.value, "application/x-amz-json-1.1")) {
isJson = true; return .json;
} else if (std.mem.startsWith(u8, h.value, "text/xml")) { } else if (std.mem.startsWith(u8, h.value, "text/xml")) {
isJson = false; return .xml;
} else if (std.mem.startsWith(u8, h.value, "application/xml")) { } else if (std.mem.startsWith(u8, h.value, "application/xml")) {
isJson = false; return .xml;
} else { } else {
log.err("Unexpected content type: {s}", .{h.value}); log.err("Unexpected content type: {s}", .{h.value});
return error.UnexpectedContentType; return error.UnexpectedContentType;
@ -934,8 +948,8 @@ fn isJsonResponse(headers: []const awshttp.Header) !bool {
break; break;
} }
} }
if (isJson == null) return error.ContentTypeNotFound;
return isJson.?; return error.ContentTypeNotFound;
} }
/// Get request ID from headers. Caller responsible for freeing memory /// Get request ID from headers. Caller responsible for freeing memory
fn requestIdFromHeaders(request: awshttp.HttpRequest, response: awshttp.HttpResult, options: Options) ![]u8 { fn requestIdFromHeaders(request: awshttp.HttpRequest, response: awshttp.HttpResult, options: Options) ![]u8 {
@ -2475,10 +2489,11 @@ test "json_1_1: ECR timestamps" {
// defer std.testing.log_level = old; // defer std.testing.log_level = old;
// std.testing.log_level = .debug; // std.testing.log_level = .debug;
const allocator = std.testing.allocator; const allocator = std.testing.allocator;
var test_harness = TestSetup.init(.{ var test_harness = TestSetup.init(.{
.allocator = allocator, .allocator = allocator,
.server_response = .server_response =
\\{"authorizationData":[{"authorizationToken":"***","expiresAt":1.7385984915E9,"proxyEndpoint":"https://146325435496.dkr.ecr.us-west-2.amazonaws.com"}]} \\{"authorizationData":[{"authorizationToken":"***","expiresAt":"2022-05-17T06:56:13.652000+00:00","proxyEndpoint":"https://146325435496.dkr.ecr.us-west-2.amazonaws.com"}]}
// \\{"authorizationData":[{"authorizationToken":"***","expiresAt":1.738598491557E9,"proxyEndpoint":"https://146325435496.dkr.ecr.us-west-2.amazonaws.com"}]} // \\{"authorizationData":[{"authorizationToken":"***","expiresAt":1.738598491557E9,"proxyEndpoint":"https://146325435496.dkr.ecr.us-west-2.amazonaws.com"}]}
, ,
.server_response_headers = &.{ .server_response_headers = &.{
@ -2503,7 +2518,13 @@ test "json_1_1: ECR timestamps" {
try std.testing.expectEqualStrings("***", call.response.authorization_data.?[0].authorization_token.?); try std.testing.expectEqualStrings("***", call.response.authorization_data.?[0].authorization_token.?);
try std.testing.expectEqualStrings("https://146325435496.dkr.ecr.us-west-2.amazonaws.com", call.response.authorization_data.?[0].proxy_endpoint.?); try std.testing.expectEqualStrings("https://146325435496.dkr.ecr.us-west-2.amazonaws.com", call.response.authorization_data.?[0].proxy_endpoint.?);
// try std.testing.expectEqual(@as(i64, 1.73859841557E9), call.response.authorization_data.?[0].expires_at.?); // try std.testing.expectEqual(@as(i64, 1.73859841557E9), call.response.authorization_data.?[0].expires_at.?);
try std.testing.expectEqual(@as(f128, 1.7385984915E9), call.response.authorization_data.?[0].expires_at.?);
const expected_ins = try zeit.instant(.{
.source = .{ .iso8601 = "2022-05-17T06:56:13.652000+00:00" },
});
const expected_ts: date.Timestamp = @enumFromInt(expected_ins.timestamp);
try std.testing.expectEqual(expected_ts, call.response.authorization_data.?[0].expires_at.?);
} }
var test_error_log_enabled = true; var test_error_log_enabled = true;
test "test server timeout works" { test "test server timeout works" {

View file

@ -3,9 +3,12 @@
// really requires the TZ DB. // really requires the TZ DB.
const std = @import("std"); const std = @import("std");
const codegen_date = @import("date");
const log = std.log.scoped(.date); const log = std.log.scoped(.date);
pub const Timestamp = codegen_date.Timestamp;
pub const DateTime = struct { day: u8, month: u8, year: u16, hour: u8, minute: u8, second: u8 }; pub const DateTime = struct { day: u8, month: u8, year: u16, hour: u8, minute: u8, second: u8 };
const SECONDS_PER_DAY = 86400; //* 24* 60 * 60 */ const SECONDS_PER_DAY = 86400; //* 24* 60 * 60 */

View file

@ -1597,12 +1597,22 @@ fn parseInternal(comptime T: type, token: Token, tokens: *TokenStream, options:
.@"enum" => |enumInfo| { .@"enum" => |enumInfo| {
switch (token) { switch (token) {
.Number => |numberToken| { .Number => |numberToken| {
if (!numberToken.is_integer) return error.UnexpectedToken; if (!numberToken.is_integer) {
// probably is in scientific notation
const n = try std.fmt.parseFloat(f128, numberToken.slice(tokens.slice, tokens.i - 1));
return try std.meta.intToEnum(T, @as(i128, @intFromFloat(n)));
}
const n = try std.fmt.parseInt(enumInfo.tag_type, numberToken.slice(tokens.slice, tokens.i - 1), 10); const n = try std.fmt.parseInt(enumInfo.tag_type, numberToken.slice(tokens.slice, tokens.i - 1), 10);
return try std.meta.intToEnum(T, n); return try std.meta.intToEnum(T, n);
}, },
.String => |stringToken| { .String => |stringToken| {
const source_slice = stringToken.slice(tokens.slice, tokens.i - 1); const source_slice = stringToken.slice(tokens.slice, tokens.i - 1);
if (std.meta.hasFn(T, "parse")) {
return try T.parse(source_slice);
}
switch (stringToken.escapes) { switch (stringToken.escapes) {
.None => return std.meta.stringToEnum(T, source_slice) orelse return error.InvalidEnumTag, .None => return std.meta.stringToEnum(T, source_slice) orelse return error.InvalidEnumTag,
.Some => { .Some => {

View file

@ -162,8 +162,10 @@ fn parseInternal(comptime T: type, element: *xml.Element, options: ParseOptions)
return try parseInternal(optional_info.child, element, options); return try parseInternal(optional_info.child, element, options);
} }
}, },
.@"enum" => |enum_info| { .@"enum" => {
_ = enum_info; if (T == date.Timestamp) {
return try date.Timestamp.parse(element.children.items[0].CharData);
}
// const numeric: ?enum_info.tag_type = std.fmt.parseInt(enum_info.tag_type, element.children.items[0].CharData, 10) catch null; // const numeric: ?enum_info.tag_type = std.fmt.parseInt(enum_info.tag_type, element.children.items[0].CharData, 10) catch null;
// if (numeric) |num| { // if (numeric) |num| {
// return std.meta.intToEnum(T, num); // return std.meta.intToEnum(T, num);