Merge PR 15: Handle empty response bodies and multiple types of timestamps
All checks were successful
AWS-Zig Build / build-zig-amd64-host (push) Successful in 7m18s

This commit is contained in:
Emil Lerch 2025-04-28 20:15:12 -07:00
commit 68a77db6d9
Signed by: lobo
GPG key ID: A7B62D657EF764F8
17 changed files with 818 additions and 769 deletions

125
build.zig
View file

@ -19,14 +19,7 @@ const test_targets = [_]std.Target.Query{
};
pub fn build(b: *Builder) !void {
// Standard target options allows the person running `zig build` to choose
// what target to build for. Here we do not override the defaults, which
// means any target is allowed, and the default is native. Other options
// for restricting supported target set are available.
const target = b.standardTargetOptions(.{});
// Standard release options allow the person running `zig build` to select
// between Debug, ReleaseSafe, ReleaseFast, and ReleaseSmall.
const optimize = b.standardOptimizeOption(.{});
const no_llvm = b.option(
@ -46,6 +39,7 @@ pub fn build(b: *Builder) !void {
"test-filter",
"Skip tests that do not match any of the specified filters",
) orelse &.{};
// TODO: Embed the current git version in the code. We can do this
// by looking for .git/HEAD (if it exists, follow the ref to /ref/heads/whatevs,
// grab that commit, and use b.addOptions/exe.addOptions to generate the
@ -58,33 +52,49 @@ pub fn build(b: *Builder) !void {
// executable
// TODO: This executable should not be built when importing as a package.
// It relies on code gen and is all fouled up when getting imported
const exe = b.addExecutable(.{
.name = "demo",
const mod_exe = b.createModule(.{
.root_source_file = b.path("src/main.zig"),
.target = target,
.optimize = optimize,
});
exe.use_llvm = !no_llvm;
const smithy_dep = b.dependency("smithy", .{
// These are the arguments to the dependency. It expects a target and optimization level.
const exe = b.addExecutable(.{
.name = "demo",
.root_module = mod_exe,
.use_llvm = !no_llvm,
});
// External dependencies
const dep_smithy = b.dependency("smithy", .{
.target = target,
.optimize = optimize,
});
const smithy_module = smithy_dep.module("smithy");
exe.root_module.addImport("smithy", smithy_module); // not sure this should be here...
const mod_smithy = dep_smithy.module("smithy");
mod_exe.addImport("smithy", mod_smithy); // not sure this should be here...
// TODO: This does not work correctly due to https://github.com/ziglang/zig/issues/16354
//
// We are working here with kind of a weird dependency though. So we can do this
// another way
//
// TODO: These target/optimize are not correct, as we need to run the thing
// const codegen = b.anonymousDependency("codegen/", @import("codegen/build.zig"), .{
// .target = target,
// .optimize = optimize,
// });
// const codegen_cmd = b.addRunArtifact(codegen.artifact("codegen"));
// exe.step.dependOn(&codegen_cmd.step);
const dep_zeit = b.dependency("zeit", .{
.target = target,
.optimize = optimize,
});
const mod_zeit = dep_zeit.module("zeit");
mod_exe.addImport("zeit", mod_zeit);
// End External dependencies
// Private modules/dependencies
const dep_json = b.dependency("json", .{
.target = target,
.optimize = optimize,
});
const mod_json = dep_json.module("json");
mod_exe.addImport("json", mod_json);
const dep_date = b.dependency("date", .{
.target = target,
.optimize = optimize,
});
const mod_date = dep_date.module("date");
mod_exe.addImport("date", mod_date);
// End private modules/dependencies
const run_cmd = b.addRunArtifact(exe);
run_cmd.step.dependOn(b.getInstallStep());
@ -97,14 +107,20 @@ pub fn build(b: *Builder) !void {
const cg = b.step("gen", "Generate zig service code from smithy models");
const cg_exe = b.addExecutable(.{
.name = "codegen",
const cg_mod = b.createModule(.{
.root_source_file = b.path("codegen/src/main.zig"),
// We need this generated for the host, not the real target
.target = b.graph.host,
.optimize = if (b.verbose) .Debug else .ReleaseSafe,
});
cg_exe.root_module.addImport("smithy", smithy_module);
cg_mod.addImport("smithy", mod_smithy);
cg_mod.addImport("date", mod_date);
cg_mod.addImport("json", mod_json);
const cg_exe = b.addExecutable(.{
.name = "codegen",
.root_module = cg_mod,
});
var cg_cmd = b.addRunArtifact(cg_exe);
cg_cmd.addArg("--models");
cg_cmd.addArg(try std.fs.path.join(
@ -144,24 +160,32 @@ pub fn build(b: *Builder) !void {
.target = target,
.optimize = optimize,
});
service_manifest_module.addImport("smithy", smithy_module);
service_manifest_module.addImport("smithy", mod_smithy);
service_manifest_module.addImport("date", mod_date);
service_manifest_module.addImport("json", mod_json);
service_manifest_module.addImport("zeit", mod_zeit);
exe.root_module.addImport("service_manifest", service_manifest_module);
mod_exe.addImport("service_manifest", service_manifest_module);
// Expose module to others
_ = b.addModule("aws", .{
const mod_aws = b.addModule("aws", .{
.root_source_file = b.path("src/aws.zig"),
.imports = &.{
.{ .name = "smithy", .module = smithy_module },
.{ .name = "service_manifest", .module = service_manifest_module },
},
.target = target,
.optimize = optimize,
});
mod_aws.addImport("smithy", mod_smithy);
mod_aws.addImport("service_manifest", service_manifest_module);
mod_aws.addImport("date", mod_date);
mod_aws.addImport("json", mod_json);
mod_aws.addImport("zeit", mod_zeit);
// Expose module to others
_ = b.addModule("aws-signing", .{
const mod_aws_signing = b.addModule("aws-signing", .{
.root_source_file = b.path("src/aws_signing.zig"),
.imports = &.{.{ .name = "smithy", .module = smithy_module }},
});
mod_aws_signing.addImport("date", mod_date);
mod_aws_signing.addImport("smithy", mod_smithy);
mod_aws_signing.addImport("json", mod_json);
// Similar to creating the run step earlier, this exposes a `test` step to
// the `zig build --help` menu, providing a way for the user to request
@ -184,16 +208,25 @@ pub fn build(b: *Builder) !void {
// test_step.dependOn(&run_unit_tests.step);
for (test_targets) |t| {
if (broken_windows and t.os_tag == .windows) continue;
// Creates a step for unit testing. This only builds the test executable
// but does not run it.
const unit_tests = b.addTest(.{
const mod_unit_tests = b.createModule(.{
.root_source_file = b.path("src/aws.zig"),
.target = b.resolveTargetQuery(t),
.optimize = optimize,
});
mod_unit_tests.addImport("smithy", mod_smithy);
mod_unit_tests.addImport("service_manifest", service_manifest_module);
mod_unit_tests.addImport("date", mod_date);
mod_unit_tests.addImport("zeit", mod_zeit);
mod_unit_tests.addImport("json", mod_json);
// Creates a step for unit testing. This only builds the test executable
// but does not run it.
const unit_tests = b.addTest(.{
.root_module = mod_unit_tests,
.filters = test_filters,
});
unit_tests.root_module.addImport("smithy", smithy_module);
unit_tests.root_module.addImport("service_manifest", service_manifest_module);
unit_tests.step.dependOn(cg);
unit_tests.use_llvm = !no_llvm;
@ -213,14 +246,10 @@ pub fn build(b: *Builder) !void {
// Creates a step for unit testing. This only builds the test executable
// but does not run it.
const smoke_test = b.addTest(.{
.root_source_file = b.path("src/aws.zig"),
.target = target,
.optimize = optimize,
.root_module = mod_aws,
.filters = test_filters,
});
smoke_test.use_llvm = !no_llvm;
smoke_test.root_module.addImport("smithy", smithy_module);
smoke_test.root_module.addImport("service_manifest", service_manifest_module);
smoke_test.step.dependOn(cg);
const run_smoke_test = b.addRunArtifact(smoke_test);

View file

@ -7,6 +7,7 @@
"build.zig.zon",
"src",
"codegen",
"lib",
"README.md",
"LICENSE",
},
@ -20,5 +21,15 @@
.url = "https://github.com/aws/aws-sdk-go-v2/archive/58cf6509525a12d64fd826da883bfdbacbd2f00e.tar.gz",
.hash = "122017a2f3081ce83c23e0c832feb1b8b4176d507b6077f522855dc774bcf83ee315",
},
.zeit = .{
.url = "git+https://github.com/rockorager/zeit#fb6557ad4bd0cd0f0f728ae978061d7fe992c528",
.hash = "zeit-0.6.0-5I6bk29nAgDhK6AVMtXMWhkKTYgUncrWjnlI_8X9DPSd",
},
.date = .{
.path = "lib/date",
},
.json = .{
.path = "lib/json",
},
},
}

View file

@ -1,150 +0,0 @@
const std = @import("std");
// options is a json.Options, but since we're using our hacked json.zig we don't want to
// specifically call this out
pub fn serializeMap(map: anytype, key: []const u8, options: anytype, out_stream: anytype) !bool {
if (@typeInfo(@TypeOf(map)) == .optional) {
if (map == null)
return false
else
return serializeMapInternal(map.?, key, options, out_stream);
}
return serializeMapInternal(map, key, options, out_stream);
}
fn serializeMapInternal(map: anytype, key: []const u8, options: anytype, out_stream: anytype) !bool {
if (map.len == 0) {
var child_options = options;
if (child_options.whitespace) |*child_ws|
child_ws.indent_level += 1;
try out_stream.writeByte('"');
try out_stream.writeAll(key);
_ = try out_stream.write("\":");
if (options.whitespace) |ws| {
if (ws.separator) {
try out_stream.writeByte(' ');
}
}
try out_stream.writeByte('{');
try out_stream.writeByte('}');
return true;
}
// TODO: Map might be [][]struct{key, value} rather than []struct{key, value}
var child_options = options;
if (child_options.whitespace) |*child_ws|
child_ws.indent_level += 1;
try out_stream.writeByte('"');
try out_stream.writeAll(key);
_ = try out_stream.write("\":");
if (options.whitespace) |ws| {
if (ws.separator) {
try out_stream.writeByte(' ');
}
}
try out_stream.writeByte('{');
if (options.whitespace) |_|
try out_stream.writeByte('\n');
for (map, 0..) |tag, i| {
if (tag.key == null or tag.value == null) continue;
// TODO: Deal with escaping and general "json.stringify" the values...
if (child_options.whitespace) |ws|
try ws.outputIndent(out_stream);
try out_stream.writeByte('"');
try jsonEscape(tag.key.?, child_options, out_stream);
_ = try out_stream.write("\":");
if (child_options.whitespace) |ws| {
if (ws.separator) {
try out_stream.writeByte(' ');
}
}
try out_stream.writeByte('"');
try jsonEscape(tag.value.?, child_options, out_stream);
try out_stream.writeByte('"');
if (i < map.len - 1) {
try out_stream.writeByte(',');
}
if (child_options.whitespace) |_|
try out_stream.writeByte('\n');
}
if (options.whitespace) |ws|
try ws.outputIndent(out_stream);
try out_stream.writeByte('}');
return true;
}
// code within jsonEscape lifted from json.zig in stdlib
fn jsonEscape(value: []const u8, options: anytype, out_stream: anytype) !void {
var i: usize = 0;
while (i < value.len) : (i += 1) {
switch (value[i]) {
// normal ascii character
0x20...0x21, 0x23...0x2E, 0x30...0x5B, 0x5D...0x7F => |c| try out_stream.writeByte(c),
// only 2 characters that *must* be escaped
'\\' => try out_stream.writeAll("\\\\"),
'\"' => try out_stream.writeAll("\\\""),
// solidus is optional to escape
'/' => {
if (options.string.String.escape_solidus) {
try out_stream.writeAll("\\/");
} else {
try out_stream.writeByte('/');
}
},
// control characters with short escapes
// TODO: option to switch between unicode and 'short' forms?
0x8 => try out_stream.writeAll("\\b"),
0xC => try out_stream.writeAll("\\f"),
'\n' => try out_stream.writeAll("\\n"),
'\r' => try out_stream.writeAll("\\r"),
'\t' => try out_stream.writeAll("\\t"),
else => {
const ulen = std.unicode.utf8ByteSequenceLength(value[i]) catch unreachable;
// control characters (only things left with 1 byte length) should always be printed as unicode escapes
if (ulen == 1 or options.string.String.escape_unicode) {
const codepoint = std.unicode.utf8Decode(value[i .. i + ulen]) catch unreachable;
try outputUnicodeEscape(codepoint, out_stream);
} else {
try out_stream.writeAll(value[i .. i + ulen]);
}
i += ulen - 1;
},
}
}
}
// outputUnicodeEscape and assert lifted from json.zig in stdlib
fn outputUnicodeEscape(
codepoint: u21,
out_stream: anytype,
) !void {
if (codepoint <= 0xFFFF) {
// If the character is in the Basic Multilingual Plane (U+0000 through U+FFFF),
// then it may be represented as a six-character sequence: a reverse solidus, followed
// by the lowercase letter u, followed by four hexadecimal digits that encode the character's code point.
try out_stream.writeAll("\\u");
try std.fmt.formatIntValue(codepoint, "x", std.fmt.FormatOptions{ .width = 4, .fill = '0' }, out_stream);
} else {
assert(codepoint <= 0x10FFFF);
// To escape an extended character that is not in the Basic Multilingual Plane,
// the character is represented as a 12-character sequence, encoding the UTF-16 surrogate pair.
const high = @as(u16, @intCast((codepoint - 0x10000) >> 10)) + 0xD800;
const low = @as(u16, @intCast(codepoint & 0x3FF)) + 0xDC00;
try out_stream.writeAll("\\u");
try std.fmt.formatIntValue(high, "x", std.fmt.FormatOptions{ .width = 4, .fill = '0' }, out_stream);
try out_stream.writeAll("\\u");
try std.fmt.formatIntValue(low, "x", std.fmt.FormatOptions{ .width = 4, .fill = '0' }, out_stream);
}
}
/// This function invokes undefined behavior when `ok` is `false`.
/// In Debug and ReleaseSafe modes, calls to this function are always
/// generated, and the `unreachable` statement triggers a panic.
/// In ReleaseFast and ReleaseSmall modes, calls to this function are
/// optimized away, and in fact the optimizer is able to use the assertion
/// in its heuristics.
/// Inside a test block, it is best to use the `std.testing` module rather
/// than this function, because this function may not detect a test failure
/// in ReleaseFast and ReleaseSmall mode. Outside of a test block, this assert
/// function is the correct function to use.
pub fn assert(ok: bool) void {
if (!ok) unreachable; // assertion failure
}

View file

@ -2,7 +2,6 @@ const std = @import("std");
const smithy = @import("smithy");
const snake = @import("snake.zig");
const Hasher = @import("Hasher.zig");
const json_zig = @embedFile("json.zig");
var verbose = false;
@ -33,8 +32,6 @@ pub fn main() anyerror!void {
if (std.mem.eql(u8, "--models", arg))
models_dir = try std.fs.cwd().openDir(args[i + 1], .{ .iterate = true });
}
// TODO: Seems like we should remove this in favor of a package
try output_dir.writeFile(.{ .sub_path = "json.zig", .data = json_zig });
// TODO: We need a different way to handle this file...
const manifest_file_started = false;
@ -186,8 +183,13 @@ fn processFile(file_name: []const u8, output_dir: std.fs.Dir, manifest: anytype)
defer arena.deinit();
const allocator = arena.allocator();
_ = try writer.write("const std = @import(\"std\");\n");
_ = try writer.write("const serializeMap = @import(\"json.zig\").serializeMap;\n");
_ = try writer.write("const smithy = @import(\"smithy\");\n\n");
_ = try writer.write("const smithy = @import(\"smithy\");\n");
_ = try writer.write("const json = @import(\"json\");\n");
_ = try writer.write("const date = @import(\"date\");\n");
_ = try writer.write("const zeit = @import(\"zeit\");\n");
_ = try writer.write("\n");
_ = try writer.write("const serializeMap = json.serializeMap;\n");
_ = try writer.write("\n");
if (verbose) std.log.info("Processing file: {s}", .{file_name});
const service_names = generateServicesForFilePath(allocator, ";", file_name, writer) catch |err| {
std.log.err("Error processing file: {s}", .{file_name});
@ -716,7 +718,7 @@ fn generateTypeFor(shape_id: []const u8, writer: anytype, state: GenerationState
// The serializer will have to deal with the idea we might be an array
return try generateTypeFor(shape.set.member_target, writer, state, true);
},
.timestamp => |s| try generateSimpleTypeFor(s, "f128", writer),
.timestamp => |s| try generateSimpleTypeFor(s, "date.Timestamp", writer),
.blob => |s| try generateSimpleTypeFor(s, "[]const u8", writer),
.boolean => |s| try generateSimpleTypeFor(s, "bool", writer),
.double => |s| try generateSimpleTypeFor(s, "f64", writer),

41
lib/date/build.zig Normal file
View file

@ -0,0 +1,41 @@
const std = @import("std");
pub fn build(b: *std.Build) void {
const target = b.standardTargetOptions(.{});
const optimize = b.standardOptimizeOption(.{});
const lib_mod = b.addModule("date", .{
.root_source_file = b.path("src/root.zig"),
.target = target,
.optimize = optimize,
});
const lib = b.addLibrary(.{
.linkage = .static,
.name = "date",
.root_module = lib_mod,
});
b.installArtifact(lib);
const lib_unit_tests = b.addTest(.{
.root_module = lib_mod,
});
const run_lib_unit_tests = b.addRunArtifact(lib_unit_tests);
const test_step = b.step("test", "Run unit tests");
test_step.dependOn(&run_lib_unit_tests.step);
const dep_zeit = b.dependency("zeit", .{
.target = target,
.optimize = optimize,
});
lib_mod.addImport("zeit", dep_zeit.module("zeit"));
const dep_json = b.dependency("json", .{
.target = target,
.optimize = optimize,
});
lib_mod.addImport("json", dep_json.module("json"));
}

20
lib/date/build.zig.zon Normal file
View file

@ -0,0 +1,20 @@
.{
.name = .date,
.version = "0.0.0",
.fingerprint = 0xaa9e377a226d739e, // Changing this has security and trust implications.
.minimum_zig_version = "0.14.0",
.dependencies = .{
.zeit = .{
.url = "git+https://github.com/rockorager/zeit#fb6557ad4bd0cd0f0f728ae978061d7fe992c528",
.hash = "zeit-0.6.0-5I6bk29nAgDhK6AVMtXMWhkKTYgUncrWjnlI_8X9DPSd",
},
.json = .{
.path = "../json",
},
},
.paths = .{
"build.zig",
"build.zig.zon",
"src",
},
}

206
lib/date/src/parsing.zig Normal file
View file

@ -0,0 +1,206 @@
// From https://gist.github.com/WoodyAtHome/3ef50b17f0fa2860ac52b97af12f8d15
// Translated from German. We don't need any local time for this use case, and conversion
// really requires the TZ DB.
const std = @import("std");
const log = std.log.scoped(.date);
const zeit = @import("zeit");
pub const DateTime = struct {
day: u8,
month: u8,
year: u16,
hour: u8,
minute: u8,
second: u8,
pub fn fromInstant(val: zeit.Instant) DateTime {
return fromTime(val.time());
}
pub fn fromTime(val: zeit.Time) DateTime {
return DateTime{
.day = val.day,
.month = @intFromEnum(val.month),
.year = @intCast(val.year),
.hour = val.hour,
.minute = val.minute,
.second = val.second,
};
}
pub fn time(self: DateTime) zeit.Time {
return zeit.Time{
.day = @intCast(self.day),
.month = @enumFromInt(self.month),
.year = self.year,
.hour = @intCast(self.hour),
.minute = @intCast(self.minute),
.second = @intCast(self.second),
};
}
pub fn instant(self: DateTime) !zeit.Instant {
return try zeit.instant(.{ .source = .{ .time = self.time() } });
}
};
pub fn timestampToDateTime(timestamp: zeit.Seconds) DateTime {
const ins = zeit.instant(.{ .source = .{ .unix_timestamp = timestamp } }) catch @panic("Failed to create instant from timestamp");
return DateTime.fromInstant(ins);
}
pub fn parseEnglishToTimestamp(data: []const u8) !i64 {
return try dateTimeToTimestamp(try parseEnglishToDateTime(data));
}
/// Converts a string to a timestamp value. May not handle dates before the
/// epoch. Dates should look like "Fri, 03 Jun 2022 18:12:36 GMT"
pub fn parseEnglishToDateTime(data: []const u8) !DateTime {
const ins = try zeit.instant(.{ .source = .{ .rfc1123 = data } });
return DateTime.fromInstant(ins);
}
pub fn parseIso8601ToTimestamp(data: []const u8) !i64 {
return try dateTimeToTimestamp(try parseIso8601ToDateTime(data));
}
const IsoParsingState = enum { Start, Year, Month, Day, Hour, Minute, Second, Millisecond, End };
/// Converts a string to a timestamp value. May not handle dates before the
/// epoch
pub fn parseIso8601ToDateTime(data: []const u8) !DateTime {
// Basic format YYYYMMDDThhmmss
if (data.len == "YYYYMMDDThhmmss".len and data[8] == 'T')
return try parseIso8601BasicFormatToDateTime(data);
if (data.len == "YYYYMMDDThhmmssZ".len and data[8] == 'T')
return try parseIso8601BasicFormatToDateTime(data);
var start: usize = 0;
var state = IsoParsingState.Start;
// Anything not explicitly set by our string would be 0
var rc = DateTime{ .year = 0, .month = 0, .day = 0, .hour = 0, .minute = 0, .second = 0 };
var zulu_time = false;
for (data, 0..) |ch, i| {
switch (ch) {
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9' => {
if (state == .Start) state = .Year;
},
'?', '~', '%' => {
// These characters all specify the type of time (approximate, etc)
// and we will ignore
},
'.', '-', ':', 'T' => {
// State transition
// We're going to coerce and this might not go well, but we
// want the compiler to create checks, so we'll turn on
// runtime safety for this block, forcing checks in ReleaseSafe
// ReleaseFast modes.
const next_state = try endIsoState(state, &rc, data[start..i]);
state = next_state;
start = i + 1;
},
'Z' => zulu_time = true,
else => {
log.err("Invalid character: {c}", .{ch});
return error.InvalidCharacter;
},
}
}
if (!zulu_time) return error.LocalTimeNotSupported;
// We know we have a Z at the end of this, so let's grab the last bit
// of the string, minus the 'Z', and fly, eagles, fly!
_ = try endIsoState(state, &rc, data[start .. data.len - 1]);
return rc;
}
fn parseIso8601BasicFormatToDateTime(data: []const u8) !DateTime {
return DateTime{
.year = try std.fmt.parseUnsigned(u16, data[0..4], 10),
.month = try std.fmt.parseUnsigned(u8, data[4..6], 10),
.day = try std.fmt.parseUnsigned(u8, data[6..8], 10),
.hour = try std.fmt.parseUnsigned(u8, data[9..11], 10),
.minute = try std.fmt.parseUnsigned(u8, data[11..13], 10),
.second = try std.fmt.parseUnsigned(u8, data[13..15], 10),
};
}
fn endIsoState(current_state: IsoParsingState, date: *DateTime, prev_data: []const u8) !IsoParsingState {
var next_state: IsoParsingState = undefined;
log.debug("endIsoState. Current state '{}', data: {s}", .{ current_state, prev_data });
// Using two switches is slightly less efficient, but more readable
switch (current_state) {
.Start, .End => return error.IllegalStateTransition,
.Year => next_state = .Month,
.Month => next_state = .Day,
.Day => next_state = .Hour,
.Hour => next_state = .Minute,
.Minute => next_state = .Second,
.Second => next_state = .Millisecond,
.Millisecond => next_state = .End,
}
// TODO: This won't handle signed, which Iso supports. For now, let's fail
// explictly
switch (current_state) {
.Year => date.year = try std.fmt.parseUnsigned(u16, prev_data, 10),
.Month => date.month = try std.fmt.parseUnsigned(u8, prev_data, 10),
.Day => date.day = try std.fmt.parseUnsigned(u8, prev_data, 10),
.Hour => date.hour = try std.fmt.parseUnsigned(u8, prev_data, 10),
.Minute => date.minute = try std.fmt.parseUnsigned(u8, prev_data, 10),
.Second => date.second = try std.fmt.parseUnsigned(u8, prev_data, 10),
.Millisecond => {}, // We'll throw that away - our granularity is 1 second
.Start, .End => return error.InvalidState,
}
return next_state;
}
pub fn dateTimeToTimestamp(datetime: DateTime) !zeit.Seconds {
return (try datetime.instant()).unixTimestamp();
}
fn printDateTime(dt: DateTime) void {
log.debug("{:0>4}-{:0>2}-{:0>2}T{:0>2}:{:0>2}:{:0<2}Z", .{
dt.year,
dt.month,
dt.day,
dt.hour,
dt.minute,
dt.second,
});
}
pub fn printNowUtc() void {
printDateTime(timestampToDateTime(std.time.timestamp()));
}
test "Convert timestamp to datetime" {
printDateTime(timestampToDateTime(std.time.timestamp()));
try std.testing.expectEqual(DateTime{ .year = 2020, .month = 8, .day = 28, .hour = 9, .minute = 32, .second = 27 }, timestampToDateTime(1598607147));
try std.testing.expectEqual(DateTime{ .year = 2020, .month = 11, .day = 1, .hour = 5, .minute = 6, .second = 7 }, timestampToDateTime(1604207167));
// Get time for date: https://wtools.io/convert-date-time-to-unix-time
try std.testing.expectEqual(DateTime{ .year = 2015, .month = 8, .day = 30, .hour = 12, .minute = 36, .second = 0 }, timestampToDateTime(1440938160));
}
test "Convert datetime to timestamp" {
try std.testing.expectEqual(@as(i64, 1598607147), try dateTimeToTimestamp(DateTime{ .year = 2020, .month = 8, .day = 28, .hour = 9, .minute = 32, .second = 27 }));
try std.testing.expectEqual(@as(i64, 1604207167), try dateTimeToTimestamp(DateTime{ .year = 2020, .month = 11, .day = 1, .hour = 5, .minute = 6, .second = 7 }));
try std.testing.expectEqual(@as(i64, 1440938160), try dateTimeToTimestamp(DateTime{ .year = 2015, .month = 8, .day = 30, .hour = 12, .minute = 36, .second = 0 }));
}
test "Convert ISO8601 string to timestamp" {
try std.testing.expectEqual(DateTime{ .year = 2020, .month = 8, .day = 28, .hour = 9, .minute = 32, .second = 27 }, try parseIso8601ToDateTime("20200828T093227"));
try std.testing.expectEqual(DateTime{ .year = 2020, .month = 8, .day = 28, .hour = 9, .minute = 32, .second = 27 }, try parseIso8601ToDateTime("2020-08-28T9:32:27Z"));
try std.testing.expectEqual(DateTime{ .year = 2020, .month = 11, .day = 1, .hour = 5, .minute = 6, .second = 7 }, try parseIso8601ToDateTime("2020-11-01T5:06:7Z"));
try std.testing.expectEqual(DateTime{ .year = 2015, .month = 8, .day = 30, .hour = 12, .minute = 36, .second = 0 }, try parseIso8601ToDateTime("2015-08-30T12:36:00.000Z"));
}
test "Convert datetime to timestamp before 1970" {
try std.testing.expectEqual(@as(i64, -449392815), try dateTimeToTimestamp(DateTime{ .year = 1955, .month = 10, .day = 5, .hour = 16, .minute = 39, .second = 45 }));
}
test "Convert whatever AWS is sending us to timestamp" {
const string_date = "Fri, 03 Jun 2022 18:12:36 GMT";
try std.testing.expectEqual(DateTime{ .year = 2022, .month = 6, .day = 3, .hour = 18, .minute = 12, .second = 36 }, try parseEnglishToDateTime(string_date));
}

20
lib/date/src/root.zig Normal file
View file

@ -0,0 +1,20 @@
const std = @import("std");
const testing = std.testing;
const parsing = @import("parsing.zig");
pub const DateTime = parsing.DateTime;
pub const timestampToDateTime = parsing.timestampToDateTime;
pub const parseEnglishToTimestamp = parsing.parseEnglishToTimestamp;
pub const parseEnglishToDateTime = parsing.parseEnglishToDateTime;
pub const parseIso8601ToTimestamp = parsing.parseIso8601ToTimestamp;
pub const parseIso8601ToDateTime = parsing.parseIso8601ToDateTime;
pub const dateTimeToTimestamp = parsing.dateTimeToTimestamp;
pub const printNowUtc = parsing.printNowUtc;
const timestamp = @import("timestamp.zig");
pub const DateFormat = timestamp.DateFormat;
pub const Timestamp = timestamp.Timestamp;
test {
testing.refAllDeclsRecursive(@This());
}

View file

@ -0,0 +1,68 @@
const std = @import("std");
const zeit = @import("zeit");
const json = @import("json");
pub const DateFormat = enum {
rfc1123,
iso8601,
};
pub const Timestamp = enum(zeit.Nanoseconds) {
_,
pub fn jsonStringify(value: Timestamp, options: json.StringifyOptions, out_stream: anytype) !void {
_ = options;
const instant = try zeit.instant(.{
.source = .{
.unix_nano = @intFromEnum(value),
},
});
try out_stream.writeAll("\"");
try instant.time().gofmt(out_stream, "Mon, 02 Jan 2006 15:04:05 GMT");
try out_stream.writeAll("\"");
}
pub fn parse(val: []const u8) !Timestamp {
const date_format = blk: {
if (std.ascii.isDigit(val[0])) {
break :blk DateFormat.iso8601;
} else {
break :blk DateFormat.rfc1123;
}
};
const ins = try zeit.instant(.{
.source = switch (date_format) {
DateFormat.iso8601 => .{
.iso8601 = val,
},
DateFormat.rfc1123 => .{
.rfc1123 = val,
},
},
});
return @enumFromInt(ins.timestamp);
}
};
test Timestamp {
const in_date = "Wed, 23 Apr 2025 11:23:45 GMT";
const expected_ts: Timestamp = @enumFromInt(1745407425000000000);
const actual_ts = try Timestamp.parse(in_date);
try std.testing.expectEqual(expected_ts, actual_ts);
var buf: [100]u8 = undefined;
var fbs = std.io.fixedBufferStream(&buf);
var counting_writer = std.io.countingWriter(fbs.writer());
try Timestamp.jsonStringify(expected_ts, .{}, counting_writer.writer());
const expected_json = "\"" ++ in_date ++ "\"";
const actual_json = buf[0..counting_writer.bytes_written];
try std.testing.expectEqualStrings(expected_json, actual_json);
}

29
lib/json/build.zig Normal file
View file

@ -0,0 +1,29 @@
const std = @import("std");
pub fn build(b: *std.Build) void {
const target = b.standardTargetOptions(.{});
const optimize = b.standardOptimizeOption(.{});
const lib_mod = b.addModule("json", .{
.root_source_file = b.path("src/json.zig"),
.target = target,
.optimize = optimize,
});
const lib = b.addLibrary(.{
.linkage = .static,
.name = "json",
.root_module = lib_mod,
});
b.installArtifact(lib);
const lib_unit_tests = b.addTest(.{
.root_module = lib_mod,
});
const run_lib_unit_tests = b.addRunArtifact(lib_unit_tests);
const test_step = b.step("test", "Run unit tests");
test_step.dependOn(&run_lib_unit_tests.step);
}

12
lib/json/build.zig.zon Normal file
View file

@ -0,0 +1,12 @@
.{
.name = .json,
.version = "0.0.0",
.fingerprint = 0x6b0725452065211c, // Changing this has security and trust implications.
.minimum_zig_version = "0.14.0",
.dependencies = .{},
.paths = .{
"build.zig",
"build.zig.zon",
"src",
},
}

View file

@ -14,8 +14,116 @@ const testing = std.testing;
const mem = std.mem;
const maxInt = std.math.maxInt;
// pub const WriteStream = @import("json/write_stream.zig").WriteStream;
// pub const writeStream = @import("json/write_stream.zig").writeStream;
pub fn serializeMap(map: anytype, key: []const u8, options: anytype, out_stream: anytype) !bool {
if (@typeInfo(@TypeOf(map)) == .optional) {
if (map == null)
return false
else
return serializeMapInternal(map.?, key, options, out_stream);
}
return serializeMapInternal(map, key, options, out_stream);
}
fn serializeMapInternal(map: anytype, key: []const u8, options: anytype, out_stream: anytype) !bool {
if (map.len == 0) {
var child_options = options;
if (child_options.whitespace) |*child_ws|
child_ws.indent_level += 1;
try out_stream.writeByte('"');
try out_stream.writeAll(key);
_ = try out_stream.write("\":");
if (options.whitespace) |ws| {
if (ws.separator) {
try out_stream.writeByte(' ');
}
}
try out_stream.writeByte('{');
try out_stream.writeByte('}');
return true;
}
// TODO: Map might be [][]struct{key, value} rather than []struct{key, value}
var child_options = options;
if (child_options.whitespace) |*child_ws|
child_ws.indent_level += 1;
try out_stream.writeByte('"');
try out_stream.writeAll(key);
_ = try out_stream.write("\":");
if (options.whitespace) |ws| {
if (ws.separator) {
try out_stream.writeByte(' ');
}
}
try out_stream.writeByte('{');
if (options.whitespace) |_|
try out_stream.writeByte('\n');
for (map, 0..) |tag, i| {
if (tag.key == null or tag.value == null) continue;
// TODO: Deal with escaping and general "json.stringify" the values...
if (child_options.whitespace) |ws|
try ws.outputIndent(out_stream);
try out_stream.writeByte('"');
try jsonEscape(tag.key.?, child_options, out_stream);
_ = try out_stream.write("\":");
if (child_options.whitespace) |ws| {
if (ws.separator) {
try out_stream.writeByte(' ');
}
}
try out_stream.writeByte('"');
try jsonEscape(tag.value.?, child_options, out_stream);
try out_stream.writeByte('"');
if (i < map.len - 1) {
try out_stream.writeByte(',');
}
if (child_options.whitespace) |_|
try out_stream.writeByte('\n');
}
if (options.whitespace) |ws|
try ws.outputIndent(out_stream);
try out_stream.writeByte('}');
return true;
}
// code within jsonEscape lifted from json.zig in stdlib
fn jsonEscape(value: []const u8, options: anytype, out_stream: anytype) !void {
var i: usize = 0;
while (i < value.len) : (i += 1) {
switch (value[i]) {
// normal ascii character
0x20...0x21, 0x23...0x2E, 0x30...0x5B, 0x5D...0x7F => |c| try out_stream.writeByte(c),
// only 2 characters that *must* be escaped
'\\' => try out_stream.writeAll("\\\\"),
'\"' => try out_stream.writeAll("\\\""),
// solidus is optional to escape
'/' => {
if (options.string.String.escape_solidus) {
try out_stream.writeAll("\\/");
} else {
try out_stream.writeByte('/');
}
},
// control characters with short escapes
// TODO: option to switch between unicode and 'short' forms?
0x8 => try out_stream.writeAll("\\b"),
0xC => try out_stream.writeAll("\\f"),
'\n' => try out_stream.writeAll("\\n"),
'\r' => try out_stream.writeAll("\\r"),
'\t' => try out_stream.writeAll("\\t"),
else => {
const ulen = std.unicode.utf8ByteSequenceLength(value[i]) catch unreachable;
// control characters (only things left with 1 byte length) should always be printed as unicode escapes
if (ulen == 1 or options.string.String.escape_unicode) {
const codepoint = std.unicode.utf8Decode(value[i .. i + ulen]) catch unreachable;
try outputUnicodeEscape(codepoint, out_stream);
} else {
try out_stream.writeAll(value[i .. i + ulen]);
}
i += ulen - 1;
},
}
}
}
const StringEscapes = union(enum) {
None,
@ -1316,8 +1424,8 @@ pub const Value = union(enum) {
}
pub fn dump(self: Value) void {
var held = std.debug.getStderrMutex().acquire();
defer held.release();
std.debug.lockStdErr();
defer std.debug.unlockStdErr();
const stderr = std.io.getStdErr().writer();
stringify(self, StringifyOptions{ .whitespace = null }, stderr) catch return;
@ -1597,12 +1705,22 @@ fn parseInternal(comptime T: type, token: Token, tokens: *TokenStream, options:
.@"enum" => |enumInfo| {
switch (token) {
.Number => |numberToken| {
if (!numberToken.is_integer) return error.UnexpectedToken;
if (!numberToken.is_integer) {
// probably is in scientific notation
const n = try std.fmt.parseFloat(f128, numberToken.slice(tokens.slice, tokens.i - 1));
return try std.meta.intToEnum(T, @as(i128, @intFromFloat(n)));
}
const n = try std.fmt.parseInt(enumInfo.tag_type, numberToken.slice(tokens.slice, tokens.i - 1), 10);
return try std.meta.intToEnum(T, n);
},
.String => |stringToken| {
const source_slice = stringToken.slice(tokens.slice, tokens.i - 1);
if (std.meta.hasFn(T, "parse")) {
return try T.parse(source_slice);
}
switch (stringToken.escapes) {
.None => return std.meta.stringToEnum(T, source_slice) orelse return error.InvalidEnumTag,
.Some => {

View file

@ -1,17 +1,21 @@
const builtin = @import("builtin");
const std = @import("std");
const zeit = @import("zeit");
const awshttp = @import("aws_http.zig");
const json = @import("json.zig");
const json = @import("json");
const url = @import("url.zig");
const case = @import("case.zig");
const date = @import("date.zig");
const date = @import("date");
const servicemodel = @import("servicemodel.zig");
const xml_shaper = @import("xml_shaper.zig");
const xml_serializer = @import("xml_serializer.zig");
const scoped_log = std.log.scoped(.aws);
const Allocator = std.mem.Allocator;
const ArenaAllocator = std.heap.ArenaAllocator;
/// control all logs directly/indirectly used by aws sdk. Not recommended for
/// use under normal circumstances, but helpful for times when the zig logging
/// controls are insufficient (e.g. use in build script)
@ -91,7 +95,7 @@ pub const Options = struct {
pub const Diagnostics = struct {
http_code: i64,
response_body: []const u8,
allocator: std.mem.Allocator,
allocator: Allocator,
pub fn deinit(self: *Diagnostics) void {
self.allocator.free(self.response_body);
@ -113,12 +117,12 @@ pub const ClientOptions = struct {
proxy: ?std.http.Client.Proxy = null,
};
pub const Client = struct {
allocator: std.mem.Allocator,
allocator: Allocator,
aws_http: awshttp.AwsHttp,
const Self = @This();
pub fn init(allocator: std.mem.Allocator, options: ClientOptions) Self {
pub fn init(allocator: Allocator, options: ClientOptions) Self {
return Self{
.allocator = allocator,
.aws_http = awshttp.AwsHttp.init(allocator, options.proxy),
@ -228,7 +232,7 @@ pub fn Request(comptime request_action: anytype) type {
// We don't know if we need a body...guessing here, this should cover most
var buffer = std.ArrayList(u8).init(options.client.allocator);
defer buffer.deinit();
var nameAllocator = std.heap.ArenaAllocator.init(options.client.allocator);
var nameAllocator = ArenaAllocator.init(options.client.allocator);
defer nameAllocator.deinit();
if (Self.service_meta.aws_protocol == .rest_json_1) {
if (std.mem.eql(u8, "PUT", aws_request.method) or std.mem.eql(u8, "POST", aws_request.method)) {
@ -325,7 +329,7 @@ pub fn Request(comptime request_action: anytype) type {
// for a boxed member with no observable difference." But we're
// seeing a lot of differences here between spec and reality
//
var nameAllocator = std.heap.ArenaAllocator.init(options.client.allocator);
var nameAllocator = ArenaAllocator.init(options.client.allocator);
defer nameAllocator.deinit();
try json.stringify(request, .{ .whitespace = .{} }, buffer.writer());
@ -358,13 +362,16 @@ pub fn Request(comptime request_action: anytype) type {
const continuation = if (buffer.items.len > 0) "&" else "";
const query = if (Self.service_meta.aws_protocol == .query)
try std.fmt.allocPrint(options.client.allocator, "", .{})
""
else // EC2
try std.fmt.allocPrint(options.client.allocator, "?Action={s}&Version={s}", .{
action.action_name,
Self.service_meta.version,
});
defer options.client.allocator.free(query);
defer if (Self.service_meta.aws_protocol != .query) {
options.client.allocator.free(query);
};
// Note: EC2 avoided the Action={s}&Version={s} in the body, but it's
// but it's required, so I'm not sure why that code was put in
@ -377,6 +384,7 @@ pub fn Request(comptime request_action: anytype) type {
buffer.items,
});
defer options.client.allocator.free(body);
return try Self.callAws(.{
.query = query,
.body = body,
@ -396,7 +404,8 @@ pub fn Request(comptime request_action: anytype) type {
},
);
defer response.deinit();
if (response.response_code != options.success_http_code) {
if (response.response_code != options.success_http_code and response.response_code != 404) {
try reportTraffic(options.client.allocator, "Call Failed", aws_request, response, log.err);
if (options.diagnostics) |d| {
d.http_code = response.response_code;
@ -425,6 +434,7 @@ pub fn Request(comptime request_action: anytype) type {
.header_name = @field(action.Response.http_header, f.name),
};
}
inline for (fields) |f| {
for (response.headers) |header| {
if (std.mem.eql(u8, header.name, f.?.header_name)) {
@ -441,7 +451,7 @@ pub fn Request(comptime request_action: anytype) type {
//
// Note: issues found on zig 0.9.0
setHeaderValue(
options.client.allocator,
full_response.arena.allocator(),
&full_response.response,
f.?.name,
f.?.T,
@ -463,7 +473,7 @@ pub fn Request(comptime request_action: anytype) type {
}
fn setHeaderValue(
allocator: std.mem.Allocator,
allocator: Allocator,
response: anytype,
comptime field_name: []const u8,
comptime field_type: type,
@ -483,51 +493,76 @@ pub fn Request(comptime request_action: anytype) type {
// First, we need to determine if we care about a response at all
// If the expected result has no fields, there's no sense in
// doing any more work. Let's bail early
comptime var expected_body_field_len = std.meta.fields(action.Response).len;
if (@hasDecl(action.Response, "http_header"))
const fields = @typeInfo(action.Response).@"struct".fields;
var expected_body_field_len = fields.len;
if (@hasDecl(action.Response, "http_header")) {
expected_body_field_len -= std.meta.fields(@TypeOf(action.Response.http_header)).len;
}
var buf_request_id: [256]u8 = undefined;
const request_id = try requestIdFromHeaders(&buf_request_id, options.client.allocator, aws_request, response);
const arena = ArenaAllocator.init(options.client.allocator);
if (@hasDecl(action.Response, "http_payload")) {
var rc = FullResponseType{
var rc = try FullResponseType.init(.{
.arena = arena,
.response = .{},
.response_metadata = .{
.request_id = try requestIdFromHeaders(aws_request, response, options),
},
.parser_options = .{ .json = .{} },
.request_id = request_id,
.raw_parsed = .{ .raw = .{} },
.allocator = options.client.allocator,
};
});
const body_field = @field(rc.response, action.Response.http_payload);
const BodyField = @TypeOf(body_field);
if (BodyField == []const u8 or BodyField == ?[]const u8) {
expected_body_field_len = 0;
// We can't use body_field for this set - only @field will work
@field(rc.response, action.Response.http_payload) = try options.client.allocator.dupe(u8, response.body);
@field(rc.response, action.Response.http_payload) = try rc.arena.allocator().dupe(u8, response.body);
return rc;
}
rc.deinit();
}
// We don't care about the body if there are no fields we expect there...
if (std.meta.fields(action.Response).len == 0 or expected_body_field_len == 0) {
if (fields.len == 0 or expected_body_field_len == 0 or response.body.len == 0) {
// Makes sure we can't get here with an `action.Response` that has required fields
// Without this block there is a compilation error when running tests
// Perhaps there is a better way to handle this
{
comptime var required_fields = 0;
inline for (fields) |field| {
const field_type_info = @typeInfo(field.type);
if (field_type_info != .optional and field.defaultValue() == null) {
required_fields += 1;
}
}
if (required_fields > 0) unreachable;
}
// Do we care if an unexpected body comes in?
return FullResponseType{
return try FullResponseType.init(.{
.arena = arena,
.request_id = request_id,
.response = .{},
.response_metadata = .{
.request_id = try requestIdFromHeaders(aws_request, response, options),
},
.parser_options = .{ .json = .{} },
.raw_parsed = .{ .raw = .{} },
.allocator = options.client.allocator,
};
});
}
const isJson = try isJsonResponse(response.headers);
if (!isJson) return try xmlReturn(aws_request, options, response);
return try jsonReturn(aws_request, options, response);
const content_type = try getContentType(response.headers);
return switch (content_type) {
.json => try jsonReturn(aws_request, options, response),
.xml => try xmlReturn(aws_request, options, response),
};
}
fn jsonReturn(aws_request: awshttp.HttpRequest, options: Options, response: awshttp.HttpResult) !FullResponseType {
var arena = ArenaAllocator.init(options.client.allocator);
const parser_options = json.ParseOptions{
.allocator = options.client.allocator,
.allocator = arena.allocator(),
.allow_camel_case_conversion = true, // new option
.allow_snake_case_conversion = true, // new option
.allow_unknown_fields = true, // new option. Cannot yet handle non-struct fields though
@ -563,26 +598,24 @@ pub fn Request(comptime request_action: anytype) type {
// We can grab index [0] as structs are guaranteed by zig to be returned in the order
// declared, and we're declaring in that order in ServerResponse().
const real_response = @field(parsed_response, @typeInfo(response_types.NormalResponse).@"struct".fields[0].name);
return FullResponseType{
return try FullResponseType.init(.{
.arena = arena,
.response = @field(real_response, @typeInfo(@TypeOf(real_response)).@"struct".fields[0].name),
.response_metadata = .{
.request_id = try options.client.allocator.dupe(u8, real_response.ResponseMetadata.RequestId),
},
.parser_options = .{ .json = parser_options },
.request_id = real_response.ResponseMetadata.RequestId,
.raw_parsed = .{ .server = parsed_response },
.allocator = options.client.allocator,
};
});
} else {
// Conditions 2 or 3 (no wrapping)
return FullResponseType{
var buf_request_id: [256]u8 = undefined;
const request_id = try requestIdFromHeaders(&buf_request_id, options.client.allocator, aws_request, response);
return try FullResponseType.init(.{
.arena = arena,
.response = parsed_response,
.response_metadata = .{
.request_id = try requestIdFromHeaders(aws_request, response, options),
},
.parser_options = .{ .json = parser_options },
.request_id = request_id,
.raw_parsed = .{ .raw = parsed_response },
.allocator = options.client.allocator,
};
});
}
}
@ -634,7 +667,13 @@ pub fn Request(comptime request_action: anytype) type {
// }
//
// Big thing is that requestid, which we'll need to fetch "manually"
const xml_options = xml_shaper.ParseOptions{ .allocator = options.client.allocator, .elementToParse = findResult };
var arena = ArenaAllocator.init(options.client.allocator);
const xml_options = xml_shaper.ParseOptions{
.allocator = arena.allocator(),
.elementToParse = findResult,
};
var body: []const u8 = result.body;
var free_body = false;
if (result.body.len < 20) {
@ -655,24 +694,23 @@ pub fn Request(comptime request_action: anytype) type {
defer if (free_body) options.client.allocator.free(body);
const parsed = try xml_shaper.parse(action.Response, body, xml_options);
errdefer parsed.deinit();
// This needs to get into FullResponseType somehow: defer parsed.deinit();
const request_id = blk: {
if (parsed.document.root.getCharData("requestId")) |elem|
break :blk try options.client.allocator.dupe(u8, elem);
break :blk try requestIdFromHeaders(request, result, options);
};
defer options.client.allocator.free(request_id);
return FullResponseType{
.response = parsed.parsed_value,
.response_metadata = .{
.request_id = try options.client.allocator.dupe(u8, request_id),
},
.parser_options = .{ .xml = xml_options },
.raw_parsed = .{ .xml = parsed },
.allocator = options.client.allocator,
var buf_request_id: [256]u8 = undefined;
const request_id = blk: {
if (parsed.document.root.getCharData("requestId")) |elem| {
break :blk elem;
}
break :blk try requestIdFromHeaders(&buf_request_id, options.client.allocator, request, result);
};
return try FullResponseType.init(.{
.arena = arena,
.response = parsed.parsed_value,
.request_id = request_id,
.raw_parsed = .{ .xml = parsed },
});
}
const ServerResponseTypes = struct {
NormalResponse: type,
RawResponse: type,
@ -734,13 +772,11 @@ pub fn Request(comptime request_action: anytype) type {
fn ParsedJsonData(comptime T: type) type {
return struct {
parsed_response_ptr: *T,
allocator: std.mem.Allocator,
allocator: Allocator,
const MySelf = @This();
pub fn deinit(self: MySelf) void {
// This feels like it should result in a use after free, but it
// seems to be working?
self.allocator.destroy(self.parsed_response_ptr);
}
};
@ -749,6 +785,7 @@ pub fn Request(comptime request_action: anytype) type {
fn parseJsonData(comptime response_types: ServerResponseTypes, data: []const u8, options: Options, parser_options: json.ParseOptions) !ParsedJsonData(response_types.NormalResponse) {
// Now it's time to start looking at the actual data. Job 1 will
// be to figure out if this is a raw response or wrapped
const allocator = options.client.allocator;
// Extract the first json key
const key = firstJsonKey(data);
@ -758,8 +795,8 @@ pub fn Request(comptime request_action: anytype) type {
isOtherNormalResponse(response_types.NormalResponse, key);
var stream = json.TokenStream.init(data);
const parsed_response_ptr = blk: {
const ptr = try options.client.allocator.create(response_types.NormalResponse);
errdefer options.client.allocator.destroy(ptr);
const ptr = try allocator.create(response_types.NormalResponse);
errdefer allocator.destroy(ptr);
if (!response_types.isRawPossible or found_normal_json_response) {
ptr.* = (json.parse(response_types.NormalResponse, &stream, parser_options) catch |e| {
@ -802,7 +839,7 @@ pub fn Request(comptime request_action: anytype) type {
};
return ParsedJsonData(response_types.NormalResponse){
.parsed_response_ptr = parsed_response_ptr,
.allocator = options.client.allocator,
.allocator = allocator,
};
}
};
@ -829,6 +866,10 @@ fn coerceFromString(comptime T: type, val: []const u8) anyerror!T {
log.err("Invalid string representing {s}: {s}", .{ @typeName(T), val });
return e;
},
date.Timestamp => return date.Timestamp.parse(val) catch |e| {
log.debug("Failed to parse timestamp from string '{s}': {}", .{ val, e });
return e;
},
else => return val,
}
}
@ -852,14 +893,14 @@ fn parseInt(comptime T: type, val: []const u8) !T {
return rc;
}
fn generalAllocPrint(allocator: std.mem.Allocator, val: anytype) !?[]const u8 {
fn generalAllocPrint(allocator: Allocator, val: anytype) !?[]const u8 {
switch (@typeInfo(@TypeOf(val))) {
.optional => if (val) |v| return generalAllocPrint(allocator, v) else return null,
.array, .pointer => return try std.fmt.allocPrint(allocator, "{s}", .{val}),
else => return try std.fmt.allocPrint(allocator, "{any}", .{val}),
}
}
fn headersFor(allocator: std.mem.Allocator, request: anytype) ![]awshttp.Header {
fn headersFor(allocator: Allocator, request: anytype) ![]awshttp.Header {
log.debug("Checking for headers to include for type {}", .{@TypeOf(request)});
if (!@hasDecl(@TypeOf(request), "http_header")) return &[_]awshttp.Header{};
const http_header = @TypeOf(request).http_header;
@ -883,7 +924,7 @@ fn headersFor(allocator: std.mem.Allocator, request: anytype) ![]awshttp.Header
return headers.toOwnedSlice();
}
fn freeHeadersFor(allocator: std.mem.Allocator, request: anytype, headers: []const awshttp.Header) void {
fn freeHeadersFor(allocator: Allocator, request: anytype, headers: []const awshttp.Header) void {
if (!@hasDecl(@TypeOf(request), "http_header")) return;
const http_header = @TypeOf(request).http_header;
const fields = std.meta.fields(@TypeOf(http_header));
@ -910,23 +951,28 @@ fn firstJsonKey(data: []const u8) []const u8 {
log.debug("First json key: {s}", .{key});
return key;
}
fn isJsonResponse(headers: []const awshttp.Header) !bool {
pub const ContentType = enum {
json,
xml,
};
fn getContentType(headers: []const awshttp.Header) !ContentType {
// EC2 ignores our accept type, but technically query protocol only
// returns XML as well. So, we'll ignore the protocol here and just
// look at the return type
var isJson: ?bool = null;
for (headers) |h| {
if (std.ascii.eqlIgnoreCase("Content-Type", h.name)) {
if (std.mem.startsWith(u8, h.value, "application/json")) {
isJson = true;
return .json;
} else if (std.mem.startsWith(u8, h.value, "application/x-amz-json-1.0")) {
isJson = true;
return .json;
} else if (std.mem.startsWith(u8, h.value, "application/x-amz-json-1.1")) {
isJson = true;
return .json;
} else if (std.mem.startsWith(u8, h.value, "text/xml")) {
isJson = false;
return .xml;
} else if (std.mem.startsWith(u8, h.value, "application/xml")) {
isJson = false;
return .xml;
} else {
log.err("Unexpected content type: {s}", .{h.value});
return error.UnexpectedContentType;
@ -934,11 +980,12 @@ fn isJsonResponse(headers: []const awshttp.Header) !bool {
break;
}
}
if (isJson == null) return error.ContentTypeNotFound;
return isJson.?;
return error.ContentTypeNotFound;
}
/// Get request ID from headers. Caller responsible for freeing memory
fn requestIdFromHeaders(request: awshttp.HttpRequest, response: awshttp.HttpResult, options: Options) ![]u8 {
/// Get request ID from headers.
/// Allocation is only used in case of an error. Caller does not need to free the returned buffer.
fn requestIdFromHeaders(buf: []u8, allocator: Allocator, request: awshttp.HttpRequest, response: awshttp.HttpResult) ![]u8 {
var rid: ?[]const u8 = null;
// This "thing" is called:
// * Host ID
@ -958,11 +1005,14 @@ fn requestIdFromHeaders(request: awshttp.HttpRequest, response: awshttp.HttpResu
host_id = header.value;
}
if (rid) |r| {
if (host_id) |h|
return try std.fmt.allocPrint(options.client.allocator, "{s}, host_id: {s}", .{ r, h });
return try options.client.allocator.dupe(u8, r);
if (host_id) |h| {
return try std.fmt.bufPrint(buf, "{s}, host_id: {s}", .{ r, h });
}
@memcpy(buf[0..r.len], r);
return buf[0..r.len];
}
try reportTraffic(options.client.allocator, "Request ID not found", request, response, log.err);
try reportTraffic(allocator, "Request ID not found", request, response, log.err);
return error.RequestIdNotFound;
}
fn ServerResponse(comptime action: anytype) type {
@ -1015,65 +1065,62 @@ fn ServerResponse(comptime action: anytype) type {
}
fn FullResponse(comptime action: anytype) type {
return struct {
response: action.Response,
response_metadata: struct {
request_id: []u8,
},
parser_options: union(enum) {
json: json.ParseOptions,
xml: xml_shaper.ParseOptions,
},
raw_parsed: union(enum) {
pub const ResponseMetadata = struct {
request_id: []const u8,
};
pub const RawParsed = union(enum) {
server: ServerResponse(action),
raw: action.Response,
xml: xml_shaper.Parsed(action.Response),
},
allocator: std.mem.Allocator,
};
pub const FullResponseOptions = struct {
response: action.Response = undefined,
request_id: []const u8,
raw_parsed: RawParsed = .{ .raw = undefined },
arena: ArenaAllocator,
};
response: action.Response = undefined,
raw_parsed: RawParsed = .{ .raw = undefined },
response_metadata: ResponseMetadata,
arena: ArenaAllocator,
const Self = @This();
pub fn deinit(self: Self) void {
switch (self.raw_parsed) {
// Server is json only (so far)
.server => json.parseFree(ServerResponse(action), self.raw_parsed.server, self.parser_options.json),
// Raw is json only (so far)
.raw => json.parseFree(action.Response, self.raw_parsed.raw, self.parser_options.json),
.xml => |xml| xml.deinit(),
}
self.allocator.free(self.response_metadata.request_id);
const Response = @TypeOf(self.response);
if (@hasDecl(Response, "http_header")) {
inline for (std.meta.fields(@TypeOf(Response.http_header))) |f| {
safeFree(self.allocator, @field(self.response, f.name));
}
}
if (@hasDecl(Response, "http_payload")) {
const body_field = @field(self.response, Response.http_payload);
const BodyField = @TypeOf(body_field);
if (BodyField == []const u8) {
self.allocator.free(body_field);
}
if (BodyField == ?[]const u8) {
if (body_field) |f|
self.allocator.free(f);
}
}
pub fn init(options: FullResponseOptions) !Self {
var arena = options.arena;
const request_id = try arena.allocator().dupe(u8, options.request_id);
return Self{
.arena = arena,
.response = options.response,
.raw_parsed = options.raw_parsed,
.response_metadata = .{
.request_id = request_id,
},
};
}
pub fn deinit(self: Self) void {
self.arena.deinit();
}
};
}
fn safeFree(allocator: std.mem.Allocator, obj: anytype) void {
fn safeFree(allocator: Allocator, obj: anytype) void {
switch (@typeInfo(@TypeOf(obj))) {
.pointer => allocator.free(obj),
.optional => if (obj) |o| safeFree(allocator, o),
else => {},
}
}
fn queryFieldTransformer(allocator: std.mem.Allocator, field_name: []const u8) anyerror![]const u8 {
fn queryFieldTransformer(allocator: Allocator, field_name: []const u8) anyerror![]const u8 {
return try case.snakeToPascal(allocator, field_name);
}
fn buildPath(
allocator: std.mem.Allocator,
allocator: Allocator,
raw_uri: []const u8,
comptime ActionRequest: type,
request: anytype,
@ -1160,7 +1207,7 @@ fn uriEncodeByte(char: u8, writer: anytype, encode_slash: bool) !void {
}
}
fn buildQuery(allocator: std.mem.Allocator, request: anytype) ![]const u8 {
fn buildQuery(allocator: Allocator, request: anytype) ![]const u8 {
// query should look something like this:
// pub const http_query = .{
// .master_region = "MasterRegion",
@ -1282,7 +1329,7 @@ pub fn IgnoringWriter(comptime WriterType: type) type {
}
fn reportTraffic(
allocator: std.mem.Allocator,
allocator: Allocator,
info: []const u8,
request: awshttp.HttpRequest,
response: awshttp.HttpResult,
@ -1301,7 +1348,8 @@ fn reportTraffic(
}
try writer.print("\tContent-Type: {s}\n\n", .{request.content_type});
_ = try writer.write("Request Body:\n");
try writer.print("Request URL: {s}\n", .{request.path});
try writer.writeAll("Request Body:\n");
try writer.print("-------------\n{s}\n", .{request.body});
_ = try writer.write("-------------\n");
_ = try writer.write("Response Headers:\n");
@ -1484,7 +1532,7 @@ test "basic json request serialization" {
// for a boxed member with no observable difference." But we're
// seeing a lot of differences here between spec and reality
//
var nameAllocator = std.heap.ArenaAllocator.init(allocator);
var nameAllocator = ArenaAllocator.init(allocator);
defer nameAllocator.deinit();
try json.stringify(request, .{ .whitespace = .{} }, buffer.writer());
try std.testing.expectEqualStrings(
@ -1568,8 +1616,8 @@ test {
std.testing.refAllDecls(xml_shaper);
}
const TestOptions = struct {
allocator: std.mem.Allocator,
arena: ?*std.heap.ArenaAllocator = null,
allocator: Allocator,
arena: ?*ArenaAllocator = null,
server_port: ?u16 = null,
server_remaining_requests: usize = 1,
server_response: []const u8 = "unset",
@ -1658,8 +1706,8 @@ const TestOptions = struct {
fn threadMain(options: *TestOptions) !void {
// https://github.com/ziglang/zig/blob/d2be725e4b14c33dbd39054e33d926913eee3cd4/lib/compiler/std-docs.zig#L22-L54
options.arena = try options.allocator.create(std.heap.ArenaAllocator);
options.arena.?.* = std.heap.ArenaAllocator.init(options.allocator);
options.arena = try options.allocator.create(ArenaAllocator);
options.arena.?.* = ArenaAllocator.init(options.allocator);
const allocator = options.arena.?.allocator();
options.allocator = allocator;
@ -1670,7 +1718,7 @@ fn threadMain(options: *TestOptions) !void {
options.test_server_runtime_uri = try std.fmt.allocPrint(options.allocator, "http://127.0.0.1:{d}", .{options.server_port.?});
log.debug("server listening at {s}", .{options.test_server_runtime_uri.?});
log.info("starting server thread, tid {d}", .{std.Thread.getCurrentId()});
// var arena = std.heap.ArenaAllocator.init(options.allocator);
// var arena = ArenaAllocator.init(options.allocator);
// defer arena.deinit();
// var aa = arena.allocator();
// We're in control of all requests/responses, so this flag will tell us
@ -1750,7 +1798,7 @@ fn serveRequest(options: *TestOptions, request: *std.http.Server.Request) !void
////////////////////////////////////////////////////////////////////////
const TestSetup = struct {
allocator: std.mem.Allocator,
allocator: Allocator,
request_options: TestOptions,
server_thread: std.Thread = undefined,
creds: aws_auth.Credentials = undefined,
@ -2475,10 +2523,11 @@ test "json_1_1: ECR timestamps" {
// defer std.testing.log_level = old;
// std.testing.log_level = .debug;
const allocator = std.testing.allocator;
var test_harness = TestSetup.init(.{
.allocator = allocator,
.server_response =
\\{"authorizationData":[{"authorizationToken":"***","expiresAt":1.7385984915E9,"proxyEndpoint":"https://146325435496.dkr.ecr.us-west-2.amazonaws.com"}]}
\\{"authorizationData":[{"authorizationToken":"***","expiresAt":"2022-05-17T06:56:13.652000+00:00","proxyEndpoint":"https://146325435496.dkr.ecr.us-west-2.amazonaws.com"}]}
// \\{"authorizationData":[{"authorizationToken":"***","expiresAt":1.738598491557E9,"proxyEndpoint":"https://146325435496.dkr.ecr.us-west-2.amazonaws.com"}]}
,
.server_response_headers = &.{
@ -2503,7 +2552,13 @@ test "json_1_1: ECR timestamps" {
try std.testing.expectEqualStrings("***", call.response.authorization_data.?[0].authorization_token.?);
try std.testing.expectEqualStrings("https://146325435496.dkr.ecr.us-west-2.amazonaws.com", call.response.authorization_data.?[0].proxy_endpoint.?);
// try std.testing.expectEqual(@as(i64, 1.73859841557E9), call.response.authorization_data.?[0].expires_at.?);
try std.testing.expectEqual(@as(f128, 1.7385984915E9), call.response.authorization_data.?[0].expires_at.?);
const expected_ins = try zeit.instant(.{
.source = .{ .iso8601 = "2022-05-17T06:56:13.652000+00:00" },
});
const expected_ts: date.Timestamp = @enumFromInt(expected_ins.timestamp);
try std.testing.expectEqual(expected_ts, call.response.authorization_data.?[0].expires_at.?);
}
var test_error_log_enabled = true;
test "test server timeout works" {

View file

@ -1,7 +1,7 @@
const std = @import("std");
const base = @import("aws_http_base.zig");
const auth = @import("aws_authentication.zig");
const date = @import("date.zig");
const date = @import("date");
const scoped_log = std.log.scoped(.aws_signing);

View file

@ -1,414 +0,0 @@
// From https://gist.github.com/WoodyAtHome/3ef50b17f0fa2860ac52b97af12f8d15
// Translated from German. We don't need any local time for this use case, and conversion
// really requires the TZ DB.
const std = @import("std");
const log = std.log.scoped(.date);
pub const DateTime = struct { day: u8, month: u8, year: u16, hour: u8, minute: u8, second: u8 };
const SECONDS_PER_DAY = 86400; //* 24* 60 * 60 */
const DAYS_PER_YEAR = 365; //* Normal year (no leap year) */
pub fn timestampToDateTime(timestamp: i64) DateTime {
// aus https://de.wikipedia.org/wiki/Unixzeit
const unixtime = @as(u64, @intCast(timestamp));
const DAYS_IN_4_YEARS = 1461; //* 4*365 + 1 */
const DAYS_IN_100_YEARS = 36524; //* 100*365 + 25 - 1 */
const DAYS_IN_400_YEARS = 146097; //* 400*365 + 100 - 4 + 1 */
const DAY_NUMBER_ADJUSTED_1970_01_01 = 719468; //* Day number relates to March 1st */
var dayN: u64 = DAY_NUMBER_ADJUSTED_1970_01_01 + unixtime / SECONDS_PER_DAY;
const seconds_since_midnight: u64 = unixtime % SECONDS_PER_DAY;
var temp: u64 = 0;
// Leap year rules for Gregorian Calendars
// Any year divisible by 100 is not a leap year unless also divisible by 400
temp = 4 * (dayN + DAYS_IN_100_YEARS + 1) / DAYS_IN_400_YEARS - 1;
var year = @as(u16, @intCast(100 * temp));
dayN -= DAYS_IN_100_YEARS * temp + temp / 4;
// For Julian calendars, each year divisible by 4 is a leap year
temp = 4 * (dayN + DAYS_PER_YEAR + 1) / DAYS_IN_4_YEARS - 1;
year += @as(u16, @intCast(temp));
dayN -= DAYS_PER_YEAR * temp + temp / 4;
// dayN calculates the days of the year in relation to March 1
var month = @as(u8, @intCast((5 * dayN + 2) / 153));
const day = @as(u8, @intCast(dayN - (@as(u64, @intCast(month)) * 153 + 2) / 5 + 1));
// 153 = 31+30+31+30+31 Days for the 5 months from March through July
// 153 = 31+30+31+30+31 Days for the 5 months from August through December
// 31+28 Days for January and February (see below)
// +2: Rounding adjustment
// +1: The first day in March is March 1st (not March 0)
month += 3; // Convert from the day that starts on March 1st, to a human year */
if (month > 12) { // months 13 and 14 become 1 (January) und 2 (February) of the next year
month -= 12;
year += 1;
}
const hours = @as(u8, @intCast(seconds_since_midnight / 3600));
const minutes = @as(u8, @intCast(seconds_since_midnight % 3600 / 60));
const seconds = @as(u8, @intCast(seconds_since_midnight % 60));
return DateTime{ .day = day, .month = month, .year = year, .hour = hours, .minute = minutes, .second = seconds };
}
pub fn parseEnglishToTimestamp(data: []const u8) !i64 {
return try dateTimeToTimestamp(try parseEnglishToDateTime(data));
}
const EnglishParsingState = enum { Start, Day, Month, Year, Hour, Minute, Second, End };
/// Converts a string to a timestamp value. May not handle dates before the
/// epoch. Dates should look like "Fri, 03 Jun 2022 18:12:36 GMT"
pub fn parseEnglishToDateTime(data: []const u8) !DateTime {
// Fri, 03 Jun 2022 18:12:36 GMT
if (!std.mem.endsWith(u8, data, "GMT")) return error.InvalidFormat;
var start: usize = 0;
var state = EnglishParsingState.Start;
// Anything not explicitly set by our string would be 0
var rc = DateTime{ .year = 0, .month = 0, .day = 0, .hour = 0, .minute = 0, .second = 0 };
for (data, 0..) |ch, i| {
switch (ch) {
',' => {},
' ', ':' => {
// State transition
// We're going to coerce and this might not go well, but we
// want the compiler to create checks, so we'll turn on
// runtime safety for this block, forcing checks in ReleaseSafe
// ReleaseFast modes.
const next_state = try endEnglishState(state, &rc, data[start..i]);
state = next_state;
start = i + 1;
},
else => {}, // We need to be pretty trusting on this format...
}
}
return rc;
}
fn endEnglishState(current_state: EnglishParsingState, date: *DateTime, prev_data: []const u8) !EnglishParsingState {
var next_state: EnglishParsingState = undefined;
log.debug("endEnglishState. Current state '{}', data: {s}", .{ current_state, prev_data });
// Using two switches is slightly less efficient, but more readable
switch (current_state) {
.End => return error.IllegalStateTransition,
.Start => next_state = .Day,
.Day => next_state = .Month,
.Month => next_state = .Year,
.Year => next_state = .Hour,
.Hour => next_state = .Minute,
.Minute => next_state = .Second,
.Second => next_state = .End,
}
switch (current_state) {
.Year => date.year = try std.fmt.parseUnsigned(u16, prev_data, 10),
.Month => date.month = try parseEnglishMonth(prev_data),
.Day => date.day = try std.fmt.parseUnsigned(u8, prev_data, 10),
.Hour => date.hour = try std.fmt.parseUnsigned(u8, prev_data, 10),
.Minute => date.minute = try std.fmt.parseUnsigned(u8, prev_data, 10),
.Second => date.second = try std.fmt.parseUnsigned(u8, prev_data, 10),
.Start => {},
.End => return error.InvalidState,
}
return next_state;
}
fn parseEnglishMonth(data: []const u8) !u8 {
if (std.ascii.startsWithIgnoreCase(data, "Jan")) return 1;
if (std.ascii.startsWithIgnoreCase(data, "Feb")) return 2;
if (std.ascii.startsWithIgnoreCase(data, "Mar")) return 3;
if (std.ascii.startsWithIgnoreCase(data, "Apr")) return 4;
if (std.ascii.startsWithIgnoreCase(data, "May")) return 5;
if (std.ascii.startsWithIgnoreCase(data, "Jun")) return 6;
if (std.ascii.startsWithIgnoreCase(data, "Jul")) return 7;
if (std.ascii.startsWithIgnoreCase(data, "Aug")) return 8;
if (std.ascii.startsWithIgnoreCase(data, "Sep")) return 9;
if (std.ascii.startsWithIgnoreCase(data, "Oct")) return 10;
if (std.ascii.startsWithIgnoreCase(data, "Nov")) return 11;
if (std.ascii.startsWithIgnoreCase(data, "Dec")) return 12;
return error.InvalidMonth;
}
pub fn parseIso8601ToTimestamp(data: []const u8) !i64 {
return try dateTimeToTimestamp(try parseIso8601ToDateTime(data));
}
const IsoParsingState = enum { Start, Year, Month, Day, Hour, Minute, Second, Millisecond, End };
/// Converts a string to a timestamp value. May not handle dates before the
/// epoch
pub fn parseIso8601ToDateTime(data: []const u8) !DateTime {
// Basic format YYYYMMDDThhmmss
if (data.len == "YYYYMMDDThhmmss".len and data[8] == 'T')
return try parseIso8601BasicFormatToDateTime(data);
if (data.len == "YYYYMMDDThhmmssZ".len and data[8] == 'T')
return try parseIso8601BasicFormatToDateTime(data);
var start: usize = 0;
var state = IsoParsingState.Start;
// Anything not explicitly set by our string would be 0
var rc = DateTime{ .year = 0, .month = 0, .day = 0, .hour = 0, .minute = 0, .second = 0 };
var zulu_time = false;
for (data, 0..) |ch, i| {
switch (ch) {
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9' => {
if (state == .Start) state = .Year;
},
'?', '~', '%' => {
// These characters all specify the type of time (approximate, etc)
// and we will ignore
},
'.', '-', ':', 'T' => {
// State transition
// We're going to coerce and this might not go well, but we
// want the compiler to create checks, so we'll turn on
// runtime safety for this block, forcing checks in ReleaseSafe
// ReleaseFast modes.
const next_state = try endIsoState(state, &rc, data[start..i]);
state = next_state;
start = i + 1;
},
'Z' => zulu_time = true,
else => {
log.err("Invalid character: {c}", .{ch});
return error.InvalidCharacter;
},
}
}
if (!zulu_time) return error.LocalTimeNotSupported;
// We know we have a Z at the end of this, so let's grab the last bit
// of the string, minus the 'Z', and fly, eagles, fly!
_ = try endIsoState(state, &rc, data[start .. data.len - 1]);
return rc;
}
fn parseIso8601BasicFormatToDateTime(data: []const u8) !DateTime {
return DateTime{
.year = try std.fmt.parseUnsigned(u16, data[0..4], 10),
.month = try std.fmt.parseUnsigned(u8, data[4..6], 10),
.day = try std.fmt.parseUnsigned(u8, data[6..8], 10),
.hour = try std.fmt.parseUnsigned(u8, data[9..11], 10),
.minute = try std.fmt.parseUnsigned(u8, data[11..13], 10),
.second = try std.fmt.parseUnsigned(u8, data[13..15], 10),
};
}
fn endIsoState(current_state: IsoParsingState, date: *DateTime, prev_data: []const u8) !IsoParsingState {
var next_state: IsoParsingState = undefined;
log.debug("endIsoState. Current state '{}', data: {s}", .{ current_state, prev_data });
// Using two switches is slightly less efficient, but more readable
switch (current_state) {
.Start, .End => return error.IllegalStateTransition,
.Year => next_state = .Month,
.Month => next_state = .Day,
.Day => next_state = .Hour,
.Hour => next_state = .Minute,
.Minute => next_state = .Second,
.Second => next_state = .Millisecond,
.Millisecond => next_state = .End,
}
// TODO: This won't handle signed, which Iso supports. For now, let's fail
// explictly
switch (current_state) {
.Year => date.year = try std.fmt.parseUnsigned(u16, prev_data, 10),
.Month => date.month = try std.fmt.parseUnsigned(u8, prev_data, 10),
.Day => date.day = try std.fmt.parseUnsigned(u8, prev_data, 10),
.Hour => date.hour = try std.fmt.parseUnsigned(u8, prev_data, 10),
.Minute => date.minute = try std.fmt.parseUnsigned(u8, prev_data, 10),
.Second => date.second = try std.fmt.parseUnsigned(u8, prev_data, 10),
.Millisecond => {}, // We'll throw that away - our granularity is 1 second
.Start, .End => return error.InvalidState,
}
return next_state;
}
pub fn dateTimeToTimestamp(datetime: DateTime) !i64 {
const epoch = DateTime{
.year = 1970,
.month = 1,
.day = 1,
.hour = 0,
.minute = 0,
.second = 0,
};
return secondsBetween(epoch, datetime);
}
const DateTimeToTimestampError = error{
DateTimeOutOfRange,
};
fn secondsBetween(start: DateTime, end: DateTime) DateTimeToTimestampError!i64 {
try validateDatetime(start);
try validateDatetime(end);
if (end.year < start.year) return -1 * try secondsBetween(end, start);
if (start.month != 1 or
start.day != 1 or
start.hour != 0 or
start.minute != 0 or
start.second != 0)
{
const seconds_into_start_year = secondsFromBeginningOfYear(
start.year,
start.month,
start.day,
start.hour,
start.minute,
start.second,
);
const new_start = DateTime{
.year = start.year,
.month = 1,
.day = 1,
.hour = 0,
.minute = 0,
.second = 0,
};
return (try secondsBetween(new_start, end)) - seconds_into_start_year;
}
const leap_years_between = leapYearsBetween(start.year, end.year);
const add_days: u1 = 0;
const years_diff = end.year - start.year;
// log.debug("Years from epoch: {d}, Leap years: {d}", .{ years_diff, leap_years_between });
const days_diff: i32 = (years_diff * DAYS_PER_YEAR) + leap_years_between + add_days;
// log.debug("Days with leap year, without month: {d}", .{days_diff});
const seconds_into_year = secondsFromBeginningOfYear(
end.year,
end.month,
end.day,
end.hour,
end.minute,
end.second,
);
return (days_diff * SECONDS_PER_DAY) + @as(i64, seconds_into_year);
}
fn validateDatetime(dt: DateTime) !void {
if (dt.month > 12 or
dt.day > 31 or
dt.hour >= 24 or
dt.minute >= 60 or
dt.second >= 60) return error.DateTimeOutOfRange;
}
fn secondsFromBeginningOfYear(year: u16, month: u8, day: u8, hour: u8, minute: u8, second: u8) u32 {
const current_year_is_leap_year = isLeapYear(year);
const leap_year_days_per_month: [12]u5 = .{ 31, 29, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31 };
const normal_days_per_month: [12]u5 = .{ 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31 };
const days_per_month = if (current_year_is_leap_year) leap_year_days_per_month else normal_days_per_month;
var current_month: usize = 1;
const end_month = month;
var days_diff: u32 = 0;
while (current_month != end_month) {
days_diff += days_per_month[current_month - 1]; // months are 1-based vs array is 0-based
current_month += 1;
}
// log.debug("Days with month, without day: {d}. Day of month {d}, will add {d} days", .{
// days_diff,
// day,
// day - 1,
// });
// We need -1 because we're not actually including the ending day (that's up to hour/minute)
// In other words, days in the month are 1-based, while hours/minutes are zero based
days_diff += day - 1;
// log.debug("Total days diff: {d}", .{days_diff});
var seconds_diff: u32 = days_diff * SECONDS_PER_DAY;
// From here out, we want to get everything into seconds
seconds_diff += @as(u32, hour) * 60 * 60;
seconds_diff += @as(u32, minute) * 60;
seconds_diff += @as(u32, second);
return seconds_diff;
}
fn isLeapYear(year: u16) bool {
if (year % 4 != 0) return false;
if (year % 400 == 0) return true;
if (year % 100 == 0) return false;
return true;
}
fn leapYearsBetween(start_year_inclusive: u16, end_year_exclusive: u16) u16 {
const start = @min(start_year_inclusive, end_year_exclusive);
const end = @max(start_year_inclusive, end_year_exclusive);
var current = start;
// log.debug("Leap years starting from {d}, ending at {d}", .{ start, end });
while (current % 4 != 0 and current < end) {
current += 1;
}
if (current == end) return 0; // No leap years here. E.g. 1971-1973
// We're on a potential leap year, and now we can step by 4
var rc: u16 = 0;
while (current < end) {
if (current % 4 == 0) {
if (current % 100 != 0) {
// log.debug("Year {d} is leap year", .{current});
rc += 1;
current += 4;
continue;
}
// We're on a century, which is normally not a leap year, unless
// it's divisible by 400
if (current % 400 == 0) {
// log.debug("Year {d} is leap year", .{current});
rc += 1;
}
}
current += 4;
}
return rc;
}
fn printDateTime(dt: DateTime) void {
log.debug("{:0>4}-{:0>2}-{:0>2}T{:0>2}:{:0>2}:{:0<2}Z", .{
dt.year,
dt.month,
dt.day,
dt.hour,
dt.minute,
dt.second,
});
}
pub fn printNowUtc() void {
printDateTime(timestampToDateTime(std.time.timestamp()));
}
test "Convert timestamp to datetime" {
printDateTime(timestampToDateTime(std.time.timestamp()));
try std.testing.expectEqual(DateTime{ .year = 2020, .month = 8, .day = 28, .hour = 9, .minute = 32, .second = 27 }, timestampToDateTime(1598607147));
try std.testing.expectEqual(DateTime{ .year = 2020, .month = 11, .day = 1, .hour = 5, .minute = 6, .second = 7 }, timestampToDateTime(1604207167));
// Get time for date: https://wtools.io/convert-date-time-to-unix-time
try std.testing.expectEqual(DateTime{ .year = 2015, .month = 8, .day = 30, .hour = 12, .minute = 36, .second = 0 }, timestampToDateTime(1440938160));
}
test "Convert datetime to timestamp" {
try std.testing.expectEqual(@as(i64, 1598607147), try dateTimeToTimestamp(DateTime{ .year = 2020, .month = 8, .day = 28, .hour = 9, .minute = 32, .second = 27 }));
try std.testing.expectEqual(@as(i64, 1604207167), try dateTimeToTimestamp(DateTime{ .year = 2020, .month = 11, .day = 1, .hour = 5, .minute = 6, .second = 7 }));
try std.testing.expectEqual(@as(i64, 1440938160), try dateTimeToTimestamp(DateTime{ .year = 2015, .month = 8, .day = 30, .hour = 12, .minute = 36, .second = 0 }));
}
test "Convert ISO8601 string to timestamp" {
try std.testing.expectEqual(DateTime{ .year = 2020, .month = 8, .day = 28, .hour = 9, .minute = 32, .second = 27 }, try parseIso8601ToDateTime("20200828T093227"));
try std.testing.expectEqual(DateTime{ .year = 2020, .month = 8, .day = 28, .hour = 9, .minute = 32, .second = 27 }, try parseIso8601ToDateTime("2020-08-28T9:32:27Z"));
try std.testing.expectEqual(DateTime{ .year = 2020, .month = 11, .day = 1, .hour = 5, .minute = 6, .second = 7 }, try parseIso8601ToDateTime("2020-11-01T5:06:7Z"));
try std.testing.expectEqual(DateTime{ .year = 2015, .month = 8, .day = 30, .hour = 12, .minute = 36, .second = 0 }, try parseIso8601ToDateTime("2015-08-30T12:36:00.000Z"));
}
test "Convert datetime to timestamp before 1970" {
try std.testing.expectEqual(@as(i64, -449392815), try dateTimeToTimestamp(DateTime{ .year = 1955, .month = 10, .day = 5, .hour = 16, .minute = 39, .second = 45 }));
}
test "Convert whatever AWS is sending us to timestamp" {
const string_date = "Fri, 03 Jun 2022 18:12:36 GMT";
try std.testing.expectEqual(DateTime{ .year = 2022, .month = 6, .day = 3, .hour = 18, .minute = 12, .second = 36 }, try parseEnglishToDateTime(string_date));
}

View file

@ -1,6 +1,6 @@
const std = @import("std");
const aws = @import("aws.zig");
const json = @import("json.zig");
const json = @import("json");
var verbose: u8 = 0;

View file

@ -1,6 +1,6 @@
const std = @import("std");
const xml = @import("xml.zig");
const date = @import("date.zig");
const date = @import("date");
const log = std.log.scoped(.xml_shaper);
@ -162,8 +162,10 @@ fn parseInternal(comptime T: type, element: *xml.Element, options: ParseOptions)
return try parseInternal(optional_info.child, element, options);
}
},
.@"enum" => |enum_info| {
_ = enum_info;
.@"enum" => {
if (T == date.Timestamp) {
return try date.Timestamp.parse(element.children.items[0].CharData);
}
// const numeric: ?enum_info.tag_type = std.fmt.parseInt(enum_info.tag_type, element.children.items[0].CharData, 10) catch null;
// if (numeric) |num| {
// return std.meta.intToEnum(T, num);