Compare commits
10 Commits
535205e947
...
d02272d12c
Author | SHA1 | Date | |
---|---|---|---|
d02272d12c | |||
c80a65ed50 | |||
c6b3f8d6e1 | |||
80a76b0998 | |||
f612b3798a | |||
0368c27c2c | |||
2d10c6e356 | |||
8ec16dbb1d | |||
98b0c4127f | |||
87116cb69e |
124
codegen/src/json.zig
Normal file
124
codegen/src/json.zig
Normal file
|
@ -0,0 +1,124 @@
|
||||||
|
const std = @import("std");
|
||||||
|
// options is a json.Options, but since we're using our hacked json.zig we don't want to
|
||||||
|
// specifically call this out
|
||||||
|
pub fn serializeMap(map: anytype, key: []const u8, options: anytype, out_stream: anytype) !bool {
|
||||||
|
if (map.len == 0) return true;
|
||||||
|
// TODO: Map might be [][]struct{key, value} rather than []struct{key, value}
|
||||||
|
var child_options = options;
|
||||||
|
if (child_options.whitespace) |*child_ws|
|
||||||
|
child_ws.indent_level += 1;
|
||||||
|
|
||||||
|
try out_stream.writeByte('"');
|
||||||
|
try out_stream.writeAll(key);
|
||||||
|
_ = try out_stream.write("\":");
|
||||||
|
if (options.whitespace) |ws| {
|
||||||
|
if (ws.separator) {
|
||||||
|
try out_stream.writeByte(' ');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
try out_stream.writeByte('{');
|
||||||
|
if (options.whitespace) |_|
|
||||||
|
try out_stream.writeByte('\n');
|
||||||
|
for (map) |tag, i| {
|
||||||
|
if (tag.key == null or tag.value == null) continue;
|
||||||
|
// TODO: Deal with escaping and general "json.stringify" the values...
|
||||||
|
if (child_options.whitespace) |ws|
|
||||||
|
try ws.outputIndent(out_stream);
|
||||||
|
try out_stream.writeByte('"');
|
||||||
|
try jsonEscape(tag.key.?, child_options, out_stream);
|
||||||
|
_ = try out_stream.write("\":");
|
||||||
|
if (child_options.whitespace) |ws| {
|
||||||
|
if (ws.separator) {
|
||||||
|
try out_stream.writeByte(' ');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
try out_stream.writeByte('"');
|
||||||
|
try jsonEscape(tag.value.?, child_options, out_stream);
|
||||||
|
try out_stream.writeByte('"');
|
||||||
|
if (i < map.len - 1) {
|
||||||
|
try out_stream.writeByte(',');
|
||||||
|
}
|
||||||
|
if (child_options.whitespace) |_|
|
||||||
|
try out_stream.writeByte('\n');
|
||||||
|
}
|
||||||
|
if (options.whitespace) |ws|
|
||||||
|
try ws.outputIndent(out_stream);
|
||||||
|
try out_stream.writeByte('}');
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
// code within jsonEscape lifted from json.zig in stdlib
|
||||||
|
fn jsonEscape(value: []const u8, options: anytype, out_stream: anytype) !void {
|
||||||
|
var i: usize = 0;
|
||||||
|
while (i < value.len) : (i += 1) {
|
||||||
|
switch (value[i]) {
|
||||||
|
// normal ascii character
|
||||||
|
0x20...0x21, 0x23...0x2E, 0x30...0x5B, 0x5D...0x7F => |c| try out_stream.writeByte(c),
|
||||||
|
// only 2 characters that *must* be escaped
|
||||||
|
'\\' => try out_stream.writeAll("\\\\"),
|
||||||
|
'\"' => try out_stream.writeAll("\\\""),
|
||||||
|
// solidus is optional to escape
|
||||||
|
'/' => {
|
||||||
|
if (options.string.String.escape_solidus) {
|
||||||
|
try out_stream.writeAll("\\/");
|
||||||
|
} else {
|
||||||
|
try out_stream.writeByte('/');
|
||||||
|
}
|
||||||
|
},
|
||||||
|
// control characters with short escapes
|
||||||
|
// TODO: option to switch between unicode and 'short' forms?
|
||||||
|
0x8 => try out_stream.writeAll("\\b"),
|
||||||
|
0xC => try out_stream.writeAll("\\f"),
|
||||||
|
'\n' => try out_stream.writeAll("\\n"),
|
||||||
|
'\r' => try out_stream.writeAll("\\r"),
|
||||||
|
'\t' => try out_stream.writeAll("\\t"),
|
||||||
|
else => {
|
||||||
|
const ulen = std.unicode.utf8ByteSequenceLength(value[i]) catch unreachable;
|
||||||
|
// control characters (only things left with 1 byte length) should always be printed as unicode escapes
|
||||||
|
if (ulen == 1 or options.string.String.escape_unicode) {
|
||||||
|
const codepoint = std.unicode.utf8Decode(value[i .. i + ulen]) catch unreachable;
|
||||||
|
try outputUnicodeEscape(codepoint, out_stream);
|
||||||
|
} else {
|
||||||
|
try out_stream.writeAll(value[i .. i + ulen]);
|
||||||
|
}
|
||||||
|
i += ulen - 1;
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// outputUnicodeEscape and assert lifted from json.zig in stdlib
|
||||||
|
fn outputUnicodeEscape(
|
||||||
|
codepoint: u21,
|
||||||
|
out_stream: anytype,
|
||||||
|
) !void {
|
||||||
|
if (codepoint <= 0xFFFF) {
|
||||||
|
// If the character is in the Basic Multilingual Plane (U+0000 through U+FFFF),
|
||||||
|
// then it may be represented as a six-character sequence: a reverse solidus, followed
|
||||||
|
// by the lowercase letter u, followed by four hexadecimal digits that encode the character's code point.
|
||||||
|
try out_stream.writeAll("\\u");
|
||||||
|
try std.fmt.formatIntValue(codepoint, "x", std.fmt.FormatOptions{ .width = 4, .fill = '0' }, out_stream);
|
||||||
|
} else {
|
||||||
|
assert(codepoint <= 0x10FFFF);
|
||||||
|
// To escape an extended character that is not in the Basic Multilingual Plane,
|
||||||
|
// the character is represented as a 12-character sequence, encoding the UTF-16 surrogate pair.
|
||||||
|
const high = @intCast(u16, (codepoint - 0x10000) >> 10) + 0xD800;
|
||||||
|
const low = @intCast(u16, codepoint & 0x3FF) + 0xDC00;
|
||||||
|
try out_stream.writeAll("\\u");
|
||||||
|
try std.fmt.formatIntValue(high, "x", std.fmt.FormatOptions{ .width = 4, .fill = '0' }, out_stream);
|
||||||
|
try out_stream.writeAll("\\u");
|
||||||
|
try std.fmt.formatIntValue(low, "x", std.fmt.FormatOptions{ .width = 4, .fill = '0' }, out_stream);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// This function invokes undefined behavior when `ok` is `false`.
|
||||||
|
/// In Debug and ReleaseSafe modes, calls to this function are always
|
||||||
|
/// generated, and the `unreachable` statement triggers a panic.
|
||||||
|
/// In ReleaseFast and ReleaseSmall modes, calls to this function are
|
||||||
|
/// optimized away, and in fact the optimizer is able to use the assertion
|
||||||
|
/// in its heuristics.
|
||||||
|
/// Inside a test block, it is best to use the `std.testing` module rather
|
||||||
|
/// than this function, because this function may not detect a test failure
|
||||||
|
/// in ReleaseFast and ReleaseSmall mode. Outside of a test block, this assert
|
||||||
|
/// function is the correct function to use.
|
||||||
|
pub fn assert(ok: bool) void {
|
||||||
|
if (!ok) unreachable; // assertion failure
|
||||||
|
}
|
|
@ -1,6 +1,7 @@
|
||||||
const std = @import("std");
|
const std = @import("std");
|
||||||
const smithy = @import("smithy");
|
const smithy = @import("smithy");
|
||||||
const snake = @import("snake.zig");
|
const snake = @import("snake.zig");
|
||||||
|
const json_zig = @embedFile("json.zig");
|
||||||
|
|
||||||
pub fn main() anyerror!void {
|
pub fn main() anyerror!void {
|
||||||
var arena = std.heap.ArenaAllocator.init(std.heap.page_allocator);
|
var arena = std.heap.ArenaAllocator.init(std.heap.page_allocator);
|
||||||
|
@ -10,6 +11,9 @@ pub fn main() anyerror!void {
|
||||||
const args = try std.process.argsAlloc(allocator);
|
const args = try std.process.argsAlloc(allocator);
|
||||||
defer std.process.argsFree(allocator, args);
|
defer std.process.argsFree(allocator, args);
|
||||||
const stdout = std.io.getStdOut().writer();
|
const stdout = std.io.getStdOut().writer();
|
||||||
|
const json_file = try std.fs.cwd().createFile("json.zig", .{});
|
||||||
|
defer json_file.close();
|
||||||
|
try json_file.writer().writeAll(json_zig);
|
||||||
const manifest_file = try std.fs.cwd().createFile("service_manifest.zig", .{});
|
const manifest_file = try std.fs.cwd().createFile("service_manifest.zig", .{});
|
||||||
defer manifest_file.close();
|
defer manifest_file.close();
|
||||||
const manifest = manifest_file.writer();
|
const manifest = manifest_file.writer();
|
||||||
|
@ -42,6 +46,8 @@ fn processFile(arg: []const u8, stdout: anytype, manifest: anytype) !void {
|
||||||
file = try std.fs.cwd().createFile(filename, .{ .truncate = true });
|
file = try std.fs.cwd().createFile(filename, .{ .truncate = true });
|
||||||
errdefer file.close();
|
errdefer file.close();
|
||||||
writer = &file.writer();
|
writer = &file.writer();
|
||||||
|
_ = try writer.write("const std = @import(\"std\");\n");
|
||||||
|
_ = try writer.write("const serializeMap = @import(\"json.zig\").serializeMap;\n");
|
||||||
_ = try writer.write("const smithy = @import(\"smithy\");\n\n");
|
_ = try writer.write("const smithy = @import(\"smithy\");\n\n");
|
||||||
std.log.info("Processing file: {s}", .{arg});
|
std.log.info("Processing file: {s}", .{arg});
|
||||||
const service_names = generateServicesForFilePath(allocator, ";", arg, writer) catch |err| {
|
const service_names = generateServicesForFilePath(allocator, ";", arg, writer) catch |err| {
|
||||||
|
@ -68,10 +74,9 @@ fn generateServices(allocator: *std.mem.Allocator, comptime _: []const u8, file:
|
||||||
defer allocator.free(json);
|
defer allocator.free(json);
|
||||||
const model = try smithy.parse(allocator, json);
|
const model = try smithy.parse(allocator, json);
|
||||||
defer model.deinit();
|
defer model.deinit();
|
||||||
const ShapeInfo = @TypeOf(model.shapes[0]); // assume we have at least one shape
|
var shapes = std.StringHashMap(smithy.ShapeInfo).init(allocator);
|
||||||
var shapes = std.StringHashMap(ShapeInfo).init(allocator);
|
|
||||||
defer shapes.deinit();
|
defer shapes.deinit();
|
||||||
var services = std.ArrayList(ShapeInfo).init(allocator);
|
var services = std.ArrayList(smithy.ShapeInfo).init(allocator);
|
||||||
defer services.deinit();
|
defer services.deinit();
|
||||||
for (model.shapes) |shape| {
|
for (model.shapes) |shape| {
|
||||||
try shapes.put(shape.id, shape);
|
try shapes.put(shape.id, shape);
|
||||||
|
@ -118,7 +123,7 @@ fn generateServices(allocator: *std.mem.Allocator, comptime _: []const u8, file:
|
||||||
try writer.print("pub const name: []const u8 = \"{s}\";\n", .{name});
|
try writer.print("pub const name: []const u8 = \"{s}\";\n", .{name});
|
||||||
// TODO: This really should just be ".whatevs". We're fully qualifying here, which isn't typical
|
// TODO: This really should just be ".whatevs". We're fully qualifying here, which isn't typical
|
||||||
try writer.print("pub const aws_protocol: smithy.AwsProtocol = smithy.{s};\n\n", .{aws_protocol});
|
try writer.print("pub const aws_protocol: smithy.AwsProtocol = smithy.{s};\n\n", .{aws_protocol});
|
||||||
_ = try writer.write("pub const service_metadata : struct {\n");
|
_ = try writer.write("pub const service_metadata: struct {\n");
|
||||||
try writer.print(" version: []const u8 = \"{s}\",\n", .{version});
|
try writer.print(" version: []const u8 = \"{s}\",\n", .{version});
|
||||||
try writer.print(" sdk_id: []const u8 = \"{s}\",\n", .{sdk_id});
|
try writer.print(" sdk_id: []const u8 = \"{s}\",\n", .{sdk_id});
|
||||||
try writer.print(" arn_namespace: []const u8 = \"{s}\",\n", .{arn_namespace});
|
try writer.print(" arn_namespace: []const u8 = \"{s}\",\n", .{arn_namespace});
|
||||||
|
@ -131,7 +136,7 @@ fn generateServices(allocator: *std.mem.Allocator, comptime _: []const u8, file:
|
||||||
|
|
||||||
// Operations
|
// Operations
|
||||||
for (service.shape.service.operations) |op|
|
for (service.shape.service.operations) |op|
|
||||||
try generateOperation(allocator, shapes.get(op).?, shapes, writer, constant_name);
|
try generateOperation(allocator, shapes.get(op).?, shapes, writer);
|
||||||
}
|
}
|
||||||
return constant_names.toOwnedSlice();
|
return constant_names.toOwnedSlice();
|
||||||
}
|
}
|
||||||
|
@ -155,67 +160,108 @@ fn constantName(allocator: *std.mem.Allocator, id: []const u8) ![]const u8 {
|
||||||
// Not a special case - just snake it
|
// Not a special case - just snake it
|
||||||
return try snake.fromPascalCase(allocator, id);
|
return try snake.fromPascalCase(allocator, id);
|
||||||
}
|
}
|
||||||
fn generateOperation(allocator: *std.mem.Allocator, operation: smithy.ShapeInfo, shapes: anytype, writer: anytype, service: []const u8) !void {
|
|
||||||
|
const GenerationState = struct {
|
||||||
|
type_stack: *std.ArrayList(*const smithy.ShapeInfo),
|
||||||
|
// we will need some sort of "type decls needed" for recursive structures
|
||||||
|
allocator: *std.mem.Allocator,
|
||||||
|
indent_level: u64,
|
||||||
|
all_required: bool,
|
||||||
|
};
|
||||||
|
|
||||||
|
fn outputIndent(state: GenerationState, writer: anytype) !void {
|
||||||
|
const n_chars = 4 * state.indent_level;
|
||||||
|
try writer.writeByteNTimes(' ', n_chars);
|
||||||
|
}
|
||||||
|
fn generateOperation(allocator: *std.mem.Allocator, operation: smithy.ShapeInfo, shapes: std.StringHashMap(smithy.ShapeInfo), writer: anytype) !void {
|
||||||
const snake_case_name = try snake.fromPascalCase(allocator, operation.name);
|
const snake_case_name = try snake.fromPascalCase(allocator, operation.name);
|
||||||
defer allocator.free(snake_case_name);
|
defer allocator.free(snake_case_name);
|
||||||
|
|
||||||
const prefix = " ";
|
|
||||||
var type_stack = std.ArrayList(*const smithy.ShapeInfo).init(allocator);
|
var type_stack = std.ArrayList(*const smithy.ShapeInfo).init(allocator);
|
||||||
defer type_stack.deinit();
|
defer type_stack.deinit();
|
||||||
|
const state = GenerationState{
|
||||||
|
.type_stack = &type_stack,
|
||||||
|
.allocator = allocator,
|
||||||
|
.indent_level = 1,
|
||||||
|
.all_required = false,
|
||||||
|
};
|
||||||
|
var child_state = state;
|
||||||
|
child_state.indent_level += 1;
|
||||||
// indent should start at 4 spaces here
|
// indent should start at 4 spaces here
|
||||||
const operation_name = avoidReserved(snake_case_name);
|
const operation_name = avoidReserved(snake_case_name);
|
||||||
try writer.print("pub const {s}: struct ", .{operation_name});
|
try writer.print("pub const {s}: struct ", .{operation_name});
|
||||||
_ = try writer.write("{\n");
|
_ = try writer.write("{\n");
|
||||||
for (operation.shape.operation.traits) |trait| {
|
for (operation.shape.operation.traits) |trait| {
|
||||||
if (trait == .http) {
|
if (trait == .http) {
|
||||||
_ = try writer.write(" pub const http_config = .{\n");
|
try outputIndent(state, writer);
|
||||||
try writer.print(" .method = \"{s}\",\n", .{trait.http.method});
|
_ = try writer.write("pub const http_config = .{\n");
|
||||||
try writer.print(" .uri = \"{s}\",\n", .{trait.http.uri});
|
try outputIndent(child_state, writer);
|
||||||
try writer.print(" .success_code = {d},\n", .{trait.http.code});
|
try writer.print(".method = \"{s}\",\n", .{trait.http.method});
|
||||||
_ = try writer.write(" };\n\n");
|
try outputIndent(child_state, writer);
|
||||||
|
try writer.print(".uri = \"{s}\",\n", .{trait.http.uri});
|
||||||
|
try outputIndent(child_state, writer);
|
||||||
|
try writer.print(".success_code = {d},\n", .{trait.http.code});
|
||||||
|
try outputIndent(state, writer);
|
||||||
|
_ = try writer.write("};\n\n");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
try writer.print(" action_name: []const u8 = \"{s}\",\n", .{operation.name});
|
try outputIndent(state, writer);
|
||||||
_ = try writer.write(" Request: type = ");
|
try writer.print("action_name: []const u8 = \"{s}\",\n", .{operation.name});
|
||||||
|
try outputIndent(state, writer);
|
||||||
|
_ = try writer.write("Request: type = ");
|
||||||
if (operation.shape.operation.input) |member| {
|
if (operation.shape.operation.input) |member| {
|
||||||
try generateTypeFor(allocator, member, shapes, writer, prefix, false, &type_stack, false);
|
if (try generateTypeFor(member, shapes, writer, state, false)) unreachable; // we expect only structs here
|
||||||
_ = try writer.write("\n");
|
_ = try writer.write("\n");
|
||||||
try generateMetadataFunction(service, operation_name, prefix, writer);
|
try generateMetadataFunction(operation_name, state, writer);
|
||||||
} else {
|
} else {
|
||||||
_ = try writer.write("struct {\n");
|
_ = try writer.write("struct {\n");
|
||||||
try generateMetadataFunction(service, operation_name, prefix, writer);
|
try generateMetadataFunction(operation_name, state, writer);
|
||||||
}
|
}
|
||||||
_ = try writer.write(",\n");
|
_ = try writer.write(",\n");
|
||||||
_ = try writer.write(" Response: type = ");
|
try outputIndent(state, writer);
|
||||||
|
_ = try writer.write("Response: type = ");
|
||||||
if (operation.shape.operation.output) |member| {
|
if (operation.shape.operation.output) |member| {
|
||||||
try generateTypeFor(allocator, member, shapes, writer, " ", false, &type_stack, true);
|
if (try generateTypeFor(member, shapes, writer, state, true)) unreachable; // we expect only structs here
|
||||||
} else _ = try writer.write("struct {}"); // we want to maintain consistency with other ops
|
} else _ = try writer.write("struct {}"); // we want to maintain consistency with other ops
|
||||||
_ = try writer.write(",\n");
|
_ = try writer.write(",\n");
|
||||||
|
|
||||||
if (operation.shape.operation.errors) |errors| {
|
if (operation.shape.operation.errors) |errors| {
|
||||||
_ = try writer.write(" ServiceError: type = error{\n");
|
try outputIndent(state, writer);
|
||||||
|
_ = try writer.write("ServiceError: type = error{\n");
|
||||||
for (errors) |err| {
|
for (errors) |err| {
|
||||||
const err_name = getErrorName(shapes.get(err).?.name); // need to remove "exception"
|
const err_name = getErrorName(shapes.get(err).?.name); // need to remove "exception"
|
||||||
try writer.print(" {s},\n", .{err_name});
|
try outputIndent(child_state, writer);
|
||||||
|
try writer.print("{s},\n", .{err_name});
|
||||||
}
|
}
|
||||||
_ = try writer.write(" },\n");
|
try outputIndent(state, writer);
|
||||||
|
_ = try writer.write("},\n");
|
||||||
}
|
}
|
||||||
_ = try writer.write("} = .{};\n");
|
_ = try writer.write("} = .{};\n");
|
||||||
}
|
}
|
||||||
|
|
||||||
fn generateMetadataFunction(_: []const u8, operation_name: []const u8, comptime prefix: []const u8, writer: anytype) !void {
|
fn generateMetadataFunction(operation_name: []const u8, state: GenerationState, writer: anytype) !void {
|
||||||
// TODO: Shove these lines in here, and also the else portion
|
// TODO: Shove these lines in here, and also the else portion
|
||||||
// pub fn metaInfo(self: @This()) struct { service: @TypeOf(sts), action: @TypeOf(sts.get_caller_identity) } {
|
// pub fn metaInfo(self: @This()) struct { service: @TypeOf(sts), action: @TypeOf(sts.get_caller_identity) } {
|
||||||
// return .{ .service = sts, .action = sts.get_caller_identity };
|
// return .{ .service = sts, .action = sts.get_caller_identity };
|
||||||
// }
|
// }
|
||||||
// We want to add a short "get my parents" function into the response
|
// We want to add a short "get my parents" function into the response
|
||||||
try writer.print("{s} ", .{prefix});
|
var child_state = state;
|
||||||
|
child_state.indent_level += 1;
|
||||||
|
try outputIndent(child_state, writer);
|
||||||
_ = try writer.write("pub fn metaInfo() struct { ");
|
_ = try writer.write("pub fn metaInfo() struct { ");
|
||||||
try writer.print("service_metadata: @TypeOf(service_metadata), action: @TypeOf({s})", .{operation_name});
|
try writer.print("service_metadata: @TypeOf(service_metadata), action: @TypeOf({s})", .{operation_name});
|
||||||
_ = try writer.write(" } {\n" ++ prefix ++ " return .{ .service_metadata = service_metadata, ");
|
_ = try writer.write(" } {\n");
|
||||||
|
child_state.indent_level += 1;
|
||||||
|
try outputIndent(child_state, writer);
|
||||||
|
_ = try writer.write("return .{ .service_metadata = service_metadata, ");
|
||||||
try writer.print(".action = {s}", .{operation_name});
|
try writer.print(".action = {s}", .{operation_name});
|
||||||
_ = try writer.write(" };\n" ++ prefix ++ " }\n" ++ prefix ++ "}");
|
_ = try writer.write(" };\n");
|
||||||
|
child_state.indent_level -= 1;
|
||||||
|
try outputIndent(child_state, writer);
|
||||||
|
_ = try writer.write("}\n");
|
||||||
|
try outputIndent(state, writer);
|
||||||
|
try writer.writeByte('}');
|
||||||
}
|
}
|
||||||
fn getErrorName(err_name: []const u8) []const u8 {
|
fn getErrorName(err_name: []const u8) []const u8 {
|
||||||
if (endsWith("Exception", err_name))
|
if (endsWith("Exception", err_name))
|
||||||
|
@ -231,7 +277,8 @@ fn endsWith(item: []const u8, str: []const u8) bool {
|
||||||
return std.mem.eql(u8, item, str[str.len - item.len ..]);
|
return std.mem.eql(u8, item, str[str.len - item.len ..]);
|
||||||
}
|
}
|
||||||
/// return type is anyerror!void as this is a recursive function, so the compiler cannot properly infer error types
|
/// return type is anyerror!void as this is a recursive function, so the compiler cannot properly infer error types
|
||||||
fn generateTypeFor(allocator: *std.mem.Allocator, shape_id: []const u8, shapes: anytype, writer: anytype, prefix: []const u8, all_required: bool, type_stack: anytype, end_structure: bool) anyerror!void {
|
fn generateTypeFor(shape_id: []const u8, shapes: std.StringHashMap(smithy.ShapeInfo), writer: anytype, state: GenerationState, end_structure: bool) anyerror!bool {
|
||||||
|
var rc = false;
|
||||||
if (shapes.get(shape_id) == null) {
|
if (shapes.get(shape_id) == null) {
|
||||||
std.debug.print("Shape ID not found. This is most likely a bug. Shape ID: {s}\n", .{shape_id});
|
std.debug.print("Shape ID not found. This is most likely a bug. Shape ID: {s}\n", .{shape_id});
|
||||||
return error.InvalidType;
|
return error.InvalidType;
|
||||||
|
@ -242,7 +289,7 @@ fn generateTypeFor(allocator: *std.mem.Allocator, shape_id: []const u8, shapes:
|
||||||
const shape = shape_info.shape;
|
const shape = shape_info.shape;
|
||||||
// Check for ourselves up the stack
|
// Check for ourselves up the stack
|
||||||
var self_occurences: u8 = 0;
|
var self_occurences: u8 = 0;
|
||||||
for (type_stack.items) |i| {
|
for (state.type_stack.items) |i| {
|
||||||
// NOTE: shapes.get isn't providing a consistent pointer - is it allocating each time?
|
// NOTE: shapes.get isn't providing a consistent pointer - is it allocating each time?
|
||||||
// we will therefore need to compare ids
|
// we will therefore need to compare ids
|
||||||
if (std.mem.eql(u8, i.*.id, shape_info.id))
|
if (std.mem.eql(u8, i.*.id, shape_info.id))
|
||||||
|
@ -264,61 +311,78 @@ fn generateTypeFor(allocator: *std.mem.Allocator, shape_id: []const u8, shapes:
|
||||||
// type to properly reference. Realistically, AWS or the service
|
// type to properly reference. Realistically, AWS or the service
|
||||||
// must be blocking deep recursion somewhere or this would be a great
|
// must be blocking deep recursion somewhere or this would be a great
|
||||||
// DOS attack
|
// DOS attack
|
||||||
try generateSimpleTypeFor("nothing", "[]const u8", writer, all_required);
|
try generateSimpleTypeFor("nothing", "[]const u8", writer);
|
||||||
std.log.warn("Type cycle detected, limiting depth. Type: {s}", .{shape_id});
|
std.log.warn("Type cycle detected, limiting depth. Type: {s}", .{shape_id});
|
||||||
// std.log.info(" Type stack:\n", .{});
|
// if (std.mem.eql(u8, "com.amazonaws.workmail#Timestamp", shape_id)) {
|
||||||
// for (type_stack.items) |i|
|
// std.log.info(" Type stack:\n", .{});
|
||||||
// std.log.info(" {s}", .{i.*.id});
|
// for (state.type_stack.items) |i|
|
||||||
return;
|
// std.log.info(" {s}", .{i.*.id});
|
||||||
|
// }
|
||||||
|
return false; // not a map
|
||||||
}
|
}
|
||||||
try type_stack.append(&shape_info);
|
try state.type_stack.append(&shape_info);
|
||||||
|
defer _ = state.type_stack.pop();
|
||||||
switch (shape) {
|
switch (shape) {
|
||||||
.structure => {
|
.structure => {
|
||||||
try generateComplexTypeFor(allocator, shape.structure.members, "struct", shapes, writer, prefix, all_required, type_stack);
|
try generateComplexTypeFor(shape_id, shape.structure.members, "struct", shapes, writer, state);
|
||||||
if (end_structure) {
|
if (end_structure) {
|
||||||
// epilog
|
// epilog
|
||||||
try writer.print("{s}", .{prefix});
|
try outputIndent(state, writer);
|
||||||
_ = try writer.write("}");
|
_ = try writer.write("}");
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
.uniontype => {
|
.uniontype => {
|
||||||
try generateComplexTypeFor(allocator, shape.uniontype.members, "union", shapes, writer, prefix, all_required, type_stack);
|
try generateComplexTypeFor(shape_id, shape.uniontype.members, "union", shapes, writer, state);
|
||||||
// epilog
|
// epilog
|
||||||
try writer.print("{s}", .{prefix});
|
try outputIndent(state, writer);
|
||||||
_ = try writer.write("}");
|
_ = try writer.write("}");
|
||||||
},
|
},
|
||||||
.string => |s| try generateSimpleTypeFor(s, "[]const u8", writer, all_required),
|
.string => |s| try generateSimpleTypeFor(s, "[]const u8", writer),
|
||||||
.integer => |s| try generateSimpleTypeFor(s, "i64", writer, all_required),
|
.integer => |s| try generateSimpleTypeFor(s, "i64", writer),
|
||||||
.list => {
|
.list => {
|
||||||
_ = try writer.write("[]");
|
_ = try writer.write("[]");
|
||||||
try generateTypeFor(allocator, shape.list.member_target, shapes, writer, prefix, all_required, type_stack, true);
|
// The serializer will have to deal with the idea we might be an array
|
||||||
|
return try generateTypeFor(shape.list.member_target, shapes, writer, state, true);
|
||||||
},
|
},
|
||||||
.set => {
|
.set => {
|
||||||
_ = try writer.write("[]");
|
_ = try writer.write("[]");
|
||||||
try generateTypeFor(allocator, shape.set.member_target, shapes, writer, prefix, all_required, type_stack, true);
|
// The serializer will have to deal with the idea we might be an array
|
||||||
|
return try generateTypeFor(shape.set.member_target, shapes, writer, state, true);
|
||||||
},
|
},
|
||||||
.timestamp => |s| try generateSimpleTypeFor(s, "i64", writer, all_required),
|
.timestamp => |s| try generateSimpleTypeFor(s, "i64", writer),
|
||||||
.blob => |s| try generateSimpleTypeFor(s, "[]const u8", writer, all_required),
|
.blob => |s| try generateSimpleTypeFor(s, "[]const u8", writer),
|
||||||
.boolean => |s| try generateSimpleTypeFor(s, "bool", writer, all_required),
|
.boolean => |s| try generateSimpleTypeFor(s, "bool", writer),
|
||||||
.double => |s| try generateSimpleTypeFor(s, "f64", writer, all_required),
|
.double => |s| try generateSimpleTypeFor(s, "f64", writer),
|
||||||
.float => |s| try generateSimpleTypeFor(s, "f32", writer, all_required),
|
.float => |s| try generateSimpleTypeFor(s, "f32", writer),
|
||||||
.long => |s| try generateSimpleTypeFor(s, "i64", writer, all_required),
|
.long => |s| try generateSimpleTypeFor(s, "i64", writer),
|
||||||
.map => {
|
.map => {
|
||||||
_ = try writer.write("[]struct {\n");
|
_ = try writer.write("[]struct {\n");
|
||||||
const new_prefix = try std.fmt.allocPrint(allocator, " {s}", .{prefix});
|
var child_state = state;
|
||||||
defer allocator.free(new_prefix);
|
child_state.indent_level += 1;
|
||||||
try writer.print("{s} key: ", .{prefix});
|
try outputIndent(child_state, writer);
|
||||||
if (!all_required) try writeOptional(shape.map.traits, writer, null);
|
_ = try writer.write("key: ");
|
||||||
try generateTypeFor(allocator, shape.map.key, shapes, writer, prefix, all_required, type_stack, true);
|
try writeOptional(shape.map.traits, writer, null);
|
||||||
if (!all_required) try writeOptional(shape.map.traits, writer, " = null");
|
var sub_maps = std.ArrayList([]const u8).init(state.allocator);
|
||||||
|
defer sub_maps.deinit();
|
||||||
|
if (try generateTypeFor(shape.map.key, shapes, writer, child_state, true))
|
||||||
|
try sub_maps.append("key");
|
||||||
|
try writeOptional(shape.map.traits, writer, " = null");
|
||||||
_ = try writer.write(",\n");
|
_ = try writer.write(",\n");
|
||||||
try writer.print("{s} value: ", .{prefix});
|
try outputIndent(child_state, writer);
|
||||||
if (!all_required) try writeOptional(shape.map.traits, writer, null);
|
_ = try writer.write("value: ");
|
||||||
try generateTypeFor(allocator, shape.map.key, shapes, writer, prefix, all_required, type_stack, true);
|
try writeOptional(shape.map.traits, writer, null);
|
||||||
if (!all_required) try writeOptional(shape.map.traits, writer, " = null");
|
if (try generateTypeFor(shape.map.value, shapes, writer, child_state, true))
|
||||||
|
try sub_maps.append("value");
|
||||||
|
try writeOptional(shape.map.traits, writer, " = null");
|
||||||
_ = try writer.write(",\n");
|
_ = try writer.write(",\n");
|
||||||
_ = try writer.write(prefix);
|
if (sub_maps.items.len > 0) {
|
||||||
|
_ = try writer.write("\n");
|
||||||
|
try writeStringify(state, sub_maps.items, writer);
|
||||||
|
}
|
||||||
|
try outputIndent(state, writer);
|
||||||
_ = try writer.write("}");
|
_ = try writer.write("}");
|
||||||
|
|
||||||
|
rc = true;
|
||||||
},
|
},
|
||||||
else => {
|
else => {
|
||||||
std.log.err("encountered unimplemented shape type {s} for shape_id {s}. Generated code will not compile", .{ @tagName(shape), shape_id });
|
std.log.err("encountered unimplemented shape type {s} for shape_id {s}. Generated code will not compile", .{ @tagName(shape), shape_id });
|
||||||
|
@ -326,25 +390,19 @@ fn generateTypeFor(allocator: *std.mem.Allocator, shape_id: []const u8, shapes:
|
||||||
// return error{UnimplementedShapeType}.UnimplementedShapeType;
|
// return error{UnimplementedShapeType}.UnimplementedShapeType;
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
_ = type_stack.pop();
|
return rc;
|
||||||
}
|
}
|
||||||
|
|
||||||
// fn generateSimpleTypeFor(shape: anytype, type_name: []const u8, writer: anytype, _: bool) !void {
|
fn generateSimpleTypeFor(_: anytype, type_name: []const u8, writer: anytype) !void {
|
||||||
fn generateSimpleTypeFor(_: anytype, type_name: []const u8, writer: anytype, all_required: bool) !void {
|
_ = try writer.write(type_name); // This had required stuff but the problem was elsewhere. Better to leave as function just in case
|
||||||
// current compiler checks unused variables, but can't handle multiple unused
|
|
||||||
// function parameters. We don't want to change the signature in case we need to work with
|
|
||||||
// these in the future, so this stupid code is only here to trick the compiler
|
|
||||||
if (all_required or !all_required)
|
|
||||||
_ = try writer.write(type_name); // This had required stuff but the problem was elsewhere. Better to leave as function just in case
|
|
||||||
}
|
}
|
||||||
|
fn generateComplexTypeFor(shape_id: []const u8, members: []smithy.TypeMember, type_type_name: []const u8, shapes: std.StringHashMap(smithy.ShapeInfo), writer: anytype, state: GenerationState) anyerror!void {
|
||||||
fn generateComplexTypeFor(allocator: *std.mem.Allocator, members: []smithy.TypeMember, type_type_name: []const u8, shapes: anytype, writer: anytype, prefix: []const u8, all_required: bool, type_stack: anytype) anyerror!void {
|
_ = shape_id;
|
||||||
const Mapping = struct { snake: []const u8, json: []const u8 };
|
const Mapping = struct { snake: []const u8, json: []const u8 };
|
||||||
var json_field_name_mappings = try std.ArrayList(Mapping).initCapacity(allocator, members.len);
|
var json_field_name_mappings = try std.ArrayList(Mapping).initCapacity(state.allocator, members.len);
|
||||||
defer {
|
defer {
|
||||||
for (json_field_name_mappings.items) |mapping| {
|
for (json_field_name_mappings.items) |mapping|
|
||||||
allocator.free(mapping.snake);
|
state.allocator.free(mapping.snake);
|
||||||
}
|
|
||||||
json_field_name_mappings.deinit();
|
json_field_name_mappings.deinit();
|
||||||
}
|
}
|
||||||
// There is an httpQueryParams trait as well, but nobody is using it. API GW
|
// There is an httpQueryParams trait as well, but nobody is using it. API GW
|
||||||
|
@ -353,28 +411,31 @@ fn generateComplexTypeFor(allocator: *std.mem.Allocator, members: []smithy.TypeM
|
||||||
// Same with httpPayload
|
// Same with httpPayload
|
||||||
//
|
//
|
||||||
// httpLabel is interesting - right now we just assume anything can be used - do we need to track this?
|
// httpLabel is interesting - right now we just assume anything can be used - do we need to track this?
|
||||||
var http_query_mappings = try std.ArrayList(Mapping).initCapacity(allocator, members.len);
|
var http_query_mappings = try std.ArrayList(Mapping).initCapacity(state.allocator, members.len);
|
||||||
defer {
|
defer {
|
||||||
for (http_query_mappings.items) |mapping| {
|
for (http_query_mappings.items) |mapping|
|
||||||
allocator.free(mapping.snake);
|
state.allocator.free(mapping.snake);
|
||||||
}
|
|
||||||
http_query_mappings.deinit();
|
http_query_mappings.deinit();
|
||||||
}
|
}
|
||||||
var http_header_mappings = try std.ArrayList(Mapping).initCapacity(allocator, members.len);
|
var http_header_mappings = try std.ArrayList(Mapping).initCapacity(state.allocator, members.len);
|
||||||
defer {
|
defer {
|
||||||
for (http_header_mappings.items) |mapping| {
|
for (http_header_mappings.items) |mapping|
|
||||||
allocator.free(mapping.snake);
|
state.allocator.free(mapping.snake);
|
||||||
}
|
|
||||||
http_header_mappings.deinit();
|
http_header_mappings.deinit();
|
||||||
}
|
}
|
||||||
|
var map_fields = std.ArrayList([]const u8).init(state.allocator);
|
||||||
|
defer {
|
||||||
|
for (map_fields.items) |f| state.allocator.free(f);
|
||||||
|
map_fields.deinit();
|
||||||
|
}
|
||||||
// prolog. We'll rely on caller to get the spacing correct here
|
// prolog. We'll rely on caller to get the spacing correct here
|
||||||
_ = try writer.write(type_type_name);
|
_ = try writer.write(type_type_name);
|
||||||
_ = try writer.write(" {\n");
|
_ = try writer.write(" {\n");
|
||||||
|
var child_state = state;
|
||||||
|
child_state.indent_level += 1;
|
||||||
for (members) |member| {
|
for (members) |member| {
|
||||||
const new_prefix = try std.fmt.allocPrint(allocator, " {s}", .{prefix});
|
|
||||||
defer allocator.free(new_prefix);
|
|
||||||
// This is our mapping
|
// This is our mapping
|
||||||
const snake_case_member = try snake.fromPascalCase(allocator, member.name);
|
const snake_case_member = try snake.fromPascalCase(state.allocator, member.name);
|
||||||
// So it looks like some services have duplicate names?! Check out "httpMethod"
|
// So it looks like some services have duplicate names?! Check out "httpMethod"
|
||||||
// in API Gateway. Not sure what we're supposed to do there. Checking the go
|
// in API Gateway. Not sure what we're supposed to do there. Checking the go
|
||||||
// sdk, they move this particular duplicate to 'http_method' - not sure yet
|
// sdk, they move this particular duplicate to 'http_method' - not sure yet
|
||||||
|
@ -384,20 +445,24 @@ fn generateComplexTypeFor(allocator: *std.mem.Allocator, members: []smithy.TypeM
|
||||||
switch (trait) {
|
switch (trait) {
|
||||||
.json_name => {
|
.json_name => {
|
||||||
found_name_trait = true;
|
found_name_trait = true;
|
||||||
json_field_name_mappings.appendAssumeCapacity(.{ .snake = try allocator.dupe(u8, snake_case_member), .json = trait.json_name });
|
json_field_name_mappings.appendAssumeCapacity(.{ .snake = try state.allocator.dupe(u8, snake_case_member), .json = trait.json_name });
|
||||||
},
|
},
|
||||||
.http_query => http_query_mappings.appendAssumeCapacity(.{ .snake = try allocator.dupe(u8, snake_case_member), .json = trait.http_query }),
|
.http_query => http_query_mappings.appendAssumeCapacity(.{ .snake = try state.allocator.dupe(u8, snake_case_member), .json = trait.http_query }),
|
||||||
.http_header => http_header_mappings.appendAssumeCapacity(.{ .snake = try allocator.dupe(u8, snake_case_member), .json = trait.http_header }),
|
.http_header => http_header_mappings.appendAssumeCapacity(.{ .snake = try state.allocator.dupe(u8, snake_case_member), .json = trait.http_header }),
|
||||||
else => {},
|
else => {},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (!found_name_trait)
|
if (!found_name_trait)
|
||||||
json_field_name_mappings.appendAssumeCapacity(.{ .snake = try allocator.dupe(u8, snake_case_member), .json = member.name });
|
json_field_name_mappings.appendAssumeCapacity(.{ .snake = try state.allocator.dupe(u8, snake_case_member), .json = member.name });
|
||||||
defer allocator.free(snake_case_member);
|
defer state.allocator.free(snake_case_member);
|
||||||
try writer.print("{s} {s}: ", .{ prefix, avoidReserved(snake_case_member) });
|
try outputIndent(child_state, writer);
|
||||||
if (!all_required) try writeOptional(member.traits, writer, null);
|
const member_name = avoidReserved(snake_case_member);
|
||||||
try generateTypeFor(allocator, member.target, shapes, writer, new_prefix, all_required, type_stack, true);
|
try writer.print("{s}: ", .{member_name});
|
||||||
if (!all_required and !std.mem.eql(u8, "union", type_type_name))
|
try writeOptional(member.traits, writer, null);
|
||||||
|
if (try generateTypeFor(member.target, shapes, writer, child_state, true))
|
||||||
|
try map_fields.append(try std.fmt.allocPrint(state.allocator, "{s}", .{member_name}));
|
||||||
|
|
||||||
|
if (!std.mem.eql(u8, "union", type_type_name))
|
||||||
try writeOptional(member.traits, writer, " = null");
|
try writeOptional(member.traits, writer, " = null");
|
||||||
_ = try writer.write(",\n");
|
_ = try writer.write(",\n");
|
||||||
}
|
}
|
||||||
|
@ -409,12 +474,10 @@ fn generateComplexTypeFor(allocator: *std.mem.Allocator, members: []smithy.TypeM
|
||||||
// .marker = "Marker",
|
// .marker = "Marker",
|
||||||
// .max_items = "MaxItems",
|
// .max_items = "MaxItems",
|
||||||
// };
|
// };
|
||||||
var constprefix = try std.fmt.allocPrint(allocator, "{s} ", .{prefix});
|
|
||||||
defer allocator.free(constprefix);
|
|
||||||
if (http_query_mappings.items.len > 0) _ = try writer.write("\n");
|
if (http_query_mappings.items.len > 0) _ = try writer.write("\n");
|
||||||
try writeMappings(constprefix, "pub ", "http_query", http_query_mappings, writer);
|
try writeMappings(child_state, "pub ", "http_query", http_query_mappings, false, writer);
|
||||||
if (http_query_mappings.items.len > 0 and http_header_mappings.items.len > 0) _ = try writer.write("\n");
|
if (http_query_mappings.items.len > 0 and http_header_mappings.items.len > 0) _ = try writer.write("\n");
|
||||||
try writeMappings(constprefix, "pub ", "http_header", http_header_mappings, writer);
|
try writeMappings(child_state, "pub ", "http_header", http_header_mappings, false, writer);
|
||||||
|
|
||||||
// Add in json mappings. The function looks like this:
|
// Add in json mappings. The function looks like this:
|
||||||
//
|
//
|
||||||
|
@ -426,23 +489,64 @@ fn generateComplexTypeFor(allocator: *std.mem.Allocator, members: []smithy.TypeM
|
||||||
// return @field(mappings, field_name);
|
// return @field(mappings, field_name);
|
||||||
// }
|
// }
|
||||||
//
|
//
|
||||||
var fieldnameprefix = try std.fmt.allocPrint(allocator, "{s} ", .{prefix});
|
try writer.writeByte('\n');
|
||||||
defer allocator.free(fieldnameprefix);
|
try outputIndent(child_state, writer);
|
||||||
try writer.print("\n{s} pub fn jsonFieldNameFor(_: @This(), comptime field_name: []const u8) []const u8 ", .{prefix});
|
_ = try writer.write("pub fn jsonFieldNameFor(_: @This(), comptime field_name: []const u8) []const u8 {\n");
|
||||||
_ = try writer.write("{\n");
|
var grandchild_state = child_state;
|
||||||
try writeMappings(fieldnameprefix, "", "mappings", json_field_name_mappings, writer);
|
grandchild_state.indent_level += 1;
|
||||||
try writer.print("{s} return @field(mappings, field_name);\n{s}", .{ prefix, prefix });
|
// We need to force output here becaseu we're referencing the field in the return statement below
|
||||||
_ = try writer.write(" }\n");
|
try writeMappings(grandchild_state, "", "mappings", json_field_name_mappings, true, writer);
|
||||||
|
try outputIndent(grandchild_state, writer);
|
||||||
|
_ = try writer.write("return @field(mappings, field_name);\n");
|
||||||
|
try outputIndent(child_state, writer);
|
||||||
|
_ = try writer.write("}\n");
|
||||||
|
try writeStringify(child_state, map_fields.items, writer);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn writeMappings(prefix: []const u8, @"pub": []const u8, mapping_name: []const u8, mappings: anytype, writer: anytype) !void {
|
fn writeStringify(state: GenerationState, fields: [][]const u8, writer: anytype) !void {
|
||||||
if (mappings.items.len == 0) return;
|
if (fields.len > 0) {
|
||||||
try writer.print("{s}{s}const {s} = .", .{ prefix, @"pub", mapping_name });
|
// pub fn jsonStringifyField(self: @This(), comptime field_name: []const u8, options: anytype, out_stream: anytype) !bool {
|
||||||
_ = try writer.write("{\n");
|
// if (std.mem.eql(u8, "tags", field_name))
|
||||||
for (mappings.items) |mapping| {
|
// return try serializeMap(self.tags, self.jsonFieldNameFor("tags"), options, out_stream);
|
||||||
try writer.print("{s} .{s} = \"{s}\",\n", .{ prefix, avoidReserved(mapping.snake), mapping.json });
|
// return false;
|
||||||
|
// }
|
||||||
|
var child_state = state;
|
||||||
|
child_state.indent_level += 1;
|
||||||
|
try writer.writeByte('\n');
|
||||||
|
try outputIndent(state, writer);
|
||||||
|
_ = try writer.write("pub fn jsonStringifyField(self: @This(), comptime field_name: []const u8, options: anytype, out_stream: anytype) !bool {\n");
|
||||||
|
var return_state = child_state;
|
||||||
|
return_state.indent_level += 1;
|
||||||
|
for (fields) |field| {
|
||||||
|
try outputIndent(child_state, writer);
|
||||||
|
try writer.print("if (std.mem.eql(u8, \"{s}\", field_name))\n", .{field});
|
||||||
|
try outputIndent(return_state, writer);
|
||||||
|
try writer.print("return try serializeMap(self.{s}, self.jsonFieldNameFor(\"{s}\"), options, out_stream);\n", .{ field, field });
|
||||||
|
}
|
||||||
|
try outputIndent(child_state, writer);
|
||||||
|
_ = try writer.write("return false;\n");
|
||||||
|
try outputIndent(state, writer);
|
||||||
|
_ = try writer.write("}\n");
|
||||||
}
|
}
|
||||||
_ = try writer.write(prefix);
|
}
|
||||||
|
|
||||||
|
fn writeMappings(state: GenerationState, @"pub": []const u8, mapping_name: []const u8, mappings: anytype, force_output: bool, writer: anytype) !void {
|
||||||
|
if (mappings.items.len == 0 and !force_output) return;
|
||||||
|
try outputIndent(state, writer);
|
||||||
|
if (mappings.items.len == 0) {
|
||||||
|
try writer.print("{s}const {s} = ", .{ @"pub", mapping_name });
|
||||||
|
_ = try writer.write(".{};\n");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
try writer.print("{s}const {s} = .", .{ @"pub", mapping_name });
|
||||||
|
_ = try writer.write("{\n");
|
||||||
|
var child_state = state;
|
||||||
|
child_state.indent_level += 1;
|
||||||
|
for (mappings.items) |mapping| {
|
||||||
|
try outputIndent(child_state, writer);
|
||||||
|
try writer.print(".{s} = \"{s}\",\n", .{ avoidReserved(mapping.snake), mapping.json });
|
||||||
|
}
|
||||||
|
try outputIndent(state, writer);
|
||||||
_ = try writer.write("};\n");
|
_ = try writer.write("};\n");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -620,6 +620,7 @@ fn getTrait(trait_type: []const u8, value: std.json.Value) SmithyParseError!?Tra
|
||||||
\\smithy.api#xmlName
|
\\smithy.api#xmlName
|
||||||
\\smithy.waiters#waitable
|
\\smithy.waiters#waitable
|
||||||
;
|
;
|
||||||
|
// var iterator = std.mem.split(u8, list, "\n"); // Uncomment for 0.8.1
|
||||||
var iterator = std.mem.split(list, "\n");
|
var iterator = std.mem.split(list, "\n");
|
||||||
while (iterator.next()) |known_but_unimplemented| {
|
while (iterator.next()) |known_but_unimplemented| {
|
||||||
if (std.mem.eql(u8, trait_type, known_but_unimplemented))
|
if (std.mem.eql(u8, trait_type, known_but_unimplemented))
|
||||||
|
|
338
src/aws.zig
338
src/aws.zig
|
@ -100,12 +100,14 @@ pub fn Request(comptime action: anytype) type {
|
||||||
.path = Action.http_config.uri,
|
.path = Action.http_config.uri,
|
||||||
};
|
};
|
||||||
|
|
||||||
log.debug("Rest JSON v1 method: {s}", .{aws_request.method});
|
log.debug("Rest JSON v1 method: '{s}'", .{aws_request.method});
|
||||||
log.debug("Rest JSON v1 success code: {d}", .{Action.http_config.success_code});
|
log.debug("Rest JSON v1 success code: '{d}'", .{Action.http_config.success_code});
|
||||||
log.debug("Rest JSON v1 raw uri: {s}", .{Action.http_config.uri});
|
log.debug("Rest JSON v1 raw uri: '{s}'", .{Action.http_config.uri});
|
||||||
|
aws_request.path = try buildPath(options.client.allocator, Action.http_config.uri, ActionRequest, request);
|
||||||
|
defer options.client.allocator.free(aws_request.path);
|
||||||
|
log.debug("Rest JSON v1 processed uri: '{s}'", .{aws_request.path});
|
||||||
aws_request.query = try buildQuery(options.client.allocator, request);
|
aws_request.query = try buildQuery(options.client.allocator, request);
|
||||||
log.debug("Rest JSON v1 query: {s}", .{aws_request.query});
|
log.debug("Rest JSON v1 query: '{s}'", .{aws_request.query});
|
||||||
defer options.client.allocator.free(aws_request.query);
|
defer options.client.allocator.free(aws_request.query);
|
||||||
// We don't know if we need a body...guessing here, this should cover most
|
// We don't know if we need a body...guessing here, this should cover most
|
||||||
var buffer = std.ArrayList(u8).init(options.client.allocator);
|
var buffer = std.ArrayList(u8).init(options.client.allocator);
|
||||||
|
@ -115,6 +117,7 @@ pub fn Request(comptime action: anytype) type {
|
||||||
if (std.mem.eql(u8, "PUT", aws_request.method) or std.mem.eql(u8, "POST", aws_request.method)) {
|
if (std.mem.eql(u8, "PUT", aws_request.method) or std.mem.eql(u8, "POST", aws_request.method)) {
|
||||||
try json.stringify(request, .{ .whitespace = .{} }, buffer.writer());
|
try json.stringify(request, .{ .whitespace = .{} }, buffer.writer());
|
||||||
}
|
}
|
||||||
|
aws_request.body = buffer.items;
|
||||||
|
|
||||||
return try Self.callAws(aws_request, .{
|
return try Self.callAws(aws_request, .{
|
||||||
.success_http_code = Action.http_config.success_code,
|
.success_http_code = Action.http_config.success_code,
|
||||||
|
@ -244,7 +247,10 @@ pub fn Request(comptime action: anytype) type {
|
||||||
// TODO: Handle XML
|
// TODO: Handle XML
|
||||||
if (!isJson) return error.XmlUnimplemented;
|
if (!isJson) return error.XmlUnimplemented;
|
||||||
|
|
||||||
var stream = json.TokenStream.init(response.body);
|
const SResponse = if (Self.service_meta.aws_protocol != .query and Self.service_meta.aws_protocol != .ec2_query)
|
||||||
|
action.Response
|
||||||
|
else
|
||||||
|
ServerResponse(action);
|
||||||
|
|
||||||
const parser_options = json.ParseOptions{
|
const parser_options = json.ParseOptions{
|
||||||
.allocator = options.client.allocator,
|
.allocator = options.client.allocator,
|
||||||
|
@ -253,12 +259,18 @@ pub fn Request(comptime action: anytype) type {
|
||||||
.allow_unknown_fields = true, // new option. Cannot yet handle non-struct fields though
|
.allow_unknown_fields = true, // new option. Cannot yet handle non-struct fields though
|
||||||
.allow_missing_fields = false, // new option. Cannot yet handle non-struct fields though
|
.allow_missing_fields = false, // new option. Cannot yet handle non-struct fields though
|
||||||
};
|
};
|
||||||
|
if (std.meta.fields(SResponse).len == 0) // We don't care about the body if there are no fields
|
||||||
|
// Do we care if an unexpected body comes in?
|
||||||
|
return FullResponseType{
|
||||||
|
.response = .{},
|
||||||
|
.response_metadata = .{
|
||||||
|
.request_id = try requestIdFromHeaders(aws_request, response, options),
|
||||||
|
},
|
||||||
|
.parser_options = parser_options,
|
||||||
|
.raw_parsed = .{ .raw = .{} },
|
||||||
|
};
|
||||||
|
|
||||||
// const SResponse = ServerResponse(request);
|
var stream = json.TokenStream.init(response.body);
|
||||||
const SResponse = if (Self.service_meta.aws_protocol != .query and Self.service_meta.aws_protocol != .ec2_query)
|
|
||||||
action.Response
|
|
||||||
else
|
|
||||||
ServerResponse(action);
|
|
||||||
|
|
||||||
const parsed_response = json.parse(SResponse, &stream, parser_options) catch |e| {
|
const parsed_response = json.parse(SResponse, &stream, parser_options) catch |e| {
|
||||||
log.err(
|
log.err(
|
||||||
|
@ -277,23 +289,10 @@ pub fn Request(comptime action: anytype) type {
|
||||||
};
|
};
|
||||||
|
|
||||||
if (Self.service_meta.aws_protocol != .query and Self.service_meta.aws_protocol != .ec2_query) {
|
if (Self.service_meta.aws_protocol != .query and Self.service_meta.aws_protocol != .ec2_query) {
|
||||||
var request_id: []u8 = undefined;
|
|
||||||
var found = false;
|
|
||||||
for (response.headers) |h| {
|
|
||||||
if (std.ascii.eqlIgnoreCase(h.name, "X-Amzn-RequestId")) {
|
|
||||||
found = true;
|
|
||||||
request_id = try std.fmt.allocPrint(options.client.allocator, "{s}", .{h.value}); // will be freed in FullR.deinit()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (!found) {
|
|
||||||
try reportTraffic(options.client.allocator, "Request ID not found", aws_request, response, log.err);
|
|
||||||
return error.RequestIdNotFound;
|
|
||||||
}
|
|
||||||
|
|
||||||
return FullResponseType{
|
return FullResponseType{
|
||||||
.response = parsed_response,
|
.response = parsed_response,
|
||||||
.response_metadata = .{
|
.response_metadata = .{
|
||||||
.request_id = request_id,
|
.request_id = try requestIdFromHeaders(aws_request, response, options),
|
||||||
},
|
},
|
||||||
.parser_options = parser_options,
|
.parser_options = parser_options,
|
||||||
.raw_parsed = .{ .raw = parsed_response },
|
.raw_parsed = .{ .raw = parsed_response },
|
||||||
|
@ -322,6 +321,21 @@ pub fn Request(comptime action: anytype) type {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn requestIdFromHeaders(request: awshttp.HttpRequest, response: awshttp.HttpResult, options: Options) ![]u8 {
|
||||||
|
var request_id: []u8 = undefined;
|
||||||
|
var found = false;
|
||||||
|
for (response.headers) |h| {
|
||||||
|
if (std.ascii.eqlIgnoreCase(h.name, "X-Amzn-RequestId")) {
|
||||||
|
found = true;
|
||||||
|
request_id = try std.fmt.allocPrint(options.client.allocator, "{s}", .{h.value}); // will be freed in FullR.deinit()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (!found) {
|
||||||
|
try reportTraffic(options.client.allocator, "Request ID not found", request, response, log.err);
|
||||||
|
return error.RequestIdNotFound;
|
||||||
|
}
|
||||||
|
return request_id;
|
||||||
|
}
|
||||||
fn ServerResponse(comptime action: anytype) type {
|
fn ServerResponse(comptime action: anytype) type {
|
||||||
const T = action.Response;
|
const T = action.Response;
|
||||||
// NOTE: The non-standard capitalization here is used as a performance
|
// NOTE: The non-standard capitalization here is used as a performance
|
||||||
|
@ -398,6 +412,80 @@ fn queryFieldTransformer(field_name: []const u8, encoding_options: url.EncodingO
|
||||||
return try case.snakeToPascal(encoding_options.allocator.?, field_name);
|
return try case.snakeToPascal(encoding_options.allocator.?, field_name);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn buildPath(allocator: *std.mem.Allocator, raw_uri: []const u8, comptime ActionRequest: type, request: anytype) ![]const u8 {
|
||||||
|
var buffer = try std.ArrayList(u8).initCapacity(allocator, raw_uri.len);
|
||||||
|
// const writer = buffer.writer();
|
||||||
|
defer buffer.deinit();
|
||||||
|
var in_var = false;
|
||||||
|
var start: u64 = 0;
|
||||||
|
for (raw_uri) |c, inx| {
|
||||||
|
switch (c) {
|
||||||
|
'{' => {
|
||||||
|
in_var = true;
|
||||||
|
start = inx + 1;
|
||||||
|
},
|
||||||
|
'}' => {
|
||||||
|
in_var = false;
|
||||||
|
const replacement_var = raw_uri[start..inx];
|
||||||
|
inline for (std.meta.fields(ActionRequest)) |field| {
|
||||||
|
if (std.mem.eql(u8, request.jsonFieldNameFor(field.name), replacement_var)) {
|
||||||
|
var replacement_buffer = try std.ArrayList(u8).initCapacity(allocator, raw_uri.len);
|
||||||
|
defer replacement_buffer.deinit();
|
||||||
|
var encoded_buffer = try std.ArrayList(u8).initCapacity(allocator, raw_uri.len);
|
||||||
|
defer encoded_buffer.deinit();
|
||||||
|
const replacement_writer = replacement_buffer.writer();
|
||||||
|
// std.mem.replacementSize
|
||||||
|
try json.stringify(
|
||||||
|
@field(request, field.name),
|
||||||
|
.{},
|
||||||
|
replacement_writer,
|
||||||
|
);
|
||||||
|
const trimmed_replacement_val = std.mem.trim(u8, replacement_buffer.items, "\"");
|
||||||
|
try uriEncode(trimmed_replacement_val, encoded_buffer.writer());
|
||||||
|
try buffer.appendSlice(encoded_buffer.items);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
else => if (!in_var) {
|
||||||
|
try buffer.append(c);
|
||||||
|
} else {},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return buffer.toOwnedSlice();
|
||||||
|
}
|
||||||
|
|
||||||
|
fn uriEncode(input: []const u8, writer: anytype) !void {
|
||||||
|
for (input) |c|
|
||||||
|
try uriEncodeByte(c, writer);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn uriEncodeByte(char: u8, writer: anytype) !void {
|
||||||
|
switch (char) {
|
||||||
|
'!' => _ = try writer.write("%21"),
|
||||||
|
'#' => _ = try writer.write("%23"),
|
||||||
|
'$' => _ = try writer.write("%24"),
|
||||||
|
'&' => _ = try writer.write("%26"),
|
||||||
|
'\'' => _ = try writer.write("%27"),
|
||||||
|
'(' => _ = try writer.write("%28"),
|
||||||
|
')' => _ = try writer.write("%29"),
|
||||||
|
'*' => _ = try writer.write("%2A"),
|
||||||
|
'+' => _ = try writer.write("%2B"),
|
||||||
|
',' => _ = try writer.write("%2C"),
|
||||||
|
'/' => _ = try writer.write("%2F"),
|
||||||
|
':' => _ = try writer.write("%3A"),
|
||||||
|
';' => _ = try writer.write("%3B"),
|
||||||
|
'=' => _ = try writer.write("%3D"),
|
||||||
|
'?' => _ = try writer.write("%3F"),
|
||||||
|
'@' => _ = try writer.write("%40"),
|
||||||
|
'[' => _ = try writer.write("%5B"),
|
||||||
|
']' => _ = try writer.write("%5D"),
|
||||||
|
'%' => _ = try writer.write("%25"),
|
||||||
|
else => {
|
||||||
|
_ = try writer.writeByte(char);
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn buildQuery(allocator: *std.mem.Allocator, request: anytype) ![]const u8 {
|
fn buildQuery(allocator: *std.mem.Allocator, request: anytype) ![]const u8 {
|
||||||
// query should look something like this:
|
// query should look something like this:
|
||||||
// pub const http_query = .{
|
// pub const http_query = .{
|
||||||
|
@ -408,22 +496,15 @@ fn buildQuery(allocator: *std.mem.Allocator, request: anytype) ![]const u8 {
|
||||||
var buffer = std.ArrayList(u8).init(allocator);
|
var buffer = std.ArrayList(u8).init(allocator);
|
||||||
const writer = buffer.writer();
|
const writer = buffer.writer();
|
||||||
defer buffer.deinit();
|
defer buffer.deinit();
|
||||||
var has_begun = false;
|
var prefix = "?";
|
||||||
const Req = @TypeOf(request);
|
const Req = @TypeOf(request);
|
||||||
if (declaration(Req, "http_query") == null)
|
if (declaration(Req, "http_query") == null)
|
||||||
return buffer.toOwnedSlice();
|
return buffer.toOwnedSlice();
|
||||||
const query_arguments = Req.http_query;
|
const query_arguments = Req.http_query;
|
||||||
inline for (@typeInfo(@TypeOf(query_arguments)).Struct.fields) |arg| {
|
inline for (@typeInfo(@TypeOf(query_arguments)).Struct.fields) |arg| {
|
||||||
const val = @field(request, arg.name);
|
const val = @field(request, arg.name);
|
||||||
if (@typeInfo(@TypeOf(val)) == .Optional) {
|
if (try addQueryArg(arg.field_type, prefix, @field(query_arguments, arg.name), val, writer))
|
||||||
if (val) |v| {
|
prefix = "&";
|
||||||
try addQueryArg(@field(query_arguments, arg.name), v, writer, !has_begun);
|
|
||||||
has_begun = true;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
try addQueryArg(@field(query_arguments, arg.name), val, writer, !has_begun);
|
|
||||||
has_begun = true;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
return buffer.toOwnedSlice();
|
return buffer.toOwnedSlice();
|
||||||
}
|
}
|
||||||
|
@ -436,15 +517,103 @@ fn declaration(comptime T: type, name: []const u8) ?std.builtin.TypeInfo.Declara
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
fn addQueryArg(key: []const u8, value: anytype, writer: anytype, start: bool) !void {
|
fn addQueryArg(comptime ValueType: type, prefix: []const u8, key: []const u8, value: anytype, writer: anytype) !bool {
|
||||||
if (start)
|
switch (@typeInfo(@TypeOf(value))) {
|
||||||
_ = try writer.write("?")
|
.Optional => {
|
||||||
else
|
if (value) |v|
|
||||||
_ = try writer.write("&");
|
return try addQueryArg(ValueType, prefix, key, v, writer);
|
||||||
// TODO: url escaping
|
return false;
|
||||||
try writer.print("{s}=", .{key});
|
},
|
||||||
try json.stringify(value, .{}, writer);
|
// if this is a pointer, we want to make sure it is more than just a string
|
||||||
|
.Pointer => |ptr| {
|
||||||
|
if (ptr.child == u8 or ptr.size != .Slice) {
|
||||||
|
// This is just a string
|
||||||
|
return try addBasicQueryArg(prefix, key, value, writer);
|
||||||
|
}
|
||||||
|
var p = prefix;
|
||||||
|
for (value) |li| {
|
||||||
|
if (try addQueryArg(ValueType, p, key, li, writer))
|
||||||
|
p = "&";
|
||||||
|
}
|
||||||
|
return std.mem.eql(u8, "&", p);
|
||||||
|
},
|
||||||
|
.Array => |arr| {
|
||||||
|
if (arr.child == u8)
|
||||||
|
return try addBasicQueryArg(prefix, key, value, writer);
|
||||||
|
var p = prefix;
|
||||||
|
for (value) |li| {
|
||||||
|
if (try addQueryArg(ValueType, p, key, li, writer))
|
||||||
|
p = "&";
|
||||||
|
}
|
||||||
|
return std.mem.eql(u8, "&", p);
|
||||||
|
},
|
||||||
|
else => {
|
||||||
|
return try addBasicQueryArg(prefix, key, value, writer);
|
||||||
|
},
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
fn addBasicQueryArg(prefix: []const u8, key: []const u8, value: anytype, writer: anytype) !bool {
|
||||||
|
_ = try writer.write(prefix);
|
||||||
|
// TODO: url escaping
|
||||||
|
try uriEncode(key, writer);
|
||||||
|
_ = try writer.write("=");
|
||||||
|
try json.stringify(value, .{}, ignoringWriter(uriEncodingWriter(writer).writer(), '"').writer());
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
pub fn uriEncodingWriter(child_stream: anytype) UriEncodingWriter(@TypeOf(child_stream)) {
|
||||||
|
return .{ .child_stream = child_stream };
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A Writer that ignores a character
|
||||||
|
pub fn UriEncodingWriter(comptime WriterType: type) type {
|
||||||
|
return struct {
|
||||||
|
child_stream: WriterType,
|
||||||
|
|
||||||
|
pub const Error = WriterType.Error;
|
||||||
|
pub const Writer = std.io.Writer(*Self, Error, write);
|
||||||
|
|
||||||
|
const Self = @This();
|
||||||
|
|
||||||
|
pub fn write(self: *Self, bytes: []const u8) Error!usize {
|
||||||
|
try uriEncode(bytes, self.child_stream);
|
||||||
|
return bytes.len; // We say that all bytes are "written", even if they're not, as caller may be retrying
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn writer(self: *Self) Writer {
|
||||||
|
return .{ .context = self };
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn ignoringWriter(child_stream: anytype, ignore: u8) IgnoringWriter(@TypeOf(child_stream)) {
|
||||||
|
return .{ .child_stream = child_stream, .ignore = ignore };
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A Writer that ignores a character
|
||||||
|
pub fn IgnoringWriter(comptime WriterType: type) type {
|
||||||
|
return struct {
|
||||||
|
child_stream: WriterType,
|
||||||
|
ignore: u8,
|
||||||
|
|
||||||
|
pub const Error = WriterType.Error;
|
||||||
|
pub const Writer = std.io.Writer(*Self, Error, write);
|
||||||
|
|
||||||
|
const Self = @This();
|
||||||
|
|
||||||
|
pub fn write(self: *Self, bytes: []const u8) Error!usize {
|
||||||
|
for (bytes) |b| {
|
||||||
|
if (b != self.ignore)
|
||||||
|
try self.child_stream.writeByte(b);
|
||||||
|
}
|
||||||
|
return bytes.len; // We say that all bytes are "written", even if they're not, as caller may be retrying
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn writer(self: *Self) Writer {
|
||||||
|
return .{ .context = self };
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
fn reportTraffic(allocator: *std.mem.Allocator, info: []const u8, request: awshttp.HttpRequest, response: awshttp.HttpResult, comptime reporter: fn (comptime []const u8, anytype) void) !void {
|
fn reportTraffic(allocator: *std.mem.Allocator, info: []const u8, request: awshttp.HttpRequest, response: awshttp.HttpResult, comptime reporter: fn (comptime []const u8, anytype) void) !void {
|
||||||
var msg = std.ArrayList(u8).init(allocator);
|
var msg = std.ArrayList(u8).init(allocator);
|
||||||
defer msg.deinit();
|
defer msg.deinit();
|
||||||
|
@ -472,6 +641,42 @@ fn reportTraffic(allocator: *std.mem.Allocator, info: []const u8, request: awsht
|
||||||
reporter("{s}\n", .{msg.items});
|
reporter("{s}\n", .{msg.items});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// TODO: Where does this belong really?
|
||||||
|
fn typeForField(comptime T: type, field_name: []const u8) !type {
|
||||||
|
const ti = @typeInfo(T);
|
||||||
|
switch (ti) {
|
||||||
|
.Struct => {
|
||||||
|
inline for (ti.Struct.fields) |field| {
|
||||||
|
if (std.mem.eql(u8, field.name, field_name))
|
||||||
|
return field.field_type;
|
||||||
|
}
|
||||||
|
},
|
||||||
|
else => return error.TypeIsNotAStruct, // should not hit this
|
||||||
|
}
|
||||||
|
return error.FieldNotFound;
|
||||||
|
}
|
||||||
|
|
||||||
|
test "custom serialization for map objects" {
|
||||||
|
const allocator = std.testing.allocator;
|
||||||
|
var buffer = std.ArrayList(u8).init(allocator);
|
||||||
|
defer buffer.deinit();
|
||||||
|
var tags = try std.ArrayList(@typeInfo(try typeForField(services.lambda.tag_resource.Request, "tags")).Pointer.child).initCapacity(allocator, 2);
|
||||||
|
defer tags.deinit();
|
||||||
|
tags.appendAssumeCapacity(.{ .key = "Foo", .value = "Bar" });
|
||||||
|
tags.appendAssumeCapacity(.{ .key = "Baz", .value = "Qux" });
|
||||||
|
const req = services.lambda.tag_resource.Request{ .resource = "hello", .tags = tags.items };
|
||||||
|
try json.stringify(req, .{ .whitespace = .{} }, buffer.writer());
|
||||||
|
try std.testing.expectEqualStrings(
|
||||||
|
\\{
|
||||||
|
\\ "Resource": "hello",
|
||||||
|
\\ "Tags": {
|
||||||
|
\\ "Foo": "Bar",
|
||||||
|
\\ "Baz": "Qux"
|
||||||
|
\\ }
|
||||||
|
\\}
|
||||||
|
, buffer.items);
|
||||||
|
}
|
||||||
|
|
||||||
test "REST Json v1 builds proper queries" {
|
test "REST Json v1 builds proper queries" {
|
||||||
const allocator = std.testing.allocator;
|
const allocator = std.testing.allocator;
|
||||||
const svs = Services(.{.lambda}){};
|
const svs = Services(.{.lambda}){};
|
||||||
|
@ -482,6 +687,52 @@ test "REST Json v1 builds proper queries" {
|
||||||
defer allocator.free(query);
|
defer allocator.free(query);
|
||||||
try std.testing.expectEqualStrings("?MaxItems=1", query);
|
try std.testing.expectEqualStrings("?MaxItems=1", query);
|
||||||
}
|
}
|
||||||
|
test "REST Json v1 handles reserved chars in queries" {
|
||||||
|
const allocator = std.testing.allocator;
|
||||||
|
const svs = Services(.{.lambda}){};
|
||||||
|
var keys = [_][]const u8{"Foo?I'm a crazy%dude"}; // Would love to have a way to express this without burning a var here
|
||||||
|
const request = svs.lambda.untag_resource.Request{
|
||||||
|
.tag_keys = keys[0..],
|
||||||
|
.resource = "hello",
|
||||||
|
};
|
||||||
|
const query = try buildQuery(allocator, request);
|
||||||
|
defer allocator.free(query);
|
||||||
|
try std.testing.expectEqualStrings("?tagKeys=Foo%3FI%27m a crazy%25dude", query);
|
||||||
|
}
|
||||||
|
test "REST Json v1 serializes lists in queries" {
|
||||||
|
const allocator = std.testing.allocator;
|
||||||
|
const svs = Services(.{.lambda}){};
|
||||||
|
var keys = [_][]const u8{ "Foo", "Bar" }; // Would love to have a way to express this without burning a var here
|
||||||
|
const request = svs.lambda.untag_resource.Request{
|
||||||
|
.tag_keys = keys[0..],
|
||||||
|
.resource = "hello",
|
||||||
|
};
|
||||||
|
const query = try buildQuery(allocator, request);
|
||||||
|
defer allocator.free(query);
|
||||||
|
try std.testing.expectEqualStrings("?tagKeys=Foo&tagKeys=Bar", query);
|
||||||
|
}
|
||||||
|
test "REST Json v1 buildpath substitutes" {
|
||||||
|
const allocator = std.testing.allocator;
|
||||||
|
const svs = Services(.{.lambda}){};
|
||||||
|
const request = svs.lambda.list_functions.Request{
|
||||||
|
.max_items = 1,
|
||||||
|
};
|
||||||
|
const input_path = "https://myhost/{MaxItems}/";
|
||||||
|
const output_path = try buildPath(allocator, input_path, @TypeOf(request), request);
|
||||||
|
defer allocator.free(output_path);
|
||||||
|
try std.testing.expectEqualStrings("https://myhost/1/", output_path);
|
||||||
|
}
|
||||||
|
test "REST Json v1 buildpath handles restricted characters" {
|
||||||
|
const allocator = std.testing.allocator;
|
||||||
|
const svs = Services(.{.lambda}){};
|
||||||
|
const request = svs.lambda.list_functions.Request{
|
||||||
|
.marker = ":",
|
||||||
|
};
|
||||||
|
const input_path = "https://myhost/{Marker}/";
|
||||||
|
const output_path = try buildPath(allocator, input_path, @TypeOf(request), request);
|
||||||
|
defer allocator.free(output_path);
|
||||||
|
try std.testing.expectEqualStrings("https://myhost/%3A/", output_path);
|
||||||
|
}
|
||||||
test "basic json request serialization" {
|
test "basic json request serialization" {
|
||||||
const allocator = std.testing.allocator;
|
const allocator = std.testing.allocator;
|
||||||
const svs = Services(.{.dynamo_db}){};
|
const svs = Services(.{.dynamo_db}){};
|
||||||
|
@ -546,6 +797,7 @@ test "layer object only" {
|
||||||
const r = try json.parse(TestResponse, &stream, parser_options);
|
const r = try json.parse(TestResponse, &stream, parser_options);
|
||||||
json.parseFree(TestResponse, r, parser_options);
|
json.parseFree(TestResponse, r, parser_options);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Use for debugging json responses of specific requests
|
// Use for debugging json responses of specific requests
|
||||||
// test "dummy request" {
|
// test "dummy request" {
|
||||||
// const allocator = std.testing.allocator;
|
// const allocator = std.testing.allocator;
|
||||||
|
|
|
@ -580,9 +580,11 @@ pub const AwsHttp = struct {
|
||||||
.service = c.aws_byte_cursor_from_c_str(@ptrCast([*c]const u8, signing_service)),
|
.service = c.aws_byte_cursor_from_c_str(@ptrCast([*c]const u8, signing_service)),
|
||||||
.should_sign_header = null,
|
.should_sign_header = null,
|
||||||
.should_sign_header_ud = null,
|
.should_sign_header_ud = null,
|
||||||
|
// TODO: S3 does not double uri encode. Also not sure why normalizing
|
||||||
|
// the path here is a flag - seems like it should always do this?
|
||||||
.flags = c.bitfield_workaround_aws_signing_config_aws_flags{
|
.flags = c.bitfield_workaround_aws_signing_config_aws_flags{
|
||||||
.use_double_uri_encode = 0,
|
.use_double_uri_encode = 1,
|
||||||
.should_normalize_uri_path = 0,
|
.should_normalize_uri_path = 1,
|
||||||
.omit_session_token = 1,
|
.omit_session_token = 1,
|
||||||
},
|
},
|
||||||
.signed_body_value = c.aws_byte_cursor_from_c_str(""),
|
.signed_body_value = c.aws_byte_cursor_from_c_str(""),
|
||||||
|
|
38
src/json.zig
38
src/json.zig
|
@ -1324,6 +1324,14 @@ pub const Value = union(enum) {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
pub fn dump(value: anytype) void {
|
||||||
|
var held = std.debug.getStderrMutex().acquire();
|
||||||
|
defer held.release();
|
||||||
|
|
||||||
|
const stderr = std.io.getStdErr().writer();
|
||||||
|
std.json.stringify(value, std.json.StringifyOptions{ .whitespace = null }, stderr) catch return;
|
||||||
|
}
|
||||||
|
|
||||||
test "Value.jsonStringify" {
|
test "Value.jsonStringify" {
|
||||||
{
|
{
|
||||||
var buffer: [10]u8 = undefined;
|
var buffer: [10]u8 = undefined;
|
||||||
|
@ -1402,7 +1410,7 @@ fn parsedEqual(a: anytype, b: @TypeOf(a)) bool {
|
||||||
if (a == null or b == null) return false;
|
if (a == null or b == null) return false;
|
||||||
return parsedEqual(a.?, b.?);
|
return parsedEqual(a.?, b.?);
|
||||||
},
|
},
|
||||||
.Union => {
|
.Union => |info| {
|
||||||
if (info.tag_type) |UnionTag| {
|
if (info.tag_type) |UnionTag| {
|
||||||
const tag_a = std.meta.activeTag(a);
|
const tag_a = std.meta.activeTag(a);
|
||||||
const tag_b = std.meta.activeTag(b);
|
const tag_b = std.meta.activeTag(b);
|
||||||
|
@ -2858,20 +2866,26 @@ pub fn stringify(
|
||||||
try out_stream.writeByte('\n');
|
try out_stream.writeByte('\n');
|
||||||
try child_whitespace.outputIndent(out_stream);
|
try child_whitespace.outputIndent(out_stream);
|
||||||
}
|
}
|
||||||
if (comptime std.meta.trait.hasFn("jsonFieldNameFor")(T)) {
|
var field_written = false;
|
||||||
const name = value.jsonFieldNameFor(Field.name);
|
if (comptime std.meta.trait.hasFn("jsonStringifyField")(T))
|
||||||
try stringify(name, options, out_stream);
|
field_written = try value.jsonStringifyField(Field.name, child_options, out_stream);
|
||||||
} else {
|
|
||||||
try stringify(Field.name, options, out_stream);
|
|
||||||
}
|
|
||||||
|
|
||||||
try out_stream.writeByte(':');
|
if (!field_written) {
|
||||||
if (child_options.whitespace) |child_whitespace| {
|
if (comptime std.meta.trait.hasFn("jsonFieldNameFor")(T)) {
|
||||||
if (child_whitespace.separator) {
|
const name = value.jsonFieldNameFor(Field.name);
|
||||||
try out_stream.writeByte(' ');
|
try stringify(name, options, out_stream);
|
||||||
|
} else {
|
||||||
|
try stringify(Field.name, options, out_stream);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
try out_stream.writeByte(':');
|
||||||
|
if (child_options.whitespace) |child_whitespace| {
|
||||||
|
if (child_whitespace.separator) {
|
||||||
|
try out_stream.writeByte(' ');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
try stringify(@field(value, Field.name), child_options, out_stream);
|
||||||
}
|
}
|
||||||
try stringify(@field(value, Field.name), child_options, out_stream);
|
|
||||||
}
|
}
|
||||||
if (field_output) {
|
if (field_output) {
|
||||||
if (options.whitespace) |whitespace| {
|
if (options.whitespace) |whitespace| {
|
||||||
|
|
Loading…
Reference in New Issue
Block a user