Compare commits
26 commits
bc18fcca75
...
3979d32a1d
Author | SHA1 | Date | |
---|---|---|---|
|
3979d32a1d | ||
|
6e775e20bb | ||
|
d60ce20305 | ||
|
90d1091b66 | ||
|
4a6c84e0ae | ||
|
9d1d8f26c9 | ||
|
045150964b | ||
|
7aeb43a8c6 | ||
|
895918c1a9 | ||
|
af0d4d92b3 | ||
|
c893a7cb31 | ||
|
2cd581f27f | ||
|
7233b87efc | ||
|
8a9d571c8e | ||
|
36a894d776 | ||
|
e43b827576 | ||
|
60f1863557 | ||
|
ee16fcf6cb | ||
|
7908c13bb4 | ||
|
489581ead2 | ||
|
9bc13d932a | ||
|
9b673b0ff3 | ||
|
db0dde50e5 | ||
|
a4d4da348f | ||
|
d2ba0af842 | ||
|
13a926af70 |
18 changed files with 1041 additions and 821 deletions
33
.pre-commit-config.yaml
Normal file
33
.pre-commit-config.yaml
Normal file
|
@ -0,0 +1,33 @@
|
|||
# See https://pre-commit.com for more information
|
||||
# See https://pre-commit.com/hooks.html for more hooks
|
||||
repos:
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v3.2.0
|
||||
hooks:
|
||||
- id: trailing-whitespace
|
||||
- id: end-of-file-fixer
|
||||
- id: check-yaml
|
||||
- id: check-added-large-files
|
||||
- repo: https://github.com/batmac/pre-commit-zig
|
||||
rev: v0.3.0
|
||||
hooks:
|
||||
- id: zig-fmt
|
||||
- id: zig-build
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: zlint
|
||||
name: Run zig build smoke-test
|
||||
entry: zig
|
||||
args: ["build", "--verbose", "smoke-test"]
|
||||
language: system
|
||||
types: [file]
|
||||
pass_filenames: false
|
||||
|
||||
# - repo: local
|
||||
# hooks:
|
||||
# - id: zlint
|
||||
# name: Run zlint
|
||||
# entry: zlint
|
||||
# args: ["--deny-warnings", "--fix"]
|
||||
# language: system
|
||||
# types: [zig]
|
|
@ -1,5 +1,4 @@
|
|||
const std = @import("std");
|
||||
const builtin = @import("builtin");
|
||||
const Builder = @import("std").Build;
|
||||
|
||||
const models_subdir = "codegen/sdk-codegen/aws-models/"; // note will probably not work on windows
|
||||
|
|
10
codegen/src/FileGenerationState.zig
Normal file
10
codegen/src/FileGenerationState.zig
Normal file
|
@ -0,0 +1,10 @@
|
|||
const std = @import("std");
|
||||
const smithy = @import("smithy");
|
||||
|
||||
const FileGenerationState = @This();
|
||||
|
||||
protocol: smithy.AwsProtocol,
|
||||
shapes: std.StringHashMap(smithy.ShapeInfo),
|
||||
shape_references: std.StringHashMap(u64),
|
||||
additional_types_to_generate: *std.ArrayList(smithy.ShapeInfo),
|
||||
additional_types_generated: *std.StringHashMap(void),
|
21
codegen/src/GenerateTypeOptions.zig
Normal file
21
codegen/src/GenerateTypeOptions.zig
Normal file
|
@ -0,0 +1,21 @@
|
|||
const std = @import("std");
|
||||
const case = @import("case");
|
||||
|
||||
const GenerateTypeOptions = @This();
|
||||
|
||||
end_structure: bool,
|
||||
key_case: case.Case,
|
||||
|
||||
pub fn endStructure(self: @This(), value: bool) GenerateTypeOptions {
|
||||
return .{
|
||||
.end_structure = value,
|
||||
.key_case = self.key_case,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn keyCase(self: @This(), value: case.Case) GenerateTypeOptions {
|
||||
return .{
|
||||
.end_structure = self.end_structure,
|
||||
.key_case = value,
|
||||
};
|
||||
}
|
53
codegen/src/GenerationState.zig
Normal file
53
codegen/src/GenerationState.zig
Normal file
|
@ -0,0 +1,53 @@
|
|||
const std = @import("std");
|
||||
const smithy = @import("smithy");
|
||||
|
||||
const FileGenerationState = @import("FileGenerationState.zig");
|
||||
|
||||
const GenerationState = @This();
|
||||
|
||||
type_stack: *std.ArrayList(*const smithy.ShapeInfo),
|
||||
file_state: FileGenerationState,
|
||||
// we will need some sort of "type decls needed" for recursive structures
|
||||
allocator: std.mem.Allocator,
|
||||
indent_level: u64,
|
||||
|
||||
pub fn appendToTypeStack(self: @This(), shape_info: *const smithy.ShapeInfo) !void {
|
||||
try self.type_stack.append(shape_info);
|
||||
}
|
||||
|
||||
pub fn popFromTypeStack(self: @This()) void {
|
||||
_ = self.type_stack.pop();
|
||||
}
|
||||
|
||||
pub fn getTypeRecurrenceCount(self: @This(), id: []const u8) u8 {
|
||||
var self_occurences: u8 = 0;
|
||||
|
||||
for (self.type_stack.items) |i| {
|
||||
if (std.mem.eql(u8, i.id, id)) {
|
||||
self_occurences += 1;
|
||||
}
|
||||
}
|
||||
|
||||
return self_occurences;
|
||||
}
|
||||
|
||||
pub fn indent(self: @This()) GenerationState {
|
||||
var new_state = self.clone();
|
||||
new_state.indent_level += 1;
|
||||
return new_state;
|
||||
}
|
||||
|
||||
pub fn deindent(self: @This()) GenerationState {
|
||||
var new_state = self.clone();
|
||||
new_state.indent_level = @max(0, new_state.indent_level - 1);
|
||||
return new_state;
|
||||
}
|
||||
|
||||
pub fn clone(self: @This()) GenerationState {
|
||||
return GenerationState{
|
||||
.type_stack = self.type_stack,
|
||||
.file_state = self.file_state,
|
||||
.allocator = self.allocator,
|
||||
.indent_level = self.indent_level,
|
||||
};
|
||||
}
|
|
@ -20,7 +20,6 @@ const multihash_len = 1 + 1 + Hash.digest_length;
|
|||
pub const hex_multihash_len = 2 * multihash_len;
|
||||
pub const digest_len = Hash.digest_length;
|
||||
|
||||
const MultiHashHexDigest = [hex_multihash_len]u8;
|
||||
const MultihashFunction = enum(u16) {
|
||||
identity = 0x00,
|
||||
sha1 = 0x11,
|
||||
|
@ -70,7 +69,7 @@ pub fn hex64(x: u64) [16]u8 {
|
|||
var result: [16]u8 = undefined;
|
||||
var i: usize = 0;
|
||||
while (i < 8) : (i += 1) {
|
||||
const byte = @as(u8, @truncate(x >> @as(u6, @intCast(8 * i))));
|
||||
const byte: u8 = @truncate(x >> @as(u6, @intCast(8 * i)));
|
||||
result[i * 2 + 0] = hex_charset[byte >> 4];
|
||||
result[i * 2 + 1] = hex_charset[byte & 15];
|
||||
}
|
||||
|
|
|
@ -2,9 +2,21 @@ const std = @import("std");
|
|||
const smithy = @import("smithy");
|
||||
const Hasher = @import("Hasher.zig");
|
||||
const case = @import("case");
|
||||
const smt = @import("smithy_tools.zig");
|
||||
const serialization = @import("serialization.zig");
|
||||
const support = @import("support.zig");
|
||||
|
||||
var verbose = false;
|
||||
|
||||
const GenerationState = @import("GenerationState.zig");
|
||||
const FileGenerationState = @import("FileGenerationState.zig");
|
||||
const GenerateTypeOptions = @import("GenerateTypeOptions.zig");
|
||||
|
||||
const Shape = smt.Shape;
|
||||
const ServiceShape = smt.ServiceShape;
|
||||
const ListShape = smt.ListShape;
|
||||
const MapShape = smt.MapShape;
|
||||
|
||||
pub fn main() anyerror!void {
|
||||
const root_progress_node = std.Progress.start(.{});
|
||||
defer root_progress_node.end();
|
||||
|
@ -83,6 +95,11 @@ pub fn main() anyerror!void {
|
|||
|
||||
if (args.len == 0)
|
||||
_ = try generateServices(allocator, ";", std.io.getStdIn(), stdout);
|
||||
|
||||
if (verbose) {
|
||||
const output_path = try output_dir.realpathAlloc(allocator, ".");
|
||||
std.debug.print("Output path: {s}\n", .{output_path});
|
||||
}
|
||||
}
|
||||
|
||||
const OutputManifest = struct {
|
||||
|
@ -184,16 +201,6 @@ fn calculateDigests(models_dir: std.fs.Dir, output_dir: std.fs.Dir, thread_pool:
|
|||
};
|
||||
}
|
||||
fn processFile(file_name: []const u8, output_dir: std.fs.Dir, manifest: anytype) !void {
|
||||
// The fixed buffer for output will be 2MB, which is twice as large as the size of the EC2
|
||||
// (the largest) model. We'll then flush all this at one go at the end.
|
||||
var buffer = std.mem.zeroes([1024 * 1024 * 2]u8);
|
||||
var output_stream = std.io.FixedBufferStream([]u8){
|
||||
.buffer = &buffer,
|
||||
.pos = 0,
|
||||
};
|
||||
var counting_writer = std.io.countingWriter(output_stream.writer());
|
||||
var writer = counting_writer.writer();
|
||||
|
||||
// It's probably best to create our own allocator here so we can deint at the end and
|
||||
// toss all allocations related to the services in this file
|
||||
// I can't guarantee we're not leaking something, and at the end of the
|
||||
|
@ -201,6 +208,13 @@ fn processFile(file_name: []const u8, output_dir: std.fs.Dir, manifest: anytype)
|
|||
var arena = std.heap.ArenaAllocator.init(std.heap.page_allocator);
|
||||
defer arena.deinit();
|
||||
const allocator = arena.allocator();
|
||||
|
||||
var output = try std.ArrayListUnmanaged(u8).initCapacity(allocator, 1024 * 1024 * 2);
|
||||
defer output.deinit(allocator);
|
||||
|
||||
var counting_writer = std.io.countingWriter(output.writer(allocator));
|
||||
var writer = counting_writer.writer();
|
||||
|
||||
_ = try writer.write("const std = @import(\"std\");\n");
|
||||
_ = try writer.write("const smithy = @import(\"smithy\");\n");
|
||||
_ = try writer.write("const json = @import(\"json\");\n");
|
||||
|
@ -235,7 +249,8 @@ fn processFile(file_name: []const u8, output_dir: std.fs.Dir, manifest: anytype)
|
|||
output_file_name = new_output_file_name;
|
||||
}
|
||||
|
||||
const formatted = try zigFmt(allocator, @ptrCast(buffer[0..counting_writer.bytes_written]));
|
||||
const unformatted: [:0]const u8 = try output.toOwnedSliceSentinel(allocator, 0);
|
||||
const formatted = try zigFmt(allocator, unformatted);
|
||||
|
||||
// Dump our buffer out to disk
|
||||
var file = try output_dir.createFile(output_file_name, .{ .truncate = true });
|
||||
|
@ -389,7 +404,8 @@ fn generateServices(allocator: std.mem.Allocator, comptime _: []const u8, file:
|
|||
var generated = std.StringHashMap(void).init(allocator);
|
||||
defer generated.deinit();
|
||||
|
||||
const state = FileGenerationState{
|
||||
var state = FileGenerationState{
|
||||
.protocol = undefined,
|
||||
.shape_references = shape_references,
|
||||
.additional_types_to_generate = &unresolved,
|
||||
.additional_types_generated = &generated,
|
||||
|
@ -412,7 +428,10 @@ fn generateServices(allocator: std.mem.Allocator, comptime _: []const u8, file:
|
|||
endpoint_prefix = trait.aws_api_service.endpoint_prefix;
|
||||
},
|
||||
.aws_auth_sigv4 => sigv4_name = trait.aws_auth_sigv4.name,
|
||||
.aws_protocol => aws_protocol = trait.aws_protocol,
|
||||
.aws_protocol => {
|
||||
aws_protocol = trait.aws_protocol;
|
||||
state.protocol = aws_protocol;
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
}
|
||||
|
@ -425,7 +444,7 @@ fn generateServices(allocator: std.mem.Allocator, comptime _: []const u8, file:
|
|||
// Service struct
|
||||
// name of the field will be snake_case of whatever comes in from
|
||||
// sdk_id. Not sure this will simple...
|
||||
const constant_name = try constantName(allocator, sdk_id);
|
||||
const constant_name = try support.constantName(allocator, sdk_id, .snake);
|
||||
try constant_names.append(constant_name);
|
||||
try writer.print("const Self = @This();\n", .{});
|
||||
if (version) |v|
|
||||
|
@ -483,54 +502,42 @@ fn generateAdditionalTypes(allocator: std.mem.Allocator, file_state: FileGenerat
|
|||
|
||||
try writer.print("\npub const {s} = ", .{type_name});
|
||||
try file_state.additional_types_generated.putNoClobber(t.name, {});
|
||||
_ = try generateTypeFor(t.id, writer, state, true);
|
||||
_ = try generateTypeFor(t.id, writer, state, .{
|
||||
.key_case = .snake,
|
||||
.end_structure = true,
|
||||
});
|
||||
_ = try writer.write(";\n");
|
||||
}
|
||||
}
|
||||
|
||||
fn constantName(allocator: std.mem.Allocator, id: []const u8) ![]const u8 {
|
||||
// There are some ids that don't follow consistent rules, so we'll
|
||||
// look for the exceptions and, if not found, revert to the snake case
|
||||
// algorithm
|
||||
|
||||
// This one might be a bug in snake, but it's the only example so HPDL
|
||||
if (std.mem.eql(u8, id, "SESv2")) return try std.fmt.allocPrint(allocator, "ses_v2", .{});
|
||||
if (std.mem.eql(u8, id, "CloudFront")) return try std.fmt.allocPrint(allocator, "cloudfront", .{});
|
||||
// IoT is an acryonym, but snake wouldn't know that. Interestingly not all
|
||||
// iot services are capitalizing that way.
|
||||
if (std.mem.eql(u8, id, "IoTSiteWise")) return try std.fmt.allocPrint(allocator, "iot_sitewise", .{});
|
||||
if (std.mem.eql(u8, id, "IoTFleetHub")) return try std.fmt.allocPrint(allocator, "iot_fleet_hub", .{});
|
||||
if (std.mem.eql(u8, id, "IoTSecureTunneling")) return try std.fmt.allocPrint(allocator, "iot_secure_tunneling", .{});
|
||||
if (std.mem.eql(u8, id, "IoTThingsGraph")) return try std.fmt.allocPrint(allocator, "iot_things_graph", .{});
|
||||
// snake turns this into dev_ops, which is a little weird
|
||||
if (std.mem.eql(u8, id, "DevOps Guru")) return try std.fmt.allocPrint(allocator, "devops_guru", .{});
|
||||
if (std.mem.eql(u8, id, "FSx")) return try std.fmt.allocPrint(allocator, "fsx", .{});
|
||||
if (std.mem.eql(u8, id, "ETag")) return try std.fmt.allocPrint(allocator, "e_tag", .{});
|
||||
|
||||
// Not a special case - just snake it
|
||||
return try case.allocTo(allocator, .snake, id);
|
||||
}
|
||||
|
||||
const FileGenerationState = struct {
|
||||
shapes: std.StringHashMap(smithy.ShapeInfo),
|
||||
shape_references: std.StringHashMap(u64),
|
||||
additional_types_to_generate: *std.ArrayList(smithy.ShapeInfo),
|
||||
additional_types_generated: *std.StringHashMap(void),
|
||||
};
|
||||
const GenerationState = struct {
|
||||
type_stack: *std.ArrayList(*const smithy.ShapeInfo),
|
||||
file_state: FileGenerationState,
|
||||
// we will need some sort of "type decls needed" for recursive structures
|
||||
allocator: std.mem.Allocator,
|
||||
indent_level: u64,
|
||||
};
|
||||
|
||||
fn outputIndent(state: GenerationState, writer: anytype) !void {
|
||||
const n_chars = 4 * state.indent_level;
|
||||
try writer.writeByteNTimes(' ', n_chars);
|
||||
}
|
||||
|
||||
const StructType = enum {
|
||||
request,
|
||||
response,
|
||||
};
|
||||
|
||||
const OperationSubTypeInfo = struct {
|
||||
type: StructType,
|
||||
key_case: case.Case,
|
||||
};
|
||||
|
||||
const operation_sub_types = [_]OperationSubTypeInfo{
|
||||
OperationSubTypeInfo{
|
||||
.key_case = .snake,
|
||||
.type = .request,
|
||||
},
|
||||
OperationSubTypeInfo{
|
||||
.key_case = .snake,
|
||||
.type = .response,
|
||||
},
|
||||
};
|
||||
|
||||
fn generateOperation(allocator: std.mem.Allocator, operation: smithy.ShapeInfo, file_state: FileGenerationState, writer: anytype) !void {
|
||||
const snake_case_name = try constantName(allocator, operation.name);
|
||||
const snake_case_name = try support.constantName(allocator, operation.name, .snake);
|
||||
defer allocator.free(snake_case_name);
|
||||
|
||||
var type_stack = std.ArrayList(*const smithy.ShapeInfo).init(allocator);
|
||||
|
@ -546,33 +553,51 @@ fn generateOperation(allocator: std.mem.Allocator, operation: smithy.ShapeInfo,
|
|||
// indent should start at 4 spaces here
|
||||
const operation_name = avoidReserved(snake_case_name);
|
||||
|
||||
// Request type
|
||||
_ = try writer.print("pub const {s}Request = ", .{operation.name});
|
||||
if (operation.shape.operation.input == null or
|
||||
(try shapeInfoForId(operation.shape.operation.input.?, state)).shape == .unit)
|
||||
{
|
||||
_ = try writer.write("struct {\n");
|
||||
try generateMetadataFunction(operation_name, state, writer);
|
||||
} else if (operation.shape.operation.input) |member| {
|
||||
if (try generateTypeFor(member, writer, state, false)) unreachable; // we expect only structs here
|
||||
_ = try writer.write("\n");
|
||||
try generateMetadataFunction(operation_name, state, writer);
|
||||
}
|
||||
_ = try writer.write(";\n\n");
|
||||
inline for (operation_sub_types) |type_info| {
|
||||
_ = try writer.print("pub const {s}", .{operation.name});
|
||||
switch (type_info.type) {
|
||||
.request => try writer.writeAll("Request"),
|
||||
.response => try writer.writeAll("Response"),
|
||||
}
|
||||
try writer.writeAll(" = ");
|
||||
|
||||
// Response type
|
||||
_ = try writer.print("pub const {s}Response = ", .{operation.name});
|
||||
if (operation.shape.operation.output == null or
|
||||
(try shapeInfoForId(operation.shape.operation.output.?, state)).shape == .unit)
|
||||
{
|
||||
_ = try writer.write("struct {\n");
|
||||
try generateMetadataFunction(operation_name, state, writer);
|
||||
} else if (operation.shape.operation.output) |member| {
|
||||
if (try generateTypeFor(member, writer, state, false)) unreachable; // we expect only structs here
|
||||
_ = try writer.write("\n");
|
||||
try generateMetadataFunction(operation_name, state, writer);
|
||||
const operation_field_name = switch (type_info.type) {
|
||||
.request => "input",
|
||||
.response => "output",
|
||||
};
|
||||
const maybe_shape_id = @field(operation.shape.operation, operation_field_name);
|
||||
|
||||
const generate_type_options = GenerateTypeOptions{
|
||||
.key_case = type_info.key_case,
|
||||
.end_structure = false,
|
||||
};
|
||||
|
||||
if (maybe_shape_id == null or
|
||||
(try smt.getShapeInfo(maybe_shape_id.?, state.file_state.shapes)).shape == .unit)
|
||||
{
|
||||
_ = try writer.write("struct {\n");
|
||||
} else if (maybe_shape_id) |shape_id| {
|
||||
if (try generateTypeFor(shape_id, writer, state, generate_type_options)) unreachable; // we expect only structs here
|
||||
_ = try writer.write("\n");
|
||||
|
||||
switch (type_info.type) {
|
||||
.request => {
|
||||
var new_state = state.clone();
|
||||
new_state.indent_level = 0;
|
||||
std.debug.assert(new_state.type_stack.items.len == 0);
|
||||
|
||||
try serialization.json.generateToJsonFunction(shape_id, writer.any(), new_state, generate_type_options.keyCase(.pascal));
|
||||
|
||||
try writer.writeAll("\n");
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
}
|
||||
|
||||
try generateMetadataFunction(operation_name, state, writer, generate_type_options);
|
||||
|
||||
_ = try writer.write("};\n\n");
|
||||
}
|
||||
_ = try writer.write(";\n\n");
|
||||
|
||||
try writer.print("pub const {s}: struct ", .{operation_name});
|
||||
_ = try writer.write("{\n");
|
||||
|
@ -613,7 +638,7 @@ fn generateOperation(allocator: std.mem.Allocator, operation: smithy.ShapeInfo,
|
|||
_ = try writer.write("} = .{};\n");
|
||||
}
|
||||
|
||||
fn generateMetadataFunction(operation_name: []const u8, state: GenerationState, writer: anytype) !void {
|
||||
fn generateMetadataFunction(operation_name: []const u8, state: GenerationState, writer: anytype, options: GenerateTypeOptions) !void {
|
||||
// TODO: Shove these lines in here, and also the else portion
|
||||
// pub fn metaInfo(self: @This()) struct { service: @TypeOf(sts), action: @TypeOf(sts.get_caller_identity) } {
|
||||
// return .{ .service = sts, .action = sts.get_caller_identity };
|
||||
|
@ -634,7 +659,10 @@ fn generateMetadataFunction(operation_name: []const u8, state: GenerationState,
|
|||
try outputIndent(child_state, writer);
|
||||
_ = try writer.write("}\n");
|
||||
try outputIndent(state, writer);
|
||||
try writer.writeByte('}');
|
||||
|
||||
if (options.end_structure) {
|
||||
try writer.writeByte('}');
|
||||
}
|
||||
}
|
||||
|
||||
fn getErrorName(err_name: []const u8) []const u8 {
|
||||
|
@ -652,16 +680,22 @@ fn endsWith(item: []const u8, str: []const u8) bool {
|
|||
}
|
||||
|
||||
fn getTypeName(allocator: std.mem.Allocator, shape: smithy.ShapeInfo) ![]const u8 {
|
||||
const type_name = avoidReserved(shape.name);
|
||||
const pascal_shape_name = try case.allocTo(allocator, .pascal, shape.name);
|
||||
const type_name = avoidReserved(pascal_shape_name);
|
||||
|
||||
switch (shape.shape) {
|
||||
// maps are named like "Tags"
|
||||
// this removes the trailing s and adds "KeyValue" suffix
|
||||
.map => {
|
||||
const map_type_name = avoidReserved(shape.name);
|
||||
return try std.fmt.allocPrint(allocator, "{s}KeyValue", .{map_type_name[0 .. map_type_name.len - 1]});
|
||||
var name_slice = pascal_shape_name;
|
||||
|
||||
if (pascal_shape_name[pascal_shape_name.len - 1] == 's') {
|
||||
name_slice = pascal_shape_name[0 .. pascal_shape_name.len - 1];
|
||||
}
|
||||
|
||||
return try std.fmt.allocPrint(allocator, "{s}KeyValue", .{name_slice});
|
||||
},
|
||||
else => return allocator.dupe(u8, type_name),
|
||||
else => return type_name,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -700,40 +734,19 @@ fn reuseCommonType(shape: smithy.ShapeInfo, writer: anytype, state: GenerationSt
|
|||
}
|
||||
return rc;
|
||||
}
|
||||
fn shapeInfoForId(id: []const u8, state: GenerationState) !smithy.ShapeInfo {
|
||||
return state.file_state.shapes.get(id) orelse {
|
||||
std.debug.print("Shape ID not found. This is most likely a bug. Shape ID: {s}\n", .{id});
|
||||
return error.InvalidType;
|
||||
};
|
||||
}
|
||||
|
||||
/// return type is anyerror!void as this is a recursive function, so the compiler cannot properly infer error types
|
||||
fn generateTypeFor(shape_id: []const u8, writer: anytype, state: GenerationState, end_structure: bool) anyerror!bool {
|
||||
fn generateTypeFor(shape_id: []const u8, writer: anytype, state: GenerationState, comptime options: GenerateTypeOptions) anyerror!bool {
|
||||
const end_structure = options.end_structure;
|
||||
|
||||
var rc = false;
|
||||
|
||||
// We assume it must exist
|
||||
const shape_info = try shapeInfoForId(shape_id, state);
|
||||
const shape_info = try smt.getShapeInfo(shape_id, state.file_state.shapes);
|
||||
const shape = shape_info.shape;
|
||||
|
||||
// Check for ourselves up the stack
|
||||
var self_occurences: u8 = 0;
|
||||
for (state.type_stack.items) |i| {
|
||||
// NOTE: shapes.get isn't providing a consistent pointer - is it allocating each time?
|
||||
// we will therefore need to compare ids
|
||||
if (std.mem.eql(u8, i.*.id, shape_info.id))
|
||||
self_occurences = self_occurences + 1;
|
||||
}
|
||||
// Debugging
|
||||
// if (std.mem.eql(u8, shape_info.name, "Expression")) {
|
||||
// std.log.info(" Type stack len: {d}, occurences: {d}\n", .{ type_stack.items.len, self_occurences });
|
||||
// if (type_stack.items.len > 15) {
|
||||
// std.log.info(" Type stack:\n", .{});
|
||||
// for (type_stack.items) |i|
|
||||
// std.log.info(" {s}: {*}", .{ i.*.id, i });
|
||||
// return error.BugDetected;
|
||||
// }
|
||||
// }
|
||||
// End Debugging
|
||||
const self_occurences: u8 = state.getTypeRecurrenceCount(shape_id);
|
||||
if (self_occurences > 2) { // TODO: What's the appropriate number here?
|
||||
// TODO: Determine if this warrants the creation of another public
|
||||
// type to properly reference. Realistically, AWS or the service
|
||||
|
@ -751,12 +764,14 @@ fn generateTypeFor(shape_id: []const u8, writer: anytype, state: GenerationState
|
|||
// }
|
||||
return false; // not a map
|
||||
}
|
||||
try state.type_stack.append(&shape_info);
|
||||
defer _ = state.type_stack.pop();
|
||||
|
||||
try state.appendToTypeStack(&shape_info);
|
||||
defer state.popFromTypeStack();
|
||||
|
||||
switch (shape) {
|
||||
.structure => {
|
||||
if (!try reuseCommonType(shape_info, writer, state)) {
|
||||
try generateComplexTypeFor(shape_id, shape.structure.members, "struct", writer, state);
|
||||
try generateComplexTypeFor(shape_id, shape.structure.members, "struct", writer, state, options);
|
||||
if (end_structure) {
|
||||
// epilog
|
||||
try outputIndent(state, writer);
|
||||
|
@ -766,7 +781,7 @@ fn generateTypeFor(shape_id: []const u8, writer: anytype, state: GenerationState
|
|||
},
|
||||
.uniontype => {
|
||||
if (!try reuseCommonType(shape_info, writer, state)) {
|
||||
try generateComplexTypeFor(shape_id, shape.uniontype.members, "union", writer, state);
|
||||
try generateComplexTypeFor(shape_id, shape.uniontype.members, "union", writer, state, options);
|
||||
// epilog
|
||||
try outputIndent(state, writer);
|
||||
_ = try writer.write("}");
|
||||
|
@ -774,30 +789,27 @@ fn generateTypeFor(shape_id: []const u8, writer: anytype, state: GenerationState
|
|||
},
|
||||
// Document is unstructured data, so bag of bytes it is
|
||||
// https://smithy.io/2.0/spec/simple-types.html#document
|
||||
.document => |s| try generateSimpleTypeFor(s, "[]const u8", writer),
|
||||
.string => |s| try generateSimpleTypeFor(s, "[]const u8", writer),
|
||||
.string, .@"enum", .document, .blob => try generateSimpleTypeFor({}, "[]const u8", writer),
|
||||
.unit => |s| try generateSimpleTypeFor(s, "struct {}", writer), // Would be better as void, but doing so creates inconsistency we don't want clients to have to deal with
|
||||
.@"enum" => |s| try generateSimpleTypeFor(s, "[]const u8", writer), // This should be closer to uniontype, but the generated code will look ugly, and Smithy 2.0 requires that enums are open (clients accept unspecified values). So string is the best analog
|
||||
.integer => |s| try generateSimpleTypeFor(s, "i64", writer),
|
||||
.list => {
|
||||
.list => |s| {
|
||||
_ = try writer.write("[]");
|
||||
// The serializer will have to deal with the idea we might be an array
|
||||
return try generateTypeFor(shape.list.member_target, writer, state, true);
|
||||
return try generateTypeFor(s.member_target, writer, state, options.endStructure(true));
|
||||
},
|
||||
.set => {
|
||||
.set => |s| {
|
||||
_ = try writer.write("[]");
|
||||
// The serializer will have to deal with the idea we might be an array
|
||||
return try generateTypeFor(shape.set.member_target, writer, state, true);
|
||||
return try generateTypeFor(s.member_target, writer, state, options.endStructure(true));
|
||||
},
|
||||
.timestamp => |s| try generateSimpleTypeFor(s, "date.Timestamp", writer),
|
||||
.blob => |s| try generateSimpleTypeFor(s, "[]const u8", writer),
|
||||
.boolean => |s| try generateSimpleTypeFor(s, "bool", writer),
|
||||
.double => |s| try generateSimpleTypeFor(s, "f64", writer),
|
||||
.float => |s| try generateSimpleTypeFor(s, "f32", writer),
|
||||
.long => |s| try generateSimpleTypeFor(s, "i64", writer),
|
||||
.map => |m| {
|
||||
if (!try reuseCommonType(shape_info, std.io.null_writer, state)) {
|
||||
try generateMapTypeFor(m, writer, state);
|
||||
try generateMapTypeFor(m, writer, state, options);
|
||||
rc = true;
|
||||
} else {
|
||||
try writer.writeAll("[]");
|
||||
|
@ -813,7 +825,7 @@ fn generateTypeFor(shape_id: []const u8, writer: anytype, state: GenerationState
|
|||
return rc;
|
||||
}
|
||||
|
||||
fn generateMapTypeFor(map: anytype, writer: anytype, state: GenerationState) anyerror!void {
|
||||
fn generateMapTypeFor(map: anytype, writer: anytype, state: GenerationState, comptime options: GenerateTypeOptions) anyerror!void {
|
||||
_ = try writer.write("struct {\n");
|
||||
|
||||
try writer.writeAll("pub const is_map_type = true;\n\n");
|
||||
|
@ -822,19 +834,16 @@ fn generateMapTypeFor(map: anytype, writer: anytype, state: GenerationState) any
|
|||
child_state.indent_level += 1;
|
||||
|
||||
_ = try writer.write("key: ");
|
||||
try writeOptional(map.traits, writer, null);
|
||||
|
||||
_ = try generateTypeFor(map.key, writer, child_state, true);
|
||||
|
||||
try writeOptional(map.traits, writer, " = null");
|
||||
_ = try generateTypeFor(map.key, writer, child_state, options.endStructure(true));
|
||||
_ = try writer.write(",\n");
|
||||
|
||||
const value_shape_info = try smt.getShapeInfo(map.value, state.file_state.shapes);
|
||||
const value_traits = smt.getShapeTraits(value_shape_info.shape);
|
||||
|
||||
_ = try writer.write("value: ");
|
||||
try writeOptional(map.traits, writer, null);
|
||||
try writeOptional(value_traits, writer, null);
|
||||
_ = try generateTypeFor(map.value, writer, child_state, options.endStructure(true));
|
||||
|
||||
_ = try generateTypeFor(map.value, writer, child_state, true);
|
||||
|
||||
try writeOptional(map.traits, writer, " = null");
|
||||
_ = try writer.write(",\n");
|
||||
_ = try writer.write("}");
|
||||
}
|
||||
|
@ -844,7 +853,7 @@ fn generateSimpleTypeFor(_: anytype, type_name: []const u8, writer: anytype) !vo
|
|||
}
|
||||
|
||||
const Mapping = struct { snake: []const u8, original: []const u8 };
|
||||
fn generateComplexTypeFor(shape_id: []const u8, members: []smithy.TypeMember, type_type_name: []const u8, writer: anytype, state: GenerationState) anyerror!void {
|
||||
fn generateComplexTypeFor(shape_id: []const u8, members: []smithy.TypeMember, type_type_name: []const u8, writer: anytype, state: GenerationState, comptime options: GenerateTypeOptions) anyerror!void {
|
||||
_ = shape_id;
|
||||
|
||||
var arena = std.heap.ArenaAllocator.init(state.allocator);
|
||||
|
@ -876,7 +885,7 @@ fn generateComplexTypeFor(shape_id: []const u8, members: []smithy.TypeMember, ty
|
|||
var payload: ?[]const u8 = null;
|
||||
for (members) |member| {
|
||||
// This is our mapping
|
||||
const snake_case_member = try constantName(allocator, member.name);
|
||||
const snake_case_member = try support.constantName(allocator, member.name, .snake);
|
||||
// So it looks like some services have duplicate names?! Check out "httpMethod"
|
||||
// in API Gateway. Not sure what we're supposed to do there. Checking the go
|
||||
// sdk, they move this particular duplicate to 'http_method' - not sure yet
|
||||
|
@ -909,10 +918,18 @@ fn generateComplexTypeFor(shape_id: []const u8, members: []smithy.TypeMember, ty
|
|||
field_name_mappings.appendAssumeCapacity(.{ .snake = try allocator.dupe(u8, snake_case_member), .original = member.name });
|
||||
|
||||
try outputIndent(child_state, writer);
|
||||
const member_name = avoidReserved(snake_case_member);
|
||||
|
||||
const member_name = blk: {
|
||||
if (options.key_case == .snake) {
|
||||
break :blk avoidReserved(snake_case_member);
|
||||
}
|
||||
|
||||
break :blk avoidReserved(try case.allocTo(allocator, options.key_case, snake_case_member));
|
||||
};
|
||||
|
||||
try writer.print("{s}: ", .{member_name});
|
||||
try writeOptional(member.traits, writer, null);
|
||||
if (try generateTypeFor(member.target, writer, child_state, true))
|
||||
if (try generateTypeFor(member.target, writer, child_state, options.endStructure(true)))
|
||||
try map_fields.append(try std.fmt.allocPrint(allocator, "{s}", .{member_name}));
|
||||
|
||||
if (!std.mem.eql(u8, "union", type_type_name))
|
||||
|
@ -945,7 +962,7 @@ fn generateComplexTypeFor(shape_id: []const u8, members: []smithy.TypeMember, ty
|
|||
if (payload) |load| {
|
||||
try writer.writeByte('\n');
|
||||
try outputIndent(child_state, writer);
|
||||
try writer.print("pub const http_payload: []const u8 = \"{s}\";", .{load});
|
||||
try writer.print("pub const http_payload: []const u8 = \"{s}\";\n", .{load});
|
||||
}
|
||||
|
||||
try writer.writeByte('\n');
|
||||
|
@ -959,34 +976,6 @@ fn generateComplexTypeFor(shape_id: []const u8, members: []smithy.TypeMember, ty
|
|||
_ = try writer.write("return @field(mappings, field_name);\n");
|
||||
try outputIndent(child_state, writer);
|
||||
_ = try writer.write("}\n");
|
||||
try writeStringify(child_state, map_fields.items, writer);
|
||||
}
|
||||
|
||||
fn writeStringify(state: GenerationState, fields: [][]const u8, writer: anytype) !void {
|
||||
if (fields.len > 0) {
|
||||
// pub fn jsonStringifyField(self: @This(), comptime field_name: []const u8, options: anytype, out_stream: anytype) !bool {
|
||||
// if (std.mem.eql(u8, "tags", field_name))
|
||||
// return try serializeMap(self.tags, self.jsonFieldNameFor("tags"), options, out_stream);
|
||||
// return false;
|
||||
// }
|
||||
var child_state = state;
|
||||
child_state.indent_level += 1;
|
||||
try writer.writeByte('\n');
|
||||
try outputIndent(state, writer);
|
||||
_ = try writer.write("pub fn jsonStringifyField(self: @This(), comptime field_name: []const u8, options: anytype, out_stream: anytype) !bool {\n");
|
||||
var return_state = child_state;
|
||||
return_state.indent_level += 1;
|
||||
for (fields) |field| {
|
||||
try outputIndent(child_state, writer);
|
||||
try writer.print("if (std.mem.eql(u8, \"{s}\", field_name))\n", .{field});
|
||||
try outputIndent(return_state, writer);
|
||||
try writer.print("return try serializeMap(self.{s}, self.fieldNameFor(\"{s}\"), options, out_stream);\n", .{ field, field });
|
||||
}
|
||||
try outputIndent(child_state, writer);
|
||||
_ = try writer.write("return false;\n");
|
||||
try outputIndent(state, writer);
|
||||
_ = try writer.write("}\n");
|
||||
}
|
||||
}
|
||||
|
||||
fn writeMappings(state: GenerationState, @"pub": []const u8, mapping_name: []const u8, mappings: anytype, force_output: bool, writer: anytype) !void {
|
||||
|
@ -1010,31 +999,20 @@ fn writeMappings(state: GenerationState, @"pub": []const u8, mapping_name: []con
|
|||
}
|
||||
|
||||
fn writeOptional(traits: ?[]smithy.Trait, writer: anytype, value: ?[]const u8) !void {
|
||||
if (traits) |ts| {
|
||||
for (ts) |t|
|
||||
if (t == .required) return;
|
||||
}
|
||||
|
||||
// not required
|
||||
if (value) |v| {
|
||||
_ = try writer.write(v);
|
||||
} else _ = try writer.write("?");
|
||||
if (traits) |ts| if (smt.hasTrait(.required, ts)) return;
|
||||
try writer.writeAll(value orelse "?");
|
||||
}
|
||||
fn camelCase(allocator: std.mem.Allocator, name: []const u8) ![]const u8 {
|
||||
const first_letter = name[0] + ('a' - 'A');
|
||||
return try std.fmt.allocPrint(allocator, "{c}{s}", .{ first_letter, name[1..] });
|
||||
}
|
||||
fn avoidReserved(snake_name: []const u8) []const u8 {
|
||||
if (std.mem.eql(u8, snake_name, "error")) return "@\"error\"";
|
||||
if (std.mem.eql(u8, snake_name, "return")) return "@\"return\"";
|
||||
if (std.mem.eql(u8, snake_name, "not")) return "@\"not\"";
|
||||
if (std.mem.eql(u8, snake_name, "and")) return "@\"and\"";
|
||||
if (std.mem.eql(u8, snake_name, "or")) return "@\"or\"";
|
||||
if (std.mem.eql(u8, snake_name, "test")) return "@\"test\"";
|
||||
if (std.mem.eql(u8, snake_name, "null")) return "@\"null\"";
|
||||
if (std.mem.eql(u8, snake_name, "export")) return "@\"export\"";
|
||||
if (std.mem.eql(u8, snake_name, "union")) return "@\"union\"";
|
||||
if (std.mem.eql(u8, snake_name, "enum")) return "@\"enum\"";
|
||||
if (std.mem.eql(u8, snake_name, "inline")) return "@\"inline\"";
|
||||
return snake_name;
|
||||
fn avoidReserved(name: []const u8) []const u8 {
|
||||
if (std.mem.eql(u8, name, "error")) return "@\"error\"";
|
||||
if (std.mem.eql(u8, name, "return")) return "@\"return\"";
|
||||
if (std.mem.eql(u8, name, "not")) return "@\"not\"";
|
||||
if (std.mem.eql(u8, name, "and")) return "@\"and\"";
|
||||
if (std.mem.eql(u8, name, "or")) return "@\"or\"";
|
||||
if (std.mem.eql(u8, name, "test")) return "@\"test\"";
|
||||
if (std.mem.eql(u8, name, "null")) return "@\"null\"";
|
||||
if (std.mem.eql(u8, name, "export")) return "@\"export\"";
|
||||
if (std.mem.eql(u8, name, "union")) return "@\"union\"";
|
||||
if (std.mem.eql(u8, name, "enum")) return "@\"enum\"";
|
||||
if (std.mem.eql(u8, name, "inline")) return "@\"inline\"";
|
||||
return name;
|
||||
}
|
||||
|
|
1
codegen/src/serialization.zig
Normal file
1
codegen/src/serialization.zig
Normal file
|
@ -0,0 +1 @@
|
|||
pub const json = @import("serialization/json.zig");
|
392
codegen/src/serialization/json.zig
Normal file
392
codegen/src/serialization/json.zig
Normal file
|
@ -0,0 +1,392 @@
|
|||
const std = @import("std");
|
||||
const smithy = @import("smithy");
|
||||
const smithy_tools = @import("../smithy_tools.zig");
|
||||
const support = @import("../support.zig");
|
||||
|
||||
const GenerationState = @import("../GenerationState.zig");
|
||||
const GenerateTypeOptions = @import("../GenerateTypeOptions.zig");
|
||||
const Allocator = std.mem.Allocator;
|
||||
|
||||
const Shape = smithy_tools.Shape;
|
||||
|
||||
const JsonMember = struct {
|
||||
field_name: []const u8,
|
||||
json_key: []const u8,
|
||||
target: []const u8,
|
||||
type_member: smithy.TypeMember,
|
||||
shape_info: smithy.ShapeInfo,
|
||||
};
|
||||
|
||||
pub fn generateToJsonFunction(shape_id: []const u8, writer: std.io.AnyWriter, state: GenerationState, comptime options: GenerateTypeOptions) !void {
|
||||
_ = options;
|
||||
const allocator = state.allocator;
|
||||
|
||||
const shape_info = try smithy_tools.getShapeInfo(shape_id, state.file_state.shapes);
|
||||
const shape = shape_info.shape;
|
||||
|
||||
if (try getJsonMembers(allocator, shape, state)) |json_members| {
|
||||
if (json_members.items.len > 0) {
|
||||
try writer.writeAll("pub fn jsonStringify(self: @This(), jw: anytype) !void {\n");
|
||||
try writer.writeAll("try jw.beginObject();\n");
|
||||
try writer.writeAll("{\n");
|
||||
|
||||
for (json_members.items) |member| {
|
||||
const member_value = try getMemberValueJson(allocator, "self", member);
|
||||
defer allocator.free(member_value);
|
||||
|
||||
try writer.print("try jw.objectField(\"{s}\");\n", .{member.json_key});
|
||||
try writeMemberJson(
|
||||
.{
|
||||
.shape_id = member.target,
|
||||
.field_name = member.field_name,
|
||||
.field_value = member_value,
|
||||
.state = state.indent(),
|
||||
.member = member.type_member,
|
||||
},
|
||||
writer,
|
||||
);
|
||||
}
|
||||
|
||||
try writer.writeAll("}\n");
|
||||
try writer.writeAll("try jw.endObject();\n");
|
||||
try writer.writeAll("}\n\n");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn getJsonMembers(allocator: Allocator, shape: Shape, state: GenerationState) !?std.ArrayListUnmanaged(JsonMember) {
|
||||
const is_json_shape = switch (state.file_state.protocol) {
|
||||
.json_1_0, .json_1_1, .rest_json_1 => true,
|
||||
else => false,
|
||||
};
|
||||
|
||||
if (!is_json_shape) {
|
||||
return null;
|
||||
}
|
||||
|
||||
var hash_map = std.StringHashMapUnmanaged(smithy.TypeMember){};
|
||||
|
||||
const shape_members = smithy_tools.getShapeMembers(shape);
|
||||
for (shape_members) |member| {
|
||||
try hash_map.putNoClobber(state.allocator, member.name, member);
|
||||
}
|
||||
|
||||
for (shape_members) |member| {
|
||||
for (member.traits) |trait| {
|
||||
switch (trait) {
|
||||
.http_header, .http_query => {
|
||||
std.debug.assert(hash_map.remove(member.name));
|
||||
break;
|
||||
},
|
||||
else => continue,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (hash_map.count() == 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
var json_members = std.ArrayListUnmanaged(JsonMember){};
|
||||
|
||||
var iter = hash_map.iterator();
|
||||
while (iter.next()) |kvp| {
|
||||
const member = kvp.value_ptr.*;
|
||||
|
||||
const key = blk: {
|
||||
if (smithy_tools.findTrait(.json_name, member.traits)) |trait| {
|
||||
break :blk trait.json_name;
|
||||
}
|
||||
|
||||
break :blk member.name;
|
||||
};
|
||||
|
||||
try json_members.append(allocator, .{
|
||||
.field_name = try support.constantName(allocator, member.name, .snake),
|
||||
.json_key = key,
|
||||
.target = member.target,
|
||||
.type_member = member,
|
||||
.shape_info = try smithy_tools.getShapeInfo(member.target, state.file_state.shapes),
|
||||
});
|
||||
}
|
||||
|
||||
return json_members;
|
||||
}
|
||||
|
||||
fn getMemberValueJson(allocator: std.mem.Allocator, source: []const u8, member: JsonMember) ![]const u8 {
|
||||
const member_value = try std.fmt.allocPrint(allocator, "@field({s}, \"{s}\")", .{ source, member.field_name });
|
||||
defer allocator.free(member_value);
|
||||
|
||||
var output_block = std.ArrayListUnmanaged(u8){};
|
||||
const writer = output_block.writer(allocator);
|
||||
|
||||
try writeMemberValue(
|
||||
writer,
|
||||
member_value,
|
||||
);
|
||||
|
||||
return output_block.toOwnedSlice(allocator);
|
||||
}
|
||||
|
||||
fn getShapeJsonValueType(shape: Shape) []const u8 {
|
||||
return switch (shape) {
|
||||
.string, .@"enum", .blob, .document, .timestamp => ".string",
|
||||
.boolean => ".bool",
|
||||
.integer, .bigInteger, .short, .long => ".integer",
|
||||
.float, .double, .bigDecimal => ".float",
|
||||
else => std.debug.panic("Unexpected shape: {}", .{shape}),
|
||||
};
|
||||
}
|
||||
|
||||
fn writeMemberValue(
|
||||
writer: anytype,
|
||||
member_value: []const u8,
|
||||
) !void {
|
||||
try writer.writeAll(member_value);
|
||||
}
|
||||
|
||||
const WriteMemberJsonParams = struct {
|
||||
shape_id: []const u8,
|
||||
field_name: []const u8,
|
||||
field_value: []const u8,
|
||||
state: GenerationState,
|
||||
member: smithy.TypeMember,
|
||||
};
|
||||
|
||||
fn writeStructureJson(params: WriteMemberJsonParams, writer: std.io.AnyWriter) !void {
|
||||
const shape_type = "structure";
|
||||
const allocator = params.state.allocator;
|
||||
const state = params.state;
|
||||
|
||||
const shape_info = try smithy_tools.getShapeInfo(params.shape_id, state.file_state.shapes);
|
||||
const shape = shape_info.shape;
|
||||
|
||||
const structure_name = try std.fmt.allocPrint(params.state.allocator, "{s}_{s}_{d}", .{ params.field_name, shape_type, state.indent_level });
|
||||
defer params.state.allocator.free(structure_name);
|
||||
|
||||
const object_value_capture = try std.fmt.allocPrint(allocator, "{s}_capture", .{structure_name});
|
||||
defer allocator.free(object_value_capture);
|
||||
|
||||
try writer.print("\n// start {s}: {s}\n", .{ shape_type, structure_name });
|
||||
defer writer.print("// end {s}: {s}\n", .{ shape_type, structure_name }) catch std.debug.panic("Unreachable", .{});
|
||||
|
||||
if (try getJsonMembers(allocator, shape, state)) |json_members| {
|
||||
if (json_members.items.len > 0) {
|
||||
const is_optional = smithy_tools.shapeIsOptional(params.member.traits);
|
||||
|
||||
var object_value = params.field_value;
|
||||
|
||||
if (is_optional) {
|
||||
object_value = object_value_capture;
|
||||
|
||||
try writer.print("if ({s}) |{s}|", .{ params.field_value, object_value_capture });
|
||||
try writer.writeAll("{\n");
|
||||
}
|
||||
|
||||
try writer.writeAll("try jw.beginObject();\n");
|
||||
try writer.writeAll("{\n");
|
||||
|
||||
// this is a workaround in case a child structure doesn't have any fields
|
||||
// and therefore doesn't use the structure variable so we capture it here.
|
||||
// the compiler should optimize this away
|
||||
try writer.print("const unused_capture_{s} = {s};\n", .{ structure_name, object_value });
|
||||
try writer.print("_ = unused_capture_{s};\n", .{structure_name});
|
||||
|
||||
for (json_members.items) |member| {
|
||||
const member_value = try getMemberValueJson(allocator, object_value, member);
|
||||
defer allocator.free(member_value);
|
||||
|
||||
try writer.print("try jw.objectField(\"{s}\");\n", .{member.json_key});
|
||||
try writeMemberJson(
|
||||
.{
|
||||
.shape_id = member.target,
|
||||
.field_name = member.field_name,
|
||||
.field_value = member_value,
|
||||
.state = state.indent(),
|
||||
.member = member.type_member,
|
||||
},
|
||||
writer,
|
||||
);
|
||||
}
|
||||
|
||||
try writer.writeAll("}\n");
|
||||
try writer.writeAll("try jw.endObject();\n");
|
||||
|
||||
if (is_optional) {
|
||||
try writer.writeAll("} else {\n");
|
||||
try writer.writeAll("try jw.write(null);\n");
|
||||
try writer.writeAll("}\n");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn writeListJson(list: smithy_tools.ListShape, params: WriteMemberJsonParams, writer: std.io.AnyWriter) anyerror!void {
|
||||
const state = params.state;
|
||||
const allocator = state.allocator;
|
||||
|
||||
const list_name = try std.fmt.allocPrint(allocator, "{s}_list_{d}", .{ params.field_name, state.indent_level });
|
||||
defer state.allocator.free(list_name);
|
||||
|
||||
try writer.print("\n// start list: {s}\n", .{list_name});
|
||||
defer writer.print("// end list: {s}\n", .{list_name}) catch std.debug.panic("Unreachable", .{});
|
||||
|
||||
const list_each_value = try std.fmt.allocPrint(allocator, "{s}_value", .{list_name});
|
||||
defer allocator.free(list_each_value);
|
||||
|
||||
const list_capture = try std.fmt.allocPrint(allocator, "{s}_capture", .{list_name});
|
||||
defer allocator.free(list_capture);
|
||||
|
||||
{
|
||||
const list_is_optional = smithy_tools.shapeIsOptional(list.traits);
|
||||
|
||||
var list_value = params.field_value;
|
||||
|
||||
if (list_is_optional) {
|
||||
list_value = list_capture;
|
||||
|
||||
try writer.print("if ({s}) |{s}| ", .{
|
||||
params.field_value,
|
||||
list_capture,
|
||||
});
|
||||
try writer.writeAll("{\n");
|
||||
}
|
||||
|
||||
// start loop
|
||||
try writer.writeAll("try jw.beginArray();\n");
|
||||
try writer.print("for ({s}) |{s}|", .{ list_value, list_each_value });
|
||||
try writer.writeAll("{\n");
|
||||
try writer.writeAll("try jw.write(");
|
||||
try writeMemberValue(
|
||||
writer,
|
||||
list_each_value,
|
||||
);
|
||||
try writer.writeAll(");\n");
|
||||
try writer.writeAll("}\n");
|
||||
try writer.writeAll("try jw.endArray();\n");
|
||||
// end loop
|
||||
|
||||
if (list_is_optional) {
|
||||
try writer.writeAll("} else {\n");
|
||||
try writer.writeAll("try jw.write(null);\n");
|
||||
try writer.writeAll("}\n");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn writeMapJson(map: smithy_tools.MapShape, params: WriteMemberJsonParams, writer: std.io.AnyWriter) anyerror!void {
|
||||
const state = params.state;
|
||||
const name = params.field_name;
|
||||
const value = params.field_value;
|
||||
const allocator = state.allocator;
|
||||
|
||||
const map_name = try std.fmt.allocPrint(allocator, "{s}_object_map_{d}", .{ name, state.indent_level });
|
||||
defer allocator.free(map_name);
|
||||
|
||||
try writer.print("\n// start map: {s}\n", .{map_name});
|
||||
defer writer.print("// end map: {s}\n", .{map_name}) catch std.debug.panic("Unreachable", .{});
|
||||
|
||||
const map_value_capture = try std.fmt.allocPrint(allocator, "{s}_kvp", .{map_name});
|
||||
defer allocator.free(map_value_capture);
|
||||
|
||||
const map_capture_key = try std.fmt.allocPrint(allocator, "{s}.key", .{map_value_capture});
|
||||
defer allocator.free(map_capture_key);
|
||||
|
||||
const map_capture_value = try std.fmt.allocPrint(allocator, "{s}.value", .{map_value_capture});
|
||||
defer allocator.free(map_capture_value);
|
||||
|
||||
const value_shape_info = try smithy_tools.getShapeInfo(map.value, state.file_state.shapes);
|
||||
|
||||
const value_member = smithy.TypeMember{
|
||||
.name = "value",
|
||||
.target = map.value,
|
||||
.traits = smithy_tools.getShapeTraits(value_shape_info.shape),
|
||||
};
|
||||
|
||||
const map_capture = try std.fmt.allocPrint(state.allocator, "{s}_capture", .{map_name});
|
||||
|
||||
{
|
||||
const map_member = params.member;
|
||||
const map_is_optional = !smithy_tools.hasTrait(.required, map_member.traits);
|
||||
|
||||
var map_value = value;
|
||||
|
||||
if (map_is_optional) {
|
||||
map_value = map_capture;
|
||||
|
||||
try writer.print("if ({s}) |{s}| ", .{
|
||||
value,
|
||||
map_capture,
|
||||
});
|
||||
try writer.writeAll("{\n");
|
||||
}
|
||||
|
||||
try writer.writeAll("try jw.beginObject();\n");
|
||||
try writer.writeAll("{\n");
|
||||
|
||||
// start loop
|
||||
try writer.print("for ({s}) |{s}|", .{ map_value, map_value_capture });
|
||||
try writer.writeAll("{\n");
|
||||
try writer.print("try jw.objectField({s});\n", .{map_capture_key});
|
||||
|
||||
try writeMemberJson(.{
|
||||
.shape_id = map.value,
|
||||
.field_name = "value",
|
||||
.field_value = map_capture_value,
|
||||
.state = state.indent(),
|
||||
.member = value_member,
|
||||
}, writer);
|
||||
|
||||
try writer.writeAll("}\n");
|
||||
// end loop
|
||||
|
||||
try writer.writeAll("}\n");
|
||||
try writer.writeAll("try jw.endObject();\n");
|
||||
|
||||
if (map_is_optional) {
|
||||
try writer.writeAll("} else {\n");
|
||||
try writer.writeAll("try jw.write(null);\n");
|
||||
try writer.writeAll("}\n");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn writeScalarJson(comment: []const u8, params: WriteMemberJsonParams, writer: std.io.AnyWriter) anyerror!void {
|
||||
try writer.print("try jw.write({s}); // {s}\n\n", .{ params.field_value, comment });
|
||||
}
|
||||
|
||||
fn writeMemberJson(params: WriteMemberJsonParams, writer: std.io.AnyWriter) anyerror!void {
|
||||
const shape_id = params.shape_id;
|
||||
const state = params.state;
|
||||
const shape_info = try smithy_tools.getShapeInfo(shape_id, state.file_state.shapes);
|
||||
const shape = shape_info.shape;
|
||||
|
||||
if (state.getTypeRecurrenceCount(shape_id) > 2) {
|
||||
return;
|
||||
}
|
||||
|
||||
try state.appendToTypeStack(&shape_info);
|
||||
defer state.popFromTypeStack();
|
||||
|
||||
switch (shape) {
|
||||
.structure, .uniontype => try writeStructureJson(params, writer),
|
||||
.list => |l| try writeListJson(l, params, writer),
|
||||
.map => |m| try writeMapJson(m, params, writer),
|
||||
.timestamp => try writeScalarJson("timestamp", params, writer),
|
||||
.string => try writeScalarJson("string", params, writer),
|
||||
.@"enum" => try writeScalarJson("enum", params, writer),
|
||||
.document => try writeScalarJson("document", params, writer),
|
||||
.blob => try writeScalarJson("blob", params, writer),
|
||||
.boolean => try writeScalarJson("bool", params, writer),
|
||||
.float => try writeScalarJson("float", params, writer),
|
||||
.integer => try writeScalarJson("integer", params, writer),
|
||||
.long => try writeScalarJson("long", params, writer),
|
||||
.double => try writeScalarJson("double", params, writer),
|
||||
.bigDecimal => try writeScalarJson("bigDecimal", params, writer),
|
||||
.bigInteger => try writeScalarJson("bigInteger", params, writer),
|
||||
.unit => try writeScalarJson("unit", params, writer),
|
||||
.byte => try writeScalarJson("byte", params, writer),
|
||||
.short => try writeScalarJson("short", params, writer),
|
||||
.service, .resource, .operation, .member, .set => std.debug.panic("Shape type not supported: {}", .{shape}),
|
||||
}
|
||||
}
|
67
codegen/src/smithy_tools.zig
Normal file
67
codegen/src/smithy_tools.zig
Normal file
|
@ -0,0 +1,67 @@
|
|||
const std = @import("std");
|
||||
const smithy = @import("smithy");
|
||||
|
||||
pub const Shape = @FieldType(smithy.ShapeInfo, "shape");
|
||||
pub const ServiceShape = @TypeOf((Shape{ .service = undefined }).service);
|
||||
pub const ListShape = @TypeOf((Shape{ .list = undefined }).list);
|
||||
pub const MapShape = @TypeOf((Shape{ .map = undefined }).map);
|
||||
|
||||
pub fn getShapeInfo(id: []const u8, shapes: std.StringHashMap(smithy.ShapeInfo)) !smithy.ShapeInfo {
|
||||
return shapes.get(id) orelse {
|
||||
std.debug.print("Shape ID not found. This is most likely a bug. Shape ID: {s}\n", .{id});
|
||||
return error.InvalidType;
|
||||
};
|
||||
}
|
||||
|
||||
pub fn getShapeTraits(shape: Shape) []smithy.Trait {
|
||||
return switch (shape) {
|
||||
.service, .operation, .resource => std.debug.panic("Unexpected shape type: {}", .{shape}),
|
||||
inline else => |s| s.traits,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn getShapeMembers(shape: Shape) []smithy.TypeMember {
|
||||
return switch (shape) {
|
||||
inline .structure, .uniontype => |s| s.members,
|
||||
else => std.debug.panic("Unexpected shape type: {}", .{shape}),
|
||||
};
|
||||
}
|
||||
|
||||
pub fn shapeIsLeaf(shape: Shape) bool {
|
||||
return switch (shape) {
|
||||
.@"enum",
|
||||
.bigDecimal,
|
||||
.bigInteger,
|
||||
.blob,
|
||||
.boolean,
|
||||
.byte,
|
||||
.document,
|
||||
.double,
|
||||
.float,
|
||||
.integer,
|
||||
.long,
|
||||
.short,
|
||||
.string,
|
||||
.timestamp,
|
||||
=> true,
|
||||
else => false,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn shapeIsOptional(traits: []smithy.Trait) bool {
|
||||
return !hasTrait(.required, traits);
|
||||
}
|
||||
|
||||
pub fn findTrait(trait_type: smithy.TraitType, traits: []smithy.Trait) ?smithy.Trait {
|
||||
for (traits) |trait| {
|
||||
if (trait == trait_type) {
|
||||
return trait;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
pub fn hasTrait(trait_type: smithy.TraitType, traits: []smithy.Trait) bool {
|
||||
return findTrait(trait_type, traits) != null;
|
||||
}
|
33
codegen/src/support.zig
Normal file
33
codegen/src/support.zig
Normal file
|
@ -0,0 +1,33 @@
|
|||
const std = @import("std");
|
||||
const case = @import("case");
|
||||
|
||||
const Allocator = std.mem.Allocator;
|
||||
|
||||
pub fn constantName(allocator: Allocator, id: []const u8, comptime to_case: case.Case) ![]const u8 {
|
||||
// There are some ids that don't follow consistent rules, so we'll
|
||||
// look for the exceptions and, if not found, revert to the snake case
|
||||
// algorithm
|
||||
|
||||
var buf = std.mem.zeroes([256]u8);
|
||||
@memcpy(buf[0..id.len], id);
|
||||
|
||||
var name = try allocator.dupe(u8, id);
|
||||
|
||||
const simple_replacements = &.{
|
||||
&.{ "DevOps", "Devops" },
|
||||
&.{ "IoT", "Iot" },
|
||||
&.{ "FSx", "Fsx" },
|
||||
&.{ "CloudFront", "Cloudfront" },
|
||||
};
|
||||
|
||||
inline for (simple_replacements) |rep| {
|
||||
if (std.mem.indexOf(u8, name, rep[0])) |idx| @memcpy(name[idx .. idx + rep[0].len], rep[1]);
|
||||
}
|
||||
|
||||
if (to_case == .snake) {
|
||||
if (std.mem.eql(u8, id, "SESv2")) return try std.fmt.allocPrint(allocator, "ses_v2", .{});
|
||||
if (std.mem.eql(u8, id, "ETag")) return try std.fmt.allocPrint(allocator, "e_tag", .{});
|
||||
}
|
||||
|
||||
return try case.allocTo(allocator, to_case, name);
|
||||
}
|
|
@ -61,7 +61,6 @@ pub fn parseIso8601ToTimestamp(data: []const u8) !i64 {
|
|||
return try dateTimeToTimestamp(try parseIso8601ToDateTime(data));
|
||||
}
|
||||
|
||||
const IsoParsingState = enum { Start, Year, Month, Day, Hour, Minute, Second, Millisecond, End };
|
||||
/// Converts a string to a timestamp value. May not handle dates before the
|
||||
/// epoch
|
||||
pub fn parseIso8601ToDateTime(data: []const u8) !DateTime {
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
const std = @import("std");
|
||||
const zeit = @import("zeit");
|
||||
const json = @import("json");
|
||||
|
||||
pub const DateFormat = enum {
|
||||
rfc1123,
|
||||
|
@ -10,18 +9,20 @@ pub const DateFormat = enum {
|
|||
pub const Timestamp = enum(zeit.Nanoseconds) {
|
||||
_,
|
||||
|
||||
pub fn jsonStringify(value: Timestamp, options: json.StringifyOptions, out_stream: anytype) !void {
|
||||
_ = options;
|
||||
|
||||
const instant = try zeit.instant(.{
|
||||
pub fn jsonStringify(value: Timestamp, jw: anytype) !void {
|
||||
const instant = zeit.instant(.{
|
||||
.source = .{
|
||||
.unix_nano = @intFromEnum(value),
|
||||
},
|
||||
});
|
||||
}) catch std.debug.panic("Failed to parse timestamp to instant: {d}", .{value});
|
||||
|
||||
try out_stream.writeAll("\"");
|
||||
try instant.time().gofmt(out_stream, "Mon, 02 Jan 2006 15:04:05 GMT");
|
||||
try out_stream.writeAll("\"");
|
||||
const fmt = "Mon, 02 Jan 2006 15:04:05 GMT";
|
||||
var buf = std.mem.zeroes([fmt.len]u8);
|
||||
|
||||
var fbs = std.io.fixedBufferStream(&buf);
|
||||
instant.time().gofmt(fbs.writer(), fmt) catch std.debug.panic("Failed to format instant: {d}", .{instant.timestamp});
|
||||
|
||||
try jw.write(&buf);
|
||||
}
|
||||
|
||||
pub fn parse(val: []const u8) !Timestamp {
|
||||
|
|
|
@ -37,7 +37,7 @@ fn serializeMapKey(key: []const u8, options: anytype, out_stream: anytype) !void
|
|||
}
|
||||
}
|
||||
|
||||
pub fn serializeMapAsObject(map: anytype, options: anytype, out_stream: anytype) !void {
|
||||
fn serializeMapAsObject(map: anytype, options: anytype, out_stream: anytype) !void {
|
||||
if (map.len == 0) {
|
||||
try out_stream.writeByte('{');
|
||||
try out_stream.writeByte('}');
|
||||
|
@ -55,12 +55,11 @@ pub fn serializeMapAsObject(map: anytype, options: anytype, out_stream: anytype)
|
|||
try out_stream.writeByte('\n');
|
||||
|
||||
for (map, 0..) |tag, i| {
|
||||
if (tag.key == null or tag.value == null) continue;
|
||||
// TODO: Deal with escaping and general "json.stringify" the values...
|
||||
if (child_options.whitespace) |ws|
|
||||
try ws.outputIndent(out_stream);
|
||||
try out_stream.writeByte('"');
|
||||
try jsonEscape(tag.key.?, child_options, out_stream);
|
||||
try jsonEscape(tag.key, child_options, out_stream);
|
||||
_ = try out_stream.write("\":");
|
||||
if (child_options.whitespace) |ws| {
|
||||
if (ws.separator) {
|
||||
|
@ -68,7 +67,7 @@ pub fn serializeMapAsObject(map: anytype, options: anytype, out_stream: anytype)
|
|||
}
|
||||
}
|
||||
try out_stream.writeByte('"');
|
||||
try jsonEscape(tag.value.?, child_options, out_stream);
|
||||
try jsonEscape(tag.value, child_options, out_stream);
|
||||
try out_stream.writeByte('"');
|
||||
if (i < map.len - 1) {
|
||||
try out_stream.writeByte(',');
|
||||
|
@ -1372,137 +1371,8 @@ pub const Value = union(enum) {
|
|||
String: []const u8,
|
||||
Array: Array,
|
||||
Object: ObjectMap,
|
||||
|
||||
pub fn jsonStringify(
|
||||
value: @This(),
|
||||
options: StringifyOptions,
|
||||
out_stream: anytype,
|
||||
) @TypeOf(out_stream).Error!void {
|
||||
switch (value) {
|
||||
.Null => try stringify(null, options, out_stream),
|
||||
.Bool => |inner| try stringify(inner, options, out_stream),
|
||||
.Integer => |inner| try stringify(inner, options, out_stream),
|
||||
.Float => |inner| try stringify(inner, options, out_stream),
|
||||
.NumberString => |inner| try out_stream.writeAll(inner),
|
||||
.String => |inner| try stringify(inner, options, out_stream),
|
||||
.Array => |inner| try stringify(inner.items, options, out_stream),
|
||||
.Object => |inner| {
|
||||
try out_stream.writeByte('{');
|
||||
var field_output = false;
|
||||
var child_options = options;
|
||||
if (child_options.whitespace) |*child_whitespace| {
|
||||
child_whitespace.indent_level += 1;
|
||||
}
|
||||
var it = inner.iterator();
|
||||
while (it.next()) |entry| {
|
||||
if (!field_output) {
|
||||
field_output = true;
|
||||
} else {
|
||||
try out_stream.writeByte(',');
|
||||
}
|
||||
if (child_options.whitespace) |child_whitespace| {
|
||||
try out_stream.writeByte('\n');
|
||||
try child_whitespace.outputIndent(out_stream);
|
||||
}
|
||||
|
||||
try stringify(entry.key_ptr, options, out_stream);
|
||||
try out_stream.writeByte(':');
|
||||
if (child_options.whitespace) |child_whitespace| {
|
||||
if (child_whitespace.separator) {
|
||||
try out_stream.writeByte(' ');
|
||||
}
|
||||
}
|
||||
try stringify(entry.value_ptr, child_options, out_stream);
|
||||
}
|
||||
if (field_output) {
|
||||
if (options.whitespace) |whitespace| {
|
||||
try out_stream.writeByte('\n');
|
||||
try whitespace.outputIndent(out_stream);
|
||||
}
|
||||
}
|
||||
try out_stream.writeByte('}');
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub fn dump(self: Value) void {
|
||||
std.debug.lockStdErr();
|
||||
defer std.debug.unlockStdErr();
|
||||
|
||||
const stderr = std.io.getStdErr().writer();
|
||||
stringify(self, StringifyOptions{ .whitespace = null }, stderr) catch return;
|
||||
}
|
||||
};
|
||||
|
||||
pub fn dump(value: anytype) void {
|
||||
var held = std.debug.getStderrMutex().acquire();
|
||||
defer held.release();
|
||||
|
||||
const stderr = std.io.getStdErr().writer();
|
||||
stringify(value, StringifyOptions{ .whitespace = null }, stderr) catch return;
|
||||
}
|
||||
|
||||
test "Value.jsonStringify" {
|
||||
{
|
||||
var buffer: [10]u8 = undefined;
|
||||
var fbs = std.io.fixedBufferStream(&buffer);
|
||||
try @as(Value, .Null).jsonStringify(.{}, fbs.writer());
|
||||
try testing.expectEqualSlices(u8, fbs.getWritten(), "null");
|
||||
}
|
||||
{
|
||||
var buffer: [10]u8 = undefined;
|
||||
var fbs = std.io.fixedBufferStream(&buffer);
|
||||
try (Value{ .Bool = true }).jsonStringify(.{}, fbs.writer());
|
||||
try testing.expectEqualSlices(u8, fbs.getWritten(), "true");
|
||||
}
|
||||
{
|
||||
var buffer: [10]u8 = undefined;
|
||||
var fbs = std.io.fixedBufferStream(&buffer);
|
||||
try (Value{ .Integer = 42 }).jsonStringify(.{}, fbs.writer());
|
||||
try testing.expectEqualSlices(u8, fbs.getWritten(), "42");
|
||||
}
|
||||
{
|
||||
var buffer: [10]u8 = undefined;
|
||||
var fbs = std.io.fixedBufferStream(&buffer);
|
||||
try (Value{ .NumberString = "43" }).jsonStringify(.{}, fbs.writer());
|
||||
try testing.expectEqualSlices(u8, fbs.getWritten(), "43");
|
||||
}
|
||||
{
|
||||
var buffer: [10]u8 = undefined;
|
||||
var fbs = std.io.fixedBufferStream(&buffer);
|
||||
try (Value{ .Float = 42 }).jsonStringify(.{}, fbs.writer());
|
||||
try testing.expectEqualSlices(u8, fbs.getWritten(), "4.2e1");
|
||||
}
|
||||
{
|
||||
var buffer: [10]u8 = undefined;
|
||||
var fbs = std.io.fixedBufferStream(&buffer);
|
||||
try (Value{ .String = "weeee" }).jsonStringify(.{}, fbs.writer());
|
||||
try testing.expectEqualSlices(u8, fbs.getWritten(), "\"weeee\"");
|
||||
}
|
||||
{
|
||||
var buffer: [10]u8 = undefined;
|
||||
var fbs = std.io.fixedBufferStream(&buffer);
|
||||
var vals = [_]Value{
|
||||
.{ .Integer = 1 },
|
||||
.{ .Integer = 2 },
|
||||
.{ .NumberString = "3" },
|
||||
};
|
||||
try (Value{
|
||||
.Array = Array.fromOwnedSlice(undefined, &vals),
|
||||
}).jsonStringify(.{}, fbs.writer());
|
||||
try testing.expectEqualSlices(u8, fbs.getWritten(), "[1,2,3]");
|
||||
}
|
||||
{
|
||||
var buffer: [10]u8 = undefined;
|
||||
var fbs = std.io.fixedBufferStream(&buffer);
|
||||
var obj = ObjectMap.init(testing.allocator);
|
||||
defer obj.deinit();
|
||||
try obj.putNoClobber("a", .{ .String = "b" });
|
||||
try (Value{ .Object = obj }).jsonStringify(.{}, fbs.writer());
|
||||
try testing.expectEqualSlices(u8, fbs.getWritten(), "{\"a\":\"b\"}");
|
||||
}
|
||||
}
|
||||
|
||||
/// parse tokens from a stream, returning `false` if they do not decode to `value`
|
||||
fn parsesTo(comptime T: type, value: T, tokens: *TokenStream, options: ParseOptions) !bool {
|
||||
// TODO: should be able to write this function to not require an allocator
|
||||
|
@ -2924,405 +2794,3 @@ fn outputUnicodeEscape(
|
|||
try std.fmt.formatIntValue(low, "x", std.fmt.FormatOptions{ .width = 4, .fill = '0' }, out_stream);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn stringify(
|
||||
value: anytype,
|
||||
options: StringifyOptions,
|
||||
out_stream: anytype,
|
||||
) !void {
|
||||
const T = @TypeOf(value);
|
||||
switch (@typeInfo(T)) {
|
||||
.float, .comptime_float => {
|
||||
return std.fmt.format(out_stream, "{e}", .{value});
|
||||
},
|
||||
.int, .comptime_int => {
|
||||
return std.fmt.formatIntValue(value, "", std.fmt.FormatOptions{}, out_stream);
|
||||
},
|
||||
.bool => {
|
||||
return out_stream.writeAll(if (value) "true" else "false");
|
||||
},
|
||||
.null => {
|
||||
return out_stream.writeAll("null");
|
||||
},
|
||||
.optional => {
|
||||
if (value) |payload| {
|
||||
return try stringify(payload, options, out_stream);
|
||||
} else {
|
||||
return try stringify(null, options, out_stream);
|
||||
}
|
||||
},
|
||||
.@"enum" => {
|
||||
if (comptime std.meta.hasFn(T, "jsonStringify")) {
|
||||
return value.jsonStringify(options, out_stream);
|
||||
}
|
||||
|
||||
@compileError("Unable to stringify enum '" ++ @typeName(T) ++ "'");
|
||||
},
|
||||
.@"union" => {
|
||||
if (comptime std.meta.hasFn(T, "jsonStringify")) {
|
||||
return value.jsonStringify(options, out_stream);
|
||||
}
|
||||
|
||||
const info = @typeInfo(T).@"union";
|
||||
if (info.tag_type) |UnionTagType| {
|
||||
inline for (info.fields) |u_field| {
|
||||
if (value == @field(UnionTagType, u_field.name)) {
|
||||
return try stringify(@field(value, u_field.name), options, out_stream);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
@compileError("Unable to stringify untagged union '" ++ @typeName(T) ++ "'");
|
||||
}
|
||||
},
|
||||
.@"struct" => |S| {
|
||||
if (comptime std.meta.hasFn(T, "jsonStringify")) {
|
||||
return value.jsonStringify(options, out_stream);
|
||||
}
|
||||
|
||||
try out_stream.writeByte('{');
|
||||
var field_output = false;
|
||||
var child_options = options;
|
||||
if (child_options.whitespace) |*child_whitespace| {
|
||||
child_whitespace.indent_level += 1;
|
||||
}
|
||||
inline for (S.fields) |field| {
|
||||
// don't include void fields
|
||||
if (field.type == void) continue;
|
||||
|
||||
var output_this_field = true;
|
||||
if (!options.emit_null and @typeInfo(field.type) == .optional and @field(value, field.name) == null) output_this_field = false;
|
||||
|
||||
const final_name = if (comptime std.meta.hasFn(T, "fieldNameFor"))
|
||||
value.fieldNameFor(field.name)
|
||||
else
|
||||
field.name;
|
||||
if (options.exclude_fields) |exclude_fields| {
|
||||
for (exclude_fields) |exclude_field| {
|
||||
if (std.mem.eql(u8, final_name, exclude_field)) {
|
||||
output_this_field = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!field_output) {
|
||||
field_output = output_this_field;
|
||||
} else {
|
||||
if (output_this_field) try out_stream.writeByte(',');
|
||||
}
|
||||
if (child_options.whitespace) |child_whitespace| {
|
||||
if (output_this_field) try out_stream.writeByte('\n');
|
||||
if (output_this_field) try child_whitespace.outputIndent(out_stream);
|
||||
}
|
||||
var field_written = false;
|
||||
if (comptime std.meta.hasFn(T, "jsonStringifyField")) {
|
||||
if (output_this_field) field_written = try value.jsonStringifyField(field.name, child_options, out_stream);
|
||||
}
|
||||
|
||||
if (!field_written) {
|
||||
if (output_this_field) {
|
||||
try stringify(final_name, options, out_stream);
|
||||
try out_stream.writeByte(':');
|
||||
}
|
||||
if (child_options.whitespace) |child_whitespace| {
|
||||
if (child_whitespace.separator) {
|
||||
if (output_this_field) try out_stream.writeByte(' ');
|
||||
}
|
||||
}
|
||||
if (output_this_field) {
|
||||
try stringify(@field(value, field.name), child_options, out_stream);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (field_output) {
|
||||
if (options.whitespace) |whitespace| {
|
||||
try out_stream.writeByte('\n');
|
||||
try whitespace.outputIndent(out_stream);
|
||||
}
|
||||
}
|
||||
try out_stream.writeByte('}');
|
||||
return;
|
||||
},
|
||||
.error_set => return stringify(@as([]const u8, @errorName(value)), options, out_stream),
|
||||
.pointer => |ptr_info| switch (ptr_info.size) {
|
||||
.one => switch (@typeInfo(ptr_info.child)) {
|
||||
.array => {
|
||||
const Slice = []const std.meta.Elem(ptr_info.child);
|
||||
return stringify(@as(Slice, value), options, out_stream);
|
||||
},
|
||||
else => {
|
||||
// TODO: avoid loops?
|
||||
return stringify(value.*, options, out_stream);
|
||||
},
|
||||
},
|
||||
// TODO: .Many when there is a sentinel (waiting for https://github.com/ziglang/zig/pull/3972)
|
||||
.slice => {
|
||||
if (ptr_info.child == u8 and options.string == .String and std.unicode.utf8ValidateSlice(value)) {
|
||||
try out_stream.writeByte('\"');
|
||||
var i: usize = 0;
|
||||
while (i < value.len) : (i += 1) {
|
||||
switch (value[i]) {
|
||||
// normal ascii character
|
||||
0x20...0x21, 0x23...0x2E, 0x30...0x5B, 0x5D...0x7F => |c| try out_stream.writeByte(c),
|
||||
// only 2 characters that *must* be escaped
|
||||
'\\' => try out_stream.writeAll("\\\\"),
|
||||
'\"' => try out_stream.writeAll("\\\""),
|
||||
// solidus is optional to escape
|
||||
'/' => {
|
||||
if (options.string.String.escape_solidus) {
|
||||
try out_stream.writeAll("\\/");
|
||||
} else {
|
||||
try out_stream.writeByte('/');
|
||||
}
|
||||
},
|
||||
// control characters with short escapes
|
||||
// TODO: option to switch between unicode and 'short' forms?
|
||||
0x8 => try out_stream.writeAll("\\b"),
|
||||
0xC => try out_stream.writeAll("\\f"),
|
||||
'\n' => try out_stream.writeAll("\\n"),
|
||||
'\r' => try out_stream.writeAll("\\r"),
|
||||
'\t' => try out_stream.writeAll("\\t"),
|
||||
else => {
|
||||
const ulen = std.unicode.utf8ByteSequenceLength(value[i]) catch unreachable;
|
||||
// control characters (only things left with 1 byte length) should always be printed as unicode escapes
|
||||
if (ulen == 1 or options.string.String.escape_unicode) {
|
||||
const codepoint = std.unicode.utf8Decode(value[i .. i + ulen]) catch unreachable;
|
||||
try outputUnicodeEscape(codepoint, out_stream);
|
||||
} else {
|
||||
try out_stream.writeAll(value[i .. i + ulen]);
|
||||
}
|
||||
i += ulen - 1;
|
||||
},
|
||||
}
|
||||
}
|
||||
try out_stream.writeByte('\"');
|
||||
return;
|
||||
}
|
||||
|
||||
if (@typeInfo(ptr_info.child) == .@"struct" and @hasDecl(ptr_info.child, "is_map_type")) {
|
||||
try serializeMapAsObject(value, options, out_stream);
|
||||
return;
|
||||
}
|
||||
|
||||
var child_options = options;
|
||||
if (child_options.whitespace) |*whitespace| {
|
||||
whitespace.indent_level += 1;
|
||||
}
|
||||
|
||||
try out_stream.writeByte('[');
|
||||
for (value, 0..) |x, i| {
|
||||
if (i != 0) {
|
||||
try out_stream.writeByte(',');
|
||||
}
|
||||
if (child_options.whitespace) |child_whitespace| {
|
||||
try out_stream.writeByte('\n');
|
||||
try child_whitespace.outputIndent(out_stream);
|
||||
}
|
||||
try stringify(x, child_options, out_stream);
|
||||
}
|
||||
if (value.len != 0) {
|
||||
if (options.whitespace) |whitespace| {
|
||||
try out_stream.writeByte('\n');
|
||||
try whitespace.outputIndent(out_stream);
|
||||
}
|
||||
}
|
||||
try out_stream.writeByte(']');
|
||||
return;
|
||||
},
|
||||
else => @compileError("Unable to stringify type '" ++ @typeName(T) ++ "'"),
|
||||
},
|
||||
.array => return stringify(&value, options, out_stream),
|
||||
.vector => |info| {
|
||||
const array: [info.len]info.child = value;
|
||||
return stringify(&array, options, out_stream);
|
||||
},
|
||||
else => @compileError("Unable to stringify type '" ++ @typeName(T) ++ "'"),
|
||||
}
|
||||
unreachable;
|
||||
}
|
||||
|
||||
fn teststringify(expected: []const u8, value: anytype, options: StringifyOptions) !void {
|
||||
const ValidationWriter = struct {
|
||||
const Self = @This();
|
||||
pub const Writer = std.io.Writer(*Self, Error, write);
|
||||
pub const Error = error{
|
||||
TooMuchData,
|
||||
DifferentData,
|
||||
};
|
||||
|
||||
expected_remaining: []const u8,
|
||||
|
||||
fn init(exp: []const u8) Self {
|
||||
return .{ .expected_remaining = exp };
|
||||
}
|
||||
|
||||
pub fn writer(self: *Self) Writer {
|
||||
return .{ .context = self };
|
||||
}
|
||||
|
||||
fn write(self: *Self, bytes: []const u8) Error!usize {
|
||||
if (self.expected_remaining.len < bytes.len) {
|
||||
std.log.warn(
|
||||
\\====== expected this output: =========
|
||||
\\{s}
|
||||
\\======== instead found this: =========
|
||||
\\{s}
|
||||
\\======================================
|
||||
, .{
|
||||
self.expected_remaining,
|
||||
bytes,
|
||||
});
|
||||
return error.TooMuchData;
|
||||
}
|
||||
if (!mem.eql(u8, self.expected_remaining[0..bytes.len], bytes)) {
|
||||
std.log.warn(
|
||||
\\====== expected this output: =========
|
||||
\\{s}
|
||||
\\======== instead found this: =========
|
||||
\\{s}
|
||||
\\======================================
|
||||
, .{
|
||||
self.expected_remaining[0..bytes.len],
|
||||
bytes,
|
||||
});
|
||||
return error.DifferentData;
|
||||
}
|
||||
self.expected_remaining = self.expected_remaining[bytes.len..];
|
||||
return bytes.len;
|
||||
}
|
||||
};
|
||||
|
||||
var vos = ValidationWriter.init(expected);
|
||||
try stringify(value, options, vos.writer());
|
||||
if (vos.expected_remaining.len > 0) return error.NotEnoughData;
|
||||
}
|
||||
|
||||
test "stringify basic types" {
|
||||
try teststringify("false", false, StringifyOptions{});
|
||||
try teststringify("true", true, StringifyOptions{});
|
||||
try teststringify("null", @as(?u8, null), StringifyOptions{});
|
||||
try teststringify("null", @as(?*u32, null), StringifyOptions{});
|
||||
try teststringify("42", 42, StringifyOptions{});
|
||||
try teststringify("4.2e1", 42.0, StringifyOptions{});
|
||||
try teststringify("42", @as(u8, 42), StringifyOptions{});
|
||||
try teststringify("42", @as(u128, 42), StringifyOptions{});
|
||||
try teststringify("4.2e1", @as(f32, 42), StringifyOptions{});
|
||||
try teststringify("4.2e1", @as(f64, 42), StringifyOptions{});
|
||||
try teststringify("\"ItBroke\"", @as(anyerror, error.ItBroke), StringifyOptions{});
|
||||
}
|
||||
|
||||
test "stringify string" {
|
||||
try teststringify("\"hello\"", "hello", StringifyOptions{});
|
||||
try teststringify("\"with\\nescapes\\r\"", "with\nescapes\r", StringifyOptions{});
|
||||
try teststringify("\"with\\nescapes\\r\"", "with\nescapes\r", StringifyOptions{ .string = .{ .String = .{ .escape_unicode = true } } });
|
||||
try teststringify("\"with unicode\\u0001\"", "with unicode\u{1}", StringifyOptions{});
|
||||
try teststringify("\"with unicode\\u0001\"", "with unicode\u{1}", StringifyOptions{ .string = .{ .String = .{ .escape_unicode = true } } });
|
||||
try teststringify("\"with unicode\u{80}\"", "with unicode\u{80}", StringifyOptions{});
|
||||
try teststringify("\"with unicode\\u0080\"", "with unicode\u{80}", StringifyOptions{ .string = .{ .String = .{ .escape_unicode = true } } });
|
||||
try teststringify("\"with unicode\u{FF}\"", "with unicode\u{FF}", StringifyOptions{});
|
||||
try teststringify("\"with unicode\\u00ff\"", "with unicode\u{FF}", StringifyOptions{ .string = .{ .String = .{ .escape_unicode = true } } });
|
||||
try teststringify("\"with unicode\u{100}\"", "with unicode\u{100}", StringifyOptions{});
|
||||
try teststringify("\"with unicode\\u0100\"", "with unicode\u{100}", StringifyOptions{ .string = .{ .String = .{ .escape_unicode = true } } });
|
||||
try teststringify("\"with unicode\u{800}\"", "with unicode\u{800}", StringifyOptions{});
|
||||
try teststringify("\"with unicode\\u0800\"", "with unicode\u{800}", StringifyOptions{ .string = .{ .String = .{ .escape_unicode = true } } });
|
||||
try teststringify("\"with unicode\u{8000}\"", "with unicode\u{8000}", StringifyOptions{});
|
||||
try teststringify("\"with unicode\\u8000\"", "with unicode\u{8000}", StringifyOptions{ .string = .{ .String = .{ .escape_unicode = true } } });
|
||||
try teststringify("\"with unicode\u{D799}\"", "with unicode\u{D799}", StringifyOptions{});
|
||||
try teststringify("\"with unicode\\ud799\"", "with unicode\u{D799}", StringifyOptions{ .string = .{ .String = .{ .escape_unicode = true } } });
|
||||
try teststringify("\"with unicode\u{10000}\"", "with unicode\u{10000}", StringifyOptions{});
|
||||
try teststringify("\"with unicode\\ud800\\udc00\"", "with unicode\u{10000}", StringifyOptions{ .string = .{ .String = .{ .escape_unicode = true } } });
|
||||
try teststringify("\"with unicode\u{10FFFF}\"", "with unicode\u{10FFFF}", StringifyOptions{});
|
||||
try teststringify("\"with unicode\\udbff\\udfff\"", "with unicode\u{10FFFF}", StringifyOptions{ .string = .{ .String = .{ .escape_unicode = true } } });
|
||||
try teststringify("\"/\"", "/", StringifyOptions{});
|
||||
try teststringify("\"\\/\"", "/", StringifyOptions{ .string = .{ .String = .{ .escape_solidus = true } } });
|
||||
}
|
||||
|
||||
test "stringify tagged unions" {
|
||||
try teststringify("42", union(enum) {
|
||||
Foo: u32,
|
||||
Bar: bool,
|
||||
}{ .Foo = 42 }, StringifyOptions{});
|
||||
}
|
||||
|
||||
test "stringify struct" {
|
||||
try teststringify("{\"foo\":42}", struct {
|
||||
foo: u32,
|
||||
}{ .foo = 42 }, StringifyOptions{});
|
||||
}
|
||||
|
||||
test "stringify struct with indentation" {
|
||||
try teststringify(
|
||||
\\{
|
||||
\\ "foo": 42,
|
||||
\\ "bar": [
|
||||
\\ 1,
|
||||
\\ 2,
|
||||
\\ 3
|
||||
\\ ]
|
||||
\\}
|
||||
,
|
||||
struct {
|
||||
foo: u32,
|
||||
bar: [3]u32,
|
||||
}{
|
||||
.foo = 42,
|
||||
.bar = .{ 1, 2, 3 },
|
||||
},
|
||||
StringifyOptions{
|
||||
.whitespace = .{},
|
||||
},
|
||||
);
|
||||
try teststringify(
|
||||
"{\n\t\"foo\":42,\n\t\"bar\":[\n\t\t1,\n\t\t2,\n\t\t3\n\t]\n}",
|
||||
struct {
|
||||
foo: u32,
|
||||
bar: [3]u32,
|
||||
}{
|
||||
.foo = 42,
|
||||
.bar = .{ 1, 2, 3 },
|
||||
},
|
||||
StringifyOptions{
|
||||
.whitespace = .{
|
||||
.indent = .Tab,
|
||||
.separator = false,
|
||||
},
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
test "stringify struct with void field" {
|
||||
try teststringify("{\"foo\":42}", struct {
|
||||
foo: u32,
|
||||
bar: void = {},
|
||||
}{ .foo = 42 }, StringifyOptions{});
|
||||
}
|
||||
|
||||
test "stringify array of structs" {
|
||||
const MyStruct = struct {
|
||||
foo: u32,
|
||||
};
|
||||
try teststringify("[{\"foo\":42},{\"foo\":100},{\"foo\":1000}]", [_]MyStruct{
|
||||
MyStruct{ .foo = 42 },
|
||||
MyStruct{ .foo = 100 },
|
||||
MyStruct{ .foo = 1000 },
|
||||
}, StringifyOptions{});
|
||||
}
|
||||
|
||||
test "stringify struct with custom stringifier" {
|
||||
try teststringify("[\"something special\",42]", struct {
|
||||
foo: u32,
|
||||
const Self = @This();
|
||||
pub fn jsonStringify(
|
||||
_: Self,
|
||||
options: StringifyOptions,
|
||||
out_stream: anytype,
|
||||
) !void {
|
||||
try out_stream.writeAll("[\"something special\",");
|
||||
try stringify(42, options, out_stream);
|
||||
try out_stream.writeByte(']');
|
||||
}
|
||||
}{ .foo = 42 }, StringifyOptions{});
|
||||
}
|
||||
|
||||
test "stringify vector" {
|
||||
try teststringify("[1,1]", @as(@Vector(2, u32), @splat(@as(u32, 1))), StringifyOptions{});
|
||||
}
|
||||
|
|
4
mise.toml
Normal file
4
mise.toml
Normal file
|
@ -0,0 +1,4 @@
|
|||
[tools]
|
||||
pre-commit = "latest"
|
||||
"ubi:DonIsaac/zlint" = "latest"
|
||||
zig = "0.14.1"
|
321
src/aws.zig
321
src/aws.zig
|
@ -232,11 +232,9 @@ pub fn Request(comptime request_action: anytype) type {
|
|||
// We don't know if we need a body...guessing here, this should cover most
|
||||
var buffer = std.ArrayList(u8).init(options.client.allocator);
|
||||
defer buffer.deinit();
|
||||
var nameAllocator = ArenaAllocator.init(options.client.allocator);
|
||||
defer nameAllocator.deinit();
|
||||
if (Self.service_meta.aws_protocol == .rest_json_1) {
|
||||
if (std.mem.eql(u8, "PUT", aws_request.method) or std.mem.eql(u8, "POST", aws_request.method)) {
|
||||
try json.stringify(request, .{ .whitespace = .{}, .emit_null = false, .exclude_fields = al.items }, buffer.writer());
|
||||
try std.json.stringify(request, .{ .whitespace = .indent_4 }, buffer.writer());
|
||||
}
|
||||
}
|
||||
aws_request.body = buffer.items;
|
||||
|
@ -328,10 +326,7 @@ pub fn Request(comptime request_action: anytype) type {
|
|||
// smithy spec, "A null value MAY be provided or omitted
|
||||
// for a boxed member with no observable difference." But we're
|
||||
// seeing a lot of differences here between spec and reality
|
||||
//
|
||||
var nameAllocator = ArenaAllocator.init(options.client.allocator);
|
||||
defer nameAllocator.deinit();
|
||||
try json.stringify(request, .{ .whitespace = .{} }, buffer.writer());
|
||||
try std.json.stringify(request, .{ .whitespace = .indent_4 }, buffer.writer());
|
||||
|
||||
var content_type: []const u8 = undefined;
|
||||
switch (Self.service_meta.aws_protocol) {
|
||||
|
@ -1154,9 +1149,9 @@ fn buildPath(
|
|||
defer encoded_buffer.deinit();
|
||||
const replacement_writer = replacement_buffer.writer();
|
||||
// std.mem.replacementSize
|
||||
try json.stringify(
|
||||
try std.json.stringify(
|
||||
@field(request, field.name),
|
||||
.{},
|
||||
.{ .whitespace = .indent_4 },
|
||||
replacement_writer,
|
||||
);
|
||||
const trimmed_replacement_val = std.mem.trim(u8, replacement_buffer.items, "\"");
|
||||
|
@ -1271,7 +1266,7 @@ fn addBasicQueryArg(prefix: []const u8, key: []const u8, value: anytype, writer:
|
|||
_ = try writer.write("=");
|
||||
var encoding_writer = uriEncodingWriter(writer);
|
||||
var ignoring_writer = ignoringWriter(encoding_writer.writer(), '"');
|
||||
try json.stringify(value, .{}, ignoring_writer.writer());
|
||||
try std.json.stringify(value, .{}, ignoring_writer.writer());
|
||||
return true;
|
||||
}
|
||||
pub fn uriEncodingWriter(child_stream: anytype) UriEncodingWriter(@TypeOf(child_stream)) {
|
||||
|
@ -1390,16 +1385,20 @@ test "custom serialization for map objects" {
|
|||
tags.appendAssumeCapacity(.{ .key = "Foo", .value = "Bar" });
|
||||
tags.appendAssumeCapacity(.{ .key = "Baz", .value = "Qux" });
|
||||
const req = services.lambda.TagResourceRequest{ .resource = "hello", .tags = tags.items };
|
||||
try json.stringify(req, .{ .whitespace = .{} }, buffer.writer());
|
||||
try std.testing.expectEqualStrings(
|
||||
\\{
|
||||
\\ "Resource": "hello",
|
||||
\\ "Tags": {
|
||||
\\ "Foo": "Bar",
|
||||
\\ "Baz": "Qux"
|
||||
\\ }
|
||||
\\}
|
||||
, buffer.items);
|
||||
try std.json.stringify(req, .{ .whitespace = .indent_4 }, buffer.writer());
|
||||
|
||||
const parsed_body = try std.json.parseFromSlice(struct {
|
||||
Resource: []const u8,
|
||||
Tags: struct {
|
||||
Foo: []const u8,
|
||||
Baz: []const u8,
|
||||
},
|
||||
}, testing.allocator, buffer.items, .{});
|
||||
defer parsed_body.deinit();
|
||||
|
||||
try testing.expectEqualStrings("hello", parsed_body.value.Resource);
|
||||
try testing.expectEqualStrings("Bar", parsed_body.value.Tags.Foo);
|
||||
try testing.expectEqualStrings("Qux", parsed_body.value.Tags.Baz);
|
||||
}
|
||||
|
||||
test "proper serialization for kms" {
|
||||
|
@ -1418,17 +1417,25 @@ test "proper serialization for kms" {
|
|||
.dry_run = false,
|
||||
.grant_tokens = &[_][]const u8{},
|
||||
};
|
||||
try json.stringify(req, .{ .whitespace = .{} }, buffer.writer());
|
||||
try std.testing.expectEqualStrings(
|
||||
\\{
|
||||
\\ "KeyId": "42",
|
||||
\\ "Plaintext": "foo",
|
||||
\\ "EncryptionContext": {},
|
||||
\\ "GrantTokens": [],
|
||||
\\ "EncryptionAlgorithm": "SYMMETRIC_DEFAULT",
|
||||
\\ "DryRun": false
|
||||
\\}
|
||||
, buffer.items);
|
||||
try std.json.stringify(req, .{ .whitespace = .indent_4 }, buffer.writer());
|
||||
|
||||
{
|
||||
const parsed_body = try std.json.parseFromSlice(struct {
|
||||
KeyId: []const u8,
|
||||
Plaintext: []const u8,
|
||||
EncryptionContext: struct {},
|
||||
GrantTokens: [][]const u8,
|
||||
EncryptionAlgorithm: []const u8,
|
||||
DryRun: bool,
|
||||
}, testing.allocator, buffer.items, .{});
|
||||
defer parsed_body.deinit();
|
||||
|
||||
try testing.expectEqualStrings("42", parsed_body.value.KeyId);
|
||||
try testing.expectEqualStrings("foo", parsed_body.value.Plaintext);
|
||||
try testing.expectEqual(0, parsed_body.value.GrantTokens.len);
|
||||
try testing.expectEqualStrings("SYMMETRIC_DEFAULT", parsed_body.value.EncryptionAlgorithm);
|
||||
try testing.expectEqual(false, parsed_body.value.DryRun);
|
||||
}
|
||||
|
||||
var buffer_null = std.ArrayList(u8).init(allocator);
|
||||
defer buffer_null.deinit();
|
||||
|
@ -1441,17 +1448,27 @@ test "proper serialization for kms" {
|
|||
.dry_run = false,
|
||||
.grant_tokens = &[_][]const u8{},
|
||||
};
|
||||
try json.stringify(req_null, .{ .whitespace = .{} }, buffer_null.writer());
|
||||
try std.testing.expectEqualStrings(
|
||||
\\{
|
||||
\\ "KeyId": "42",
|
||||
\\ "Plaintext": "foo",
|
||||
\\ "EncryptionContext": null,
|
||||
\\ "GrantTokens": [],
|
||||
\\ "EncryptionAlgorithm": "SYMMETRIC_DEFAULT",
|
||||
\\ "DryRun": false
|
||||
\\}
|
||||
, buffer_null.items);
|
||||
|
||||
try std.json.stringify(req_null, .{ .whitespace = .indent_4 }, buffer_null.writer());
|
||||
|
||||
{
|
||||
const parsed_body = try std.json.parseFromSlice(struct {
|
||||
KeyId: []const u8,
|
||||
Plaintext: []const u8,
|
||||
EncryptionContext: ?struct {},
|
||||
GrantTokens: [][]const u8,
|
||||
EncryptionAlgorithm: []const u8,
|
||||
DryRun: bool,
|
||||
}, testing.allocator, buffer_null.items, .{});
|
||||
defer parsed_body.deinit();
|
||||
|
||||
try testing.expectEqualStrings("42", parsed_body.value.KeyId);
|
||||
try testing.expectEqualStrings("foo", parsed_body.value.Plaintext);
|
||||
try testing.expectEqual(null, parsed_body.value.EncryptionContext);
|
||||
try testing.expectEqual(0, parsed_body.value.GrantTokens.len);
|
||||
try testing.expectEqualStrings("SYMMETRIC_DEFAULT", parsed_body.value.EncryptionAlgorithm);
|
||||
try testing.expectEqual(false, parsed_body.value.DryRun);
|
||||
}
|
||||
}
|
||||
|
||||
test "REST Json v1 builds proper queries" {
|
||||
|
@ -1532,9 +1549,7 @@ test "basic json request serialization" {
|
|||
// for a boxed member with no observable difference." But we're
|
||||
// seeing a lot of differences here between spec and reality
|
||||
//
|
||||
var nameAllocator = ArenaAllocator.init(allocator);
|
||||
defer nameAllocator.deinit();
|
||||
try json.stringify(request, .{ .whitespace = .{} }, buffer.writer());
|
||||
try std.json.stringify(request, .{ .whitespace = .indent_4 }, buffer.writer());
|
||||
try std.testing.expectEqualStrings(
|
||||
\\{
|
||||
\\ "ExclusiveStartTableName": null,
|
||||
|
@ -2025,7 +2040,7 @@ test "json_1_0_query_with_input: dynamodb listTables runtime" {
|
|||
});
|
||||
defer test_harness.deinit();
|
||||
const options = try test_harness.start();
|
||||
const dynamo_db = (Services(.{.dynamo_db}){}).dynamo_db;
|
||||
const dynamo_db = services.dynamo_db;
|
||||
const call = try test_harness.client.call(dynamo_db.list_tables.Request{
|
||||
.limit = 1,
|
||||
}, options);
|
||||
|
@ -2035,12 +2050,16 @@ test "json_1_0_query_with_input: dynamodb listTables runtime" {
|
|||
try std.testing.expectEqual(std.http.Method.POST, test_harness.request_options.request_method);
|
||||
try std.testing.expectEqualStrings("/", test_harness.request_options.request_target);
|
||||
try test_harness.request_options.expectHeader("X-Amz-Target", "DynamoDB_20120810.ListTables");
|
||||
try std.testing.expectEqualStrings(
|
||||
\\{
|
||||
\\ "ExclusiveStartTableName": null,
|
||||
\\ "Limit": 1
|
||||
\\}
|
||||
, test_harness.request_options.request_body);
|
||||
|
||||
const parsed_body = try std.json.parseFromSlice(struct {
|
||||
ExclusiveStartTableName: ?[]const u8,
|
||||
Limit: u8,
|
||||
}, testing.allocator, test_harness.request_options.request_body, .{});
|
||||
defer parsed_body.deinit();
|
||||
|
||||
try testing.expectEqual(null, parsed_body.value.ExclusiveStartTableName);
|
||||
try testing.expectEqual(1, parsed_body.value.Limit);
|
||||
|
||||
// Response expectations
|
||||
try std.testing.expectEqualStrings("QBI72OUIN8U9M9AG6PCSADJL4JVV4KQNSO5AEMVJF66Q9ASUAAJG", call.response_metadata.request_id);
|
||||
try std.testing.expectEqual(@as(usize, 1), call.response.table_names.?.len);
|
||||
|
@ -2100,12 +2119,16 @@ test "json_1_1_query_with_input: ecs listClusters runtime" {
|
|||
try std.testing.expectEqual(std.http.Method.POST, test_harness.request_options.request_method);
|
||||
try std.testing.expectEqualStrings("/", test_harness.request_options.request_target);
|
||||
try test_harness.request_options.expectHeader("X-Amz-Target", "AmazonEC2ContainerServiceV20141113.ListClusters");
|
||||
try std.testing.expectEqualStrings(
|
||||
\\{
|
||||
\\ "nextToken": null,
|
||||
\\ "maxResults": 1
|
||||
\\}
|
||||
, test_harness.request_options.request_body);
|
||||
|
||||
const parsed_body = try std.json.parseFromSlice(struct {
|
||||
nextToken: ?[]const u8,
|
||||
maxResults: u8,
|
||||
}, testing.allocator, test_harness.request_options.request_body, .{});
|
||||
defer parsed_body.deinit();
|
||||
|
||||
try testing.expectEqual(null, parsed_body.value.nextToken);
|
||||
try testing.expectEqual(1, parsed_body.value.maxResults);
|
||||
|
||||
// Response expectations
|
||||
try std.testing.expectEqualStrings("b2420066-ff67-4237-b782-721c4df60744", call.response_metadata.request_id);
|
||||
try std.testing.expectEqual(@as(usize, 1), call.response.cluster_arns.?.len);
|
||||
|
@ -2136,12 +2159,16 @@ test "json_1_1_query_no_input: ecs listClusters runtime" {
|
|||
try std.testing.expectEqual(std.http.Method.POST, test_harness.request_options.request_method);
|
||||
try std.testing.expectEqualStrings("/", test_harness.request_options.request_target);
|
||||
try test_harness.request_options.expectHeader("X-Amz-Target", "AmazonEC2ContainerServiceV20141113.ListClusters");
|
||||
try std.testing.expectEqualStrings(
|
||||
\\{
|
||||
\\ "nextToken": null,
|
||||
\\ "maxResults": null
|
||||
\\}
|
||||
, test_harness.request_options.request_body);
|
||||
|
||||
const parsed_body = try std.json.parseFromSlice(struct {
|
||||
nextToken: ?[]const u8,
|
||||
maxResults: ?u8,
|
||||
}, testing.allocator, test_harness.request_options.request_body, .{});
|
||||
defer parsed_body.deinit();
|
||||
|
||||
try testing.expectEqual(null, parsed_body.value.nextToken);
|
||||
try testing.expectEqual(null, parsed_body.value.maxResults);
|
||||
|
||||
// Response expectations
|
||||
try std.testing.expectEqualStrings("e65322b2-0065-45f2-ba37-f822bb5ce395", call.response_metadata.request_id);
|
||||
try std.testing.expectEqual(@as(usize, 1), call.response.cluster_arns.?.len);
|
||||
|
@ -2238,13 +2265,16 @@ test "rest_json_1_work_with_lambda: lambda tagResource (only), to excercise zig
|
|||
test_harness.stop();
|
||||
// Request expectations
|
||||
try std.testing.expectEqual(std.http.Method.POST, test_harness.request_options.request_method);
|
||||
try std.testing.expectEqualStrings(
|
||||
\\{
|
||||
\\ "Tags": {
|
||||
\\ "Foo": "Bar"
|
||||
\\ }
|
||||
\\}
|
||||
, test_harness.request_options.request_body);
|
||||
|
||||
const parsed_body = try std.json.parseFromSlice(struct {
|
||||
Tags: struct {
|
||||
Foo: []const u8,
|
||||
},
|
||||
}, testing.allocator, test_harness.request_options.request_body, .{ .ignore_unknown_fields = true });
|
||||
defer parsed_body.deinit();
|
||||
|
||||
try testing.expectEqualStrings("Bar", parsed_body.value.Tags.Foo);
|
||||
|
||||
// Due to 17015, we see %253A instead of %3A
|
||||
try std.testing.expectEqualStrings("/2017-03-31/tags/arn%3Aaws%3Alambda%3Aus-west-2%3A550620852718%3Afunction%3Aawsome-lambda-LambdaStackawsomeLambda", test_harness.request_options.request_target);
|
||||
// Response expectations
|
||||
|
@ -2276,14 +2306,19 @@ test "rest_json_1_url_parameters_not_in_request: lambda update_function_code" {
|
|||
test_harness.stop();
|
||||
// Request expectations
|
||||
try std.testing.expectEqual(std.http.Method.PUT, test_harness.request_options.request_method);
|
||||
try std.testing.expectEqualStrings(
|
||||
\\{
|
||||
\\ "ZipFile": "zipfile",
|
||||
\\ "Architectures": [
|
||||
\\ "x86_64"
|
||||
\\ ]
|
||||
\\}
|
||||
, test_harness.request_options.request_body);
|
||||
|
||||
const parsed_body = try std.json.parseFromSlice(struct {
|
||||
ZipFile: []const u8,
|
||||
Architectures: [][]const u8,
|
||||
}, testing.allocator, test_harness.request_options.request_body, .{
|
||||
.ignore_unknown_fields = true,
|
||||
});
|
||||
defer parsed_body.deinit();
|
||||
|
||||
try testing.expectEqualStrings("zipfile", parsed_body.value.ZipFile);
|
||||
try testing.expectEqual(1, parsed_body.value.Architectures.len);
|
||||
try testing.expectEqualStrings("x86_64", parsed_body.value.Architectures[0]);
|
||||
|
||||
// Due to 17015, we see %253A instead of %3A
|
||||
try std.testing.expectEqualStrings("/2015-03-31/functions/functionname/code", test_harness.request_options.request_target);
|
||||
// Response expectations
|
||||
|
@ -2590,3 +2625,131 @@ test "test server timeout works" {
|
|||
test_harness.stop();
|
||||
std.log.debug("test complete", .{});
|
||||
}
|
||||
|
||||
const testing = std.testing;
|
||||
|
||||
test "jsonStringify: structure + enums" {
|
||||
const request = services.media_convert.PutPolicyRequest{
|
||||
.policy = .{
|
||||
.http_inputs = "foo",
|
||||
.https_inputs = "bar",
|
||||
.s3_inputs = "baz",
|
||||
},
|
||||
};
|
||||
|
||||
var arena = std.heap.ArenaAllocator.init(testing.allocator);
|
||||
defer arena.deinit();
|
||||
|
||||
const request_json = try std.json.stringifyAlloc(std.testing.allocator, request, .{});
|
||||
defer std.testing.allocator.free(request_json);
|
||||
|
||||
const parsed = try std.json.parseFromSlice(struct {
|
||||
policy: struct {
|
||||
httpInputs: []const u8,
|
||||
httpsInputs: []const u8,
|
||||
s3Inputs: []const u8,
|
||||
},
|
||||
}, testing.allocator, request_json, .{});
|
||||
defer parsed.deinit();
|
||||
|
||||
try testing.expectEqualStrings("foo", parsed.value.policy.httpInputs);
|
||||
try testing.expectEqualStrings("bar", parsed.value.policy.httpsInputs);
|
||||
try testing.expectEqualStrings("baz", parsed.value.policy.s3Inputs);
|
||||
}
|
||||
|
||||
test "jsonStringify: strings" {
|
||||
const request = services.media_convert.AssociateCertificateRequest{
|
||||
.arn = "1234",
|
||||
};
|
||||
|
||||
var arena = std.heap.ArenaAllocator.init(testing.allocator);
|
||||
defer arena.deinit();
|
||||
|
||||
const request_json = try std.json.stringifyAlloc(std.testing.allocator, request, .{});
|
||||
defer std.testing.allocator.free(request_json);
|
||||
|
||||
try testing.expectEqualStrings("{\"arn\":\"1234\"}", request_json);
|
||||
}
|
||||
|
||||
test "jsonStringify" {
|
||||
var tags = [_]services.media_convert.MapOfStringKeyValue{
|
||||
.{
|
||||
.key = "foo",
|
||||
.value = "bar",
|
||||
},
|
||||
};
|
||||
|
||||
const request = services.media_convert.TagResourceRequest{
|
||||
.arn = "1234",
|
||||
.tags = &tags,
|
||||
};
|
||||
|
||||
var arena = std.heap.ArenaAllocator.init(testing.allocator);
|
||||
defer arena.deinit();
|
||||
|
||||
const request_json = try std.json.stringifyAlloc(std.testing.allocator, request, .{});
|
||||
defer std.testing.allocator.free(request_json);
|
||||
|
||||
const json_parsed = try std.json.parseFromSlice(struct {
|
||||
arn: []const u8,
|
||||
tags: struct {
|
||||
foo: []const u8,
|
||||
},
|
||||
}, testing.allocator, request_json, .{});
|
||||
defer json_parsed.deinit();
|
||||
|
||||
try testing.expectEqualStrings("1234", json_parsed.value.arn);
|
||||
try testing.expectEqualStrings("bar", json_parsed.value.tags.foo);
|
||||
}
|
||||
|
||||
test "jsonStringify nullable object" {
|
||||
// structure is not null
|
||||
{
|
||||
const request = services.lambda.CreateAliasRequest{
|
||||
.function_name = "foo",
|
||||
.function_version = "bar",
|
||||
.name = "baz",
|
||||
.routing_config = services.lambda.AliasRoutingConfiguration{
|
||||
.additional_version_weights = null,
|
||||
},
|
||||
};
|
||||
|
||||
const request_json = try std.json.stringifyAlloc(std.testing.allocator, request, .{});
|
||||
defer std.testing.allocator.free(request_json);
|
||||
|
||||
const json_parsed = try std.json.parseFromSlice(struct {
|
||||
FunctionName: []const u8,
|
||||
FunctionVersion: []const u8,
|
||||
Name: []const u8,
|
||||
RoutingConfig: struct {
|
||||
AdditionalVersionWeights: ?struct {},
|
||||
},
|
||||
}, testing.allocator, request_json, .{ .ignore_unknown_fields = true });
|
||||
defer json_parsed.deinit();
|
||||
|
||||
try testing.expectEqualStrings("foo", json_parsed.value.FunctionName);
|
||||
try testing.expectEqualStrings("bar", json_parsed.value.FunctionVersion);
|
||||
try testing.expectEqualStrings("baz", json_parsed.value.Name);
|
||||
try testing.expectEqual(null, json_parsed.value.RoutingConfig.AdditionalVersionWeights);
|
||||
}
|
||||
|
||||
// structure is null
|
||||
{
|
||||
const request = services.kms.DecryptRequest{
|
||||
.key_id = "foo",
|
||||
.ciphertext_blob = "bar",
|
||||
};
|
||||
|
||||
const request_json = try std.json.stringifyAlloc(std.testing.allocator, request, .{});
|
||||
defer std.testing.allocator.free(request_json);
|
||||
|
||||
const json_parsed = try std.json.parseFromSlice(struct {
|
||||
KeyId: []const u8,
|
||||
CiphertextBlob: []const u8,
|
||||
}, testing.allocator, request_json, .{ .ignore_unknown_fields = true });
|
||||
defer json_parsed.deinit();
|
||||
|
||||
try testing.expectEqualStrings("foo", json_parsed.value.KeyId);
|
||||
try testing.expectEqualStrings("bar", json_parsed.value.CiphertextBlob);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
//! This module provides base data structures for aws http requests
|
||||
const std = @import("std");
|
||||
const log = std.log.scoped(.aws_base);
|
||||
pub const Request = struct {
|
||||
path: []const u8 = "/",
|
||||
query: []const u8 = "",
|
||||
|
|
|
@ -8,7 +8,7 @@ pub fn snakeToCamel(allocator: std.mem.Allocator, name: []const u8) ![]u8 {
|
|||
var rc = try allocator.alloc(u8, name.len);
|
||||
while (utf8_name.nextCodepoint()) |cp| {
|
||||
if (cp > 0xff) return error.UnicodeNotSupported;
|
||||
const ascii_char = @as(u8, @truncate(cp));
|
||||
const ascii_char: u8 = @truncate(cp);
|
||||
if (ascii_char != '_') {
|
||||
if (previous_ascii == '_' and ascii_char >= 'a' and ascii_char <= 'z') {
|
||||
const uppercase_char = ascii_char - ('a' - 'A');
|
||||
|
|
Loading…
Add table
Reference in a new issue