Compare commits
6 commits
464218b02c
...
b01771df66
Author | SHA1 | Date | |
---|---|---|---|
|
b01771df66 | ||
|
a056d763ca | ||
|
934323acf1 | ||
|
522ab72296 | ||
|
87fc872f7d | ||
|
3e06eeca66 |
7 changed files with 196 additions and 283 deletions
|
@ -215,6 +215,7 @@ fn configure(compile: *std.Build.Module, modules: std.StringHashMap(*std.Build.M
|
|||
compile.addImport("smithy", modules.get("smithy").?);
|
||||
compile.addImport("date", modules.get("date").?);
|
||||
compile.addImport("json", modules.get("json").?);
|
||||
compile.addImport("case", modules.get("case").?);
|
||||
if (include_time) compile.addImport("zeit", modules.get("zeit").?);
|
||||
}
|
||||
|
||||
|
@ -229,6 +230,10 @@ fn getDependencyModules(b: *std.Build, args: anytype) !std.StringHashMap(*std.Bu
|
|||
const dep_zeit = b.dependency("zeit", args);
|
||||
const mod_zeit = dep_zeit.module("zeit");
|
||||
try result.putNoClobber("zeit", mod_zeit);
|
||||
|
||||
const dep_case = b.dependency("case", args);
|
||||
const mod_case = dep_case.module("case");
|
||||
try result.putNoClobber("case", mod_case);
|
||||
// End External dependencies
|
||||
|
||||
// Private modules/dependencies
|
||||
|
|
|
@ -31,5 +31,9 @@
|
|||
.json = .{
|
||||
.path = "lib/json",
|
||||
},
|
||||
.case = .{
|
||||
.url = "git+https://github.com/travisstaloch/case.git#610caade88ca54d2745f115114b08e73e2c6fe02",
|
||||
.hash = "N-V-__8AAIfIAAC_RzCtghVVBVdqUzB8AaaGIyvK2WWz38bC",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
const std = @import("std");
|
||||
const smithy = @import("smithy");
|
||||
const snake = @import("snake.zig");
|
||||
const Hasher = @import("Hasher.zig");
|
||||
const case = @import("case");
|
||||
|
||||
var verbose = false;
|
||||
|
||||
|
@ -80,6 +80,7 @@ pub fn main() anyerror!void {
|
|||
if (args.len == 0)
|
||||
_ = try generateServices(allocator, ";", std.io.getStdIn(), stdout);
|
||||
}
|
||||
|
||||
const OutputManifest = struct {
|
||||
model_dir_hash_digest: [Hasher.hex_multihash_len]u8,
|
||||
output_dir_hash_digest: [Hasher.hex_multihash_len]u8,
|
||||
|
@ -168,12 +169,13 @@ fn calculateDigests(models_dir: std.fs.Dir, output_dir: std.fs.Dir, thread_pool:
|
|||
fn processFile(file_name: []const u8, output_dir: std.fs.Dir, manifest: anytype) !void {
|
||||
// The fixed buffer for output will be 2MB, which is twice as large as the size of the EC2
|
||||
// (the largest) model. We'll then flush all this at one go at the end.
|
||||
var buffer = [_]u8{0} ** (1024 * 1024 * 2);
|
||||
var buffer = std.mem.zeroes([1024 * 1024 * 2]u8);
|
||||
var output_stream = std.io.FixedBufferStream([]u8){
|
||||
.buffer = &buffer,
|
||||
.pos = 0,
|
||||
};
|
||||
var writer = output_stream.writer();
|
||||
var counting_writer = std.io.countingWriter(output_stream.writer());
|
||||
var writer = counting_writer.writer();
|
||||
|
||||
// It's probably best to create our own allocator here so we can deint at the end and
|
||||
// toss all allocations related to the services in this file
|
||||
|
@ -221,15 +223,26 @@ fn processFile(file_name: []const u8, output_dir: std.fs.Dir, manifest: anytype)
|
|||
allocator.free(output_file_name);
|
||||
output_file_name = new_output_file_name;
|
||||
}
|
||||
|
||||
const formatted = try zigFmt(allocator, @ptrCast(buffer[0..counting_writer.bytes_written]));
|
||||
|
||||
// Dump our buffer out to disk
|
||||
var file = try output_dir.createFile(output_file_name, .{ .truncate = true });
|
||||
defer file.close();
|
||||
try file.writeAll(output_stream.getWritten());
|
||||
try file.writeAll(formatted);
|
||||
|
||||
for (service_names) |name| {
|
||||
try manifest.print("pub const {s} = @import(\"{s}\");\n", .{ name, std.fs.path.basename(output_file_name) });
|
||||
}
|
||||
}
|
||||
|
||||
fn zigFmt(allocator: std.mem.Allocator, buffer: [:0]const u8) ![]const u8 {
|
||||
var tree = try std.zig.Ast.parse(allocator, buffer, .zig);
|
||||
defer tree.deinit(allocator);
|
||||
|
||||
return try tree.render(allocator);
|
||||
}
|
||||
|
||||
fn generateServicesForFilePath(
|
||||
allocator: std.mem.Allocator,
|
||||
comptime terminator: []const u8,
|
||||
|
@ -454,7 +467,9 @@ fn generateAdditionalTypes(allocator: std.mem.Allocator, file_state: FileGenerat
|
|||
.allocator = allocator,
|
||||
.indent_level = 0,
|
||||
};
|
||||
const type_name = avoidReserved(t.name);
|
||||
const type_name = try getTypeName(allocator, t);
|
||||
defer allocator.free(type_name);
|
||||
|
||||
try writer.print("\npub const {s} = ", .{type_name});
|
||||
try file_state.additional_types_generated.putNoClobber(t.name, {});
|
||||
_ = try generateTypeFor(t.id, writer, state, true);
|
||||
|
@ -479,9 +494,10 @@ fn constantName(allocator: std.mem.Allocator, id: []const u8) ![]const u8 {
|
|||
// snake turns this into dev_ops, which is a little weird
|
||||
if (std.mem.eql(u8, id, "DevOps Guru")) return try std.fmt.allocPrint(allocator, "devops_guru", .{});
|
||||
if (std.mem.eql(u8, id, "FSx")) return try std.fmt.allocPrint(allocator, "fsx", .{});
|
||||
if (std.mem.eql(u8, id, "ETag")) return try std.fmt.allocPrint(allocator, "e_tag", .{});
|
||||
|
||||
// Not a special case - just snake it
|
||||
return try snake.fromPascalCase(allocator, id);
|
||||
return try case.allocTo(allocator, .snake, id);
|
||||
}
|
||||
|
||||
const FileGenerationState = struct {
|
||||
|
@ -503,7 +519,7 @@ fn outputIndent(state: GenerationState, writer: anytype) !void {
|
|||
try writer.writeByteNTimes(' ', n_chars);
|
||||
}
|
||||
fn generateOperation(allocator: std.mem.Allocator, operation: smithy.ShapeInfo, file_state: FileGenerationState, writer: anytype) !void {
|
||||
const snake_case_name = try snake.fromPascalCase(allocator, operation.name);
|
||||
const snake_case_name = try constantName(allocator, operation.name);
|
||||
defer allocator.free(snake_case_name);
|
||||
|
||||
var type_stack = std.ArrayList(*const smithy.ShapeInfo).init(allocator);
|
||||
|
@ -518,6 +534,35 @@ fn generateOperation(allocator: std.mem.Allocator, operation: smithy.ShapeInfo,
|
|||
child_state.indent_level += 1;
|
||||
// indent should start at 4 spaces here
|
||||
const operation_name = avoidReserved(snake_case_name);
|
||||
|
||||
// Request type
|
||||
_ = try writer.print("pub const {s}Request = ", .{operation.name});
|
||||
if (operation.shape.operation.input == null or
|
||||
(try shapeInfoForId(operation.shape.operation.input.?, state)).shape == .unit)
|
||||
{
|
||||
_ = try writer.write("struct {\n");
|
||||
try generateMetadataFunction(operation_name, state, writer);
|
||||
} else if (operation.shape.operation.input) |member| {
|
||||
if (try generateTypeFor(member, writer, state, false)) unreachable; // we expect only structs here
|
||||
_ = try writer.write("\n");
|
||||
try generateMetadataFunction(operation_name, state, writer);
|
||||
}
|
||||
_ = try writer.write(";\n\n");
|
||||
|
||||
// Response type
|
||||
_ = try writer.print("pub const {s}Response = ", .{operation.name});
|
||||
if (operation.shape.operation.output == null or
|
||||
(try shapeInfoForId(operation.shape.operation.output.?, state)).shape == .unit)
|
||||
{
|
||||
_ = try writer.write("struct {\n");
|
||||
try generateMetadataFunction(operation_name, state, writer);
|
||||
} else if (operation.shape.operation.output) |member| {
|
||||
if (try generateTypeFor(member, writer, state, false)) unreachable; // we expect only structs here
|
||||
_ = try writer.write("\n");
|
||||
try generateMetadataFunction(operation_name, state, writer);
|
||||
}
|
||||
_ = try writer.write(";\n\n");
|
||||
|
||||
try writer.print("pub const {s}: struct ", .{operation_name});
|
||||
_ = try writer.write("{\n");
|
||||
for (operation.shape.operation.traits) |trait| {
|
||||
|
@ -538,28 +583,10 @@ fn generateOperation(allocator: std.mem.Allocator, operation: smithy.ShapeInfo,
|
|||
try outputIndent(state, writer);
|
||||
try writer.print("action_name: []const u8 = \"{s}\",\n", .{operation.name});
|
||||
try outputIndent(state, writer);
|
||||
_ = try writer.write("Request: type = ");
|
||||
if (operation.shape.operation.input == null or
|
||||
(try shapeInfoForId(operation.shape.operation.input.?, state)).shape == .unit)
|
||||
{
|
||||
_ = try writer.write("struct {\n");
|
||||
try generateMetadataFunction(operation_name, state, writer);
|
||||
} else if (operation.shape.operation.input) |member| {
|
||||
if (try generateTypeFor(member, writer, state, false)) unreachable; // we expect only structs here
|
||||
_ = try writer.write("\n");
|
||||
try generateMetadataFunction(operation_name, state, writer);
|
||||
}
|
||||
_ = try writer.write(",\n");
|
||||
_ = try writer.print("Request: type = {s}Request,\n", .{operation.name});
|
||||
|
||||
try outputIndent(state, writer);
|
||||
_ = try writer.write("Response: type = ");
|
||||
if (operation.shape.operation.output == null or
|
||||
(try shapeInfoForId(operation.shape.operation.output.?, state)).shape == .unit)
|
||||
{
|
||||
_ = try writer.write("struct {}"); // we want to maintain consistency with other ops
|
||||
} else if (operation.shape.operation.output) |member| {
|
||||
if (try generateTypeFor(member, writer, state, true)) unreachable; // we expect only structs here
|
||||
}
|
||||
_ = try writer.write(",\n");
|
||||
_ = try writer.print("Response: type = {s}Response,\n", .{operation.name});
|
||||
|
||||
if (operation.shape.operation.errors) |errors| {
|
||||
try outputIndent(state, writer);
|
||||
|
@ -598,6 +625,7 @@ fn generateMetadataFunction(operation_name: []const u8, state: GenerationState,
|
|||
try outputIndent(state, writer);
|
||||
try writer.writeByte('}');
|
||||
}
|
||||
|
||||
fn getErrorName(err_name: []const u8) []const u8 {
|
||||
if (endsWith("Exception", err_name))
|
||||
return err_name[0 .. err_name.len - "Exception".len];
|
||||
|
@ -612,6 +640,20 @@ fn endsWith(item: []const u8, str: []const u8) bool {
|
|||
return std.mem.eql(u8, item, str[str.len - item.len ..]);
|
||||
}
|
||||
|
||||
fn getTypeName(allocator: std.mem.Allocator, shape: smithy.ShapeInfo) ![]const u8 {
|
||||
const type_name = avoidReserved(shape.name);
|
||||
|
||||
switch (shape.shape) {
|
||||
// maps are named like "Tags"
|
||||
// this removes the trailing s and adds "KeyValue" suffix
|
||||
.map => {
|
||||
const map_type_name = avoidReserved(shape.name);
|
||||
return try std.fmt.allocPrint(allocator, "{s}KeyValue", .{map_type_name[0 .. map_type_name.len - 1]});
|
||||
},
|
||||
else => return allocator.dupe(u8, type_name),
|
||||
}
|
||||
}
|
||||
|
||||
fn reuseCommonType(shape: smithy.ShapeInfo, writer: anytype, state: GenerationState) !bool {
|
||||
// We want to return if we're at the top level of the stack. There are three
|
||||
// reasons for this:
|
||||
|
@ -626,12 +668,21 @@ fn reuseCommonType(shape: smithy.ShapeInfo, writer: anytype, state: GenerationSt
|
|||
// can at least see the top level.
|
||||
// 3. When we come through at the end, we want to make sure we're writing
|
||||
// something or we'll have an infinite loop!
|
||||
|
||||
switch (shape.shape) {
|
||||
.structure, .uniontype, .map => {},
|
||||
else => return false,
|
||||
}
|
||||
|
||||
const type_name = try getTypeName(state.allocator, shape);
|
||||
defer state.allocator.free(type_name);
|
||||
|
||||
if (state.type_stack.items.len == 1) return false;
|
||||
var rc = false;
|
||||
if (state.file_state.shape_references.get(shape.id)) |r| {
|
||||
if (r > 1 and (shape.shape == .structure or shape.shape == .uniontype)) {
|
||||
if (r > 1) {
|
||||
rc = true;
|
||||
_ = try writer.write(avoidReserved(shape.name)); // This can't possibly be this easy...
|
||||
_ = try writer.write(type_name); // This can't possibly be this easy...
|
||||
if (state.file_state.additional_types_generated.getEntry(shape.name) == null)
|
||||
try state.file_state.additional_types_to_generate.append(shape);
|
||||
}
|
||||
|
@ -730,34 +781,14 @@ fn generateTypeFor(shape_id: []const u8, writer: anytype, state: GenerationState
|
|||
.double => |s| try generateSimpleTypeFor(s, "f64", writer),
|
||||
.float => |s| try generateSimpleTypeFor(s, "f32", writer),
|
||||
.long => |s| try generateSimpleTypeFor(s, "i64", writer),
|
||||
.map => {
|
||||
_ = try writer.write("[]struct {\n");
|
||||
var child_state = state;
|
||||
child_state.indent_level += 1;
|
||||
try outputIndent(child_state, writer);
|
||||
_ = try writer.write("key: ");
|
||||
try writeOptional(shape.map.traits, writer, null);
|
||||
var sub_maps = std.ArrayList([]const u8).init(state.allocator);
|
||||
defer sub_maps.deinit();
|
||||
if (try generateTypeFor(shape.map.key, writer, child_state, true))
|
||||
try sub_maps.append("key");
|
||||
try writeOptional(shape.map.traits, writer, " = null");
|
||||
_ = try writer.write(",\n");
|
||||
try outputIndent(child_state, writer);
|
||||
_ = try writer.write("value: ");
|
||||
try writeOptional(shape.map.traits, writer, null);
|
||||
if (try generateTypeFor(shape.map.value, writer, child_state, true))
|
||||
try sub_maps.append("value");
|
||||
try writeOptional(shape.map.traits, writer, " = null");
|
||||
_ = try writer.write(",\n");
|
||||
if (sub_maps.items.len > 0) {
|
||||
_ = try writer.write("\n");
|
||||
try writeStringify(state, sub_maps.items, writer);
|
||||
.map => |m| {
|
||||
if (!try reuseCommonType(shape_info, std.io.null_writer, state)) {
|
||||
try generateMapTypeFor(m, writer, state);
|
||||
rc = true;
|
||||
} else {
|
||||
try writer.writeAll("[]");
|
||||
_ = try reuseCommonType(shape_info, writer, state);
|
||||
}
|
||||
try outputIndent(state, writer);
|
||||
_ = try writer.write("}");
|
||||
|
||||
rc = true;
|
||||
},
|
||||
else => {
|
||||
std.log.err("encountered unimplemented shape type {s} for shape_id {s}. Generated code will not compile", .{ @tagName(shape), shape_id });
|
||||
|
@ -768,41 +799,61 @@ fn generateTypeFor(shape_id: []const u8, writer: anytype, state: GenerationState
|
|||
return rc;
|
||||
}
|
||||
|
||||
fn generateMapTypeFor(map: anytype, writer: anytype, state: GenerationState) anyerror!void {
|
||||
_ = try writer.write("struct {\n");
|
||||
|
||||
try writer.writeAll("pub const is_map_type = true;\n\n");
|
||||
|
||||
var child_state = state;
|
||||
child_state.indent_level += 1;
|
||||
|
||||
_ = try writer.write("key: ");
|
||||
try writeOptional(map.traits, writer, null);
|
||||
|
||||
_ = try generateTypeFor(map.key, writer, child_state, true);
|
||||
|
||||
try writeOptional(map.traits, writer, " = null");
|
||||
_ = try writer.write(",\n");
|
||||
|
||||
_ = try writer.write("value: ");
|
||||
try writeOptional(map.traits, writer, null);
|
||||
|
||||
_ = try generateTypeFor(map.value, writer, child_state, true);
|
||||
|
||||
try writeOptional(map.traits, writer, " = null");
|
||||
_ = try writer.write(",\n");
|
||||
_ = try writer.write("}");
|
||||
}
|
||||
|
||||
fn generateSimpleTypeFor(_: anytype, type_name: []const u8, writer: anytype) !void {
|
||||
_ = try writer.write(type_name); // This had required stuff but the problem was elsewhere. Better to leave as function just in case
|
||||
}
|
||||
|
||||
const Mapping = struct { snake: []const u8, original: []const u8 };
|
||||
fn generateComplexTypeFor(shape_id: []const u8, members: []smithy.TypeMember, type_type_name: []const u8, writer: anytype, state: GenerationState) anyerror!void {
|
||||
_ = shape_id;
|
||||
const Mapping = struct { snake: []const u8, original: []const u8 };
|
||||
var field_name_mappings = try std.ArrayList(Mapping).initCapacity(state.allocator, members.len);
|
||||
defer {
|
||||
for (field_name_mappings.items) |mapping|
|
||||
state.allocator.free(mapping.snake);
|
||||
field_name_mappings.deinit();
|
||||
}
|
||||
|
||||
var arena = std.heap.ArenaAllocator.init(state.allocator);
|
||||
defer arena.deinit();
|
||||
const allocator = arena.allocator();
|
||||
|
||||
var field_name_mappings = try std.ArrayList(Mapping).initCapacity(allocator, members.len);
|
||||
defer field_name_mappings.deinit();
|
||||
// There is an httpQueryParams trait as well, but nobody is using it. API GW
|
||||
// pretends to, but it's an empty map
|
||||
//
|
||||
// Same with httpPayload
|
||||
//
|
||||
// httpLabel is interesting - right now we just assume anything can be used - do we need to track this?
|
||||
var http_query_mappings = try std.ArrayList(Mapping).initCapacity(state.allocator, members.len);
|
||||
defer {
|
||||
for (http_query_mappings.items) |mapping|
|
||||
state.allocator.free(mapping.snake);
|
||||
http_query_mappings.deinit();
|
||||
}
|
||||
var http_header_mappings = try std.ArrayList(Mapping).initCapacity(state.allocator, members.len);
|
||||
defer {
|
||||
for (http_header_mappings.items) |mapping|
|
||||
state.allocator.free(mapping.snake);
|
||||
http_header_mappings.deinit();
|
||||
}
|
||||
var map_fields = std.ArrayList([]const u8).init(state.allocator);
|
||||
defer {
|
||||
for (map_fields.items) |f| state.allocator.free(f);
|
||||
map_fields.deinit();
|
||||
}
|
||||
var http_query_mappings = try std.ArrayList(Mapping).initCapacity(allocator, members.len);
|
||||
defer http_query_mappings.deinit();
|
||||
|
||||
var http_header_mappings = try std.ArrayList(Mapping).initCapacity(allocator, members.len);
|
||||
defer http_header_mappings.deinit();
|
||||
|
||||
var map_fields = std.ArrayList([]const u8).init(allocator);
|
||||
defer map_fields.deinit();
|
||||
|
||||
// prolog. We'll rely on caller to get the spacing correct here
|
||||
_ = try writer.write(type_type_name);
|
||||
_ = try writer.write(" {\n");
|
||||
|
@ -811,7 +862,7 @@ fn generateComplexTypeFor(shape_id: []const u8, members: []smithy.TypeMember, ty
|
|||
var payload: ?[]const u8 = null;
|
||||
for (members) |member| {
|
||||
// This is our mapping
|
||||
const snake_case_member = try snake.fromPascalCase(state.allocator, member.name);
|
||||
const snake_case_member = try constantName(allocator, member.name);
|
||||
// So it looks like some services have duplicate names?! Check out "httpMethod"
|
||||
// in API Gateway. Not sure what we're supposed to do there. Checking the go
|
||||
// sdk, they move this particular duplicate to 'http_method' - not sure yet
|
||||
|
@ -821,34 +872,34 @@ fn generateComplexTypeFor(shape_id: []const u8, members: []smithy.TypeMember, ty
|
|||
switch (trait) {
|
||||
.json_name => |n| {
|
||||
found_name_trait = true;
|
||||
field_name_mappings.appendAssumeCapacity(.{ .snake = try state.allocator.dupe(u8, snake_case_member), .original = n });
|
||||
field_name_mappings.appendAssumeCapacity(.{ .snake = try allocator.dupe(u8, snake_case_member), .original = n });
|
||||
},
|
||||
.xml_name => |n| {
|
||||
found_name_trait = true;
|
||||
field_name_mappings.appendAssumeCapacity(.{ .snake = try state.allocator.dupe(u8, snake_case_member), .original = n });
|
||||
field_name_mappings.appendAssumeCapacity(.{ .snake = try allocator.dupe(u8, snake_case_member), .original = n });
|
||||
},
|
||||
.http_query => |n| http_query_mappings.appendAssumeCapacity(.{ .snake = try state.allocator.dupe(u8, snake_case_member), .original = n }),
|
||||
.http_header => http_header_mappings.appendAssumeCapacity(.{ .snake = try state.allocator.dupe(u8, snake_case_member), .original = trait.http_header }),
|
||||
.http_query => |n| http_query_mappings.appendAssumeCapacity(.{ .snake = try allocator.dupe(u8, snake_case_member), .original = n }),
|
||||
.http_header => http_header_mappings.appendAssumeCapacity(.{ .snake = try allocator.dupe(u8, snake_case_member), .original = trait.http_header }),
|
||||
.http_payload => {
|
||||
// Don't assert as that will be optimized for Release* builds
|
||||
// We'll continue here and treat the above as a warning
|
||||
if (payload) |first| {
|
||||
std.log.err("Found multiple httpPayloads in violation of smithy spec! Ignoring '{s}' and using '{s}'", .{ first, snake_case_member });
|
||||
}
|
||||
payload = try state.allocator.dupe(u8, snake_case_member);
|
||||
payload = try allocator.dupe(u8, snake_case_member);
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
}
|
||||
if (!found_name_trait)
|
||||
field_name_mappings.appendAssumeCapacity(.{ .snake = try state.allocator.dupe(u8, snake_case_member), .original = member.name });
|
||||
defer state.allocator.free(snake_case_member);
|
||||
field_name_mappings.appendAssumeCapacity(.{ .snake = try allocator.dupe(u8, snake_case_member), .original = member.name });
|
||||
|
||||
try outputIndent(child_state, writer);
|
||||
const member_name = avoidReserved(snake_case_member);
|
||||
try writer.print("{s}: ", .{member_name});
|
||||
try writeOptional(member.traits, writer, null);
|
||||
if (try generateTypeFor(member.target, writer, child_state, true))
|
||||
try map_fields.append(try std.fmt.allocPrint(state.allocator, "{s}", .{member_name}));
|
||||
try map_fields.append(try std.fmt.allocPrint(allocator, "{s}", .{member_name}));
|
||||
|
||||
if (!std.mem.eql(u8, "union", type_type_name))
|
||||
try writeOptional(member.traits, writer, " = null");
|
||||
|
|
|
@ -1,157 +0,0 @@
|
|||
const std = @import("std");
|
||||
const expectEqualStrings = std.testing.expectEqualStrings;
|
||||
|
||||
pub fn fromPascalCase(allocator: std.mem.Allocator, name: []const u8) ![]u8 {
|
||||
const rc = try allocator.alloc(u8, name.len * 2); // This is overkill, but is > the maximum length possibly needed
|
||||
errdefer allocator.free(rc);
|
||||
var utf8_name = (std.unicode.Utf8View.init(name) catch unreachable).iterator();
|
||||
var target_inx: u64 = 0;
|
||||
var curr_char = (try isAscii(utf8_name.nextCodepoint())).?;
|
||||
target_inx = setNext(lowercase(curr_char), rc, target_inx);
|
||||
var prev_char = curr_char;
|
||||
if (try isAscii(utf8_name.nextCodepoint())) |ch| {
|
||||
curr_char = ch;
|
||||
} else {
|
||||
// Single character only - we're done here
|
||||
_ = setNext(0, rc, target_inx);
|
||||
return rc[0..target_inx];
|
||||
}
|
||||
while (try isAscii(utf8_name.nextCodepoint())) |next_char| {
|
||||
if (next_char == ' ') {
|
||||
// a space shouldn't be happening. But if it does, it clues us
|
||||
// in pretty well:
|
||||
//
|
||||
// MyStuff Is Awesome
|
||||
// |^
|
||||
// |next_char
|
||||
// ^
|
||||
// prev_codepoint/ascii_prev_char (and target_inx)
|
||||
target_inx = setNext(lowercase(curr_char), rc, target_inx);
|
||||
target_inx = setNext('_', rc, target_inx);
|
||||
var maybe_curr_char = (try isAscii(utf8_name.nextCodepoint()));
|
||||
if (maybe_curr_char == null) {
|
||||
std.log.err("Error on fromPascalCase processing name '{s}'", .{name});
|
||||
}
|
||||
curr_char = maybe_curr_char.?;
|
||||
maybe_curr_char = (try isAscii(utf8_name.nextCodepoint()));
|
||||
if (maybe_curr_char == null) {
|
||||
// We have reached the end of the string (e.g. "Resource Explorer 2")
|
||||
// We need to do this check before we setNext, so that we don't
|
||||
// end up duplicating the last character
|
||||
break;
|
||||
// std.log.err("Error on fromPascalCase processing name '{s}', curr_char = '{}'", .{ name, curr_char });
|
||||
}
|
||||
target_inx = setNext(lowercase(curr_char), rc, target_inx);
|
||||
prev_char = curr_char;
|
||||
curr_char = maybe_curr_char.?;
|
||||
continue;
|
||||
}
|
||||
if (between(curr_char, 'A', 'Z')) {
|
||||
if (isAcronym(curr_char, next_char)) {
|
||||
// We could be in an acronym at the start of a word. This
|
||||
// is the only case where we actually need to look back at the
|
||||
// previous character, and if that's the case, throw in an
|
||||
// underscore
|
||||
// "SAMLMySAMLAcronymThing");
|
||||
if (between(prev_char, 'a', 'z'))
|
||||
target_inx = setNext('_', rc, target_inx);
|
||||
|
||||
//we are in an acronym - don't snake, just lower
|
||||
target_inx = setNext(lowercase(curr_char), rc, target_inx);
|
||||
} else {
|
||||
target_inx = setNext('_', rc, target_inx);
|
||||
target_inx = setNext(lowercase(curr_char), rc, target_inx);
|
||||
}
|
||||
} else {
|
||||
target_inx = setNext(curr_char, rc, target_inx);
|
||||
}
|
||||
prev_char = curr_char;
|
||||
curr_char = next_char;
|
||||
}
|
||||
// work in the last codepoint - force lowercase
|
||||
target_inx = setNext(lowercase(curr_char), rc, target_inx);
|
||||
|
||||
rc[target_inx] = 0;
|
||||
_ = allocator.resize(rc, target_inx);
|
||||
return rc[0..target_inx];
|
||||
}
|
||||
|
||||
fn isAcronym(char1: u8, char2: u8) bool {
|
||||
return isAcronymChar(char1) and isAcronymChar(char2);
|
||||
}
|
||||
fn isAcronymChar(char: u8) bool {
|
||||
return between(char, 'A', 'Z') or between(char, '0', '9');
|
||||
}
|
||||
fn isAscii(codepoint: ?u21) !?u8 {
|
||||
if (codepoint) |cp| {
|
||||
if (cp > 0xff) return error.UnicodeNotSupported;
|
||||
return @as(u8, @truncate(cp));
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
fn setNext(ascii: u8, slice: []u8, inx: u64) u64 {
|
||||
slice[inx] = ascii;
|
||||
return inx + 1;
|
||||
}
|
||||
|
||||
fn lowercase(ascii: u8) u8 {
|
||||
var lowercase_char = ascii;
|
||||
if (between(ascii, 'A', 'Z'))
|
||||
lowercase_char = ascii + ('a' - 'A');
|
||||
return lowercase_char;
|
||||
}
|
||||
|
||||
fn between(char: u8, from: u8, to: u8) bool {
|
||||
return char >= from and char <= to;
|
||||
}
|
||||
|
||||
test "converts from PascalCase to snake_case" {
|
||||
const allocator = std.testing.allocator;
|
||||
const snake_case = try fromPascalCase(allocator, "MyPascalCaseThing");
|
||||
defer allocator.free(snake_case);
|
||||
try expectEqualStrings("my_pascal_case_thing", snake_case);
|
||||
}
|
||||
test "handles from PascalCase acronyms to snake_case" {
|
||||
const allocator = std.testing.allocator;
|
||||
const snake_case = try fromPascalCase(allocator, "SAMLMySAMLAcronymThing");
|
||||
defer allocator.free(snake_case);
|
||||
try expectEqualStrings("saml_my_saml_acronym_thing", snake_case);
|
||||
}
|
||||
test "spaces in the name" {
|
||||
const allocator = std.testing.allocator;
|
||||
const snake_case = try fromPascalCase(allocator, "API Gateway");
|
||||
defer allocator.free(snake_case);
|
||||
try expectEqualStrings("api_gateway", snake_case);
|
||||
}
|
||||
|
||||
test "S3" {
|
||||
const allocator = std.testing.allocator;
|
||||
const snake_case = try fromPascalCase(allocator, "S3");
|
||||
defer allocator.free(snake_case);
|
||||
try expectEqualStrings("s3", snake_case);
|
||||
}
|
||||
|
||||
test "ec2" {
|
||||
const allocator = std.testing.allocator;
|
||||
const snake_case = try fromPascalCase(allocator, "EC2");
|
||||
defer allocator.free(snake_case);
|
||||
try expectEqualStrings("ec2", snake_case);
|
||||
}
|
||||
|
||||
test "IoT 1Click Devices Service" {
|
||||
const allocator = std.testing.allocator;
|
||||
const snake_case = try fromPascalCase(allocator, "IoT 1Click Devices Service");
|
||||
defer allocator.free(snake_case);
|
||||
// NOTE: There is some debate amoung humans about what this should
|
||||
// turn into. Should it be iot_1click_... or iot_1_click...?
|
||||
try expectEqualStrings("iot_1_click_devices_service", snake_case);
|
||||
}
|
||||
test "Resource Explorer 2" {
|
||||
const allocator = std.testing.allocator;
|
||||
const snake_case = try fromPascalCase(allocator, "Resource Explorer 2");
|
||||
defer allocator.free(snake_case);
|
||||
// NOTE: There is some debate amoung humans about what this should
|
||||
// turn into. Should it be iot_1click_... or iot_1_click...?
|
||||
try expectEqualStrings("resource_explorer_2", snake_case);
|
||||
}
|
|
@ -14,35 +14,15 @@ const testing = std.testing;
|
|||
const mem = std.mem;
|
||||
const maxInt = std.math.maxInt;
|
||||
|
||||
pub fn serializeMap(map: anytype, key: []const u8, options: anytype, out_stream: anytype) !bool {
|
||||
pub fn serializeMap(map: anytype, key: []const u8, options: anytype, out_stream: anytype) !void {
|
||||
if (@typeInfo(@TypeOf(map)) == .optional) {
|
||||
if (map == null)
|
||||
return false
|
||||
else
|
||||
return serializeMapInternal(map.?, key, options, out_stream);
|
||||
if (map) |m| serializeMapInternal(m, key, options, out_stream);
|
||||
} else {
|
||||
serializeMapInternal(map, key, options, out_stream);
|
||||
}
|
||||
return serializeMapInternal(map, key, options, out_stream);
|
||||
}
|
||||
|
||||
fn serializeMapInternal(map: anytype, key: []const u8, options: anytype, out_stream: anytype) !bool {
|
||||
if (map.len == 0) {
|
||||
var child_options = options;
|
||||
if (child_options.whitespace) |*child_ws|
|
||||
child_ws.indent_level += 1;
|
||||
|
||||
try out_stream.writeByte('"');
|
||||
try out_stream.writeAll(key);
|
||||
_ = try out_stream.write("\":");
|
||||
if (options.whitespace) |ws| {
|
||||
if (ws.separator) {
|
||||
try out_stream.writeByte(' ');
|
||||
}
|
||||
}
|
||||
try out_stream.writeByte('{');
|
||||
try out_stream.writeByte('}');
|
||||
return true;
|
||||
}
|
||||
// TODO: Map might be [][]struct{key, value} rather than []struct{key, value}
|
||||
fn serializeMapKey(key: []const u8, options: anytype, out_stream: anytype) !void {
|
||||
var child_options = options;
|
||||
if (child_options.whitespace) |*child_ws|
|
||||
child_ws.indent_level += 1;
|
||||
|
@ -55,9 +35,25 @@ fn serializeMapInternal(map: anytype, key: []const u8, options: anytype, out_str
|
|||
try out_stream.writeByte(' ');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn serializeMapAsObject(map: anytype, options: anytype, out_stream: anytype) !void {
|
||||
if (map.len == 0) {
|
||||
try out_stream.writeByte('{');
|
||||
try out_stream.writeByte('}');
|
||||
return;
|
||||
}
|
||||
|
||||
// TODO: Map might be [][]struct{key, value} rather than []struct{key, value}
|
||||
var child_options = options;
|
||||
if (child_options.whitespace) |*whitespace| {
|
||||
whitespace.indent_level += 1;
|
||||
}
|
||||
|
||||
try out_stream.writeByte('{');
|
||||
if (options.whitespace) |_|
|
||||
try out_stream.writeByte('\n');
|
||||
|
||||
for (map, 0..) |tag, i| {
|
||||
if (tag.key == null or tag.value == null) continue;
|
||||
// TODO: Deal with escaping and general "json.stringify" the values...
|
||||
|
@ -80,11 +76,17 @@ fn serializeMapInternal(map: anytype, key: []const u8, options: anytype, out_str
|
|||
if (child_options.whitespace) |_|
|
||||
try out_stream.writeByte('\n');
|
||||
}
|
||||
|
||||
if (options.whitespace) |ws|
|
||||
try ws.outputIndent(out_stream);
|
||||
try out_stream.writeByte('}');
|
||||
return true;
|
||||
}
|
||||
|
||||
fn serializeMapInternal(map: anytype, key: []const u8, options: anytype, out_stream: anytype) !bool {
|
||||
try serializeMapKey(key, options, out_stream);
|
||||
return try serializeMapAsObject(map, options, out_stream);
|
||||
}
|
||||
|
||||
// code within jsonEscape lifted from json.zig in stdlib
|
||||
fn jsonEscape(value: []const u8, options: anytype, out_stream: anytype) !void {
|
||||
var i: usize = 0;
|
||||
|
@ -2983,17 +2985,17 @@ pub fn stringify(
|
|||
if (child_options.whitespace) |*child_whitespace| {
|
||||
child_whitespace.indent_level += 1;
|
||||
}
|
||||
inline for (S.fields) |Field| {
|
||||
inline for (S.fields) |field| {
|
||||
// don't include void fields
|
||||
if (Field.type == void) continue;
|
||||
if (field.type == void) continue;
|
||||
|
||||
var output_this_field = true;
|
||||
if (!options.emit_null and @typeInfo(Field.type) == .optional and @field(value, Field.name) == null) output_this_field = false;
|
||||
if (!options.emit_null and @typeInfo(field.type) == .optional and @field(value, field.name) == null) output_this_field = false;
|
||||
|
||||
const final_name = if (comptime std.meta.hasFn(T, "fieldNameFor"))
|
||||
value.fieldNameFor(Field.name)
|
||||
value.fieldNameFor(field.name)
|
||||
else
|
||||
Field.name;
|
||||
field.name;
|
||||
if (options.exclude_fields) |exclude_fields| {
|
||||
for (exclude_fields) |exclude_field| {
|
||||
if (std.mem.eql(u8, final_name, exclude_field)) {
|
||||
|
@ -3013,7 +3015,7 @@ pub fn stringify(
|
|||
}
|
||||
var field_written = false;
|
||||
if (comptime std.meta.hasFn(T, "jsonStringifyField")) {
|
||||
if (output_this_field) field_written = try value.jsonStringifyField(Field.name, child_options, out_stream);
|
||||
if (output_this_field) field_written = try value.jsonStringifyField(field.name, child_options, out_stream);
|
||||
}
|
||||
|
||||
if (!field_written) {
|
||||
|
@ -3026,7 +3028,9 @@ pub fn stringify(
|
|||
if (output_this_field) try out_stream.writeByte(' ');
|
||||
}
|
||||
}
|
||||
if (output_this_field) try stringify(@field(value, Field.name), child_options, out_stream);
|
||||
if (output_this_field) {
|
||||
try stringify(@field(value, field.name), child_options, out_stream);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (field_output) {
|
||||
|
@ -3094,11 +3098,17 @@ pub fn stringify(
|
|||
return;
|
||||
}
|
||||
|
||||
try out_stream.writeByte('[');
|
||||
if (@typeInfo(ptr_info.child) == .@"struct" and @hasDecl(ptr_info.child, "is_map_type")) {
|
||||
try serializeMapAsObject(value, options, out_stream);
|
||||
return;
|
||||
}
|
||||
|
||||
var child_options = options;
|
||||
if (child_options.whitespace) |*whitespace| {
|
||||
whitespace.indent_level += 1;
|
||||
}
|
||||
|
||||
try out_stream.writeByte('[');
|
||||
for (value, 0..) |x, i| {
|
||||
if (i != 0) {
|
||||
try out_stream.writeByte(',');
|
||||
|
|
|
@ -1389,7 +1389,7 @@ test "custom serialization for map objects" {
|
|||
defer tags.deinit();
|
||||
tags.appendAssumeCapacity(.{ .key = "Foo", .value = "Bar" });
|
||||
tags.appendAssumeCapacity(.{ .key = "Baz", .value = "Qux" });
|
||||
const req = services.lambda.tag_resource.Request{ .resource = "hello", .tags = tags.items };
|
||||
const req = services.lambda.TagResourceRequest{ .resource = "hello", .tags = tags.items };
|
||||
try json.stringify(req, .{ .whitespace = .{} }, buffer.writer());
|
||||
try std.testing.expectEqualStrings(
|
||||
\\{
|
||||
|
|
|
@ -192,7 +192,7 @@ pub fn main() anyerror!void {
|
|||
const func = fns[0];
|
||||
const arn = func.function_arn.?;
|
||||
// This is a bit ugly. Maybe a helper function in the library would help?
|
||||
var tags = try std.ArrayList(@typeInfo(try typeForField(services.lambda.tag_resource.Request, "tags")).pointer.child).initCapacity(allocator, 1);
|
||||
var tags = try std.ArrayList(aws.services.lambda.TagKeyValue).initCapacity(allocator, 1);
|
||||
defer tags.deinit();
|
||||
tags.appendAssumeCapacity(.{ .key = "Foo", .value = "Bar" });
|
||||
const req = services.lambda.tag_resource.Request{ .resource = arn, .tags = tags.items };
|
||||
|
|
Loading…
Add table
Reference in a new issue