Compare commits

...

8 Commits

Author SHA1 Message Date
b2ebc5a621
add snake comparison test 2021-06-23 18:23:56 -07:00
93536aa4ad
make primary executable a more robust integration test harness 2021-06-23 18:23:07 -07:00
06479b8cb7
more useful logging 2021-06-23 18:21:34 -07:00
6f38ecd893
update to new json/url options 2021-06-23 18:20:45 -07:00
77caa626f0
allow parsing of exponential numbers if they resolve to int 2021-06-23 18:18:42 -07:00
c2e2778d77
missing field support (not sure if we will use this) 2021-06-23 18:18:00 -07:00
3d9490de82
fix snake case comparison
code in the function required found variable to be a real count
for allocation purposes, but it was originally coded as a flag
instead, which broke any field with more than two underscores
2021-06-23 18:16:54 -07:00
f816c0cbf1
additional url encoding test cases and virtual rewrite of encoder 2021-06-23 18:14:59 -07:00
4 changed files with 247 additions and 81 deletions

View File

@ -79,21 +79,10 @@ pub const Aws = struct {
var buffer = std.ArrayList(u8).init(self.allocator);
defer buffer.deinit();
const writer = buffer.writer();
// TODO: transformation function should be refactored for operation
// with a Writer passed in so we don't have to allocate
const transformer = struct {
allocator: *std.mem.Allocator,
const This = @This();
pub fn transform(this: This, name: []const u8) ![]const u8 {
return try case.snakeToPascal(this.allocator, name);
}
pub fn transform_deinit(this: This, name: []const u8) void {
this.allocator.free(name);
}
}{ .allocator = self.allocator };
try url.encode(request, writer, .{ .field_name_transformer = transformer });
try url.encode(request, writer, .{
.field_name_transformer = &queryFieldTransformer,
.allocator = self.allocator,
});
const continuation = if (buffer.items.len > 0) "&" else "";
const body = try std.fmt.allocPrint(self.allocator, "Action={s}&Version={s}{s}{s}\n", .{ action.action_name, service.version, continuation, buffer.items });
@ -115,6 +104,7 @@ pub const Aws = struct {
log.err("Request:\n |{s}\nResponse:\n |{s}", .{ body, response.body });
return error.HttpFailure;
}
// log.debug("Successful return from server:\n |{s}", .{response.body});
// TODO: Check status code for badness
var stream = json.TokenStream.init(response.body);
@ -123,9 +113,20 @@ pub const Aws = struct {
.allow_camel_case_conversion = true, // new option
.allow_snake_case_conversion = true, // new option
.allow_unknown_fields = true, // new option. Cannot yet handle non-struct fields though
.allow_missing_fields = false, // new option. Cannot yet handle non-struct fields though
};
const SResponse = ServerResponse(request);
const parsed_response = try json.parse(SResponse, &stream, parser_options);
const parsed_response = json.parse(SResponse, &stream, parser_options) catch |e| {
log.err(
\\Call successful, but unexpected response from service.
\\This could be the result of a bug or a stale set of code generated
\\service models. Response from server:
\\
\\{s}
\\
, .{response.body});
return e;
};
// Grab the first (and only) object from the server. Server shape expected to be:
// { ActionResponse: {ActionResult: {...}, ResponseMetadata: {...} } }
@ -215,3 +216,29 @@ fn FullResponse(comptime request: anytype) type {
fn Response(comptime request: anytype) type {
return request.metaInfo().action.Response;
}
fn queryFieldTransformer(field_name: []const u8, encoding_options: url.EncodingOptions) anyerror![]const u8 {
return try case.snakeToPascal(encoding_options.allocator.?, field_name);
}
// Use for debugging json responses of specific requests
// test "dummy request" {
// const allocator = std.testing.allocator;
// const svs = Services(.{.sts}){};
// const request = svs.sts.get_session_token.Request{
// .duration_seconds = 900,
// };
// const FullR = FullResponse(request);
// const response =
// var stream = json.TokenStream.init(response);
//
// const parser_options = json.ParseOptions{
// .allocator = allocator,
// .allow_camel_case_conversion = true, // new option
// .allow_snake_case_conversion = true, // new option
// .allow_unknown_fields = true, // new option. Cannot yet handle non-struct fields though
// .allow_missing_fields = false, // new option. Cannot yet handle non-struct fields though
// };
// const SResponse = ServerResponse(request);
// const r = try json.parse(SResponse, &stream, parser_options);
// json.parseFree(SResponse, r, parser_options);
// }

View File

@ -1454,6 +1454,7 @@ pub const ParseOptions = struct {
allow_camel_case_conversion: bool = false,
allow_snake_case_conversion: bool = false,
allow_unknown_fields: bool = false,
allow_missing_fields: bool = false,
};
fn camelCaseComp(field: []const u8, key: []const u8, options: ParseOptions) !bool {
@ -1471,6 +1472,17 @@ fn camelCaseComp(field: []const u8, key: []const u8, options: ParseOptions) !boo
}
return std.mem.eql(u8, field, key);
}
test "snake" {
const allocator = testing.allocator;
const options = ParseOptions{
.allocator = allocator,
.allow_camel_case_conversion = true,
.allow_snake_case_conversion = true,
.allow_unknown_fields = true,
};
try std.testing.expect(try snakeCaseComp("access_key_id", "AccessKeyId", options));
}
fn snakeCaseComp(field: []const u8, key: []const u8, options: ParseOptions) !bool {
// snake case is much more intricate. Input:
// Field: user_id
@ -1480,10 +1492,8 @@ fn snakeCaseComp(field: []const u8, key: []const u8, options: ParseOptions) !boo
// Then compare
var found: u32 = 0;
for (field) |ch| {
if (ch == '_') {
if (ch == '_')
found = found + 1;
break;
}
}
if (found == 0)
return std.mem.eql(u8, field, key);
@ -1515,6 +1525,11 @@ fn snakeCaseComp(field: []const u8, key: []const u8, options: ParseOptions) !boo
}
inx = inx + 1;
}
// std.debug.print("comp_field, len {d}: {s}\n", .{ comp_field.len, comp_field });
// std.debug.print("normalized_key, len {d}: {s}\n", .{ normalized_key.len, normalized_key });
// std.debug.print("comp_field, last: {d}\n", .{comp_field[comp_field.len - 1]});
// std.debug.print("normalized_key, last: {d}\n", .{normalized_key[normalized_key.len - 1]});
return std.mem.eql(u8, comp_field, normalized_key);
}
@ -1539,8 +1554,13 @@ fn parseInternal(comptime T: type, token: Token, tokens: *TokenStream, options:
.Number => |n| n,
else => return error.UnexpectedToken,
};
if (!numberToken.is_integer) return error.UnexpectedToken;
// This is a bug. you can still potentially have an integer that has exponents
// if (!numberToken.is_integer) return error.UnexpectedToken;
if (numberToken.is_integer)
return try std.fmt.parseInt(T, numberToken.slice(tokens.slice, tokens.i - 1), 10);
const float = try std.fmt.parseFloat(f128, numberToken.slice(tokens.slice, tokens.i - 1));
if (std.math.round(float) != float) return error.InvalidNumber;
return @floatToInt(T, float);
},
.Optional => |optionalInfo| {
if (token == .Null) {
@ -1679,6 +1699,7 @@ fn parseInternal(comptime T: type, token: Token, tokens: *TokenStream, options:
@field(r, field.name) = default;
}
} else {
if (!options.allow_missing_fields)
return error.MissingField;
}
}
@ -1997,6 +2018,11 @@ test "parse into struct with no fields" {
const T = struct {};
try testing.expectEqual(T{}, try parse(T, &TokenStream.init("{}"), ParseOptions{}));
}
test "parse exponential into int" {
const T = struct { int: i64 };
const r = try parse(T, &TokenStream.init("{ \"int\": 4.2e2 }"), ParseOptions{});
try testing.expectEqual(@as(i64, 420), r.int);
}
test "parse into struct with misc fields" {
@setEvalBranchQuota(10000);

View File

@ -2,6 +2,8 @@ const std = @import("std");
const aws = @import("aws.zig");
const json = @import("json.zig");
var verbose = false;
pub fn log(
comptime level: std.log.Level,
comptime scope: @TypeOf(.EnumLiteral),
@ -9,7 +11,7 @@ pub fn log(
args: anytype,
) void {
// Ignore awshttp messages
if (scope == .awshttp and @enumToInt(level) >= @enumToInt(std.log.Level.debug))
if (!verbose and scope == .awshttp and @enumToInt(level) >= @enumToInt(std.log.Level.debug))
return;
const scope_prefix = "(" ++ @tagName(scope) ++ "): ";
const prefix = "[" ++ @tagName(level) ++ "] " ++ scope_prefix;
@ -21,60 +23,90 @@ pub fn log(
nosuspend stderr.print(prefix ++ format ++ "\n", args) catch return;
}
const Tests = enum {
query_no_input,
query_with_input,
ec2_query_no_input,
};
pub fn main() anyerror!void {
// Uncomment if you want to log allocations
// const file = try std.fs.cwd().createFile("/tmp/allocations.log", .{ .truncate = true });
// defer file.close();
// var child_allocator = std.heap.c_allocator;
// const allocator = &std.heap.loggingAllocator(child_allocator, file.writer()).allocator;
// Flip to true to run a second time. This will help debug
// allocation/deallocation issues
const test_twice = false;
// Flip to true to run through the json parsing changes made to stdlib
const test_json = false;
if (test_json) try jsonFun();
const c_allocator = std.heap.c_allocator;
var gpa = std.heap.GeneralPurposeAllocator(.{}){
.backing_allocator = c_allocator,
};
defer if (!gpa.deinit()) @panic("memory leak detected");
defer _ = gpa.deinit();
const allocator = &gpa.allocator;
// const allocator = std.heap.c_allocator;
var tests = std.ArrayList(Tests).init(allocator);
defer tests.deinit();
var args = std.process.args();
while (args.next(allocator)) |arg_or_error| {
const arg = try arg_or_error;
defer allocator.free(arg);
if (std.mem.eql(u8, "-v", arg)) {
verbose = true;
continue;
}
inline for (@typeInfo(Tests).Enum.fields) |f| {
if (std.mem.eql(u8, f.name, arg)) {
try tests.append(@field(Tests, f.name));
break;
}
}
}
if (tests.items.len == 0) {
inline for (@typeInfo(Tests).Enum.fields) |f|
try tests.append(@field(Tests, f.name));
}
const options = aws.Options{
.region = "us-west-2",
};
std.log.info("Start", .{});
std.log.info("Start\n", .{});
var client = aws.Aws.init(allocator);
defer client.deinit();
const services = aws.Services(.{.sts}){};
for (tests.items) |t| {
std.log.info("===== Start Test: {s} =====", .{@tagName(t)});
switch (t) {
.query_no_input => {
const resp = try client.call(services.sts.get_caller_identity.Request{}, options);
// TODO: This is a bit wonky. Root cause is lack of declarations in
// comptime-generated types
defer resp.deinit();
if (test_twice) {
std.time.sleep(1000 * std.time.ns_per_ms);
std.log.info("second request", .{});
var client2 = aws.Aws.init(allocator);
defer client2.deinit();
const resp2 = try client2.call(services.sts.get_caller_identity.Request{}, options); // catch here and try alloc?
defer resp2.deinit();
}
std.log.info("arn: {s}", .{resp.response.arn});
std.log.info("id: {s}", .{resp.response.user_id});
std.log.info("account: {s}", .{resp.response.account});
std.log.info("requestId: {s}", .{resp.response_metadata.request_id});
},
.query_with_input => {
// TODO: Find test without sensitive info
const access = try client.call(services.sts.get_session_token.Request{
.duration_seconds = 900,
}, options);
defer access.deinit();
std.log.info("access key: {s}", .{access.response.credentials.access_key_id});
},
.ec2_query_no_input => {
// TODO: Find test
},
}
std.log.info("===== End Test: {s} =====\n", .{@tagName(t)});
}
std.log.info("Departing main", .{});
// if (test_twice) {
// std.time.sleep(1000 * std.time.ns_per_ms);
// std.log.info("second request", .{});
//
// var client2 = aws.Aws.init(allocator);
// defer client2.deinit();
// const resp2 = try client2.call(services.sts.get_caller_identity.Request{}, options); // catch here and try alloc?
// defer resp2.deinit();
// }
std.log.info("===== Tests complete =====", .{});
}
// TODO: Move into json.zig
pub fn jsonFun() !void {
// Standard behavior
const payload =

View File

@ -1,29 +1,81 @@
const std = @import("std");
pub fn encode(obj: anytype, writer: anytype, options: anytype) !void {
try encodeStruct("", obj, writer, options);
fn defaultTransformer(field_name: []const u8, options: EncodingOptions) anyerror![]const u8 {
return field_name;
}
fn encodeStruct(parent: []const u8, obj: anytype, writer: anytype, options: anytype) !void {
var first = true;
pub const FieldNameTransformer = fn ([]const u8, EncodingOptions) anyerror![]const u8;
pub const EncodingOptions = struct {
allocator: ?*std.mem.Allocator = null,
field_name_transformer: *const FieldNameTransformer = &defaultTransformer,
};
pub fn encode(obj: anytype, writer: anytype, options: EncodingOptions) !void {
_ = try encodeInternal("", "", true, obj, writer, options);
}
fn encodeStruct(parent: []const u8, first: bool, obj: anytype, writer: anytype, options: EncodingOptions) !bool {
var rc = first;
inline for (@typeInfo(@TypeOf(obj)).Struct.fields) |field| {
const field_name = if (@hasField(@TypeOf(options), "field_name_transformer")) try options.field_name_transformer.transform(field.name) else field.name;
defer {
if (@hasField(@TypeOf(options), "field_name_transformer"))
options.field_name_transformer.transform_deinit(field_name);
}
if (!first) _ = try writer.write("&");
switch (@typeInfo(field.field_type)) {
.Struct => {
try encodeStruct(field_name ++ ".", @field(obj, field.name), writer);
},
else => try writer.print("{s}{s}={s}", .{ parent, field_name, @field(obj, field.name) }),
}
first = false;
const field_name = try options.field_name_transformer.*(field.name, options);
defer if (options.field_name_transformer.* != defaultTransformer)
if (options.allocator) |a| a.free(field_name);
// @compileLog(@typeInfo(field.field_type).Pointer);
rc = try encodeInternal(parent, field_name, rc, @field(obj, field.name), writer, options);
}
return rc;
}
fn testencode(expected: []const u8, value: anytype, options: anytype) !void {
pub fn encodeInternal(parent: []const u8, field_name: []const u8, first: bool, obj: anytype, writer: anytype, options: EncodingOptions) !bool {
// @compileLog(@typeInfo(@TypeOf(obj)));
var rc = first;
switch (@typeInfo(@TypeOf(obj))) {
.Optional => if (obj) |o| {
rc = try encodeInternal(parent, field_name, first, o, writer, options);
},
.Pointer => |ti| if (ti.size == .One) {
rc = try encodeInternal(parent, field_name, first, obj.*, writer, options);
} else {
if (!first) _ = try writer.write("&");
try writer.print("{s}{s}={s}", .{ parent, field_name, obj });
rc = false;
},
.Struct => if (std.mem.eql(u8, "", field_name)) {
rc = try encodeStruct(parent, first, obj, writer, options);
} else {
// TODO: It would be lovely if we could concat at compile time or allocPrint at runtime
// XOR have compile time allocator support. Alas, neither are possible:
// https://github.com/ziglang/zig/issues/868: Comptime detection (feels like foot gun)
// https://github.com/ziglang/zig/issues/1291: Comptime allocator
const allocator = options.allocator orelse return error.AllocatorRequired;
const new_parent = try std.fmt.allocPrint(allocator, "{s}{s}.", .{ parent, field_name });
defer allocator.free(new_parent);
rc = try encodeStruct(new_parent, first, obj, writer, options);
// try encodeStruct(parent ++ field_name ++ ".", first, obj, writer, options);
},
.Array => {
if (!first) _ = try writer.write("&");
try writer.print("{s}{s}={s}", .{ parent, field_name, obj });
rc = false;
},
.Int, .ComptimeInt, .Float, .ComptimeFloat => {
if (!first) _ = try writer.write("&");
try writer.print("{s}{s}={d}", .{ parent, field_name, obj });
rc = false;
},
// BUGS! any doesn't work - a lot. Check this out:
// https://github.com/ziglang/zig/blob/master/lib/std/fmt.zig#L424
else => {
if (!first) _ = try writer.write("&");
try writer.print("{s}{s}={any}", .{ parent, field_name, obj });
rc = false;
},
}
return rc;
}
fn testencode(expected: []const u8, value: anytype, options: EncodingOptions) !void {
const ValidationWriter = struct {
const Self = @This();
pub const Writer = std.io.Writer(*Self, Error, write);
@ -43,7 +95,7 @@ fn testencode(expected: []const u8, value: anytype, options: anytype) !void {
}
fn write(self: *Self, bytes: []const u8) Error!usize {
// std.debug.print("{s}", .{bytes});
// std.debug.print("{s}\n", .{bytes});
if (self.expected_remaining.len < bytes.len) {
std.debug.warn(
\\====== expected this output: =========
@ -80,17 +132,46 @@ fn testencode(expected: []const u8, value: anytype, options: anytype) !void {
if (vos.expected_remaining.len > 0) return error.NotEnoughData;
}
test "can url encode an object" {
test "can urlencode an object" {
try testencode(
"Action=GetCallerIdentity&Version=2021-01-01",
.{ .Action = "GetCallerIdentity", .Version = "2021-01-01" },
.{},
);
}
test "can url encode a complex object" {
test "can urlencode an object with integer" {
try testencode(
"Action=GetCallerIdentity&Version=2021-01-01&complex.innermember=foo",
.{ .Action = "GetCallerIdentity", .Version = "2021-01-01", .complex = .{ .innermember = "foo" } },
"Action=GetCallerIdentity&Duration=32",
.{ .Action = "GetCallerIdentity", .Duration = 32 },
.{},
);
}
const UnsetValues = struct {
action: ?[]const u8 = null,
duration: ?i64 = null,
val1: ?i64 = null,
val2: ?[]const u8 = null,
};
test "can urlencode an object with unset values" {
// var buffer = std.ArrayList(u8).init(std.testing.allocator);
// defer buffer.deinit();
// const writer = buffer.writer();
// try encode(
// UnsetValues{ .action = "GetCallerIdentity", .duration = 32 },
// writer,
// .{ .allocator = std.testing.allocator },
// );
// std.debug.print("{s}", .{buffer.items});
try testencode(
"action=GetCallerIdentity&duration=32",
UnsetValues{ .action = "GetCallerIdentity", .duration = 32 },
.{},
);
}
test "can urlencode a complex object" {
try testencode(
"Action=GetCallerIdentity&Version=2021-01-01&complex.innermember=foo",
.{ .Action = "GetCallerIdentity", .Version = "2021-01-01", .complex = .{ .innermember = "foo" } },
.{ .allocator = std.testing.allocator },
);
}