Compare commits
2 Commits
8e853e9a82
...
866a68777e
Author | SHA1 | Date | |
---|---|---|---|
866a68777e | |||
5c5f23eb26 |
36
src/aws.zig
36
src/aws.zig
|
@ -485,6 +485,42 @@ test "basic json request serialization" {
|
|||
\\}
|
||||
, buffer.items);
|
||||
}
|
||||
test "layer object only" {
|
||||
const TestResponse = struct {
|
||||
arn: ?[]const u8 = null,
|
||||
// uncompressed_code_size: ?i64 = null,
|
||||
|
||||
pub fn jsonFieldNameFor(_: @This(), comptime field_name: []const u8) []const u8 {
|
||||
const mappings = .{
|
||||
.arn = "Arn",
|
||||
};
|
||||
return @field(mappings, field_name);
|
||||
}
|
||||
};
|
||||
const response =
|
||||
\\ {
|
||||
\\ "UncompressedCodeSize": 2,
|
||||
\\ "Arn": "blah"
|
||||
\\ }
|
||||
;
|
||||
// const response =
|
||||
// \\ {
|
||||
// \\ "UncompressedCodeSize": 22599541,
|
||||
// \\ "Arn": "arn:aws:lambda:us-west-2:550620852718:layer:PollyNotes-lib:4"
|
||||
// \\ }
|
||||
// ;
|
||||
const allocator = std.testing.allocator;
|
||||
var stream = json.TokenStream.init(response);
|
||||
const parser_options = json.ParseOptions{
|
||||
.allocator = allocator,
|
||||
.allow_camel_case_conversion = true, // new option
|
||||
.allow_snake_case_conversion = true, // new option
|
||||
.allow_unknown_fields = true, // new option. Cannot yet handle non-struct fields though
|
||||
.allow_missing_fields = false, // new option. Cannot yet handle non-struct fields though
|
||||
};
|
||||
const r = try json.parse(TestResponse, &stream, parser_options);
|
||||
json.parseFree(TestResponse, r, parser_options);
|
||||
}
|
||||
// Use for debugging json responses of specific requests
|
||||
// test "dummy request" {
|
||||
// const allocator = std.testing.allocator;
|
||||
|
|
90
src/json.zig
90
src/json.zig
|
@ -1136,6 +1136,9 @@ pub const TokenStream = struct {
|
|||
return error.UnexpectedEndOfJson;
|
||||
}
|
||||
}
|
||||
fn stackUsed(self: *TokenStream) u8 {
|
||||
return self.parser.stack_used + if (self.token != null) @as(u8, 1) else 0;
|
||||
}
|
||||
};
|
||||
|
||||
fn checkNext(p: *TokenStream, id: std.meta.Tag(Token)) !void {
|
||||
|
@ -1532,6 +1535,20 @@ fn snakeCaseComp(field: []const u8, key: []const u8, options: ParseOptions) !boo
|
|||
|
||||
return std.mem.eql(u8, comp_field, normalized_key);
|
||||
}
|
||||
const SkipValueError = error{UnexpectedJsonDepth} || TokenStream.Error;
|
||||
|
||||
fn skipValue(tokens: *TokenStream) SkipValueError!void {
|
||||
const original_depth = tokens.stackUsed();
|
||||
|
||||
// Return an error if no value is found
|
||||
_ = try tokens.next();
|
||||
if (tokens.stackUsed() < original_depth) return error.UnexpectedJsonDepth;
|
||||
if (tokens.stackUsed() == original_depth) return;
|
||||
|
||||
while (try tokens.next()) |_| {
|
||||
if (tokens.stackUsed() == original_depth) return;
|
||||
}
|
||||
}
|
||||
|
||||
fn parseInternal(comptime T: type, token: Token, tokens: *TokenStream, options: ParseOptions) !T {
|
||||
switch (@typeInfo(T)) {
|
||||
|
@ -1675,6 +1692,7 @@ fn parseInternal(comptime T: type, token: Token, tokens: *TokenStream, options:
|
|||
}
|
||||
}
|
||||
if (!found and !options.allow_unknown_fields) return error.UnknownField;
|
||||
if (!found) try skipValue(tokens);
|
||||
},
|
||||
.ObjectBegin => {
|
||||
if (!options.allow_unknown_fields) return error.UnknownField;
|
||||
|
@ -1788,6 +1806,35 @@ fn parseInternal(comptime T: type, token: Token, tokens: *TokenStream, options:
|
|||
},
|
||||
}
|
||||
},
|
||||
.ObjectBegin => {
|
||||
// We are parsing into a slice, but we have an
|
||||
// ObjectBegin. This might be ok, iff the type
|
||||
// follows this pattern: []struct { key: []const u8, value: anytype }
|
||||
// (could key be anytype?).
|
||||
if (!isMapPattern(T))
|
||||
return error.UnexpectedToken;
|
||||
var arraylist = std.ArrayList(ptrInfo.child).init(allocator);
|
||||
errdefer {
|
||||
while (arraylist.popOrNull()) |v| {
|
||||
parseFree(ptrInfo.child, v, options);
|
||||
}
|
||||
arraylist.deinit();
|
||||
}
|
||||
while (true) {
|
||||
const key = (try tokens.next()) orelse return error.UnexpectedEndOfJson;
|
||||
switch (key) {
|
||||
.ObjectEnd => break,
|
||||
else => {},
|
||||
}
|
||||
|
||||
try arraylist.ensureCapacity(arraylist.items.len + 1);
|
||||
const key_val = try parseInternal(try typeForField(ptrInfo.child, "key"), key, tokens, options);
|
||||
const val = (try tokens.next()) orelse return error.UnexpectedEndOfJson;
|
||||
const val_val = try parseInternal(try typeForField(ptrInfo.child, "value"), val, tokens, options);
|
||||
arraylist.appendAssumeCapacity(.{ .key = key_val, .value = val_val });
|
||||
}
|
||||
return arraylist.toOwnedSlice();
|
||||
},
|
||||
else => return error.UnexpectedToken,
|
||||
}
|
||||
},
|
||||
|
@ -1799,6 +1846,40 @@ fn parseInternal(comptime T: type, token: Token, tokens: *TokenStream, options:
|
|||
unreachable;
|
||||
}
|
||||
|
||||
fn typeForField(comptime T: type, field_name: []const u8) !type {
|
||||
const ti = @typeInfo(T);
|
||||
switch (ti) {
|
||||
.Struct => {
|
||||
inline for (ti.Struct.fields) |field| {
|
||||
if (std.mem.eql(u8, field.name, field_name))
|
||||
return field.field_type;
|
||||
}
|
||||
},
|
||||
else => return error.TypeIsNotAStruct, // should not hit this
|
||||
}
|
||||
return error.FieldNotFound;
|
||||
}
|
||||
|
||||
fn isMapPattern(comptime T: type) bool {
|
||||
// We should be getting a type that is a pointer to a slice.
|
||||
// Let's just double check before proceeding
|
||||
const ti = @typeInfo(T);
|
||||
if (ti != .Pointer) return false;
|
||||
if (ti.Pointer.size != .Slice) return false;
|
||||
const ti_child = @typeInfo(ti.Pointer.child);
|
||||
if (ti_child != .Struct) return false;
|
||||
if (ti_child.Struct.fields.len != 2) return false;
|
||||
var key_found = false;
|
||||
var value_found = false;
|
||||
inline for (ti_child.Struct.fields) |field| {
|
||||
if (std.mem.eql(u8, "key", field.name))
|
||||
key_found = true;
|
||||
if (std.mem.eql(u8, "value", field.name))
|
||||
value_found = true;
|
||||
}
|
||||
return key_found and value_found;
|
||||
}
|
||||
|
||||
pub fn parse(comptime T: type, tokens: *TokenStream, options: ParseOptions) !T {
|
||||
const token = (try tokens.next()) orelse return error.UnexpectedEndOfJson;
|
||||
return parseInternal(T, token, tokens, options);
|
||||
|
@ -1904,6 +1985,15 @@ test "parse into that allocates a slice" {
|
|||
}
|
||||
}
|
||||
|
||||
test "parse into that uses a map pattern" {
|
||||
const options = ParseOptions{ .allocator = testing.allocator };
|
||||
const Map = []struct { key: []const u8, value: []const u8 };
|
||||
const r = try parse(Map, &TokenStream.init("{\"foo\": \"bar\"}"), options);
|
||||
defer parseFree(Map, r, options);
|
||||
try testing.expectEqualSlices(u8, "foo", r[0].key);
|
||||
try testing.expectEqualSlices(u8, "bar", r[0].value);
|
||||
}
|
||||
|
||||
test "parse into tagged union" {
|
||||
{
|
||||
const T = union(enum) {
|
||||
|
|
Loading…
Reference in New Issue
Block a user