Compare commits

..

No commits in common. "866a68777e76c3f9b652407b3cd694b1359762aa" and "8e853e9a829e567d33d44b1b77feece34a7a57a9" have entirely different histories.

2 changed files with 0 additions and 126 deletions

View File

@ -485,42 +485,6 @@ test "basic json request serialization" {
\\} \\}
, buffer.items); , buffer.items);
} }
test "layer object only" {
const TestResponse = struct {
arn: ?[]const u8 = null,
// uncompressed_code_size: ?i64 = null,
pub fn jsonFieldNameFor(_: @This(), comptime field_name: []const u8) []const u8 {
const mappings = .{
.arn = "Arn",
};
return @field(mappings, field_name);
}
};
const response =
\\ {
\\ "UncompressedCodeSize": 2,
\\ "Arn": "blah"
\\ }
;
// const response =
// \\ {
// \\ "UncompressedCodeSize": 22599541,
// \\ "Arn": "arn:aws:lambda:us-west-2:550620852718:layer:PollyNotes-lib:4"
// \\ }
// ;
const allocator = std.testing.allocator;
var stream = json.TokenStream.init(response);
const parser_options = json.ParseOptions{
.allocator = allocator,
.allow_camel_case_conversion = true, // new option
.allow_snake_case_conversion = true, // new option
.allow_unknown_fields = true, // new option. Cannot yet handle non-struct fields though
.allow_missing_fields = false, // new option. Cannot yet handle non-struct fields though
};
const r = try json.parse(TestResponse, &stream, parser_options);
json.parseFree(TestResponse, r, parser_options);
}
// Use for debugging json responses of specific requests // Use for debugging json responses of specific requests
// test "dummy request" { // test "dummy request" {
// const allocator = std.testing.allocator; // const allocator = std.testing.allocator;

View File

@ -1136,9 +1136,6 @@ pub const TokenStream = struct {
return error.UnexpectedEndOfJson; return error.UnexpectedEndOfJson;
} }
} }
fn stackUsed(self: *TokenStream) u8 {
return self.parser.stack_used + if (self.token != null) @as(u8, 1) else 0;
}
}; };
fn checkNext(p: *TokenStream, id: std.meta.Tag(Token)) !void { fn checkNext(p: *TokenStream, id: std.meta.Tag(Token)) !void {
@ -1535,20 +1532,6 @@ fn snakeCaseComp(field: []const u8, key: []const u8, options: ParseOptions) !boo
return std.mem.eql(u8, comp_field, normalized_key); return std.mem.eql(u8, comp_field, normalized_key);
} }
const SkipValueError = error{UnexpectedJsonDepth} || TokenStream.Error;
fn skipValue(tokens: *TokenStream) SkipValueError!void {
const original_depth = tokens.stackUsed();
// Return an error if no value is found
_ = try tokens.next();
if (tokens.stackUsed() < original_depth) return error.UnexpectedJsonDepth;
if (tokens.stackUsed() == original_depth) return;
while (try tokens.next()) |_| {
if (tokens.stackUsed() == original_depth) return;
}
}
fn parseInternal(comptime T: type, token: Token, tokens: *TokenStream, options: ParseOptions) !T { fn parseInternal(comptime T: type, token: Token, tokens: *TokenStream, options: ParseOptions) !T {
switch (@typeInfo(T)) { switch (@typeInfo(T)) {
@ -1692,7 +1675,6 @@ fn parseInternal(comptime T: type, token: Token, tokens: *TokenStream, options:
} }
} }
if (!found and !options.allow_unknown_fields) return error.UnknownField; if (!found and !options.allow_unknown_fields) return error.UnknownField;
if (!found) try skipValue(tokens);
}, },
.ObjectBegin => { .ObjectBegin => {
if (!options.allow_unknown_fields) return error.UnknownField; if (!options.allow_unknown_fields) return error.UnknownField;
@ -1806,35 +1788,6 @@ fn parseInternal(comptime T: type, token: Token, tokens: *TokenStream, options:
}, },
} }
}, },
.ObjectBegin => {
// We are parsing into a slice, but we have an
// ObjectBegin. This might be ok, iff the type
// follows this pattern: []struct { key: []const u8, value: anytype }
// (could key be anytype?).
if (!isMapPattern(T))
return error.UnexpectedToken;
var arraylist = std.ArrayList(ptrInfo.child).init(allocator);
errdefer {
while (arraylist.popOrNull()) |v| {
parseFree(ptrInfo.child, v, options);
}
arraylist.deinit();
}
while (true) {
const key = (try tokens.next()) orelse return error.UnexpectedEndOfJson;
switch (key) {
.ObjectEnd => break,
else => {},
}
try arraylist.ensureCapacity(arraylist.items.len + 1);
const key_val = try parseInternal(try typeForField(ptrInfo.child, "key"), key, tokens, options);
const val = (try tokens.next()) orelse return error.UnexpectedEndOfJson;
const val_val = try parseInternal(try typeForField(ptrInfo.child, "value"), val, tokens, options);
arraylist.appendAssumeCapacity(.{ .key = key_val, .value = val_val });
}
return arraylist.toOwnedSlice();
},
else => return error.UnexpectedToken, else => return error.UnexpectedToken,
} }
}, },
@ -1846,40 +1799,6 @@ fn parseInternal(comptime T: type, token: Token, tokens: *TokenStream, options:
unreachable; unreachable;
} }
fn typeForField(comptime T: type, field_name: []const u8) !type {
const ti = @typeInfo(T);
switch (ti) {
.Struct => {
inline for (ti.Struct.fields) |field| {
if (std.mem.eql(u8, field.name, field_name))
return field.field_type;
}
},
else => return error.TypeIsNotAStruct, // should not hit this
}
return error.FieldNotFound;
}
fn isMapPattern(comptime T: type) bool {
// We should be getting a type that is a pointer to a slice.
// Let's just double check before proceeding
const ti = @typeInfo(T);
if (ti != .Pointer) return false;
if (ti.Pointer.size != .Slice) return false;
const ti_child = @typeInfo(ti.Pointer.child);
if (ti_child != .Struct) return false;
if (ti_child.Struct.fields.len != 2) return false;
var key_found = false;
var value_found = false;
inline for (ti_child.Struct.fields) |field| {
if (std.mem.eql(u8, "key", field.name))
key_found = true;
if (std.mem.eql(u8, "value", field.name))
value_found = true;
}
return key_found and value_found;
}
pub fn parse(comptime T: type, tokens: *TokenStream, options: ParseOptions) !T { pub fn parse(comptime T: type, tokens: *TokenStream, options: ParseOptions) !T {
const token = (try tokens.next()) orelse return error.UnexpectedEndOfJson; const token = (try tokens.next()) orelse return error.UnexpectedEndOfJson;
return parseInternal(T, token, tokens, options); return parseInternal(T, token, tokens, options);
@ -1985,15 +1904,6 @@ test "parse into that allocates a slice" {
} }
} }
test "parse into that uses a map pattern" {
const options = ParseOptions{ .allocator = testing.allocator };
const Map = []struct { key: []const u8, value: []const u8 };
const r = try parse(Map, &TokenStream.init("{\"foo\": \"bar\"}"), options);
defer parseFree(Map, r, options);
try testing.expectEqualSlices(u8, "foo", r[0].key);
try testing.expectEqualSlices(u8, "bar", r[0].value);
}
test "parse into tagged union" { test "parse into tagged union" {
{ {
const T = union(enum) { const T = union(enum) {