Compare commits

...

12 Commits

6 changed files with 279 additions and 39 deletions

View File

@ -62,8 +62,9 @@ The zig 0.11 HTTP client supports TLS 1.3 only. This, IMHO, is a reasonable
restriction given its introduction 5 years ago, but is inflicting some short restriction given its introduction 5 years ago, but is inflicting some short
term pain on this project as AWS has not yet fully implemented the protocol. AWS has term pain on this project as AWS has not yet fully implemented the protocol. AWS has
committed to [TLS 1.3 support across all services by the end of 2023](https://aws.amazon.com/blogs/security/faster-aws-cloud-connections-with-tls-1-3/), but many (most) services as of August 28th have not yet committed to [TLS 1.3 support across all services by the end of 2023](https://aws.amazon.com/blogs/security/faster-aws-cloud-connections-with-tls-1-3/), but many (most) services as of August 28th have not yet
been upgraded. Proxy support is available in the client but is not yet implemented. been upgraded. Proxy support has been added, so to get to the services that
In the meantime, it's possible that proxychains is your friend. do not yet support TLS 1.3, you can use something like [mitmproxy](https://mitmproxy.org/)
to proxy those requests. Of course, this is not a good production solution...
WebIdentityToken is not yet implemented. WebIdentityToken is not yet implemented.

View File

@ -934,6 +934,8 @@ fn buildPath(
replacement_writer, replacement_writer,
); );
const trimmed_replacement_val = std.mem.trim(u8, replacement_buffer.items, "\""); const trimmed_replacement_val = std.mem.trim(u8, replacement_buffer.items, "\"");
// NOTE: We have to encode here as it is a portion of the rest JSON protocol.
// This makes the encoding in the standard library wrong
try uriEncode(trimmed_replacement_val, encoded_buffer.writer(), encode_slash); try uriEncode(trimmed_replacement_val, encoded_buffer.writer(), encode_slash);
try buffer.appendSlice(encoded_buffer.items); try buffer.appendSlice(encoded_buffer.items);
} }
@ -1329,6 +1331,7 @@ const TestOptions = struct {
server_port: ?u16 = null, server_port: ?u16 = null,
server_remaining_requests: usize = 1, server_remaining_requests: usize = 1,
server_response: []const u8 = "unset", server_response: []const u8 = "unset",
server_response_status: std.http.Status = .ok,
server_response_headers: [][2][]const u8 = &[_][2][]const u8{}, server_response_headers: [][2][]const u8 = &[_][2][]const u8{},
server_response_transfer_encoding: ?std.http.TransferEncoding = null, server_response_transfer_encoding: ?std.http.TransferEncoding = null,
request_body: []u8 = "", request_body: []u8 = "",
@ -1461,6 +1464,7 @@ fn processRequest(options: *TestOptions, server: *std.http.Server) !void {
} }
fn serve(options: *TestOptions, res: *std.http.Server.Response) ![]const u8 { fn serve(options: *TestOptions, res: *std.http.Server.Response) ![]const u8 {
res.status = options.server_response_status;
for (options.server_response_headers) |h| for (options.server_response_headers) |h|
try res.headers.append(h[0], h[1]); try res.headers.append(h[0], h[1]);
// try res.headers.append("content-length", try std.fmt.allocPrint(allocator, "{d}", .{server_response.len})); // try res.headers.append("content-length", try std.fmt.allocPrint(allocator, "{d}", .{server_response.len}));
@ -1798,9 +1802,41 @@ test "rest_json_1_query_no_input: lambda listFunctions runtime" {
call.response.functions.?[12].function_name.?, call.response.functions.?[12].function_name.?,
); );
} }
test "rest_json_1_work_with_lambda: lambda multiple functions (blank test)" { test "rest_json_1_work_with_lambda: lambda tagResource (only), to excercise zig issue 17015" {
// Replicating this test would not provide additional coverage. It is const allocator = std.testing.allocator;
// here for completeness only var test_harness = TestSetup.init(allocator, .{
.allocator = allocator,
.server_response = "",
.server_response_status = .no_content,
.server_response_headers = @constCast(&[_][2][]const u8{
.{ "Content-Type", "application/json" },
.{ "x-amzn-RequestId", "a521e152-6e32-4e67-9fb3-abc94e34551b" },
}),
});
defer test_harness.deinit();
const options = try test_harness.start();
const lambda = (Services(.{.lambda}){}).lambda;
var tags = try std.ArrayList(@typeInfo(try typeForField(lambda.tag_resource.Request, "tags")).Pointer.child).initCapacity(allocator, 1);
defer tags.deinit();
tags.appendAssumeCapacity(.{ .key = "Foo", .value = "Bar" });
const req = services.lambda.tag_resource.Request{ .resource = "arn:aws:lambda:us-west-2:550620852718:function:awsome-lambda-LambdaStackawsomeLambda", .tags = tags.items };
const call = try Request(lambda.tag_resource).call(req, options);
defer call.deinit();
test_harness.stop();
// Request expectations
try std.testing.expectEqual(std.http.Method.POST, test_harness.request_options.request_method);
try std.testing.expectEqualStrings(
\\{
\\ "Resource": "arn:aws:lambda:us-west-2:550620852718:function:awsome-lambda-LambdaStackawsomeLambda",
\\ "Tags": {
\\ "Foo": "Bar"
\\ }
\\}
, test_harness.request_options.request_body);
// Due to 17015, we see %253A instead of %3A
try std.testing.expectEqualStrings("/2017-03-31/tags/arn%3Aaws%3Alambda%3Aus-west-2%3A550620852718%3Afunction%3Aawsome-lambda-LambdaStackawsomeLambda", test_harness.request_options.request_target);
// Response expectations
try std.testing.expectEqualStrings("a521e152-6e32-4e67-9fb3-abc94e34551b", call.response_metadata.request_id);
} }
test "ec2_query_no_input: EC2 describe regions" { test "ec2_query_no_input: EC2 describe regions" {
const allocator = std.testing.allocator; const allocator = std.testing.allocator;

View File

@ -178,11 +178,39 @@ pub const AwsHttp = struct {
// defer req.deinit(); // defer req.deinit();
const method = std.meta.stringToEnum(std.http.Method, request_cp.method).?; const method = std.meta.stringToEnum(std.http.Method, request_cp.method).?;
// std.Uri has a format function here that is used by start() (below)
// to escape the string we're about to send. But we don't want that...
// we need the control, because the signing above relies on the url above.
// We can't seem to have our cake and eat it too, because we need escaped
// ':' characters, but if we escape them, we'll get them double encoded.
// If we don't escape them, they won't get encoded at all. I believe the
// only answer may be to copy the Request.start function from the
// standard library and tweak the print statements such that they don't
// escape (but do still handle full uri (in proxy) vs path only (normal)
//
// Bug report filed here:
// https://github.com/ziglang/zig/issues/17015
//
// https://github.com/ziglang/zig/blob/0.11.0/lib/std/http/Client.zig#L538-L636
//
// Look at lines 551 and 553:
// https://github.com/ziglang/zig/blob/0.11.0/lib/std/http/Client.zig#L551
//
// This ends up executing the format function here:
// https://github.com/ziglang/zig/blob/0.11.0/lib/std/http/Client.zig#L551
//
// Which is basically the what we want, without the escaping on lines
// 249, 254, and 260:
// https://github.com/ziglang/zig/blob/0.11.0/lib/std/Uri.zig#L249
//
// const unescaped_url = try std.Uri.unescapeString(self.allocator, url);
// defer self.allocator.free(unescaped_url);
var req = try cl.request(method, try std.Uri.parse(url), headers, .{}); var req = try cl.request(method, try std.Uri.parse(url), headers, .{});
defer req.deinit(); defer req.deinit();
if (request_cp.body.len > 0) if (request_cp.body.len > 0)
req.transfer_encoding = .{ .content_length = request_cp.body.len }; req.transfer_encoding = .{ .content_length = request_cp.body.len };
try req.start(); try @import("http_client_17015_issue.zig").start(&req);
// try req.start();
if (request_cp.body.len > 0) { if (request_cp.body.len > 0) {
try req.writeAll(request_cp.body); try req.writeAll(request_cp.body);
try req.finish(); try req.finish();
@ -191,7 +219,7 @@ pub const AwsHttp = struct {
// TODO: Timeout - is this now above us? // TODO: Timeout - is this now above us?
log.debug( log.debug(
"Request Complete. Response code {d}: {any}", "Request Complete. Response code {d}: {?s}",
.{ @intFromEnum(req.response.status), req.response.status.phrase() }, .{ @intFromEnum(req.response.status), req.response.status.phrase() },
); );
log.debug("Response headers:", .{}); log.debug("Response headers:", .{});
@ -248,7 +276,7 @@ fn addHeaders(allocator: std.mem.Allocator, headers: *std.ArrayList(base.Header)
} }
try headers.append(.{ .name = "Accept", .value = "application/json" }); try headers.append(.{ .name = "Accept", .value = "application/json" });
try headers.append(.{ .name = "Host", .value = host }); try headers.append(.{ .name = "Host", .value = host });
try headers.append(.{ .name = "User-Agent", .value = "zig-aws 1.0, Powered by the AWS Common Runtime." }); try headers.append(.{ .name = "User-Agent", .value = "zig-aws 1.0" });
if (!has_content_type) if (!has_content_type)
try headers.append(.{ .name = "Content-Type", .value = content_type }); try headers.append(.{ .name = "Content-Type", .value = content_type });
try headers.appendSlice(additional_headers); try headers.appendSlice(additional_headers);

View File

@ -452,11 +452,30 @@ fn encodeParamPart(allocator: std.mem.Allocator, path: []const u8) ![]const u8 {
} }
return encoded.toOwnedSlice(); return encoded.toOwnedSlice();
} }
// URI encode every byte except the unreserved characters:
// 'A'-'Z', 'a'-'z', '0'-'9', '-', '.', '_', and '~'.
//
// The space character is a reserved character and must be encoded as "%20"
// (and not as "+").
//
// Each URI encoded byte is formed by a '%' and the two-digit hexadecimal value of the byte.
//
// Letters in the hexadecimal value must be uppercase, for example "%1A".
//
// Encode the forward slash character, '/', everywhere except in the object key
// name. For example, if the object key name is photos/Jan/sample.jpg, the
// forward slash in the key name is not encoded.
fn encodeUri(allocator: std.mem.Allocator, path: []const u8) ![]u8 { fn encodeUri(allocator: std.mem.Allocator, path: []const u8) ![]u8 {
const reserved_characters = ";,/?:@&=+$#"; const reserved_characters = ";,/?:@&=+$#";
const unreserved_marks = "-_.!~*'()"; const unreserved_marks = "-_.!~*'()";
var encoded = try std.ArrayList(u8).initCapacity(allocator, path.len); var encoded = try std.ArrayList(u8).initCapacity(allocator, path.len);
defer encoded.deinit(); defer encoded.deinit();
// if (std.mem.startsWith(u8, path, "/2017-03-31/tags/arn")) {
// try encoded.appendSlice("/2017-03-31/tags/arn%25253Aaws%25253Alambda%25253Aus-west-2%25253A550620852718%25253Afunction%25253Aawsome-lambda-LambdaStackawsomeLambda");
// return encoded.toOwnedSlice();
// }
for (path) |c| { for (path) |c| {
var should_encode = true; var should_encode = true;
for (reserved_characters) |r| for (reserved_characters) |r|

View File

@ -0,0 +1,155 @@
const std = @import("std");
const Uri = std.Uri;
///////////////////////////////////////////////////////////////////////////
/// This function imported from:
/// https://github.com/ziglang/zig/blob/0.11.0/lib/std/http/Client.zig#L538-L636
///
/// The first commit of this file will be unchanged from 0.11.0 to more
/// clearly indicate changes moving forward. The plan is to change
/// only the two w.print lines for req.uri 16 and 18 lines down from this comment
///////////////////////////////////////////////////////////////////////////
/// Send the request to the server.
pub fn start(req: *std.http.Client.Request) std.http.Client.Request.StartError!void {
var buffered = std.io.bufferedWriter(req.connection.?.data.writer());
const w = buffered.writer();
try w.writeAll(@tagName(req.method));
try w.writeByte(' ');
if (req.method == .CONNECT) {
try w.writeAll(req.uri.host.?);
try w.writeByte(':');
try w.print("{}", .{req.uri.port.?});
} else if (req.connection.?.data.proxied) {
// proxied connections require the full uri
try format(req.uri, "+/", .{}, w);
} else {
try format(req.uri, "/", .{}, w);
}
try w.writeByte(' ');
try w.writeAll(@tagName(req.version));
try w.writeAll("\r\n");
if (!req.headers.contains("host")) {
try w.writeAll("Host: ");
try w.writeAll(req.uri.host.?);
try w.writeAll("\r\n");
}
if (!req.headers.contains("user-agent")) {
try w.writeAll("User-Agent: zig/");
try w.writeAll(@import("builtin").zig_version_string);
try w.writeAll(" (std.http)\r\n");
}
if (!req.headers.contains("connection")) {
try w.writeAll("Connection: keep-alive\r\n");
}
if (!req.headers.contains("accept-encoding")) {
try w.writeAll("Accept-Encoding: gzip, deflate, zstd\r\n");
}
if (!req.headers.contains("te")) {
try w.writeAll("TE: gzip, deflate, trailers\r\n");
}
const has_transfer_encoding = req.headers.contains("transfer-encoding");
const has_content_length = req.headers.contains("content-length");
if (!has_transfer_encoding and !has_content_length) {
switch (req.transfer_encoding) {
.chunked => try w.writeAll("Transfer-Encoding: chunked\r\n"),
.content_length => |content_length| try w.print("Content-Length: {d}\r\n", .{content_length}),
.none => {},
}
} else {
if (has_content_length) {
const content_length = std.fmt.parseInt(u64, req.headers.getFirstValue("content-length").?, 10) catch return error.InvalidContentLength;
req.transfer_encoding = .{ .content_length = content_length };
} else if (has_transfer_encoding) {
const transfer_encoding = req.headers.getFirstValue("transfer-encoding").?;
if (std.mem.eql(u8, transfer_encoding, "chunked")) {
req.transfer_encoding = .chunked;
} else {
return error.UnsupportedTransferEncoding;
}
} else {
req.transfer_encoding = .none;
}
}
try w.print("{}", .{req.headers});
try w.writeAll("\r\n");
try buffered.flush();
}
///////////////////////////////////////////////////////////////////////////
/// This function imported from:
/// https://github.com/ziglang/zig/blob/0.11.0/lib/std/Uri.zig#L209-L264
///
/// The first commit of this file will be unchanged from 0.11.0 to more
/// clearly indicate changes moving forward. The plan is to change
/// only the writeEscapedPath call 42 lines down from this comment
///////////////////////////////////////////////////////////////////////////
pub fn format(
uri: Uri,
comptime fmt: []const u8,
options: std.fmt.FormatOptions,
writer: anytype,
) @TypeOf(writer).Error!void {
_ = options;
const needs_absolute = comptime std.mem.indexOf(u8, fmt, "+") != null;
const needs_path = comptime std.mem.indexOf(u8, fmt, "/") != null or fmt.len == 0;
const needs_fragment = comptime std.mem.indexOf(u8, fmt, "#") != null;
if (needs_absolute) {
try writer.writeAll(uri.scheme);
try writer.writeAll(":");
if (uri.host) |host| {
try writer.writeAll("//");
if (uri.user) |user| {
try writer.writeAll(user);
if (uri.password) |password| {
try writer.writeAll(":");
try writer.writeAll(password);
}
try writer.writeAll("@");
}
try writer.writeAll(host);
if (uri.port) |port| {
try writer.writeAll(":");
try std.fmt.formatInt(port, 10, .lower, .{}, writer);
}
}
}
if (needs_path) {
if (uri.path.len == 0) {
try writer.writeAll("/");
} else {
try writer.writeAll(uri.path); // do not mess with our path
}
if (uri.query) |q| {
try writer.writeAll("?");
try Uri.writeEscapedQuery(writer, q);
}
if (needs_fragment) {
if (uri.fragment) |f| {
try writer.writeAll("#");
try Uri.writeEscapedQuery(writer, f);
}
}
}
}

View File

@ -90,8 +90,11 @@ pub fn main() anyerror!void {
proxy = try proxyFromString(args.next().?); // parse stuff proxy = try proxyFromString(args.next().?); // parse stuff
continue; continue;
} }
if (std.mem.eql(u8, "-v", arg)) { if (std.mem.startsWith(u8, arg, "-v")) {
verbose += 1; for (arg[1..]) |c| {
if (c != 'v') return error.InvalidArgument;
verbose += 1;
}
continue; continue;
} }
inline for (@typeInfo(Tests).Enum.fields) |f| { inline for (@typeInfo(Tests).Enum.fields) |f| {
@ -181,35 +184,33 @@ pub fn main() anyerror!void {
std.log.info("account has functions: {}", .{call.response.functions.?.len > 0}); std.log.info("account has functions: {}", .{call.response.functions.?.len > 0});
}, },
.rest_json_1_work_with_lambda => { .rest_json_1_work_with_lambda => {
// const call = try client.call(services.lambda.list_functions.Request{}, options); const call = try client.call(services.lambda.list_functions.Request{}, options);
// defer call.deinit(); defer call.deinit();
// std.log.info("list request id: {s}", .{call.response_metadata.request_id}); std.log.info("list request id: {s}", .{call.response_metadata.request_id});
// if (call.response.functions) |fns| { if (call.response.functions) |fns| {
// if (fns.len > 0) { if (fns.len > 0) {
// const func = fns[0]; const func = fns[0];
// const arn = func.function_arn.?; const arn = func.function_arn.?;
// // This is a bit ugly. Maybe a helper function in the library would help? // This is a bit ugly. Maybe a helper function in the library would help?
// var tags = try std.ArrayList(@typeInfo(try typeForField(services.lambda.tag_resource.Request, "tags")).Pointer.child).initCapacity(allocator, 1); var tags = try std.ArrayList(@typeInfo(try typeForField(services.lambda.tag_resource.Request, "tags")).Pointer.child).initCapacity(allocator, 1);
// defer tags.deinit(); defer tags.deinit();
// tags.appendAssumeCapacity(.{ .key = "Foo", .value = "Bar" }); tags.appendAssumeCapacity(.{ .key = "Foo", .value = "Bar" });
// const req = services.lambda.tag_resource.Request{ .resource = arn, .tags = tags.items }; const req = services.lambda.tag_resource.Request{ .resource = arn, .tags = tags.items };
// const addtag = try aws.Request(services.lambda.tag_resource).call(req, options); const addtag = try aws.Request(services.lambda.tag_resource).call(req, options);
// TODO: Something is up with signature calculation. I believe it's with the encoding, because the url used // TODO: This is failing due to double-encoding (see zig issue 17015)
// here is totally crazy with the arn of the resource directly in it defer addtag.deinit();
// Example: https://lambda.us-west-2.amazonaws.com/2017-03-31/tags/arn%253Aaws%253Alambda%253Aus-west-2%253A550620852718%253Afunction%253ADevelopmentFrontendStack--amplifyassetdeploymentha-aZqB9IbZLIKU // const addtag = try client.call(services.lambda.tag_resource.Request{ .resource = arn, .tags = &.{.{ .key = "Foo", .value = "Bar" }} }, options);
// defer addtag.deinit(); std.log.info("add tag request id: {s}", .{addtag.response_metadata.request_id});
// // const addtag = try client.call(services.lambda.tag_resource.Request{ .resource = arn, .tags = &.{.{ .key = "Foo", .value = "Bar" }} }, options); var keys = [_][]const u8{"Foo"}; // Would love to have a way to express this without burning a var here
// std.log.info("add tag request id: {s}", .{addtag.response_metadata.request_id}); const deletetag = try aws.Request(services.lambda.untag_resource).call(.{ .tag_keys = keys[0..], .resource = arn }, options);
// var keys = [_][]const u8{"Foo"}; // Would love to have a way to express this without burning a var here defer deletetag.deinit();
// const deletetag = try aws.Request(services.lambda.untag_resource).call(.{ .tag_keys = keys[0..], .resource = arn }, options); std.log.info("delete tag request id: {s}", .{deletetag.response_metadata.request_id});
// defer deletetag.deinit(); } else {
// std.log.info("delete tag request id: {s}", .{deletetag.response_metadata.request_id}); std.log.err("no functions to work with", .{});
// } else { }
// std.log.err("no functions to work with", .{}); } else {
// } std.log.err("no functions to work with", .{});
// } else { }
// std.log.err("no functions to work with", .{});
// }
}, },
.ec2_query_no_input => { .ec2_query_no_input => {
// Describe regions is a simpler request and easier to debug // Describe regions is a simpler request and easier to debug