Compare commits

..

3 Commits

Author SHA1 Message Date
88b497bea8
update ci to zig 0.12.0
Some checks failed
AWS-Zig Build / build-zig-0.11.0-amd64-host (push) Has been cancelled
2024-05-15 13:19:00 -07:00
1e544d75c1
upgrade to zig 0.12.0 2024-05-15 13:16:24 -07:00
51445f3c9d
should not return arrays 2024-03-05 08:37:33 -08:00
10 changed files with 79 additions and 83 deletions

View File

@ -11,26 +11,12 @@ env:
jobs:
build-zig-0.11.0-amd64-host:
runs-on: ubuntu-latest
# Need to use the default container with node and all that, so we can
# use JS-based actions like actions/checkout@v3...
# container:
# image: alpine:3.15.0
env:
ZIG_VERSION: 0.11.0
ARCH: x86_64
steps:
- name: Check out repository code
uses: actions/checkout@v3
# ARCH is fine, but we can't substitute directly because zig
# uses x86_64 instead of amd64. They also use aarch64 instead of arm64.
#
# However, arm64/linux isn't quite fully tier 1 yet, so this is more of a
# TODO: https://github.com/ziglang/zig/issues/2443
- name: Install zig
run: |
wget -q https://ziglang.org/download/${ZIG_VERSION}/zig-linux-${ARCH}-${ZIG_VERSION}.tar.xz
tar x -C /usr/local -f zig-linux-${ARCH}-${ZIG_VERSION}.tar.xz
ln -s /usr/local/zig-linux-${ARCH}-${ZIG_VERSION}/zig /usr/local/bin/zig
- uses: actions/checkout@v4
- uses: elerch/setup-zig@v3
with:
version: 0.12.0
- uses: elerch/zig-action-cache@v1.1.6
- name: Run tests
run: zig build test --verbose
- name: Build other platforms
@ -59,8 +45,8 @@ jobs:
# echo "Signature 1 should be ./foo.sig: ${{ steps.sign.outputs.SIG_1 }}"
# - run: echo "URL of foo (1) is ${{ steps.sign.outputs.URL_1 }}"
- name: Notify
uses: https://git.lerch.org/lobo/action-notify-ntfy@v2
if: always()
uses: elerch/action-notify-ntfy@v2.github
if: always() && env.GITEA_ACTIONS == 'true'
with:
host: ${{ secrets.NTFY_HOST }}
topic: ${{ secrets.NTFY_TOPIC }}

View File

@ -1,5 +1,5 @@
const std = @import("std");
const universal_lambda = @import("universal_lambda_build");
const universal_lambda_build = @import("universal-lambda-zig");
// This seems to fail for some reason. zig-sqlite does a lot of messing with
// the target. So instead, we will handle this in the CI/CD system at the
@ -95,9 +95,15 @@ pub fn build(b: *std.Build) !void {
const run_step = b.step("run", "Run the app");
run_step.dependOn(&run_cmd.step);
try universal_lambda.configureBuild(b, exe);
const universal_lambda_zig_dep = b.dependency("universal-lambda-zig", .{
.target = target,
.optimize = optimize,
});
// All modules should be added before this is called
try universal_lambda_build.configureBuild(b, exe, universal_lambda_zig_dep);
_ = universal_lambda_build.addImports(b, exe, universal_lambda_zig_dep);
const exe_aws_dep = b.dependency("aws", .{
const exe_aws_dep = b.dependency("aws-zig", .{
.target = target,
.optimize = optimize,
});
@ -108,16 +114,17 @@ pub fn build(b: *std.Build) !void {
.use_bundled = true,
});
const exe_sqlite_module = exe_sqlite_dep.module("sqlite");
exe.addModule("aws-signing", exe_aws_signing_module);
exe.addModule("sqlite", exe_sqlite_module);
exe.root_module.addImport("aws-signing", exe_aws_signing_module);
exe.root_module.addImport("sqlite", exe_sqlite_module);
exe.addIncludePath(.{ .path = "c" });
exe.linkLibrary(exe_sqlite_dep.artifact("sqlite"));
// Similar to creating the run step earlier, this exposes a `test` step to
// the `zig build --help` menu, providing a way for the user to request
// running the unit tests.
const test_step = b.step("test", "Run unit tests");
for (test_targets) |t| {
const aws_dep = b.dependency("aws", .{
for (test_targets) |ct| {
const t = b.resolveTargetQuery(ct);
const aws_dep = b.dependency("aws-zig", .{
.target = t,
.optimize = optimize,
});
@ -135,15 +142,15 @@ pub fn build(b: *std.Build) !void {
.target = t,
.optimize = optimize,
});
_ = try universal_lambda.addModules(b, unit_tests);
_ = universal_lambda_build.addImports(b, unit_tests, universal_lambda_zig_dep);
const run_unit_tests = b.addRunArtifact(unit_tests);
// run_unit_tests.skip_foreign_checks = true;
test_step.dependOn(&run_unit_tests.step);
unit_tests.addModule("aws-signing", aws_signing_module);
unit_tests.addModule("sqlite", sqlite_module);
unit_tests.root_module.addImport("aws-signing", aws_signing_module);
unit_tests.root_module.addImport("sqlite", sqlite_module);
unit_tests.addIncludePath(.{ .path = "c" });
unit_tests.linkLibrary(sqlite_dep.artifact("sqlite"));
}
@ -152,7 +159,7 @@ pub fn build(b: *std.Build) !void {
creds_step.makeFn = generateCredentials;
}
fn generateCredentials(s: *std.build.Step, prog_node: *std.Progress.Node) error{ MakeFailed, MakeSkipped }!void {
fn generateCredentials(s: *std.Build.Step, prog_node: *std.Progress.Node) error{ MakeFailed, MakeSkipped }!void {
_ = s;
// Account id:
// Documentation describes account id as a 12 digit number:
@ -197,7 +204,8 @@ fn generateCredentials(s: *std.build.Step, prog_node: *std.Progress.Node) error{
//
// Without this bit set, AWS' sts will complain that this is not a valid key
const access_key_suffix: u80 = (1 << 79) | (@as(u80, account_number) << 39) + @as(u80, access_key_random_suffix);
const access_key_suffix_encoded = base32Encode(u80, access_key_suffix);
var access_key_suffix_encoded: [16]u8 = undefined;
base32Encode(u80, access_key_suffix, &access_key_suffix_encoded);
// std.debug.assert(access_key_suffix_encoded.len == 16);
var secret_key: [30]u8 = undefined;
rand.bytes(&secret_key); // The rest don't need to be cryptographically secure...does this?
@ -240,7 +248,7 @@ fn generateCredentials(s: *std.build.Step, prog_node: *std.Progress.Node) error{
stdout_writer.flush() catch return error.MakeFailed;
}
/// encodes an unsigned integer into base36
/// encodes an unsigned integer into base36. Caller owns the memory returned
pub fn base36encode(comptime T: type, allocator: std.mem.Allocator, data: T) ![]const u8 {
const alphabet = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ";
std.debug.assert(alphabet.len == 36);
@ -257,15 +265,15 @@ pub fn base36encode(comptime T: type, allocator: std.mem.Allocator, data: T) ![]
al.appendAssumeCapacity(alphabet[@as(usize, @intCast(remaining % alphabet.len))]);
}
// This is not exact, but 6 bits
var rc = try al.toOwnedSlice();
const rc = try al.toOwnedSlice();
std.mem.reverse(u8, rc);
return rc;
}
/// Because Base32 is a power of 2, we can directly return an array and avoid
/// allocations entirely
/// allocations entirely. A pointer to the output array must be bits/5 long
/// To trim leading 0s, simply std.mem.trimLeft(u8, encoded_data, "A");
pub fn base32Encode(comptime T: type, data: T) [@typeInfo(T).Int.bits / 5]u8 {
pub fn base32Encode(comptime T: type, data: T, encoded: *[@typeInfo(T).Int.bits / 5]u8) void {
const alphabet = "ABCDEFGHIJKLMNOPQRSTUVWXYZ234567";
std.debug.assert(alphabet.len == 32);
const ti = @typeInfo(T);
@ -273,16 +281,15 @@ pub fn base32Encode(comptime T: type, data: T) [@typeInfo(T).Int.bits / 5]u8 {
@compileError("encode only works with unsigned integers");
const bits = ti.Int.bits;
// We will have exactly 5 bits (2^5 = 32) represented per byte in our final output
var rc: [bits / 5]u8 = undefined;
// var rc: [bits / 5]u8 = undefined;
var inx: usize = 0;
const Shift_type = @Type(.{ .Int = .{
.signedness = .unsigned,
.bits = @ceil(@log2(@as(f128, @floatFromInt(bits)))),
} });
// TODO: I think we need a table here to determine the size below
while (inx < rc.len) : (inx += 1) {
while (inx < encoded.len) : (inx += 1) {
const char_bits: u5 = @as(u5, @truncate(data >> (@as(Shift_type, @intCast(inx * 5)))));
rc[rc.len - @as(usize, @intCast(inx)) - 1] = alphabet[@as(usize, @intCast(char_bits))]; // 5 bits from inx
encoded[encoded.len - @as(usize, @intCast(inx)) - 1] = alphabet[@as(usize, @intCast(char_bits))]; // 5 bits from inx
}
return rc;
}

View File

@ -3,21 +3,24 @@
.version = "0.0.1",
.dependencies = .{
.aws = .{
.url = "https://git.lerch.org/api/packages/lobo/generic/aws-sdk-with-models/31324c7e83392ce7aeca13629013870181d1458e/31324c7e83392ce7aeca13629013870181d1458e-with-models.tar.gz",
.hash = "1220ae1932ce061440dece024f6d605fb4d00ef3bf03518e2aae0502980108368ee3",
},
.sqlite = .{
.url = "https://github.com/vrischmann/zig-sqlite/archive/19535aab5760eeaf2979a9dadfca3bb21d1594b9.tar.gz",
.hash = "12208c654deea149cee27eaa45d0e6515c3d8f97d775a4156cbcce0ff424b5d26ea3",
.url = "https://github.com/vrischmann/zig-sqlite/archive/fd17eb9a4ea35f515cc7c2e4755a3a5e284a0bd3.tar.gz",
.hash = "1220e0300613f2b949a8328c5b1d9ba84777f2258c1535039afcb536e9e1228d2881",
},
.universal_lambda_build = .{
.url = "https://git.lerch.org/lobo/universal-lambda-zig/archive/5f1b1a52beea841e130ea4d878437f9488da0eb7.tar.gz",
.hash = "12202e3f5cc4db196d9bef727e10b407413d6dd95a6e94d66f11c4c14dc5ee060b58",
.@"universal-lambda-zig" = .{
.url = "https://git.lerch.org/lobo/universal-lambda-zig/archive/f3d80b4afe8c13031b6cff051e93deaeadb1d268.tar.gz",
.hash = "122093d59b28dcd0201eaa1587ca78e11c06fb563b2cb9b554aaa5f7bab0ae34432e",
},
.flexilib = .{
.url = "https://git.lerch.org/lobo/flexilib/archive/3d3dab9c792651477932e2b61c9f4794ac694dcb.tar.gz",
.hash = "1220fd7a614fe3c9f6006b630bba528e2ec9dca9c66f5ff10f7e471ad2bdd41b6c89",
.@"aws-zig" = .{
.url = "https://git.lerch.org/api/packages/lobo/generic/aws-sdk-with-models/007f2f588aa30fd2c62d6a037fb87bace8a83710/007f2f588aa30fd2c62d6a037fb87bace8a83710-with-models.tar.gz",
.hash = "1220f128554146e2f58f223a51f1839f49012294a8163a445e8320889539edcda9b3",
},
},
.paths = .{
"build.zig",
"build.zig.zon",
"src",
"LICENSE",
"README.md",
},
}

View File

@ -2,7 +2,7 @@ const std = @import("std");
allocator: std.mem.Allocator,
event_data: []const u8,
headers: std.http.Headers,
headers: []const std.http.Header,
status: std.http.Status,
reason: ?[]const u8,
account_id: u40,

View File

@ -295,7 +295,7 @@ const Params = struct {
writer,
"ExpressionAttributeNames must be an object",
);
var count = v.object.count();
const count = v.object.count();
var names = v.object.iterator();
var hashmap = std.StringHashMap([]const u8).init(aa);
try hashmap.ensureTotalCapacity(@as(u32, @intCast(count)));
@ -434,7 +434,7 @@ test "basic request parsing" {
};
var al = std.ArrayList(u8).init(allocator);
defer al.deinit();
var writer = al.writer();
const writer = al.writer();
var parms = try Params.parseRequest(allocator, &request, writer);
defer parms.deinit();
try std.testing.expect(parms.return_consumed_capacity == .total);
@ -463,7 +463,7 @@ test "read item" {
defer Account.test_retain_db = false;
const allocator = std.testing.allocator;
const account_id = 1234;
var db = try Account.dbForAccount(allocator, account_id);
const db = try Account.dbForAccount(allocator, account_id);
defer allocator.destroy(db);
defer Account.testDbDeinit();
@ -562,7 +562,7 @@ test "read item" {
};
var al = std.ArrayList(u8).init(allocator);
defer al.deinit();
var writer = al.writer();
const writer = al.writer();
_ = try @import("batchwriteitem.zig").handler(&request, writer);
}
@ -596,7 +596,7 @@ test "read item" {
};
var al = std.ArrayList(u8).init(allocator);
defer al.deinit();
var writer = al.writer();
const writer = al.writer();
const output = try handler(&request, writer);
defer allocator.free(output);
// TODO: Fix this

View File

@ -361,7 +361,7 @@ test "basic request parsing failure" {
};
var al = std.ArrayList(u8).init(allocator);
defer al.deinit();
var writer = al.writer();
const writer = al.writer();
var parms = try Params.parseRequest(allocator, &request, writer);
defer parms.deinit();
try std.testing.expectError(error.InvalidPadding, parms.validate());
@ -398,7 +398,7 @@ test "basic request parsing" {
};
var al = std.ArrayList(u8).init(allocator);
defer al.deinit();
var writer = al.writer();
const writer = al.writer();
var parms = try Params.parseRequest(allocator, &request, writer);
defer parms.deinit();
try std.testing.expect(parms.return_consumed_capacity == .none);
@ -477,7 +477,7 @@ test "all types request parsing" {
};
var al = std.ArrayList(u8).init(allocator);
defer al.deinit();
var writer = al.writer();
const writer = al.writer();
var parms = try Params.parseRequest(allocator, &request, writer);
defer parms.deinit();
try parms.validate();
@ -497,7 +497,7 @@ test "all types request parsing" {
try std.testing.expectEqualStrings("Binary", put[2].name);
try std.testing.expect(put[2].value == .binary);
try std.testing.expect(put[2].value.binary.len > 0);
var buf = try allocator.alloc(u8, "this text is base64-encoded".len);
const buf = try allocator.alloc(u8, "this text is base64-encoded".len);
defer allocator.free(buf);
try std.base64.standard.Decoder.decode(buf, put[2].value.binary);
try std.testing.expectEqualStrings("this text is base64-encoded", buf);
@ -509,7 +509,7 @@ test "write item" {
defer Account.testDbDeinit();
const allocator = std.testing.allocator;
const account_id = 1234;
var db = try Account.dbForAccount(allocator, account_id);
const db = try Account.dbForAccount(allocator, account_id);
defer allocator.destroy(db);
defer Account.testDbDeinit();
const account = try Account.accountForId(allocator, account_id); // This will get us the encryption key needed
@ -591,6 +591,6 @@ test "write item" {
};
var al = std.ArrayList(u8).init(allocator);
defer al.deinit();
var writer = al.writer();
const writer = al.writer();
_ = try handler(&request, writer);
}

View File

@ -561,7 +561,7 @@ test "can create a table" {
.account_id = 1234,
.status = .ok,
.reason = null,
.headers = std.http.Headers.init(allocator),
.headers = &.{},
.output_format = .text,
};
const output = try handler(&request, std.io.null_writer);
@ -592,7 +592,7 @@ test "will fail an unrecognized request parameter" {
.account_id = 1234,
.status = .ok,
.reason = null,
.headers = std.http.Headers.init(allocator),
.headers = &.{},
.output_format = .text,
};
var al = std.ArrayList(u8).init(allocator);
@ -631,7 +631,7 @@ fn failOnShortTableNames(format: AuthenticatedRequest.OutputFormat) !void {
.account_id = 1234,
.status = .ok,
.reason = null,
.headers = std.http.Headers.init(allocator),
.headers = &.{},
.output_format = format,
};
var al = std.ArrayList(u8).init(allocator);

View File

@ -77,7 +77,7 @@ pub const Attribute = struct {
// {
// "string" : {...attribute value...}
// }
var attribute_count = value.count();
const attribute_count = value.count();
if (attribute_count == 0)
try returnException(
request,
@ -149,11 +149,11 @@ pub const AttributeValue = union(AttributeTypeName) {
if (std.mem.eql(u8, token.string, "binary_set") or std.mem.eql(u8, token.string, "BS"))
rc = Self{ .binary_set = try std.json.innerParse([][]const u8, allocator, source, options) };
if (std.mem.eql(u8, token.string, "list") or std.mem.eql(u8, token.string, "L")) {
var json = try std.json.Value.jsonParse(allocator, source, options);
const json = try std.json.Value.jsonParse(allocator, source, options);
rc = Self{ .list = json.array };
}
if (std.mem.eql(u8, token.string, "map") or std.mem.eql(u8, token.string, "M")) {
var json = try std.json.Value.jsonParse(allocator, source, options);
const json = try std.json.Value.jsonParse(allocator, source, options);
rc = Self{ .map = json.object };
}
if (rc == null) return error.InvalidEnumTag;
@ -182,11 +182,11 @@ pub const AttributeValue = union(AttributeTypeName) {
if (source.object.get("binary_set") orelse source.object.get("BS")) |attr|
rc = Self{ .binary_set = try std.json.innerParseFromValue([][]const u8, allocator, attr, options) };
if (source.object.get("list") orelse source.object.get("L")) |attr| {
var json = try std.json.Value.jsonParseFromValue(allocator, attr, options);
const json = try std.json.Value.jsonParseFromValue(allocator, attr, options);
rc = Self{ .list = json.array };
}
if (source.object.get("map") orelse source.object.get("M")) |attr| {
var json = try std.json.Value.jsonParseFromValue(allocator, attr, options);
const json = try std.json.Value.jsonParseFromValue(allocator, attr, options);
rc = Self{ .map = json.object };
}
if (rc == null) return error.InvalidEnumTag;
@ -262,7 +262,7 @@ pub const AttributeValue = union(AttributeTypeName) {
return error.InvalidPadding;
}
if (leftover_idx == null) return;
var leftover = source[leftover_idx.?..];
const leftover = source[leftover_idx.?..];
if (decoder.pad_char) |pad_char| {
const padding_len = acc_len / 2;
var padding_chars: usize = 0;
@ -677,7 +677,7 @@ fn insertIntoDm(
}
fn testCreateTable(allocator: std.mem.Allocator, account_id: u40) !*sqlite.Db {
var db = try Account.dbForAccount(allocator, account_id);
const db = try Account.dbForAccount(allocator, account_id);
const account = try Account.accountForId(allocator, account_id); // This will get us the encryption key needed
defer account.deinit();
var hash = AttributeDefinition{ .name = "Artist", .type = .S };
@ -716,7 +716,7 @@ test "can list tables in an account" {
Account.test_retain_db = true;
const allocator = std.testing.allocator;
const account_id = 1234;
var db = try testCreateTable(allocator, account_id);
const db = try testCreateTable(allocator, account_id);
defer allocator.destroy(db);
defer Account.testDbDeinit();
var table_list = try tablesForAccount(allocator, account_id);
@ -730,7 +730,7 @@ test "can put an item in a table in an account" {
Account.test_retain_db = true;
const allocator = std.testing.allocator;
const account_id = 1234;
var db = try testCreateTable(allocator, account_id);
const db = try testCreateTable(allocator, account_id);
defer allocator.destroy(db);
defer Account.testDbDeinit();
var table_list = try tablesForAccount(allocator, account_id);

View File

@ -97,7 +97,7 @@ pub fn encryptAndEncode(allocator: std.mem.Allocator, key: [key_length]u8, plain
const ciphertext = try encrypt(allocator, key, plaintext);
defer allocator.free(ciphertext);
const Encoder = std.base64.standard.Encoder;
var encoded_ciphertext = try allocator.alloc(u8, Encoder.calcSize(ciphertext.len));
const encoded_ciphertext = try allocator.alloc(u8, Encoder.calcSize(ciphertext.len));
errdefer allocator.free(encoded_ciphertext);
return Encoder.encode(encoded_ciphertext, ciphertext);
}
@ -108,14 +108,14 @@ pub fn encryptAndEncodeWithNonce(allocator: std.mem.Allocator, key: [key_length]
const ciphertext = try encryptWithNonce(allocator, key, nonce, plaintext);
defer allocator.free(ciphertext);
const Encoder = std.base64.standard.Encoder;
var encoded_ciphertext = try allocator.alloc(u8, Encoder.calcSize(ciphertext.len));
const encoded_ciphertext = try allocator.alloc(u8, Encoder.calcSize(ciphertext.len));
errdefer allocator.free(encoded_ciphertext);
return Encoder.encode(encoded_ciphertext, ciphertext);
}
/// Decrypts data. Use deriveKey function to get a key from password/salt
pub fn decrypt(allocator: std.mem.Allocator, key: [key_length]u8, ciphertext: []const u8) ![]const u8 {
var plaintext = try allocator.alloc(
const plaintext = try allocator.alloc(
u8,
ciphertext.len - nonce_length - std.crypto.aead.salsa_poly.XSalsa20Poly1305.tag_length,
);
@ -140,7 +140,7 @@ pub fn decrypt(allocator: std.mem.Allocator, key: [key_length]u8, ciphertext: []
pub fn decodeAndDecrypt(allocator: std.mem.Allocator, key: [key_length]u8, encoded_ciphertext: []const u8) ![]const u8 {
const Decoder = std.base64.standard.Decoder;
const ciphertext_len = try Decoder.calcSizeForSlice(encoded_ciphertext);
var ciphertext = try allocator.alloc(u8, ciphertext_len);
const ciphertext = try allocator.alloc(u8, ciphertext_len);
defer allocator.free(ciphertext);
try std.base64.standard.Decoder.decode(ciphertext, encoded_ciphertext);
return try decrypt(allocator, key, ciphertext);

View File

@ -99,7 +99,7 @@ fn executeOperation(
};
}
fn authenticateUser(allocator: std.mem.Allocator, context: universal_lambda_interface.Context, target: []const u8, headers: std.http.Headers, body_reader: anytype) !void {
var request = signing.UnverifiedRequest{
const request = signing.UnverifiedRequest{
.method = std.http.Method.POST,
.target = target,
.headers = headers,
@ -167,7 +167,7 @@ fn fillRootCreds(allocator: std.mem.Allocator) !void {
var line_num: usize = 1;
while (reader.streamUntilDelimiter(line_writer, '\n', null)) : (line_num += 1) {
defer line.clearRetainingCapacity();
var relevant_line = line.items[0 .. std.mem.indexOfScalar(u8, line.items, '#') orelse line.items.len];
const relevant_line = line.items[0 .. std.mem.indexOfScalar(u8, line.items, '#') orelse line.items.len];
const relevant_line_trimmed = std.mem.trim(u8, relevant_line, " \t");
var value_iterator = std.mem.splitScalar(u8, relevant_line_trimmed, ',');
if (std.mem.trim(u8, value_iterator.peek().?, " \t").len == 0) continue;