All checks were successful
AWS-Zig Build / build-zig-amd64-host (push) Successful in 8m5s
This was always problematic, and bypasses the build system. It broke earlier when moving to proper modules, and really, it is not worth using. The idea was to allow usage of the SDK in a build context, but that does not work anyway, because the build operates in a sandboxed environment that effectively bars things like connecting to TLS endpoints. That is a feature of the build system, not a bug, and issues like https://github.com/ziglang/zig/issues/14286 demonstrate that the zig team wants to sandbox even further. For downstream, the right idea here is actually to create an executable that depends on aws and run it as part of the build. This is where https://git.lerch.org/lobo/lambda-zig is heading.
424 lines
16 KiB
Zig
424 lines
16 KiB
Zig
const std = @import("std");
|
|
const Builder = @import("std").Build;
|
|
|
|
const models_subdir = "codegen/sdk-codegen/aws-models/"; // note will probably not work on windows
|
|
|
|
const test_targets = [_]std.Target.Query{
|
|
.{}, // native
|
|
.{ .cpu_arch = .x86_64, .os_tag = .linux },
|
|
.{ .cpu_arch = .aarch64, .os_tag = .linux },
|
|
.{ .cpu_arch = .riscv64, .os_tag = .linux },
|
|
.{ .cpu_arch = .arm, .os_tag = .linux },
|
|
.{ .cpu_arch = .x86_64, .os_tag = .windows },
|
|
.{ .cpu_arch = .aarch64, .os_tag = .macos },
|
|
.{ .cpu_arch = .x86_64, .os_tag = .macos },
|
|
// .{ .cpu_arch = .wasm32, .os_tag = .wasi },
|
|
};
|
|
|
|
pub fn build(b: *Builder) !void {
|
|
const target = b.standardTargetOptions(.{});
|
|
const optimize = b.standardOptimizeOption(.{});
|
|
|
|
const no_llvm = b.option(
|
|
bool,
|
|
"no-llvm",
|
|
"Disable LLVM",
|
|
) orelse false;
|
|
const broken_windows = b.option(
|
|
bool,
|
|
"broken-windows",
|
|
"Windows is broken in this environment (do not run Windows tests)",
|
|
) orelse false;
|
|
const no_bin = b.option(bool, "no-bin", "skip emitting binary") orelse false;
|
|
|
|
const test_filters: []const []const u8 = b.option(
|
|
[]const []const u8,
|
|
"test-filter",
|
|
"Skip tests that do not match any of the specified filters",
|
|
) orelse &.{};
|
|
|
|
const dep_mods = try getDependencyModules(b, .{
|
|
.target = target,
|
|
.optimize = optimize,
|
|
});
|
|
|
|
const mod_exe = b.createModule(.{
|
|
.root_source_file = b.path("src/main.zig"),
|
|
.target = target,
|
|
.optimize = optimize,
|
|
});
|
|
configure(mod_exe, dep_mods, true);
|
|
|
|
const exe = b.addExecutable(.{
|
|
.name = "demo",
|
|
.root_module = mod_exe,
|
|
.use_llvm = !no_llvm,
|
|
});
|
|
|
|
const run_cmd = b.addRunArtifact(exe);
|
|
run_cmd.step.dependOn(b.getInstallStep());
|
|
if (b.args) |args| {
|
|
run_cmd.addArgs(args);
|
|
}
|
|
|
|
const run_step = b.step("run", "Run the app");
|
|
run_step.dependOn(&run_cmd.step);
|
|
|
|
const cg = b.step("gen", "Generate zig service code from smithy models");
|
|
|
|
const cg_mod = b.createModule(.{
|
|
.root_source_file = b.path("codegen/src/main.zig"),
|
|
// We need this generated for the host, not the real target
|
|
.target = b.graph.host,
|
|
.optimize = if (b.verbose) .Debug else .ReleaseSafe,
|
|
});
|
|
configure(cg_mod, dep_mods, false);
|
|
|
|
const cg_exe = b.addExecutable(.{
|
|
.name = "codegen",
|
|
.root_module = cg_mod,
|
|
});
|
|
var cg_cmd = b.addRunArtifact(cg_exe);
|
|
cg_cmd.addArg("--models");
|
|
cg_cmd.addArg(try std.fs.path.join(
|
|
b.allocator,
|
|
&[_][]const u8{
|
|
try b.dependency("models", .{}).path("").getPath3(b, null).toString(b.allocator),
|
|
models_subdir,
|
|
},
|
|
));
|
|
cg_cmd.addArg("--output");
|
|
const cg_output_dir = cg_cmd.addOutputDirectoryArg("src/models");
|
|
if (b.verbose) {
|
|
cg_cmd.addArg("--verbose");
|
|
}
|
|
if (!no_bin) {
|
|
b.installArtifact(cg_exe);
|
|
}
|
|
// cg_cmd.step.dependOn(&fetch_step.step);
|
|
// TODO: this should use zig_exe from std.Build
|
|
// codegen should store a hash in a comment
|
|
// this would be hash of the exe that created the file
|
|
// concatenated with hash of input json. this would
|
|
// allow skipping generated files. May not include hash
|
|
// of contents of output file as maybe we want to tweak
|
|
// manually??
|
|
//
|
|
// All the hashes can be in service_manifest.zig, which
|
|
// could be fun to just parse and go nuts. Top of
|
|
// file, generator exe hash. Each import has comment
|
|
// with both input and output hash and we can decide
|
|
// later about warning on manual changes...
|
|
|
|
cg.dependOn(&cg_cmd.step);
|
|
|
|
// Each module will need access to the generated AWS modules. These
|
|
// are all imported by service_manifest.zig, which is a generated list
|
|
// of services created by the codegen process.
|
|
//
|
|
// First, we need to check if pre-generated models exist, which only happens
|
|
// for packaged distribution.
|
|
//
|
|
// The idea here is that if we have a packaged distibution (tarball with
|
|
// models available, we are pre-generated, do not need the codegen step
|
|
// (and in fact do not have that available), and our service_manifest
|
|
// module needs to be the pre-packaged file.
|
|
//
|
|
// If we do not have a packaged distribution, the file will not exist,
|
|
// because it is generated by codegen and will live in the zig cache directory,
|
|
// so we depend on the codegen step and the service_manifest module will
|
|
// be based on the codegen output itself.
|
|
//
|
|
// Most of this complication comes from the fact that we want to enable
|
|
// consuming build.zig files to be able to use the SDK at build time for
|
|
// things like code deployments, e.g. https://git.lerch.org/lobo/lambda-zig
|
|
const has_pre_generated =
|
|
if (b.build_root.handle.access("src/models/service_manifest.zig", .{})) true else |_| false;
|
|
|
|
// Only depend on codegen if we don't have pre-generated models
|
|
if (!has_pre_generated)
|
|
exe.step.dependOn(cg);
|
|
|
|
// Use pre-generated models if available, otherwise use codegen output
|
|
const service_manifest_source: std.Build.LazyPath = if (has_pre_generated)
|
|
b.path("src/models/service_manifest.zig")
|
|
else
|
|
cg_output_dir.path(b, "service_manifest.zig");
|
|
|
|
const service_manifest_module = b.createModule(.{
|
|
.root_source_file = service_manifest_source,
|
|
.target = target,
|
|
.optimize = optimize,
|
|
});
|
|
configure(service_manifest_module, dep_mods, true);
|
|
|
|
mod_exe.addImport("service_manifest", service_manifest_module);
|
|
|
|
// Expose module to others
|
|
const mod_aws = b.addModule("aws", .{
|
|
.root_source_file = b.path("src/aws.zig"),
|
|
.target = target,
|
|
.optimize = optimize,
|
|
});
|
|
mod_aws.addImport("service_manifest", service_manifest_module);
|
|
configure(mod_aws, dep_mods, true);
|
|
|
|
// Expose module to others
|
|
const mod_aws_signing = b.addModule("aws-signing", .{
|
|
.root_source_file = b.path("src/aws_signing.zig"),
|
|
});
|
|
configure(mod_aws_signing, dep_mods, false);
|
|
|
|
// Similar to creating the run step earlier, this exposes a `test` step to
|
|
// the `zig build --help` menu, providing a way for the user to request
|
|
// running the unit tests.
|
|
const test_step = b.step("test", "Run unit tests");
|
|
|
|
// // Creates a step for unit testing. This only builds the test executable
|
|
// // but does not run it.
|
|
// const unit_tests = b.addTest(.{
|
|
// .root_source_file = .{ .path = "src/aws.zig" },
|
|
// .target = target,
|
|
// .optimize = optimize,
|
|
// });
|
|
// unit_tests.root_module.addImport("smithy", smithy_dep.module("smithy"));
|
|
// unit_tests.step.dependOn(gen_step);
|
|
//
|
|
// const run_unit_tests = b.addRunArtifact(unit_tests);
|
|
// run_unit_tests.skip_foreign_checks = true;
|
|
|
|
// test_step.dependOn(&run_unit_tests.step);
|
|
for (test_targets) |t| {
|
|
if (broken_windows and t.os_tag == .windows) continue;
|
|
|
|
const mod_unit_tests = b.createModule(.{
|
|
.root_source_file = b.path("src/aws.zig"),
|
|
.target = b.resolveTargetQuery(t),
|
|
.optimize = optimize,
|
|
});
|
|
mod_unit_tests.addImport("service_manifest", service_manifest_module);
|
|
configure(mod_unit_tests, dep_mods, true);
|
|
|
|
// Creates a step for unit testing. This only builds the test executable
|
|
// but does not run it.
|
|
const unit_tests = b.addTest(.{
|
|
.root_module = mod_unit_tests,
|
|
.filters = test_filters,
|
|
});
|
|
|
|
if (!has_pre_generated)
|
|
unit_tests.step.dependOn(cg);
|
|
unit_tests.use_llvm = !no_llvm;
|
|
|
|
const run_unit_tests = b.addRunArtifact(unit_tests);
|
|
run_unit_tests.skip_foreign_checks = true;
|
|
|
|
test_step.dependOn(&run_unit_tests.step);
|
|
}
|
|
const check = b.step("check", "Check compilation errors");
|
|
check.dependOn(&exe.step);
|
|
|
|
// Similar to creating the run step earlier, this exposes a `test` step to
|
|
// the `zig build --help` menu, providing a way for the user to request
|
|
// running the unit tests.
|
|
const smoke_test_step = b.step("smoke-test", "Run unit tests");
|
|
|
|
// Creates a step for unit testing. This only builds the test executable
|
|
// but does not run it.
|
|
const smoke_test = b.addTest(.{
|
|
.root_module = mod_aws,
|
|
.filters = test_filters,
|
|
});
|
|
smoke_test.use_llvm = !no_llvm;
|
|
if (!has_pre_generated)
|
|
smoke_test.step.dependOn(cg);
|
|
|
|
const run_smoke_test = b.addRunArtifact(smoke_test);
|
|
|
|
smoke_test_step.dependOn(&run_smoke_test.step);
|
|
if (no_bin) {
|
|
b.getInstallStep().dependOn(&exe.step);
|
|
} else {
|
|
b.installArtifact(exe);
|
|
}
|
|
|
|
// Package step - creates distribution source directory
|
|
const pkg_step = PackageStep.create(b, cg_output_dir);
|
|
pkg_step.step.dependOn(cg);
|
|
|
|
const package = b.step("package", "Copy code to zig-out/package with generated models");
|
|
package.dependOn(&pkg_step.step);
|
|
}
|
|
|
|
fn configure(compile: *std.Build.Module, modules: std.StringHashMap(*std.Build.Module), include_time: bool) void {
|
|
compile.addImport("smithy", modules.get("smithy").?);
|
|
compile.addImport("date", modules.get("date").?);
|
|
compile.addImport("json", modules.get("json").?);
|
|
compile.addImport("case", modules.get("case").?);
|
|
if (include_time) compile.addImport("zeit", modules.get("zeit").?);
|
|
}
|
|
|
|
fn getDependencyModules(b: *std.Build, args: anytype) !std.StringHashMap(*std.Build.Module) {
|
|
var result = std.StringHashMap(*std.Build.Module).init(b.allocator);
|
|
|
|
// External dependencies
|
|
const dep_smithy = b.dependency("smithy", args);
|
|
const mod_smithy = dep_smithy.module("smithy");
|
|
try result.putNoClobber("smithy", mod_smithy);
|
|
|
|
const dep_zeit = b.dependency("zeit", args);
|
|
const mod_zeit = dep_zeit.module("zeit");
|
|
try result.putNoClobber("zeit", mod_zeit);
|
|
|
|
const dep_case = b.dependency("case", args);
|
|
const mod_case = dep_case.module("case");
|
|
try result.putNoClobber("case", mod_case);
|
|
// End External dependencies
|
|
|
|
// Private modules/dependencies
|
|
const dep_json = b.dependency("json", args);
|
|
const mod_json = dep_json.module("json");
|
|
try result.putNoClobber("json", mod_json);
|
|
|
|
const dep_date = b.dependency("date", args);
|
|
const mod_date = dep_date.module("date");
|
|
try result.putNoClobber("date", mod_date);
|
|
// End private modules/dependencies
|
|
|
|
return result;
|
|
}
|
|
|
|
/// Custom build step that creates a distribution source directory
|
|
/// This copies all source files plus the generated service models into a
|
|
/// package directory suitable for distribution
|
|
const PackageStep = struct {
|
|
step: std.Build.Step,
|
|
cg_output_dir: std.Build.LazyPath,
|
|
|
|
const base_id: std.Build.Step.Id = .custom;
|
|
|
|
/// Files to include in the package (relative to build root)
|
|
const package_files = [_][]const u8{
|
|
"build.zig",
|
|
"build.zig.zon",
|
|
"README.md",
|
|
"LICENSE",
|
|
};
|
|
|
|
/// Directories to include in the package (relative to build root)
|
|
const package_dirs = [_][]const u8{
|
|
"src",
|
|
"lib",
|
|
};
|
|
|
|
pub fn create(owner: *std.Build, cg_output_dir: std.Build.LazyPath) *PackageStep {
|
|
const self = owner.allocator.create(PackageStep) catch @panic("OOM");
|
|
self.* = .{
|
|
.step = std.Build.Step.init(.{
|
|
.id = base_id,
|
|
.name = "copy generated files",
|
|
.owner = owner,
|
|
.makeFn = make,
|
|
}),
|
|
.cg_output_dir = cg_output_dir,
|
|
};
|
|
return self;
|
|
}
|
|
|
|
fn make(step: *std.Build.Step, options: std.Build.Step.MakeOptions) anyerror!void {
|
|
_ = options;
|
|
const self: *PackageStep = @fieldParentPtr("step", step);
|
|
const b = step.owner;
|
|
|
|
// Get the path to generated models
|
|
const models_path = self.cg_output_dir.getPath2(b, &self.step);
|
|
|
|
// Create output directory for packaging
|
|
const package_dir = b.pathJoin(&.{ "zig-out", "package" });
|
|
const models_dest_dir = b.pathJoin(&.{ package_dir, "src", "models" });
|
|
std.fs.cwd().makePath(models_dest_dir) catch |err| {
|
|
return step.fail("Failed to create package directory: {}", .{err});
|
|
};
|
|
|
|
// Copy all source files to package directory
|
|
for (package_files) |file_name|
|
|
copyFile(b, b.build_root.handle, file_name, package_dir) catch {};
|
|
|
|
// Copy directories
|
|
for (package_dirs) |dir_name|
|
|
copyDirRecursive(b, b.build_root.handle, dir_name, package_dir) catch |err| {
|
|
return step.fail("Failed to copy directory '{s}': {}", .{ dir_name, err });
|
|
};
|
|
|
|
// Copy generated models to src/models/
|
|
copyGeneratedModels(b, models_path, models_dest_dir) catch |err| {
|
|
return step.fail("Failed to copy generated models: {}", .{err});
|
|
};
|
|
|
|
step.result_cached = false;
|
|
}
|
|
|
|
fn copyFile(b: *std.Build, src_dir: std.fs.Dir, file_path: []const u8, dest_prefix: []const u8) !void {
|
|
const dest_path = b.pathJoin(&.{ dest_prefix, file_path });
|
|
|
|
// Ensure parent directory exists
|
|
if (std.fs.path.dirname(dest_path)) |parent|
|
|
std.fs.cwd().makePath(parent) catch {};
|
|
|
|
src_dir.copyFile(file_path, std.fs.cwd(), dest_path, .{}) catch return;
|
|
}
|
|
|
|
fn copyDirRecursive(b: *std.Build, src_base: std.fs.Dir, dir_path: []const u8, dest_prefix: []const u8) !void {
|
|
var src_dir = src_base.openDir(dir_path, .{ .iterate = true }) catch return;
|
|
defer src_dir.close();
|
|
|
|
var walker = try src_dir.walk(b.allocator);
|
|
defer walker.deinit();
|
|
|
|
while (try walker.next()) |entry| {
|
|
// Skip zig build artifact directories
|
|
if (std.mem.indexOf(u8, entry.path, "zig-out") != null or
|
|
std.mem.indexOf(u8, entry.path, ".zig-cache") != null or
|
|
std.mem.indexOf(u8, entry.path, "zig-cache") != null)
|
|
continue;
|
|
|
|
const src_path = b.pathJoin(&.{ dir_path, entry.path });
|
|
const dest_path = b.pathJoin(&.{ dest_prefix, dir_path, entry.path });
|
|
|
|
switch (entry.kind) {
|
|
.directory => std.fs.cwd().makePath(dest_path) catch {},
|
|
.file => {
|
|
// Ensure parent directory exists
|
|
if (std.fs.path.dirname(dest_path)) |parent| {
|
|
std.fs.cwd().makePath(parent) catch {};
|
|
}
|
|
src_base.copyFile(src_path, std.fs.cwd(), dest_path, .{}) catch {};
|
|
},
|
|
.sym_link => {
|
|
var link_buf: [std.fs.max_path_bytes]u8 = undefined;
|
|
const link_target = entry.dir.readLink(entry.basename, &link_buf) catch continue;
|
|
// Ensure parent directory exists
|
|
if (std.fs.path.dirname(dest_path)) |parent| {
|
|
std.fs.cwd().makePath(parent) catch {};
|
|
}
|
|
std.fs.cwd().symLink(link_target, dest_path, .{}) catch {};
|
|
},
|
|
else => {},
|
|
}
|
|
}
|
|
}
|
|
|
|
fn copyGeneratedModels(b: *std.Build, models_path: []const u8, models_dest_dir: []const u8) !void {
|
|
var models_dir = std.fs.cwd().openDir(models_path, .{ .iterate = true }) catch
|
|
return error.ModelsNotFound;
|
|
defer models_dir.close();
|
|
|
|
var iter = models_dir.iterate();
|
|
while (try iter.next()) |entry| {
|
|
if (entry.kind != .file) continue;
|
|
|
|
const dest_path = b.pathJoin(&.{ models_dest_dir, entry.name });
|
|
models_dir.copyFile(entry.name, std.fs.cwd(), dest_path, .{}) catch continue;
|
|
}
|
|
}
|
|
};
|