zfin/src/analytics/timeline.zig
2026-04-23 16:02:34 -07:00

1374 lines
51 KiB
Zig

//! Portfolio timeline analytics — pure compute over snapshot history.
//!
//! This module takes pre-loaded portfolio snapshots (see `src/history.zig`
//! for the IO layer that produces them) and reduces them to time-series
//! data for display or export. Nothing here touches the filesystem, the
//! network, or a writer — that's by design, so the logic can be tested
//! exhaustively with fixture data.
//!
//! Typical flow:
//! snapshots : []const Snapshot <- history.loadHistoryDir(...)
//! │
//! ▼
//! buildSeries(snapshots) -> TimelineSeries (sorted by date)
//! │
//! ▼
//! filterByDate(series, since, until) (optional)
//! │
//! ▼
//! extractMetric(series, .net_worth) -> []MetricPoint for rendering
//!
//! For rollup generation, `buildRollupRecords` emits a flat slice suitable
//! for `srf.fmtFrom` without any of the per-lot detail — the rollup is a
//! summary cache, not a replacement for the per-day snapshot files.
const std = @import("std");
const Date = @import("../models/date.zig").Date;
const snapshot = @import("../models/snapshot.zig");
const valuation = @import("valuation.zig");
const HistoricalPeriod = valuation.HistoricalPeriod;
// ── Public types ─────────────────────────────────────────────
/// A single point on the portfolio timeline. Totals are always present
/// (they're the three `kind::total` rows every snapshot emits); the
/// per-account / per-tax-type maps are only populated when the source
/// snapshot included analysis breakdowns.
///
/// Values are dollar amounts. Weights aren't stored — callers can
/// compute them cheaply from the totals when rendering.
pub const TimelinePoint = struct {
as_of_date: Date,
net_worth: f64,
liquid: f64,
illiquid: f64,
/// Per-account totals. Empty when the source snapshot had no
/// analysis breakdown (e.g. backfilled-from-.txt snapshots).
/// Keys are account names; values are dollar totals.
accounts: []const NamedValue,
/// Per-tax-type totals. Same caveat as `accounts`.
tax_types: []const NamedValue,
/// Deinit releases only the owned slices (`accounts`, `tax_types`).
/// The strings inside those slices are borrowed from the source
/// Snapshot's arena and must not be freed here.
pub fn deinit(self: TimelinePoint, allocator: std.mem.Allocator) void {
allocator.free(self.accounts);
allocator.free(self.tax_types);
}
};
pub const NamedValue = struct {
name: []const u8,
value: f64,
};
/// An ordered series of TimelinePoint sorted ascending by as_of_date.
/// Duplicates by date shouldn't happen (snapshot filenames enforce one
/// per day) but if they do the caller's last-write-wins.
pub const TimelineSeries = struct {
points: []TimelinePoint,
allocator: std.mem.Allocator,
pub fn deinit(self: TimelineSeries) void {
for (self.points) |p| p.deinit(self.allocator);
self.allocator.free(self.points);
}
};
/// Which metric to extract for single-series display.
pub const Metric = enum {
net_worth,
liquid,
illiquid,
pub fn label(self: Metric) []const u8 {
return switch (self) {
.net_worth => "Net Worth",
.liquid => "Liquid",
.illiquid => "Illiquid",
};
}
};
/// One row of the extracted single-metric series. Used as input to
/// renderers and for mathematical operations (diffs, deltas, sparklines).
pub const MetricPoint = struct {
date: Date,
value: f64,
};
// ── Core construction ────────────────────────────────────────
/// Build a TimelineSeries from a slice of snapshots.
///
/// Input snapshots are NOT required to be sorted; this function sorts
/// by `as_of_date` ascending. Each returned point's `accounts` and
/// `tax_types` slices borrow strings from the source Snapshot's arena
/// but own their own outer slice (freed by TimelinePoint.deinit).
pub fn buildSeries(
allocator: std.mem.Allocator,
snapshots: []const snapshot.Snapshot,
) !TimelineSeries {
var points = try allocator.alloc(TimelinePoint, snapshots.len);
errdefer {
// On error, free any already-populated points' nested slices.
allocator.free(points);
}
var populated: usize = 0;
errdefer {
for (points[0..populated]) |p| p.deinit(allocator);
}
for (snapshots, 0..) |snap, idx| {
points[idx] = try snapshotToPoint(allocator, snap);
populated = idx + 1;
}
std.mem.sort(TimelinePoint, points, {}, lessByDate);
return .{ .points = points, .allocator = allocator };
}
fn lessByDate(_: void, a: TimelinePoint, b: TimelinePoint) bool {
return a.as_of_date.lessThan(b.as_of_date);
}
/// Derive a TimelinePoint from a single Snapshot. Pure — no IO.
///
/// Exposed for testability. `buildSeries` is the usual entry point.
pub fn snapshotToPoint(
allocator: std.mem.Allocator,
snap: snapshot.Snapshot,
) !TimelinePoint {
var nw: f64 = 0;
var liq: f64 = 0;
var ill: f64 = 0;
for (snap.totals) |t| {
if (std.mem.eql(u8, t.scope, "net_worth")) nw = t.value;
if (std.mem.eql(u8, t.scope, "liquid")) liq = t.value;
if (std.mem.eql(u8, t.scope, "illiquid")) ill = t.value;
}
const accts = try allocator.alloc(NamedValue, snap.accounts.len);
errdefer allocator.free(accts);
for (snap.accounts, 0..) |a, i| {
accts[i] = .{ .name = a.name, .value = a.value };
}
const tts = try allocator.alloc(NamedValue, snap.tax_types.len);
errdefer allocator.free(tts);
for (snap.tax_types, 0..) |t, i| {
tts[i] = .{ .name = t.label, .value = t.value };
}
return .{
.as_of_date = snap.meta.as_of_date,
.net_worth = nw,
.liquid = liq,
.illiquid = ill,
.accounts = accts,
.tax_types = tts,
};
}
// ── Filters ──────────────────────────────────────────────────
/// Return the subset of `points` whose `as_of_date` falls within the
/// inclusive `[since, until]` range. Either bound may be null to leave
/// that end open. The resulting slice is newly allocated; caller owns.
///
/// This does NOT free the input points — the caller remains responsible
/// for the original TimelineSeries.
pub fn filterByDate(
allocator: std.mem.Allocator,
points: []const TimelinePoint,
since: ?Date,
until: ?Date,
) ![]TimelinePoint {
var kept: std.ArrayList(TimelinePoint) = .empty;
errdefer kept.deinit(allocator);
for (points) |p| {
if (since) |s| if (p.as_of_date.lessThan(s)) continue;
if (until) |u| if (u.lessThan(p.as_of_date)) continue;
try kept.append(allocator, p);
}
return kept.toOwnedSlice(allocator);
}
// ── Metric extraction ────────────────────────────────────────
/// Extract a single top-level metric into a flat `[]MetricPoint` ready
/// for rendering or statistical analysis. Result is caller-owned.
pub fn extractMetric(
allocator: std.mem.Allocator,
points: []const TimelinePoint,
metric: Metric,
) ![]MetricPoint {
var out = try allocator.alloc(MetricPoint, points.len);
for (points, 0..) |p, i| {
out[i] = .{
.date = p.as_of_date,
.value = switch (metric) {
.net_worth => p.net_worth,
.liquid => p.liquid,
.illiquid => p.illiquid,
},
};
}
return out;
}
/// Which collection of per-row named values on `TimelinePoint` to project.
pub const NamedSeriesSource = enum { accounts, tax_types };
/// Extract a single-metric series for a named row (an account or a
/// tax-type label) from the timeline. Dates without the named row emit
/// `value = 0` rather than being skipped, so the returned slice has
/// `points.len` entries — suitable for stacked displays that need a row
/// per named entity per date.
///
/// Caller owns the returned slice.
pub fn extractNamedSeries(
allocator: std.mem.Allocator,
points: []const TimelinePoint,
source: NamedSeriesSource,
name: []const u8,
) ![]MetricPoint {
var out = try allocator.alloc(MetricPoint, points.len);
for (points, 0..) |p, i| {
const rows = switch (source) {
.accounts => p.accounts,
.tax_types => p.tax_types,
};
var v: f64 = 0;
for (rows) |r| {
if (std.mem.eql(u8, r.name, name)) {
v = r.value;
break;
}
}
out[i] = .{ .date = p.as_of_date, .value = v };
}
return out;
}
// ── Deltas / statistics ──────────────────────────────────────
/// Summary statistics over a single-metric series.
pub const MetricStats = struct {
first: f64,
last: f64,
min: f64,
max: f64,
/// `last - first`. Dollars, not percent.
delta_abs: f64,
/// `(last - first) / first` — or null when `first == 0` (division
/// by zero); callers should render "n/a" or similar.
delta_pct: ?f64,
};
/// Compute min/max/first/last/delta over a MetricPoint slice. Returns
/// null on empty input — every field would be meaningless otherwise.
pub fn computeStats(points: []const MetricPoint) ?MetricStats {
if (points.len == 0) return null;
var min_v = points[0].value;
var max_v = points[0].value;
for (points[1..]) |p| {
if (p.value < min_v) min_v = p.value;
if (p.value > max_v) max_v = p.value;
}
const first = points[0].value;
const last = points[points.len - 1].value;
const delta = last - first;
return .{
.first = first,
.last = last,
.min = min_v,
.max = max_v,
.delta_abs = delta,
.delta_pct = if (first == 0) null else delta / first,
};
}
// ── Rollup emission ──────────────────────────────────────────
/// A single row in `history/rollup.srf`. Deliberately slim: one record
/// per date, carrying the three totals only. Per-account and per-
/// tax-type detail stays in the per-day files.
///
/// The `kind` discriminator pattern is consistent with the snapshot
/// format. Not strictly required here (the file has only one record
/// type) but future-proof if we ever add other rollup shapes.
pub const RollupRow = struct {
kind: []const u8,
as_of_date: Date,
net_worth: f64,
liquid: f64,
illiquid: f64,
};
/// Produce a rollup-row slice from a TimelineSeries. Pure function —
/// caller owns the result, ready to hand to `srf.fmtFrom`.
pub fn buildRollupRecords(
allocator: std.mem.Allocator,
points: []const TimelinePoint,
) ![]RollupRow {
var out = try allocator.alloc(RollupRow, points.len);
for (points, 0..) |p, i| {
out[i] = .{
.kind = "rollup",
.as_of_date = p.as_of_date,
.net_worth = p.net_worth,
.liquid = p.liquid,
.illiquid = p.illiquid,
};
}
return out;
}
// ── Snap-backward for snapshot points ────────────────────────
fn pointDateOf(p: TimelinePoint) Date {
return p.as_of_date;
}
/// Return the latest point on or before `target`. Null if `points` is
/// empty or every entry sits strictly after `target`.
///
/// Delegates to the shared `valuation.indexAtOrBefore` kernel — same
/// snap-backward behavior used by candle pricing, so holiday/weekend
/// semantics are identical across the app. No slack cap: snapshot
/// history is dense enough by construction (one entry per trading day)
/// that caps would only hide real gaps.
pub fn pointAtOrBefore(points: []const TimelinePoint, target: Date) ?*const TimelinePoint {
const idx = valuation.indexAtOrBefore(TimelinePoint, points, target, pointDateOf) orelse return null;
return &points[idx];
}
// ── Rolling-windows block ────────────────────────────────────
/// One row in the rolling-windows block. `anchor_date` / `start_value` /
/// `delta_*` are null when there isn't enough history to honor the
/// window (e.g. asking for 10-year on a 2-week-old portfolio).
///
/// `end_value` is always populated — it's the latest point in the
/// series, which must exist for the block to render at all.
pub const WindowStat = struct {
/// The period this row represents. Null for the synthetic "All-time"
/// row (anchored to the first snapshot rather than to `today - N`).
period: ?HistoricalPeriod,
/// Human-facing label ("1 day", "YTD", "All-time").
label: []const u8,
/// Short label used when horizontal space is tight ("1D", "YTD").
short_label: []const u8,
/// The snapshot date we anchored to. Null when no snapshot exists at
/// or before the target date — i.e. not enough history.
anchor_date: ?Date,
/// The anchor snapshot's metric value. Null when anchor is missing.
start_value: ?f64,
/// Always populated — the latest snapshot's metric value.
end_value: f64,
/// `end_value - start_value`. Null when start is missing.
delta_abs: ?f64,
/// `(end_value - start_value) / start_value`. Null when start is
/// missing OR when start is exactly zero (division by zero).
delta_pct: ?f64,
};
/// Rolling-windows block for a single metric. Owns the `rows` slice.
/// Order: 8 relative-to-today periods (from HistoricalPeriod.timeline_windows),
/// then a final synthetic "All-time" row anchored to the first snapshot.
pub const WindowSet = struct {
rows: []WindowStat,
allocator: std.mem.Allocator,
pub fn deinit(self: WindowSet) void {
self.allocator.free(self.rows);
}
};
fn extractValue(p: TimelinePoint, metric: Metric) f64 {
return switch (metric) {
.net_worth => p.net_worth,
.liquid => p.liquid,
.illiquid => p.illiquid,
};
}
/// Build the rolling-windows block for one metric. `today` is the
/// reference "now" — almost always the last snapshot's as_of_date, but
/// taken as a parameter so tests can pin deterministic scenarios.
///
/// Returns an empty set when `points` is empty.
pub fn computeWindowSet(
allocator: std.mem.Allocator,
points: []const TimelinePoint,
metric: Metric,
today: Date,
) !WindowSet {
if (points.len == 0) {
return .{ .rows = &.{}, .allocator = allocator };
}
const windows = HistoricalPeriod.timeline_windows;
var rows = try allocator.alloc(WindowStat, windows.len + 1);
errdefer allocator.free(rows);
const end_point = points[points.len - 1];
const end_value = extractValue(end_point, metric);
for (windows, 0..) |period, i| {
const target = period.targetDate(today);
const anchor_opt = pointAtOrBefore(points, target);
rows[i] = if (anchor_opt) |a| .{
.period = period,
.label = period.longLabel(),
.short_label = period.label(),
.anchor_date = a.as_of_date,
.start_value = extractValue(a.*, metric),
.end_value = end_value,
.delta_abs = end_value - extractValue(a.*, metric),
.delta_pct = blk: {
const sv = extractValue(a.*, metric);
break :blk if (sv == 0) null else (end_value - sv) / sv;
},
} else .{
.period = period,
.label = period.longLabel(),
.short_label = period.label(),
.anchor_date = null,
.start_value = null,
.end_value = end_value,
.delta_abs = null,
.delta_pct = null,
};
}
// All-time = vs. first snapshot in series. Not a HistoricalPeriod
// member because it isn't relative to `today` (see valuation.zig
// doc block).
const first = points[0];
const first_value = extractValue(first, metric);
rows[windows.len] = .{
.period = null,
.label = "All-time",
.short_label = "All",
.anchor_date = first.as_of_date,
.start_value = first_value,
.end_value = end_value,
.delta_abs = end_value - first_value,
.delta_pct = if (first_value == 0) null else (end_value - first_value) / first_value,
};
return .{ .rows = rows, .allocator = allocator };
}
// ── Per-row day-over-day deltas ──────────────────────────────
/// One row in the "Recent snapshots" table after per-row deltas have
/// been computed. The delta is *relative to the previous row in the
/// same resolution* — i.e. when the table is aggregated to weekly,
/// `d_*` fields hold week-over-week change.
///
/// First row has all `d_*` fields null (no prior row to compare against).
pub const RowDelta = struct {
date: Date,
liquid: f64,
illiquid: f64,
net_worth: f64,
d_liquid: ?f64,
d_illiquid: ?f64,
d_net_worth: ?f64,
};
/// Compute per-row deltas against the previous row. Returns a
/// newly-allocated slice the caller owns. Empty input -> empty output.
pub fn computeRowDeltas(
allocator: std.mem.Allocator,
points: []const TimelinePoint,
) ![]RowDelta {
var out = try allocator.alloc(RowDelta, points.len);
for (points, 0..) |p, i| {
out[i] = .{
.date = p.as_of_date,
.liquid = p.liquid,
.illiquid = p.illiquid,
.net_worth = p.net_worth,
.d_liquid = if (i == 0) null else p.liquid - points[i - 1].liquid,
.d_illiquid = if (i == 0) null else p.illiquid - points[i - 1].illiquid,
.d_net_worth = if (i == 0) null else p.net_worth - points[i - 1].net_worth,
};
}
return out;
}
// ── Resolution (daily / weekly / monthly) ────────────────────
pub const Resolution = enum {
daily,
weekly,
monthly,
pub fn label(self: Resolution) []const u8 {
return switch (self) {
.daily => "daily",
.weekly => "weekly",
.monthly => "monthly",
};
}
};
/// Pick a default resolution based on series span.
/// span ≤ 90d → daily
/// span ≤ 730d → weekly
/// else → monthly
///
/// Empty / single-point series always return `daily` (there's nothing
/// to aggregate).
pub fn selectResolution(points: []const TimelinePoint) Resolution {
if (points.len < 2) return .daily;
const first = points[0].as_of_date;
const last = points[points.len - 1].as_of_date;
const span_days = last.days - first.days;
if (span_days <= 90) return .daily;
if (span_days <= 730) return .weekly;
return .monthly;
}
/// Aggregate `points` to the requested resolution. Returns a
/// newly-allocated slice the caller owns.
///
/// `daily` → returns a copy of the input.
/// `weekly` → rolling 7-day buckets walking *backward from latest*, one
/// representative point per bucket (the latest in the bucket,
/// not the oldest — matches brokerage weekly-bar convention).
/// The returned slice is sorted ascending by date.
/// `monthly` → groups by calendar (year, month); picks the latest snapshot
/// in each month. Sorted ascending by date.
///
/// Empty input returns an empty owned slice.
pub fn aggregatePoints(
allocator: std.mem.Allocator,
points: []const TimelinePoint,
resolution: Resolution,
) ![]TimelinePoint {
if (points.len == 0) return allocator.alloc(TimelinePoint, 0);
switch (resolution) {
.daily => {
const out = try allocator.alloc(TimelinePoint, points.len);
@memcpy(out, points);
return out;
},
.weekly => return aggregateWeeklyRolling(allocator, points),
.monthly => return aggregateMonthly(allocator, points),
}
}
/// Walk backward in 7-day strides from the latest point. The latest
/// point always seeds bucket 0; subsequent buckets cover
/// `(latest - 7i - 6) … (latest - 7i)` inclusive. Each bucket emits
/// its latest-date member. Output is sorted ascending.
fn aggregateWeeklyRolling(
allocator: std.mem.Allocator,
points: []const TimelinePoint,
) ![]TimelinePoint {
var picked: std.ArrayList(TimelinePoint) = .empty;
errdefer picked.deinit(allocator);
const last_date = points[points.len - 1].as_of_date;
// Bucket index i covers dates in [last - 7i - 6, last - 7i].
// We scan points newest-first. For each point, compute its bucket
// relative to `last`; keep the first (i.e. newest) one we see per
// bucket.
var current_bucket: i32 = -1;
var i: usize = points.len;
while (i > 0) {
i -= 1;
const p = points[i];
const age_days = last_date.days - p.as_of_date.days;
if (age_days < 0) continue; // shouldn't happen (series is sorted)
const bucket: i32 = @divFloor(age_days, 7);
if (bucket != current_bucket) {
try picked.append(allocator, p);
current_bucket = bucket;
}
}
// picked is newest-first; reverse to ascending.
std.mem.reverse(TimelinePoint, picked.items);
return picked.toOwnedSlice(allocator);
}
/// Group by (year, month); emit the latest-date member of each group.
fn aggregateMonthly(
allocator: std.mem.Allocator,
points: []const TimelinePoint,
) ![]TimelinePoint {
var picked: std.ArrayList(TimelinePoint) = .empty;
errdefer picked.deinit(allocator);
// Scan ascending (points are already sorted). For each (year, month)
// key, keep updating the "representative" point until the key
// changes; then commit the previous one.
var cur_year: i16 = 0;
var cur_month: u8 = 0;
var cur_point: ?TimelinePoint = null;
for (points) |p| {
const y = p.as_of_date.year();
const m = p.as_of_date.month();
if (cur_point == null) {
cur_year = y;
cur_month = m;
cur_point = p;
continue;
}
if (y == cur_year and m == cur_month) {
cur_point = p; // same month; keep the latest
} else {
try picked.append(allocator, cur_point.?);
cur_year = y;
cur_month = m;
cur_point = p;
}
}
if (cur_point) |p| try picked.append(allocator, p);
return picked.toOwnedSlice(allocator);
}
// ── Tests ────────────────────────────────────────────────────
//
// Pure compute — every function here can be exercised with fixture
// structs. No IO, no writer, no colors.
const testing = std.testing;
/// Populate `buf` with the three total rows and return a minimal
/// Snapshot fixture for tests. Each test supplies its own `buf` (three
/// `TotalRow`s on the stack) so storage doesn't alias across calls.
fn fixtureSnapshot(
buf: *[3]snapshot.TotalRow,
y: i16,
m: u8,
d: u8,
net_worth: f64,
liquid: f64,
illiquid: f64,
) snapshot.Snapshot {
buf[0] = .{ .kind = "total", .scope = "net_worth", .value = net_worth };
buf[1] = .{ .kind = "total", .scope = "liquid", .value = liquid };
buf[2] = .{ .kind = "total", .scope = "illiquid", .value = illiquid };
return .{
.meta = .{
.kind = "meta",
.snapshot_version = 1,
.as_of_date = Date.fromYmd(y, m, d),
.captured_at = 0,
.zfin_version = "test",
.stale_count = 0,
},
.totals = buf,
.tax_types = &.{},
.accounts = &.{},
.lots = &.{},
};
}
test "snapshotToPoint: extracts the three totals" {
var buf: [3]snapshot.TotalRow = undefined;
const snap = fixtureSnapshot(&buf, 2026, 4, 17, 1000, 800, 200);
const p = try snapshotToPoint(testing.allocator, snap);
defer p.deinit(testing.allocator);
try testing.expect(p.as_of_date.eql(Date.fromYmd(2026, 4, 17)));
try testing.expectEqual(@as(f64, 1000), p.net_worth);
try testing.expectEqual(@as(f64, 800), p.liquid);
try testing.expectEqual(@as(f64, 200), p.illiquid);
try testing.expectEqual(@as(usize, 0), p.accounts.len);
try testing.expectEqual(@as(usize, 0), p.tax_types.len);
}
test "snapshotToPoint: missing totals default to zero" {
// Snapshot with empty totals slice — nothing at all to extract.
const snap: snapshot.Snapshot = .{
.meta = .{
.kind = "meta",
.snapshot_version = 1,
.as_of_date = Date.fromYmd(2026, 4, 17),
.captured_at = 0,
.zfin_version = "test",
.stale_count = 0,
},
.totals = &.{},
.tax_types = &.{},
.accounts = &.{},
.lots = &.{},
};
const p = try snapshotToPoint(testing.allocator, snap);
defer p.deinit(testing.allocator);
try testing.expectEqual(@as(f64, 0), p.net_worth);
try testing.expectEqual(@as(f64, 0), p.liquid);
try testing.expectEqual(@as(f64, 0), p.illiquid);
}
test "snapshotToPoint: propagates accounts and tax_types" {
const accts = [_]snapshot.AccountRow{
.{ .kind = "account", .name = "Emil Roth", .value = 1000 },
.{ .kind = "account", .name = "Kelly IRA", .value = 500 },
};
const taxes = [_]snapshot.TaxTypeRow{
.{ .kind = "tax_type", .label = "Taxable", .value = 800 },
};
const totals = [_]snapshot.TotalRow{
.{ .kind = "total", .scope = "net_worth", .value = 1500 },
};
const snap: snapshot.Snapshot = .{
.meta = .{
.kind = "meta",
.snapshot_version = 1,
.as_of_date = Date.fromYmd(2026, 4, 17),
.captured_at = 0,
.zfin_version = "test",
.stale_count = 0,
},
.totals = @constCast(&totals),
.tax_types = @constCast(&taxes),
.accounts = @constCast(&accts),
.lots = &.{},
};
const p = try snapshotToPoint(testing.allocator, snap);
defer p.deinit(testing.allocator);
try testing.expectEqual(@as(usize, 2), p.accounts.len);
try testing.expectEqualStrings("Emil Roth", p.accounts[0].name);
try testing.expectEqual(@as(f64, 1000), p.accounts[0].value);
try testing.expectEqual(@as(usize, 1), p.tax_types.len);
try testing.expectEqualStrings("Taxable", p.tax_types[0].name);
}
test "buildSeries: empty input" {
const series = try buildSeries(testing.allocator, &.{});
defer series.deinit();
try testing.expectEqual(@as(usize, 0), series.points.len);
}
test "buildSeries: sorts ascending by date" {
var b1: [3]snapshot.TotalRow = undefined;
var b2: [3]snapshot.TotalRow = undefined;
var b3: [3]snapshot.TotalRow = undefined;
const snaps = [_]snapshot.Snapshot{
fixtureSnapshot(&b1, 2026, 4, 20, 3000, 0, 0),
fixtureSnapshot(&b2, 2026, 4, 17, 1000, 0, 0),
fixtureSnapshot(&b3, 2026, 4, 18, 2000, 0, 0),
};
const series = try buildSeries(testing.allocator, &snaps);
defer series.deinit();
try testing.expectEqual(@as(usize, 3), series.points.len);
try testing.expect(series.points[0].as_of_date.eql(Date.fromYmd(2026, 4, 17)));
try testing.expect(series.points[1].as_of_date.eql(Date.fromYmd(2026, 4, 18)));
try testing.expect(series.points[2].as_of_date.eql(Date.fromYmd(2026, 4, 20)));
try testing.expectEqual(@as(f64, 1000), series.points[0].net_worth);
try testing.expectEqual(@as(f64, 3000), series.points[2].net_worth);
}
test "filterByDate: inclusive bounds" {
var b1: [3]snapshot.TotalRow = undefined;
var b2: [3]snapshot.TotalRow = undefined;
var b3: [3]snapshot.TotalRow = undefined;
var b4: [3]snapshot.TotalRow = undefined;
var b5: [3]snapshot.TotalRow = undefined;
const snaps = [_]snapshot.Snapshot{
fixtureSnapshot(&b1, 2026, 4, 17, 1, 0, 0),
fixtureSnapshot(&b2, 2026, 4, 18, 2, 0, 0),
fixtureSnapshot(&b3, 2026, 4, 19, 3, 0, 0),
fixtureSnapshot(&b4, 2026, 4, 20, 4, 0, 0),
fixtureSnapshot(&b5, 2026, 4, 21, 5, 0, 0),
};
const series = try buildSeries(testing.allocator, &snaps);
defer series.deinit();
const kept = try filterByDate(
testing.allocator,
series.points,
Date.fromYmd(2026, 4, 18),
Date.fromYmd(2026, 4, 20),
);
defer testing.allocator.free(kept);
try testing.expectEqual(@as(usize, 3), kept.len);
try testing.expectEqual(@as(f64, 2), kept[0].net_worth);
try testing.expectEqual(@as(f64, 4), kept[2].net_worth);
}
test "filterByDate: null since leaves left end open" {
var b1: [3]snapshot.TotalRow = undefined;
var b2: [3]snapshot.TotalRow = undefined;
var b3: [3]snapshot.TotalRow = undefined;
const snaps = [_]snapshot.Snapshot{
fixtureSnapshot(&b1, 2026, 4, 17, 1, 0, 0),
fixtureSnapshot(&b2, 2026, 4, 18, 2, 0, 0),
fixtureSnapshot(&b3, 2026, 4, 19, 3, 0, 0),
};
const series = try buildSeries(testing.allocator, &snaps);
defer series.deinit();
const kept = try filterByDate(testing.allocator, series.points, null, Date.fromYmd(2026, 4, 18));
defer testing.allocator.free(kept);
try testing.expectEqual(@as(usize, 2), kept.len);
}
test "filterByDate: null until leaves right end open" {
var b1: [3]snapshot.TotalRow = undefined;
var b2: [3]snapshot.TotalRow = undefined;
var b3: [3]snapshot.TotalRow = undefined;
const snaps = [_]snapshot.Snapshot{
fixtureSnapshot(&b1, 2026, 4, 17, 1, 0, 0),
fixtureSnapshot(&b2, 2026, 4, 18, 2, 0, 0),
fixtureSnapshot(&b3, 2026, 4, 19, 3, 0, 0),
};
const series = try buildSeries(testing.allocator, &snaps);
defer series.deinit();
const kept = try filterByDate(testing.allocator, series.points, Date.fromYmd(2026, 4, 18), null);
defer testing.allocator.free(kept);
try testing.expectEqual(@as(usize, 2), kept.len);
}
test "filterByDate: both bounds null returns everything" {
var b1: [3]snapshot.TotalRow = undefined;
var b2: [3]snapshot.TotalRow = undefined;
const snaps = [_]snapshot.Snapshot{
fixtureSnapshot(&b1, 2026, 4, 17, 1, 0, 0),
fixtureSnapshot(&b2, 2026, 4, 18, 2, 0, 0),
};
const series = try buildSeries(testing.allocator, &snaps);
defer series.deinit();
const kept = try filterByDate(testing.allocator, series.points, null, null);
defer testing.allocator.free(kept);
try testing.expectEqual(@as(usize, 2), kept.len);
}
test "filterByDate: out-of-range bounds return empty" {
var b1: [3]snapshot.TotalRow = undefined;
const snaps = [_]snapshot.Snapshot{
fixtureSnapshot(&b1, 2026, 4, 17, 1, 0, 0),
};
const series = try buildSeries(testing.allocator, &snaps);
defer series.deinit();
const kept = try filterByDate(testing.allocator, series.points, Date.fromYmd(2030, 1, 1), null);
defer testing.allocator.free(kept);
try testing.expectEqual(@as(usize, 0), kept.len);
}
test "extractMetric: net_worth / liquid / illiquid" {
var b1: [3]snapshot.TotalRow = undefined;
var b2: [3]snapshot.TotalRow = undefined;
const snaps = [_]snapshot.Snapshot{
fixtureSnapshot(&b1, 2026, 4, 17, 1000, 800, 200),
fixtureSnapshot(&b2, 2026, 4, 18, 2000, 1500, 500),
};
const series = try buildSeries(testing.allocator, &snaps);
defer series.deinit();
inline for (.{
.{ Metric.net_worth, [_]f64{ 1000, 2000 } },
.{ Metric.liquid, [_]f64{ 800, 1500 } },
.{ Metric.illiquid, [_]f64{ 200, 500 } },
}) |tc| {
const out = try extractMetric(testing.allocator, series.points, tc[0]);
defer testing.allocator.free(out);
try testing.expectEqual(@as(usize, 2), out.len);
try testing.expectEqual(tc[1][0], out[0].value);
try testing.expectEqual(tc[1][1], out[1].value);
}
}
test "Metric.label: stable strings" {
try testing.expectEqualStrings("Net Worth", Metric.net_worth.label());
try testing.expectEqualStrings("Liquid", Metric.liquid.label());
try testing.expectEqualStrings("Illiquid", Metric.illiquid.label());
}
test "extractNamedSeries accounts: matches + absent days emit 0" {
// Build three snapshots: day1 has account A; day2 has account B; day3 has both.
// Extracting "A" should see value on day1, 0 on day2, value on day3.
var day1_accts = [_]snapshot.AccountRow{.{ .kind = "account", .name = "A", .value = 100 }};
var day2_accts = [_]snapshot.AccountRow{.{ .kind = "account", .name = "B", .value = 200 }};
var day3_accts = [_]snapshot.AccountRow{
.{ .kind = "account", .name = "A", .value = 300 },
.{ .kind = "account", .name = "B", .value = 400 },
};
var t1 = [_]snapshot.TotalRow{.{ .kind = "total", .scope = "net_worth", .value = 100 }};
var t2 = [_]snapshot.TotalRow{.{ .kind = "total", .scope = "net_worth", .value = 200 }};
var t3 = [_]snapshot.TotalRow{.{ .kind = "total", .scope = "net_worth", .value = 700 }};
const mk = struct {
fn f(y: i16, m: u8, d: u8, totals: []snapshot.TotalRow, accts: []snapshot.AccountRow) snapshot.Snapshot {
return .{
.meta = .{
.kind = "meta",
.snapshot_version = 1,
.as_of_date = Date.fromYmd(y, m, d),
.captured_at = 0,
.zfin_version = "test",
.stale_count = 0,
},
.totals = totals,
.tax_types = &.{},
.accounts = accts,
.lots = &.{},
};
}
}.f;
const snaps = [_]snapshot.Snapshot{
mk(2026, 4, 17, &t1, &day1_accts),
mk(2026, 4, 18, &t2, &day2_accts),
mk(2026, 4, 19, &t3, &day3_accts),
};
const series = try buildSeries(testing.allocator, &snaps);
defer series.deinit();
const a_series = try extractNamedSeries(testing.allocator, series.points, .accounts, "A");
defer testing.allocator.free(a_series);
try testing.expectEqual(@as(f64, 100), a_series[0].value);
try testing.expectEqual(@as(f64, 0), a_series[1].value); // absent -> 0
try testing.expectEqual(@as(f64, 300), a_series[2].value);
}
test "extractNamedSeries tax_types: absent days emit 0" {
var taxes1 = [_]snapshot.TaxTypeRow{.{ .kind = "tax_type", .label = "Taxable", .value = 500 }};
var taxes2 = [_]snapshot.TaxTypeRow{};
var tot1 = [_]snapshot.TotalRow{.{ .kind = "total", .scope = "net_worth", .value = 500 }};
var tot2 = [_]snapshot.TotalRow{.{ .kind = "total", .scope = "net_worth", .value = 0 }};
const mk = struct {
fn f(d: u8, totals: []snapshot.TotalRow, tt: []snapshot.TaxTypeRow) snapshot.Snapshot {
return .{
.meta = .{
.kind = "meta",
.snapshot_version = 1,
.as_of_date = Date.fromYmd(2026, 4, d),
.captured_at = 0,
.zfin_version = "test",
.stale_count = 0,
},
.totals = totals,
.tax_types = tt,
.accounts = &.{},
.lots = &.{},
};
}
}.f;
const snaps = [_]snapshot.Snapshot{
mk(17, &tot1, &taxes1),
mk(18, &tot2, &taxes2),
};
const series = try buildSeries(testing.allocator, &snaps);
defer series.deinit();
const s = try extractNamedSeries(testing.allocator, series.points, .tax_types, "Taxable");
defer testing.allocator.free(s);
try testing.expectEqual(@as(f64, 500), s[0].value);
try testing.expectEqual(@as(f64, 0), s[1].value);
}
test "computeStats: typical case" {
const pts = [_]MetricPoint{
.{ .date = Date.fromYmd(2026, 4, 17), .value = 1000 },
.{ .date = Date.fromYmd(2026, 4, 18), .value = 900 },
.{ .date = Date.fromYmd(2026, 4, 19), .value = 1500 },
.{ .date = Date.fromYmd(2026, 4, 20), .value = 1100 },
};
const s = computeStats(&pts).?;
try testing.expectEqual(@as(f64, 1000), s.first);
try testing.expectEqual(@as(f64, 1100), s.last);
try testing.expectEqual(@as(f64, 900), s.min);
try testing.expectEqual(@as(f64, 1500), s.max);
try testing.expectEqual(@as(f64, 100), s.delta_abs);
try testing.expect(s.delta_pct != null);
try testing.expectApproxEqAbs(@as(f64, 0.10), s.delta_pct.?, 1e-9);
}
test "computeStats: empty input returns null" {
const empty: []const MetricPoint = &.{};
try testing.expect(computeStats(empty) == null);
}
test "computeStats: single point — all fields equal" {
const pts = [_]MetricPoint{.{ .date = Date.fromYmd(2026, 4, 17), .value = 5000 }};
const s = computeStats(&pts).?;
try testing.expectEqual(@as(f64, 5000), s.first);
try testing.expectEqual(@as(f64, 5000), s.last);
try testing.expectEqual(@as(f64, 5000), s.min);
try testing.expectEqual(@as(f64, 5000), s.max);
try testing.expectEqual(@as(f64, 0), s.delta_abs);
try testing.expectEqual(@as(f64, 0), s.delta_pct.?);
}
test "computeStats: first-zero yields null delta_pct (no div-by-zero)" {
const pts = [_]MetricPoint{
.{ .date = Date.fromYmd(2026, 4, 17), .value = 0 },
.{ .date = Date.fromYmd(2026, 4, 18), .value = 1000 },
};
const s = computeStats(&pts).?;
try testing.expectEqual(@as(f64, 1000), s.delta_abs);
try testing.expect(s.delta_pct == null);
}
test "computeStats: negative delta" {
const pts = [_]MetricPoint{
.{ .date = Date.fromYmd(2026, 4, 17), .value = 1000 },
.{ .date = Date.fromYmd(2026, 4, 18), .value = 800 },
};
const s = computeStats(&pts).?;
try testing.expectEqual(@as(f64, -200), s.delta_abs);
try testing.expectApproxEqAbs(@as(f64, -0.20), s.delta_pct.?, 1e-9);
}
test "buildRollupRecords: one row per point, kind discriminator set" {
var b1: [3]snapshot.TotalRow = undefined;
var b2: [3]snapshot.TotalRow = undefined;
const snaps = [_]snapshot.Snapshot{
fixtureSnapshot(&b1, 2026, 4, 17, 1000, 800, 200),
fixtureSnapshot(&b2, 2026, 4, 18, 1100, 850, 250),
};
const series = try buildSeries(testing.allocator, &snaps);
defer series.deinit();
const rows = try buildRollupRecords(testing.allocator, series.points);
defer testing.allocator.free(rows);
try testing.expectEqual(@as(usize, 2), rows.len);
try testing.expectEqualStrings("rollup", rows[0].kind);
try testing.expect(rows[0].as_of_date.eql(Date.fromYmd(2026, 4, 17)));
try testing.expectEqual(@as(f64, 1000), rows[0].net_worth);
try testing.expectEqual(@as(f64, 800), rows[0].liquid);
try testing.expectEqual(@as(f64, 200), rows[0].illiquid);
try testing.expectEqual(@as(f64, 1100), rows[1].net_worth);
}
test "buildRollupRecords: empty input produces empty slice" {
const rows = try buildRollupRecords(testing.allocator, &.{});
defer testing.allocator.free(rows);
try testing.expectEqual(@as(usize, 0), rows.len);
}
// ── pointAtOrBefore ──────────────────────────────────────────
test "pointAtOrBefore: exact / snap-backward / null" {
var b1: [3]snapshot.TotalRow = undefined;
var b2: [3]snapshot.TotalRow = undefined;
var b3: [3]snapshot.TotalRow = undefined;
const snaps = [_]snapshot.Snapshot{
fixtureSnapshot(&b1, 2026, 4, 17, 1000, 700, 300),
fixtureSnapshot(&b2, 2026, 4, 18, 1100, 750, 350),
fixtureSnapshot(&b3, 2026, 4, 21, 1200, 800, 400), // gap over weekend
};
const series = try buildSeries(testing.allocator, &snaps);
defer series.deinit();
// Exact match
try testing.expect(pointAtOrBefore(series.points, Date.fromYmd(2026, 4, 18)).?.net_worth == 1100);
// Snap backward over weekend: Saturday -> Friday's snapshot
try testing.expect(pointAtOrBefore(series.points, Date.fromYmd(2026, 4, 19)).?.net_worth == 1100);
// Before all -> null
try testing.expect(pointAtOrBefore(series.points, Date.fromYmd(2026, 4, 1)) == null);
// After all -> latest
try testing.expect(pointAtOrBefore(series.points, Date.fromYmd(2099, 1, 1)).?.net_worth == 1200);
}
// ── computeWindowSet ─────────────────────────────────────────
test "computeWindowSet: empty series produces empty rows" {
const ws = try computeWindowSet(testing.allocator, &.{}, .net_worth, Date.fromYmd(2026, 4, 22));
defer ws.deinit();
try testing.expectEqual(@as(usize, 0), ws.rows.len);
}
test "computeWindowSet: 8 period rows + 1 all-time row" {
var b1: [3]snapshot.TotalRow = undefined;
var b2: [3]snapshot.TotalRow = undefined;
const snaps = [_]snapshot.Snapshot{
fixtureSnapshot(&b1, 2026, 4, 17, 1000, 700, 300),
fixtureSnapshot(&b2, 2026, 4, 22, 1500, 1000, 500),
};
const series = try buildSeries(testing.allocator, &snaps);
defer series.deinit();
const ws = try computeWindowSet(testing.allocator, series.points, .net_worth, Date.fromYmd(2026, 4, 22));
defer ws.deinit();
// 8 HistoricalPeriod windows + 1 all-time
try testing.expectEqual(@as(usize, 9), ws.rows.len);
// Last row is all-time (period is null, label matches)
const all_time = ws.rows[ws.rows.len - 1];
try testing.expect(all_time.period == null);
try testing.expectEqualStrings("All-time", all_time.label);
try testing.expect(all_time.start_value.? == 1000);
try testing.expect(all_time.end_value == 1500);
try testing.expect(all_time.delta_abs.? == 500);
try testing.expectApproxEqAbs(@as(f64, 0.5), all_time.delta_pct.?, 1e-9);
}
test "computeWindowSet: not-enough-history leaves start_value/delta null" {
var b1: [3]snapshot.TotalRow = undefined;
var b2: [3]snapshot.TotalRow = undefined;
const snaps = [_]snapshot.Snapshot{
fixtureSnapshot(&b1, 2026, 4, 21, 1000, 700, 300),
fixtureSnapshot(&b2, 2026, 4, 22, 1100, 750, 350),
};
const series = try buildSeries(testing.allocator, &snaps);
defer series.deinit();
const ws = try computeWindowSet(testing.allocator, series.points, .net_worth, Date.fromYmd(2026, 4, 22));
defer ws.deinit();
// First row is 1-day: anchor = 2026-04-21 (exact match). Fully populated.
try testing.expect(ws.rows[0].period == HistoricalPeriod.@"1D");
try testing.expect(ws.rows[0].anchor_date != null);
try testing.expect(ws.rows[0].delta_abs != null);
// 1-year row (index 4 in timeline_windows: 1D, 1W, 1M, YTD, 1Y, 3Y, 5Y, 10Y)
// with only 2 days of history: no anchor.
try testing.expect(ws.rows[4].period == HistoricalPeriod.@"1Y");
try testing.expect(ws.rows[4].anchor_date == null);
try testing.expect(ws.rows[4].start_value == null);
try testing.expect(ws.rows[4].delta_abs == null);
try testing.expect(ws.rows[4].delta_pct == null);
// end_value still populated
try testing.expect(ws.rows[4].end_value == 1100);
}
test "computeWindowSet: YTD anchors to Jan 1 (snaps to prior year's last close)" {
// Dec 31 2025 snapshot + one in April 2026. YTD on 2026-04-22 should
// anchor to Dec 31 2025 (pointAtOrBefore snaps Jan 1 2026 back to the
// prior-year close).
var b1: [3]snapshot.TotalRow = undefined;
var b2: [3]snapshot.TotalRow = undefined;
const snaps = [_]snapshot.Snapshot{
fixtureSnapshot(&b1, 2025, 12, 31, 1000, 700, 300),
fixtureSnapshot(&b2, 2026, 4, 22, 1500, 1000, 500),
};
const series = try buildSeries(testing.allocator, &snaps);
defer series.deinit();
const ws = try computeWindowSet(testing.allocator, series.points, .net_worth, Date.fromYmd(2026, 4, 22));
defer ws.deinit();
// ytd is index 3 in timeline_windows
const ytd = ws.rows[3];
try testing.expect(ytd.period == HistoricalPeriod.ytd);
try testing.expect(ytd.anchor_date.?.eql(Date.fromYmd(2025, 12, 31)));
try testing.expect(ytd.start_value.? == 1000);
try testing.expect(ytd.delta_abs.? == 500);
}
test "computeWindowSet: liquid metric is independent of net_worth" {
var b1: [3]snapshot.TotalRow = undefined;
var b2: [3]snapshot.TotalRow = undefined;
const snaps = [_]snapshot.Snapshot{
fixtureSnapshot(&b1, 2026, 4, 17, 1000, 700, 300),
fixtureSnapshot(&b2, 2026, 4, 22, 1500, 1100, 400),
};
const series = try buildSeries(testing.allocator, &snaps);
defer series.deinit();
const ws = try computeWindowSet(testing.allocator, series.points, .liquid, Date.fromYmd(2026, 4, 22));
defer ws.deinit();
const all_time = ws.rows[ws.rows.len - 1];
try testing.expect(all_time.start_value.? == 700);
try testing.expect(all_time.end_value == 1100);
try testing.expect(all_time.delta_abs.? == 400);
}
// ── computeRowDeltas ─────────────────────────────────────────
test "computeRowDeltas: first row has null deltas; others populated" {
var b1: [3]snapshot.TotalRow = undefined;
var b2: [3]snapshot.TotalRow = undefined;
var b3: [3]snapshot.TotalRow = undefined;
const snaps = [_]snapshot.Snapshot{
fixtureSnapshot(&b1, 2026, 4, 17, 1000, 700, 300),
fixtureSnapshot(&b2, 2026, 4, 18, 1100, 750, 350),
fixtureSnapshot(&b3, 2026, 4, 19, 1050, 720, 330),
};
const series = try buildSeries(testing.allocator, &snaps);
defer series.deinit();
const rows = try computeRowDeltas(testing.allocator, series.points);
defer testing.allocator.free(rows);
try testing.expectEqual(@as(usize, 3), rows.len);
// First row: all deltas null
try testing.expect(rows[0].d_net_worth == null);
try testing.expect(rows[0].d_liquid == null);
try testing.expect(rows[0].d_illiquid == null);
// Second row: +100 net_worth
try testing.expect(rows[1].d_net_worth.? == 100);
try testing.expect(rows[1].d_liquid.? == 50);
try testing.expect(rows[1].d_illiquid.? == 50);
// Third row: -50 net_worth
try testing.expect(rows[2].d_net_worth.? == -50);
}
test "computeRowDeltas: empty input" {
const rows = try computeRowDeltas(testing.allocator, &.{});
defer testing.allocator.free(rows);
try testing.expectEqual(@as(usize, 0), rows.len);
}
// ── selectResolution / aggregatePoints ───────────────────────
test "selectResolution: thresholds" {
const mk = struct {
fn f(day_span: i32) []TimelinePoint {
var out = std.testing.allocator.alloc(TimelinePoint, 2) catch unreachable;
out[0] = .{
.as_of_date = Date.fromYmd(2026, 1, 1),
.net_worth = 0,
.liquid = 0,
.illiquid = 0,
.accounts = &.{},
.tax_types = &.{},
};
out[1] = .{
.as_of_date = Date.fromYmd(2026, 1, 1).addDays(day_span),
.net_worth = 0,
.liquid = 0,
.illiquid = 0,
.accounts = &.{},
.tax_types = &.{},
};
return out;
}
}.f;
const p90 = mk(90);
defer testing.allocator.free(p90);
try testing.expectEqual(Resolution.daily, selectResolution(p90));
const p91 = mk(91);
defer testing.allocator.free(p91);
try testing.expectEqual(Resolution.weekly, selectResolution(p91));
const p730 = mk(730);
defer testing.allocator.free(p730);
try testing.expectEqual(Resolution.weekly, selectResolution(p730));
const p731 = mk(731);
defer testing.allocator.free(p731);
try testing.expectEqual(Resolution.monthly, selectResolution(p731));
// Single-point: daily
try testing.expectEqual(Resolution.daily, selectResolution(p90[0..1]));
}
test "aggregatePoints: daily returns a copy" {
var b1: [3]snapshot.TotalRow = undefined;
var b2: [3]snapshot.TotalRow = undefined;
const snaps = [_]snapshot.Snapshot{
fixtureSnapshot(&b1, 2026, 4, 17, 1000, 700, 300),
fixtureSnapshot(&b2, 2026, 4, 18, 1100, 750, 350),
};
const series = try buildSeries(testing.allocator, &snaps);
defer series.deinit();
const out = try aggregatePoints(testing.allocator, series.points, .daily);
defer testing.allocator.free(out);
try testing.expectEqual(@as(usize, 2), out.len);
try testing.expect(out[0].net_worth == 1000);
try testing.expect(out[1].net_worth == 1100);
}
test "aggregatePoints: weekly rolling, one pick per 7-day bucket from latest" {
// Span 21 days: expect 3 buckets, one pick each.
// Generate one point per day so we can see bucket boundaries clearly.
var points_al: std.ArrayList(TimelinePoint) = .empty;
defer points_al.deinit(testing.allocator);
var i: i32 = 0;
while (i <= 20) : (i += 1) {
try points_al.append(testing.allocator, .{
.as_of_date = Date.fromYmd(2026, 4, 1).addDays(i),
.net_worth = @as(f64, @floatFromInt(1000 + i)),
.liquid = 0,
.illiquid = 0,
.accounts = &.{},
.tax_types = &.{},
});
}
const out = try aggregatePoints(testing.allocator, points_al.items, .weekly);
defer testing.allocator.free(out);
// Last date is 2026-04-21. Buckets anchored from that:
// bucket 0: days [15..21] -> pick 2026-04-21 (1020)
// bucket 1: days [08..14] -> pick 2026-04-14 (1013)
// bucket 2: days [01..07] -> pick 2026-04-07 (1006)
try testing.expectEqual(@as(usize, 3), out.len);
try testing.expect(out[0].as_of_date.eql(Date.fromYmd(2026, 4, 7)));
try testing.expect(out[0].net_worth == 1006);
try testing.expect(out[1].as_of_date.eql(Date.fromYmd(2026, 4, 14)));
try testing.expect(out[2].as_of_date.eql(Date.fromYmd(2026, 4, 21)));
try testing.expect(out[2].net_worth == 1020);
}
test "aggregatePoints: monthly picks latest snapshot in each calendar month" {
var b1: [3]snapshot.TotalRow = undefined;
var b2: [3]snapshot.TotalRow = undefined;
var b3: [3]snapshot.TotalRow = undefined;
var b4: [3]snapshot.TotalRow = undefined;
var b5: [3]snapshot.TotalRow = undefined;
const snaps = [_]snapshot.Snapshot{
fixtureSnapshot(&b1, 2026, 2, 5, 100, 0, 0),
fixtureSnapshot(&b2, 2026, 2, 28, 200, 0, 0), // latest Feb
fixtureSnapshot(&b3, 2026, 3, 1, 300, 0, 0),
fixtureSnapshot(&b4, 2026, 3, 31, 400, 0, 0), // latest Mar
fixtureSnapshot(&b5, 2026, 4, 10, 500, 0, 0), // only Apr entry
};
const series = try buildSeries(testing.allocator, &snaps);
defer series.deinit();
const out = try aggregatePoints(testing.allocator, series.points, .monthly);
defer testing.allocator.free(out);
try testing.expectEqual(@as(usize, 3), out.len);
try testing.expect(out[0].as_of_date.eql(Date.fromYmd(2026, 2, 28)));
try testing.expect(out[0].net_worth == 200);
try testing.expect(out[1].as_of_date.eql(Date.fromYmd(2026, 3, 31)));
try testing.expect(out[1].net_worth == 400);
try testing.expect(out[2].as_of_date.eql(Date.fromYmd(2026, 4, 10)));
try testing.expect(out[2].net_worth == 500);
}
test "aggregatePoints: empty input returns empty slice" {
const out_d = try aggregatePoints(testing.allocator, &.{}, .daily);
defer testing.allocator.free(out_d);
try testing.expectEqual(@as(usize, 0), out_d.len);
const out_w = try aggregatePoints(testing.allocator, &.{}, .weekly);
defer testing.allocator.free(out_w);
try testing.expectEqual(@as(usize, 0), out_w.len);
const out_m = try aggregatePoints(testing.allocator, &.{}, .monthly);
defer testing.allocator.free(out_m);
try testing.expectEqual(@as(usize, 0), out_m.len);
}