history view refactor
This commit is contained in:
parent
c89e93c243
commit
41208f3732
9 changed files with 1441 additions and 626 deletions
|
|
@ -29,7 +29,7 @@ repos:
|
|||
- id: test
|
||||
name: Run zig build test
|
||||
entry: zig
|
||||
args: ["build", "coverage", "-Dcoverage-threshold=54"]
|
||||
args: ["build", "coverage", "-Dcoverage-threshold=56"]
|
||||
language: system
|
||||
types: [file]
|
||||
pass_filenames: false
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ zig build # build the zfin binary (output: zig-out/bin/zfin)
|
|||
zig build test # run all tests (single binary, discovers all tests via refAllDeclsRecursive)
|
||||
zig build run -- <args> # build and run CLI
|
||||
zig build docs # generate library documentation
|
||||
zig build coverage -Dcoverage-threshold=54 # run tests with kcov coverage (Linux only)
|
||||
zig build coverage -Dcoverage-threshold=56 # run tests with kcov coverage (Linux only)
|
||||
```
|
||||
|
||||
**Tooling** (managed via `.mise.toml`):
|
||||
|
|
|
|||
|
|
@ -25,6 +25,8 @@
|
|||
const std = @import("std");
|
||||
const Date = @import("../models/date.zig").Date;
|
||||
const snapshot = @import("../models/snapshot.zig");
|
||||
const valuation = @import("valuation.zig");
|
||||
const HistoricalPeriod = valuation.HistoricalPeriod;
|
||||
|
||||
// ── Public types ─────────────────────────────────────────────
|
||||
|
||||
|
|
@ -331,6 +333,320 @@ pub fn buildRollupRecords(
|
|||
return out;
|
||||
}
|
||||
|
||||
// ── Snap-backward for snapshot points ────────────────────────
|
||||
|
||||
fn pointDateOf(p: TimelinePoint) Date {
|
||||
return p.as_of_date;
|
||||
}
|
||||
|
||||
/// Return the latest point on or before `target`. Null if `points` is
|
||||
/// empty or every entry sits strictly after `target`.
|
||||
///
|
||||
/// Delegates to the shared `valuation.indexAtOrBefore` kernel — same
|
||||
/// snap-backward behavior used by candle pricing, so holiday/weekend
|
||||
/// semantics are identical across the app. No slack cap: snapshot
|
||||
/// history is dense enough by construction (one entry per trading day)
|
||||
/// that caps would only hide real gaps.
|
||||
pub fn pointAtOrBefore(points: []const TimelinePoint, target: Date) ?*const TimelinePoint {
|
||||
const idx = valuation.indexAtOrBefore(TimelinePoint, points, target, pointDateOf) orelse return null;
|
||||
return &points[idx];
|
||||
}
|
||||
|
||||
// ── Rolling-windows block ────────────────────────────────────
|
||||
|
||||
/// One row in the rolling-windows block. `anchor_date` / `start_value` /
|
||||
/// `delta_*` are null when there isn't enough history to honor the
|
||||
/// window (e.g. asking for 10-year on a 2-week-old portfolio).
|
||||
///
|
||||
/// `end_value` is always populated — it's the latest point in the
|
||||
/// series, which must exist for the block to render at all.
|
||||
pub const WindowStat = struct {
|
||||
/// The period this row represents. Null for the synthetic "All-time"
|
||||
/// row (anchored to the first snapshot rather than to `today - N`).
|
||||
period: ?HistoricalPeriod,
|
||||
/// Human-facing label ("1 day", "YTD", "All-time").
|
||||
label: []const u8,
|
||||
/// Short label used when horizontal space is tight ("1D", "YTD").
|
||||
short_label: []const u8,
|
||||
/// The snapshot date we anchored to. Null when no snapshot exists at
|
||||
/// or before the target date — i.e. not enough history.
|
||||
anchor_date: ?Date,
|
||||
/// The anchor snapshot's metric value. Null when anchor is missing.
|
||||
start_value: ?f64,
|
||||
/// Always populated — the latest snapshot's metric value.
|
||||
end_value: f64,
|
||||
/// `end_value - start_value`. Null when start is missing.
|
||||
delta_abs: ?f64,
|
||||
/// `(end_value - start_value) / start_value`. Null when start is
|
||||
/// missing OR when start is exactly zero (division by zero).
|
||||
delta_pct: ?f64,
|
||||
};
|
||||
|
||||
/// Rolling-windows block for a single metric. Owns the `rows` slice.
|
||||
/// Order: 8 relative-to-today periods (from HistoricalPeriod.timeline_windows),
|
||||
/// then a final synthetic "All-time" row anchored to the first snapshot.
|
||||
pub const WindowSet = struct {
|
||||
rows: []WindowStat,
|
||||
allocator: std.mem.Allocator,
|
||||
|
||||
pub fn deinit(self: WindowSet) void {
|
||||
self.allocator.free(self.rows);
|
||||
}
|
||||
};
|
||||
|
||||
fn extractValue(p: TimelinePoint, metric: Metric) f64 {
|
||||
return switch (metric) {
|
||||
.net_worth => p.net_worth,
|
||||
.liquid => p.liquid,
|
||||
.illiquid => p.illiquid,
|
||||
};
|
||||
}
|
||||
|
||||
/// Build the rolling-windows block for one metric. `today` is the
|
||||
/// reference "now" — almost always the last snapshot's as_of_date, but
|
||||
/// taken as a parameter so tests can pin deterministic scenarios.
|
||||
///
|
||||
/// Returns an empty set when `points` is empty.
|
||||
pub fn computeWindowSet(
|
||||
allocator: std.mem.Allocator,
|
||||
points: []const TimelinePoint,
|
||||
metric: Metric,
|
||||
today: Date,
|
||||
) !WindowSet {
|
||||
if (points.len == 0) {
|
||||
return .{ .rows = &.{}, .allocator = allocator };
|
||||
}
|
||||
|
||||
const windows = HistoricalPeriod.timeline_windows;
|
||||
var rows = try allocator.alloc(WindowStat, windows.len + 1);
|
||||
errdefer allocator.free(rows);
|
||||
|
||||
const end_point = points[points.len - 1];
|
||||
const end_value = extractValue(end_point, metric);
|
||||
|
||||
for (windows, 0..) |period, i| {
|
||||
const target = period.targetDate(today);
|
||||
const anchor_opt = pointAtOrBefore(points, target);
|
||||
|
||||
rows[i] = if (anchor_opt) |a| .{
|
||||
.period = period,
|
||||
.label = period.longLabel(),
|
||||
.short_label = period.label(),
|
||||
.anchor_date = a.as_of_date,
|
||||
.start_value = extractValue(a.*, metric),
|
||||
.end_value = end_value,
|
||||
.delta_abs = end_value - extractValue(a.*, metric),
|
||||
.delta_pct = blk: {
|
||||
const sv = extractValue(a.*, metric);
|
||||
break :blk if (sv == 0) null else (end_value - sv) / sv;
|
||||
},
|
||||
} else .{
|
||||
.period = period,
|
||||
.label = period.longLabel(),
|
||||
.short_label = period.label(),
|
||||
.anchor_date = null,
|
||||
.start_value = null,
|
||||
.end_value = end_value,
|
||||
.delta_abs = null,
|
||||
.delta_pct = null,
|
||||
};
|
||||
}
|
||||
|
||||
// All-time = vs. first snapshot in series. Not a HistoricalPeriod
|
||||
// member because it isn't relative to `today` (see valuation.zig
|
||||
// doc block).
|
||||
const first = points[0];
|
||||
const first_value = extractValue(first, metric);
|
||||
rows[windows.len] = .{
|
||||
.period = null,
|
||||
.label = "All-time",
|
||||
.short_label = "All",
|
||||
.anchor_date = first.as_of_date,
|
||||
.start_value = first_value,
|
||||
.end_value = end_value,
|
||||
.delta_abs = end_value - first_value,
|
||||
.delta_pct = if (first_value == 0) null else (end_value - first_value) / first_value,
|
||||
};
|
||||
|
||||
return .{ .rows = rows, .allocator = allocator };
|
||||
}
|
||||
|
||||
// ── Per-row day-over-day deltas ──────────────────────────────
|
||||
|
||||
/// One row in the "Recent snapshots" table after per-row deltas have
|
||||
/// been computed. The delta is *relative to the previous row in the
|
||||
/// same resolution* — i.e. when the table is aggregated to weekly,
|
||||
/// `d_*` fields hold week-over-week change.
|
||||
///
|
||||
/// First row has all `d_*` fields null (no prior row to compare against).
|
||||
pub const RowDelta = struct {
|
||||
date: Date,
|
||||
liquid: f64,
|
||||
illiquid: f64,
|
||||
net_worth: f64,
|
||||
d_liquid: ?f64,
|
||||
d_illiquid: ?f64,
|
||||
d_net_worth: ?f64,
|
||||
};
|
||||
|
||||
/// Compute per-row deltas against the previous row. Returns a
|
||||
/// newly-allocated slice the caller owns. Empty input -> empty output.
|
||||
pub fn computeRowDeltas(
|
||||
allocator: std.mem.Allocator,
|
||||
points: []const TimelinePoint,
|
||||
) ![]RowDelta {
|
||||
var out = try allocator.alloc(RowDelta, points.len);
|
||||
for (points, 0..) |p, i| {
|
||||
out[i] = .{
|
||||
.date = p.as_of_date,
|
||||
.liquid = p.liquid,
|
||||
.illiquid = p.illiquid,
|
||||
.net_worth = p.net_worth,
|
||||
.d_liquid = if (i == 0) null else p.liquid - points[i - 1].liquid,
|
||||
.d_illiquid = if (i == 0) null else p.illiquid - points[i - 1].illiquid,
|
||||
.d_net_worth = if (i == 0) null else p.net_worth - points[i - 1].net_worth,
|
||||
};
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
// ── Resolution (daily / weekly / monthly) ────────────────────
|
||||
|
||||
pub const Resolution = enum {
|
||||
daily,
|
||||
weekly,
|
||||
monthly,
|
||||
|
||||
pub fn label(self: Resolution) []const u8 {
|
||||
return switch (self) {
|
||||
.daily => "daily",
|
||||
.weekly => "weekly",
|
||||
.monthly => "monthly",
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
/// Pick a default resolution based on series span.
|
||||
/// span ≤ 90d → daily
|
||||
/// span ≤ 730d → weekly
|
||||
/// else → monthly
|
||||
///
|
||||
/// Empty / single-point series always return `daily` (there's nothing
|
||||
/// to aggregate).
|
||||
pub fn selectResolution(points: []const TimelinePoint) Resolution {
|
||||
if (points.len < 2) return .daily;
|
||||
const first = points[0].as_of_date;
|
||||
const last = points[points.len - 1].as_of_date;
|
||||
const span_days = last.days - first.days;
|
||||
if (span_days <= 90) return .daily;
|
||||
if (span_days <= 730) return .weekly;
|
||||
return .monthly;
|
||||
}
|
||||
|
||||
/// Aggregate `points` to the requested resolution. Returns a
|
||||
/// newly-allocated slice the caller owns.
|
||||
///
|
||||
/// `daily` → returns a copy of the input.
|
||||
/// `weekly` → rolling 7-day buckets walking *backward from latest*, one
|
||||
/// representative point per bucket (the latest in the bucket,
|
||||
/// not the oldest — matches brokerage weekly-bar convention).
|
||||
/// The returned slice is sorted ascending by date.
|
||||
/// `monthly` → groups by calendar (year, month); picks the latest snapshot
|
||||
/// in each month. Sorted ascending by date.
|
||||
///
|
||||
/// Empty input returns an empty owned slice.
|
||||
pub fn aggregatePoints(
|
||||
allocator: std.mem.Allocator,
|
||||
points: []const TimelinePoint,
|
||||
resolution: Resolution,
|
||||
) ![]TimelinePoint {
|
||||
if (points.len == 0) return allocator.alloc(TimelinePoint, 0);
|
||||
|
||||
switch (resolution) {
|
||||
.daily => {
|
||||
const out = try allocator.alloc(TimelinePoint, points.len);
|
||||
@memcpy(out, points);
|
||||
return out;
|
||||
},
|
||||
.weekly => return aggregateWeeklyRolling(allocator, points),
|
||||
.monthly => return aggregateMonthly(allocator, points),
|
||||
}
|
||||
}
|
||||
|
||||
/// Walk backward in 7-day strides from the latest point. The latest
|
||||
/// point always seeds bucket 0; subsequent buckets cover
|
||||
/// `(latest - 7i - 6) … (latest - 7i)` inclusive. Each bucket emits
|
||||
/// its latest-date member. Output is sorted ascending.
|
||||
fn aggregateWeeklyRolling(
|
||||
allocator: std.mem.Allocator,
|
||||
points: []const TimelinePoint,
|
||||
) ![]TimelinePoint {
|
||||
var picked: std.ArrayList(TimelinePoint) = .empty;
|
||||
errdefer picked.deinit(allocator);
|
||||
|
||||
const last_date = points[points.len - 1].as_of_date;
|
||||
|
||||
// Bucket index i covers dates in [last - 7i - 6, last - 7i].
|
||||
// We scan points newest-first. For each point, compute its bucket
|
||||
// relative to `last`; keep the first (i.e. newest) one we see per
|
||||
// bucket.
|
||||
var current_bucket: i32 = -1;
|
||||
var i: usize = points.len;
|
||||
while (i > 0) {
|
||||
i -= 1;
|
||||
const p = points[i];
|
||||
const age_days = last_date.days - p.as_of_date.days;
|
||||
if (age_days < 0) continue; // shouldn't happen (series is sorted)
|
||||
const bucket: i32 = @divFloor(age_days, 7);
|
||||
if (bucket != current_bucket) {
|
||||
try picked.append(allocator, p);
|
||||
current_bucket = bucket;
|
||||
}
|
||||
}
|
||||
|
||||
// picked is newest-first; reverse to ascending.
|
||||
std.mem.reverse(TimelinePoint, picked.items);
|
||||
return picked.toOwnedSlice(allocator);
|
||||
}
|
||||
|
||||
/// Group by (year, month); emit the latest-date member of each group.
|
||||
fn aggregateMonthly(
|
||||
allocator: std.mem.Allocator,
|
||||
points: []const TimelinePoint,
|
||||
) ![]TimelinePoint {
|
||||
var picked: std.ArrayList(TimelinePoint) = .empty;
|
||||
errdefer picked.deinit(allocator);
|
||||
|
||||
// Scan ascending (points are already sorted). For each (year, month)
|
||||
// key, keep updating the "representative" point until the key
|
||||
// changes; then commit the previous one.
|
||||
var cur_year: i16 = 0;
|
||||
var cur_month: u8 = 0;
|
||||
var cur_point: ?TimelinePoint = null;
|
||||
|
||||
for (points) |p| {
|
||||
const y = p.as_of_date.year();
|
||||
const m = p.as_of_date.month();
|
||||
if (cur_point == null) {
|
||||
cur_year = y;
|
||||
cur_month = m;
|
||||
cur_point = p;
|
||||
continue;
|
||||
}
|
||||
if (y == cur_year and m == cur_month) {
|
||||
cur_point = p; // same month; keep the latest
|
||||
} else {
|
||||
try picked.append(allocator, cur_point.?);
|
||||
cur_year = y;
|
||||
cur_month = m;
|
||||
cur_point = p;
|
||||
}
|
||||
}
|
||||
if (cur_point) |p| try picked.append(allocator, p);
|
||||
|
||||
return picked.toOwnedSlice(allocator);
|
||||
}
|
||||
|
||||
// ── Tests ────────────────────────────────────────────────────
|
||||
//
|
||||
// Pure compute — every function here can be exercised with fixture
|
||||
|
|
@ -753,3 +1069,306 @@ test "buildRollupRecords: empty input produces empty slice" {
|
|||
defer testing.allocator.free(rows);
|
||||
try testing.expectEqual(@as(usize, 0), rows.len);
|
||||
}
|
||||
|
||||
// ── pointAtOrBefore ──────────────────────────────────────────
|
||||
|
||||
test "pointAtOrBefore: exact / snap-backward / null" {
|
||||
var b1: [3]snapshot.TotalRow = undefined;
|
||||
var b2: [3]snapshot.TotalRow = undefined;
|
||||
var b3: [3]snapshot.TotalRow = undefined;
|
||||
const snaps = [_]snapshot.Snapshot{
|
||||
fixtureSnapshot(&b1, 2026, 4, 17, 1000, 700, 300),
|
||||
fixtureSnapshot(&b2, 2026, 4, 18, 1100, 750, 350),
|
||||
fixtureSnapshot(&b3, 2026, 4, 21, 1200, 800, 400), // gap over weekend
|
||||
};
|
||||
const series = try buildSeries(testing.allocator, &snaps);
|
||||
defer series.deinit();
|
||||
|
||||
// Exact match
|
||||
try testing.expect(pointAtOrBefore(series.points, Date.fromYmd(2026, 4, 18)).?.net_worth == 1100);
|
||||
// Snap backward over weekend: Saturday -> Friday's snapshot
|
||||
try testing.expect(pointAtOrBefore(series.points, Date.fromYmd(2026, 4, 19)).?.net_worth == 1100);
|
||||
// Before all -> null
|
||||
try testing.expect(pointAtOrBefore(series.points, Date.fromYmd(2026, 4, 1)) == null);
|
||||
// After all -> latest
|
||||
try testing.expect(pointAtOrBefore(series.points, Date.fromYmd(2099, 1, 1)).?.net_worth == 1200);
|
||||
}
|
||||
|
||||
// ── computeWindowSet ─────────────────────────────────────────
|
||||
|
||||
test "computeWindowSet: empty series produces empty rows" {
|
||||
const ws = try computeWindowSet(testing.allocator, &.{}, .net_worth, Date.fromYmd(2026, 4, 22));
|
||||
defer ws.deinit();
|
||||
try testing.expectEqual(@as(usize, 0), ws.rows.len);
|
||||
}
|
||||
|
||||
test "computeWindowSet: 8 period rows + 1 all-time row" {
|
||||
var b1: [3]snapshot.TotalRow = undefined;
|
||||
var b2: [3]snapshot.TotalRow = undefined;
|
||||
const snaps = [_]snapshot.Snapshot{
|
||||
fixtureSnapshot(&b1, 2026, 4, 17, 1000, 700, 300),
|
||||
fixtureSnapshot(&b2, 2026, 4, 22, 1500, 1000, 500),
|
||||
};
|
||||
const series = try buildSeries(testing.allocator, &snaps);
|
||||
defer series.deinit();
|
||||
|
||||
const ws = try computeWindowSet(testing.allocator, series.points, .net_worth, Date.fromYmd(2026, 4, 22));
|
||||
defer ws.deinit();
|
||||
|
||||
// 8 HistoricalPeriod windows + 1 all-time
|
||||
try testing.expectEqual(@as(usize, 9), ws.rows.len);
|
||||
|
||||
// Last row is all-time (period is null, label matches)
|
||||
const all_time = ws.rows[ws.rows.len - 1];
|
||||
try testing.expect(all_time.period == null);
|
||||
try testing.expectEqualStrings("All-time", all_time.label);
|
||||
try testing.expect(all_time.start_value.? == 1000);
|
||||
try testing.expect(all_time.end_value == 1500);
|
||||
try testing.expect(all_time.delta_abs.? == 500);
|
||||
try testing.expectApproxEqAbs(@as(f64, 0.5), all_time.delta_pct.?, 1e-9);
|
||||
}
|
||||
|
||||
test "computeWindowSet: not-enough-history leaves start_value/delta null" {
|
||||
var b1: [3]snapshot.TotalRow = undefined;
|
||||
var b2: [3]snapshot.TotalRow = undefined;
|
||||
const snaps = [_]snapshot.Snapshot{
|
||||
fixtureSnapshot(&b1, 2026, 4, 21, 1000, 700, 300),
|
||||
fixtureSnapshot(&b2, 2026, 4, 22, 1100, 750, 350),
|
||||
};
|
||||
const series = try buildSeries(testing.allocator, &snaps);
|
||||
defer series.deinit();
|
||||
|
||||
const ws = try computeWindowSet(testing.allocator, series.points, .net_worth, Date.fromYmd(2026, 4, 22));
|
||||
defer ws.deinit();
|
||||
|
||||
// First row is 1-day: anchor = 2026-04-21 (exact match). Fully populated.
|
||||
try testing.expect(ws.rows[0].period == HistoricalPeriod.@"1D");
|
||||
try testing.expect(ws.rows[0].anchor_date != null);
|
||||
try testing.expect(ws.rows[0].delta_abs != null);
|
||||
|
||||
// 1-year row (index 4 in timeline_windows: 1D, 1W, 1M, YTD, 1Y, 3Y, 5Y, 10Y)
|
||||
// with only 2 days of history: no anchor.
|
||||
try testing.expect(ws.rows[4].period == HistoricalPeriod.@"1Y");
|
||||
try testing.expect(ws.rows[4].anchor_date == null);
|
||||
try testing.expect(ws.rows[4].start_value == null);
|
||||
try testing.expect(ws.rows[4].delta_abs == null);
|
||||
try testing.expect(ws.rows[4].delta_pct == null);
|
||||
// end_value still populated
|
||||
try testing.expect(ws.rows[4].end_value == 1100);
|
||||
}
|
||||
|
||||
test "computeWindowSet: YTD anchors to Jan 1 (snaps to prior year's last close)" {
|
||||
// Dec 31 2025 snapshot + one in April 2026. YTD on 2026-04-22 should
|
||||
// anchor to Dec 31 2025 (pointAtOrBefore snaps Jan 1 2026 back to the
|
||||
// prior-year close).
|
||||
var b1: [3]snapshot.TotalRow = undefined;
|
||||
var b2: [3]snapshot.TotalRow = undefined;
|
||||
const snaps = [_]snapshot.Snapshot{
|
||||
fixtureSnapshot(&b1, 2025, 12, 31, 1000, 700, 300),
|
||||
fixtureSnapshot(&b2, 2026, 4, 22, 1500, 1000, 500),
|
||||
};
|
||||
const series = try buildSeries(testing.allocator, &snaps);
|
||||
defer series.deinit();
|
||||
|
||||
const ws = try computeWindowSet(testing.allocator, series.points, .net_worth, Date.fromYmd(2026, 4, 22));
|
||||
defer ws.deinit();
|
||||
|
||||
// ytd is index 3 in timeline_windows
|
||||
const ytd = ws.rows[3];
|
||||
try testing.expect(ytd.period == HistoricalPeriod.ytd);
|
||||
try testing.expect(ytd.anchor_date.?.eql(Date.fromYmd(2025, 12, 31)));
|
||||
try testing.expect(ytd.start_value.? == 1000);
|
||||
try testing.expect(ytd.delta_abs.? == 500);
|
||||
}
|
||||
|
||||
test "computeWindowSet: liquid metric is independent of net_worth" {
|
||||
var b1: [3]snapshot.TotalRow = undefined;
|
||||
var b2: [3]snapshot.TotalRow = undefined;
|
||||
const snaps = [_]snapshot.Snapshot{
|
||||
fixtureSnapshot(&b1, 2026, 4, 17, 1000, 700, 300),
|
||||
fixtureSnapshot(&b2, 2026, 4, 22, 1500, 1100, 400),
|
||||
};
|
||||
const series = try buildSeries(testing.allocator, &snaps);
|
||||
defer series.deinit();
|
||||
|
||||
const ws = try computeWindowSet(testing.allocator, series.points, .liquid, Date.fromYmd(2026, 4, 22));
|
||||
defer ws.deinit();
|
||||
|
||||
const all_time = ws.rows[ws.rows.len - 1];
|
||||
try testing.expect(all_time.start_value.? == 700);
|
||||
try testing.expect(all_time.end_value == 1100);
|
||||
try testing.expect(all_time.delta_abs.? == 400);
|
||||
}
|
||||
|
||||
// ── computeRowDeltas ─────────────────────────────────────────
|
||||
|
||||
test "computeRowDeltas: first row has null deltas; others populated" {
|
||||
var b1: [3]snapshot.TotalRow = undefined;
|
||||
var b2: [3]snapshot.TotalRow = undefined;
|
||||
var b3: [3]snapshot.TotalRow = undefined;
|
||||
const snaps = [_]snapshot.Snapshot{
|
||||
fixtureSnapshot(&b1, 2026, 4, 17, 1000, 700, 300),
|
||||
fixtureSnapshot(&b2, 2026, 4, 18, 1100, 750, 350),
|
||||
fixtureSnapshot(&b3, 2026, 4, 19, 1050, 720, 330),
|
||||
};
|
||||
const series = try buildSeries(testing.allocator, &snaps);
|
||||
defer series.deinit();
|
||||
|
||||
const rows = try computeRowDeltas(testing.allocator, series.points);
|
||||
defer testing.allocator.free(rows);
|
||||
|
||||
try testing.expectEqual(@as(usize, 3), rows.len);
|
||||
|
||||
// First row: all deltas null
|
||||
try testing.expect(rows[0].d_net_worth == null);
|
||||
try testing.expect(rows[0].d_liquid == null);
|
||||
try testing.expect(rows[0].d_illiquid == null);
|
||||
|
||||
// Second row: +100 net_worth
|
||||
try testing.expect(rows[1].d_net_worth.? == 100);
|
||||
try testing.expect(rows[1].d_liquid.? == 50);
|
||||
try testing.expect(rows[1].d_illiquid.? == 50);
|
||||
|
||||
// Third row: -50 net_worth
|
||||
try testing.expect(rows[2].d_net_worth.? == -50);
|
||||
}
|
||||
|
||||
test "computeRowDeltas: empty input" {
|
||||
const rows = try computeRowDeltas(testing.allocator, &.{});
|
||||
defer testing.allocator.free(rows);
|
||||
try testing.expectEqual(@as(usize, 0), rows.len);
|
||||
}
|
||||
|
||||
// ── selectResolution / aggregatePoints ───────────────────────
|
||||
|
||||
test "selectResolution: thresholds" {
|
||||
const mk = struct {
|
||||
fn f(day_span: i32) []TimelinePoint {
|
||||
var out = std.testing.allocator.alloc(TimelinePoint, 2) catch unreachable;
|
||||
out[0] = .{
|
||||
.as_of_date = Date.fromYmd(2026, 1, 1),
|
||||
.net_worth = 0,
|
||||
.liquid = 0,
|
||||
.illiquid = 0,
|
||||
.accounts = &.{},
|
||||
.tax_types = &.{},
|
||||
};
|
||||
out[1] = .{
|
||||
.as_of_date = Date.fromYmd(2026, 1, 1).addDays(day_span),
|
||||
.net_worth = 0,
|
||||
.liquid = 0,
|
||||
.illiquid = 0,
|
||||
.accounts = &.{},
|
||||
.tax_types = &.{},
|
||||
};
|
||||
return out;
|
||||
}
|
||||
}.f;
|
||||
|
||||
const p90 = mk(90);
|
||||
defer testing.allocator.free(p90);
|
||||
try testing.expectEqual(Resolution.daily, selectResolution(p90));
|
||||
|
||||
const p91 = mk(91);
|
||||
defer testing.allocator.free(p91);
|
||||
try testing.expectEqual(Resolution.weekly, selectResolution(p91));
|
||||
|
||||
const p730 = mk(730);
|
||||
defer testing.allocator.free(p730);
|
||||
try testing.expectEqual(Resolution.weekly, selectResolution(p730));
|
||||
|
||||
const p731 = mk(731);
|
||||
defer testing.allocator.free(p731);
|
||||
try testing.expectEqual(Resolution.monthly, selectResolution(p731));
|
||||
|
||||
// Single-point: daily
|
||||
try testing.expectEqual(Resolution.daily, selectResolution(p90[0..1]));
|
||||
}
|
||||
|
||||
test "aggregatePoints: daily returns a copy" {
|
||||
var b1: [3]snapshot.TotalRow = undefined;
|
||||
var b2: [3]snapshot.TotalRow = undefined;
|
||||
const snaps = [_]snapshot.Snapshot{
|
||||
fixtureSnapshot(&b1, 2026, 4, 17, 1000, 700, 300),
|
||||
fixtureSnapshot(&b2, 2026, 4, 18, 1100, 750, 350),
|
||||
};
|
||||
const series = try buildSeries(testing.allocator, &snaps);
|
||||
defer series.deinit();
|
||||
const out = try aggregatePoints(testing.allocator, series.points, .daily);
|
||||
defer testing.allocator.free(out);
|
||||
try testing.expectEqual(@as(usize, 2), out.len);
|
||||
try testing.expect(out[0].net_worth == 1000);
|
||||
try testing.expect(out[1].net_worth == 1100);
|
||||
}
|
||||
|
||||
test "aggregatePoints: weekly rolling, one pick per 7-day bucket from latest" {
|
||||
// Span 21 days: expect 3 buckets, one pick each.
|
||||
// Generate one point per day so we can see bucket boundaries clearly.
|
||||
var points_al: std.ArrayList(TimelinePoint) = .empty;
|
||||
defer points_al.deinit(testing.allocator);
|
||||
var i: i32 = 0;
|
||||
while (i <= 20) : (i += 1) {
|
||||
try points_al.append(testing.allocator, .{
|
||||
.as_of_date = Date.fromYmd(2026, 4, 1).addDays(i),
|
||||
.net_worth = @as(f64, @floatFromInt(1000 + i)),
|
||||
.liquid = 0,
|
||||
.illiquid = 0,
|
||||
.accounts = &.{},
|
||||
.tax_types = &.{},
|
||||
});
|
||||
}
|
||||
const out = try aggregatePoints(testing.allocator, points_al.items, .weekly);
|
||||
defer testing.allocator.free(out);
|
||||
|
||||
// Last date is 2026-04-21. Buckets anchored from that:
|
||||
// bucket 0: days [15..21] -> pick 2026-04-21 (1020)
|
||||
// bucket 1: days [08..14] -> pick 2026-04-14 (1013)
|
||||
// bucket 2: days [01..07] -> pick 2026-04-07 (1006)
|
||||
try testing.expectEqual(@as(usize, 3), out.len);
|
||||
try testing.expect(out[0].as_of_date.eql(Date.fromYmd(2026, 4, 7)));
|
||||
try testing.expect(out[0].net_worth == 1006);
|
||||
try testing.expect(out[1].as_of_date.eql(Date.fromYmd(2026, 4, 14)));
|
||||
try testing.expect(out[2].as_of_date.eql(Date.fromYmd(2026, 4, 21)));
|
||||
try testing.expect(out[2].net_worth == 1020);
|
||||
}
|
||||
|
||||
test "aggregatePoints: monthly picks latest snapshot in each calendar month" {
|
||||
var b1: [3]snapshot.TotalRow = undefined;
|
||||
var b2: [3]snapshot.TotalRow = undefined;
|
||||
var b3: [3]snapshot.TotalRow = undefined;
|
||||
var b4: [3]snapshot.TotalRow = undefined;
|
||||
var b5: [3]snapshot.TotalRow = undefined;
|
||||
const snaps = [_]snapshot.Snapshot{
|
||||
fixtureSnapshot(&b1, 2026, 2, 5, 100, 0, 0),
|
||||
fixtureSnapshot(&b2, 2026, 2, 28, 200, 0, 0), // latest Feb
|
||||
fixtureSnapshot(&b3, 2026, 3, 1, 300, 0, 0),
|
||||
fixtureSnapshot(&b4, 2026, 3, 31, 400, 0, 0), // latest Mar
|
||||
fixtureSnapshot(&b5, 2026, 4, 10, 500, 0, 0), // only Apr entry
|
||||
};
|
||||
const series = try buildSeries(testing.allocator, &snaps);
|
||||
defer series.deinit();
|
||||
const out = try aggregatePoints(testing.allocator, series.points, .monthly);
|
||||
defer testing.allocator.free(out);
|
||||
|
||||
try testing.expectEqual(@as(usize, 3), out.len);
|
||||
try testing.expect(out[0].as_of_date.eql(Date.fromYmd(2026, 2, 28)));
|
||||
try testing.expect(out[0].net_worth == 200);
|
||||
try testing.expect(out[1].as_of_date.eql(Date.fromYmd(2026, 3, 31)));
|
||||
try testing.expect(out[1].net_worth == 400);
|
||||
try testing.expect(out[2].as_of_date.eql(Date.fromYmd(2026, 4, 10)));
|
||||
try testing.expect(out[2].net_worth == 500);
|
||||
}
|
||||
|
||||
test "aggregatePoints: empty input returns empty slice" {
|
||||
const out_d = try aggregatePoints(testing.allocator, &.{}, .daily);
|
||||
defer testing.allocator.free(out_d);
|
||||
try testing.expectEqual(@as(usize, 0), out_d.len);
|
||||
|
||||
const out_w = try aggregatePoints(testing.allocator, &.{}, .weekly);
|
||||
defer testing.allocator.free(out_w);
|
||||
try testing.expectEqual(@as(usize, 0), out_w.len);
|
||||
|
||||
const out_m = try aggregatePoints(testing.allocator, &.{}, .monthly);
|
||||
defer testing.allocator.free(out_m);
|
||||
try testing.expectEqual(@as(usize, 0), out_m.len);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -195,24 +195,53 @@ pub const CandleAtDate = struct {
|
|||
/// Input is expected to be sorted ascending by date (the cache
|
||||
/// guarantees this). O(log n) via binary search.
|
||||
pub fn candleCloseOnOrBefore(candles: []const Candle, target: Date) ?CandleAtDate {
|
||||
if (candles.len == 0) return null;
|
||||
const idx = indexAtOrBefore(Candle, candles, target, candleDateOf) orelse return null;
|
||||
const c = candles[idx];
|
||||
return .{ .close = c.close, .date = c.date, .stale = !c.date.eql(target) };
|
||||
}
|
||||
|
||||
// Binary search for largest index with candles[i].date <= target.
|
||||
// Standard lower-bound on "date > target", then step back.
|
||||
fn candleDateOf(c: Candle) Date {
|
||||
return c.date;
|
||||
}
|
||||
|
||||
/// Generic "latest index ≤ target" binary search.
|
||||
///
|
||||
/// Returns the largest index `i` such that `dateOf(items[i]) <= target`, or
|
||||
/// null when no such index exists (target is strictly before every entry, or
|
||||
/// the slice is empty). Caller supplies a `dateOf` extractor so this works
|
||||
/// on any slice sorted ascending by date.
|
||||
///
|
||||
/// This is the shared "snap backward" primitive used by candle pricing
|
||||
/// (`findPriceAtDate`, `candleCloseOnOrBefore`) and the portfolio-timeline
|
||||
/// windows (`src/analytics/timeline.zig:pointAtOrBefore`). Every one of
|
||||
/// those callers answers the same question — "what's the latest data point
|
||||
/// on or before this target?" — so a single implementation keeps weekend /
|
||||
/// holiday / gap semantics uniform across the codebase.
|
||||
///
|
||||
/// No slack cap. If a policy cap is needed (e.g. "reject matches more than
|
||||
/// 7 days old"), apply it at the call site against the returned index.
|
||||
pub fn indexAtOrBefore(
|
||||
comptime T: type,
|
||||
items: []const T,
|
||||
target: Date,
|
||||
comptime dateOf: fn (T) Date,
|
||||
) ?usize {
|
||||
if (items.len == 0) return null;
|
||||
|
||||
// Lower-bound on "date > target", then step back.
|
||||
var lo: usize = 0;
|
||||
var hi: usize = candles.len;
|
||||
var hi: usize = items.len;
|
||||
while (lo < hi) {
|
||||
const mid = lo + (hi - lo) / 2;
|
||||
if (candles[mid].date.lessThan(target) or candles[mid].date.eql(target)) {
|
||||
const md = dateOf(items[mid]);
|
||||
if (md.lessThan(target) or md.eql(target)) {
|
||||
lo = mid + 1;
|
||||
} else {
|
||||
hi = mid;
|
||||
}
|
||||
}
|
||||
// lo is the first index with date > target; lo - 1 is the answer.
|
||||
if (lo == 0) return null;
|
||||
const c = candles[lo - 1];
|
||||
return .{ .close = c.close, .date = c.date, .stale = !c.date.eql(target) };
|
||||
return lo - 1;
|
||||
}
|
||||
|
||||
/// Compute portfolio summary given positions and current prices.
|
||||
|
|
@ -324,10 +353,27 @@ pub fn buildFallbackPrices(
|
|||
|
||||
// ── Historical portfolio value ───────────────────────────────
|
||||
|
||||
/// A lookback period for historical portfolio value.
|
||||
/// A lookback period anchored to `today`. Used both for:
|
||||
/// * `computeHistoricalSnapshots` — "current holdings at historical prices"
|
||||
/// (backed by candle cache via `findPriceAtDate`).
|
||||
/// * portfolio-timeline windows — "snapshot-value on date A vs. today's
|
||||
/// snapshot value" (backed by snapshot history via
|
||||
/// `timeline.pointAtOrBefore`).
|
||||
///
|
||||
/// The enum only holds periods that are *relative to today*; "since first
|
||||
/// snapshot" ("all-time") is handled inline by the timeline renderer —
|
||||
/// adding it here would break the "relative to today" invariant.
|
||||
///
|
||||
/// `all` lists the 6 periods used by the portfolio historical block (kept
|
||||
/// stable — `zfin portfolio` and the portfolio tab iterate it). The
|
||||
/// `timeline_windows` array defines the 8 periods shown in the history
|
||||
/// view's rolling-windows block.
|
||||
pub const HistoricalPeriod = enum {
|
||||
@"1D",
|
||||
@"1W",
|
||||
@"1M",
|
||||
@"3M",
|
||||
ytd,
|
||||
@"1Y",
|
||||
@"3Y",
|
||||
@"5Y",
|
||||
|
|
@ -335,8 +381,11 @@ pub const HistoricalPeriod = enum {
|
|||
|
||||
pub fn label(self: HistoricalPeriod) []const u8 {
|
||||
return switch (self) {
|
||||
.@"1D" => "1D",
|
||||
.@"1W" => "1W",
|
||||
.@"1M" => "1M",
|
||||
.@"3M" => "3M",
|
||||
.ytd => "YTD",
|
||||
.@"1Y" => "1Y",
|
||||
.@"3Y" => "3Y",
|
||||
.@"5Y" => "5Y",
|
||||
|
|
@ -344,11 +393,39 @@ pub const HistoricalPeriod = enum {
|
|||
};
|
||||
}
|
||||
|
||||
/// Human-friendly label for the history view's windows block. Longer
|
||||
/// than `label()` (which is used in compact table headers).
|
||||
pub fn longLabel(self: HistoricalPeriod) []const u8 {
|
||||
return switch (self) {
|
||||
.@"1D" => "1 day",
|
||||
.@"1W" => "1 week",
|
||||
.@"1M" => "1 month",
|
||||
.@"3M" => "3 months",
|
||||
.ytd => "YTD",
|
||||
.@"1Y" => "1 year",
|
||||
.@"3Y" => "3 years",
|
||||
.@"5Y" => "5 years",
|
||||
.@"10Y" => "10 years",
|
||||
};
|
||||
}
|
||||
|
||||
/// Compute the target date by subtracting this period from `today`.
|
||||
///
|
||||
/// `1D` subtracts one calendar day. Downstream snap-backward logic
|
||||
/// will then pick the latest available data point on or before that
|
||||
/// date — so a Saturday-run view with no Saturday snapshot naturally
|
||||
/// compares today against Friday's close.
|
||||
///
|
||||
/// `ytd` resolves to Jan 1 of today's year. Jan 1 is always a market
|
||||
/// holiday; the snap primitive will fall back to the prior year's
|
||||
/// final trading day, which is exactly the brokerage YTD convention.
|
||||
pub fn targetDate(self: HistoricalPeriod, today: Date) Date {
|
||||
return switch (self) {
|
||||
.@"1D" => today.addDays(-1),
|
||||
.@"1W" => today.addDays(-7),
|
||||
.@"1M" => today.subtractMonths(1),
|
||||
.@"3M" => today.subtractMonths(3),
|
||||
.ytd => Date.fromYmd(today.year(), 1, 1),
|
||||
.@"1Y" => today.subtractYears(1),
|
||||
.@"3Y" => today.subtractYears(3),
|
||||
.@"5Y" => today.subtractYears(5),
|
||||
|
|
@ -356,7 +433,18 @@ pub const HistoricalPeriod = enum {
|
|||
};
|
||||
}
|
||||
|
||||
/// Periods shown in `zfin portfolio`'s historical-value block and the
|
||||
/// portfolio tab. Stable by design — renderers iterate and format by
|
||||
/// index. Do not reorder without updating those callers.
|
||||
pub const all = [_]HistoricalPeriod{ .@"1M", .@"3M", .@"1Y", .@"3Y", .@"5Y", .@"10Y" };
|
||||
|
||||
/// Periods shown in the history view's rolling-windows block. Order
|
||||
/// matches user mental model: "today vs. recent" → "today vs. old".
|
||||
/// `all_time` is rendered as a 9th row by the timeline renderer —
|
||||
/// not listed here because it isn't relative to `today`.
|
||||
pub const timeline_windows = [_]HistoricalPeriod{
|
||||
.@"1D", .@"1W", .@"1M", .ytd, .@"1Y", .@"3Y", .@"5Y", .@"10Y",
|
||||
};
|
||||
};
|
||||
|
||||
/// One snapshot of portfolio value at a historical date.
|
||||
|
|
@ -388,23 +476,9 @@ pub const HistoricalSnapshot = struct {
|
|||
/// For snapshot/backfill usage prefer `candleCloseOnOrBefore` — it has
|
||||
/// no slack cap and reports the matched candle's date + staleness.
|
||||
fn findPriceAtDate(candles: []const Candle, target: Date) ?f64 {
|
||||
if (candles.len == 0) return null;
|
||||
|
||||
// Binary search for the target date
|
||||
var lo: usize = 0;
|
||||
var hi: usize = candles.len;
|
||||
while (lo < hi) {
|
||||
const mid = lo + (hi - lo) / 2;
|
||||
if (candles[mid].date.days <= target.days) {
|
||||
lo = mid + 1;
|
||||
} else {
|
||||
hi = mid;
|
||||
}
|
||||
}
|
||||
// lo points to first candle after target; we want the one at or before
|
||||
if (lo == 0) return null; // all candles are after target
|
||||
const idx = lo - 1;
|
||||
// Allow up to 5 trading days slack (weekends, holidays)
|
||||
const idx = indexAtOrBefore(Candle, candles, target, candleDateOf) orelse return null;
|
||||
// Allow up to 7 calendar days slack (weekends, holidays) between the
|
||||
// matched candle and the target.
|
||||
if (target.days - candles[idx].date.days > 7) return null;
|
||||
return candles[idx].close;
|
||||
}
|
||||
|
|
@ -625,6 +699,69 @@ test "HistoricalPeriod label and targetDate" {
|
|||
try std.testing.expectEqual(@as(u8, 2), one_month.month());
|
||||
}
|
||||
|
||||
test "HistoricalPeriod 1D/1W/ytd targetDate + labels" {
|
||||
const today = Date.fromYmd(2026, 4, 22);
|
||||
|
||||
// 1D = yesterday
|
||||
const d1 = HistoricalPeriod.@"1D".targetDate(today);
|
||||
try std.testing.expect(d1.eql(Date.fromYmd(2026, 4, 21)));
|
||||
|
||||
// 1W = 7 days ago
|
||||
const w1 = HistoricalPeriod.@"1W".targetDate(today);
|
||||
try std.testing.expect(w1.eql(Date.fromYmd(2026, 4, 15)));
|
||||
|
||||
// YTD = Jan 1 of current year (snap-backward in callers pulls back to
|
||||
// prior year's Dec 31 close, matching brokerage YTD convention)
|
||||
const ytd = HistoricalPeriod.ytd.targetDate(today);
|
||||
try std.testing.expect(ytd.eql(Date.fromYmd(2026, 1, 1)));
|
||||
|
||||
// Labels used in compact contexts
|
||||
try std.testing.expectEqualStrings("1D", HistoricalPeriod.@"1D".label());
|
||||
try std.testing.expectEqualStrings("1W", HistoricalPeriod.@"1W".label());
|
||||
try std.testing.expectEqualStrings("YTD", HistoricalPeriod.ytd.label());
|
||||
|
||||
// Long labels used in the history windows block
|
||||
try std.testing.expectEqualStrings("1 day", HistoricalPeriod.@"1D".longLabel());
|
||||
try std.testing.expectEqualStrings("1 week", HistoricalPeriod.@"1W".longLabel());
|
||||
try std.testing.expectEqualStrings("1 month", HistoricalPeriod.@"1M".longLabel());
|
||||
try std.testing.expectEqualStrings("YTD", HistoricalPeriod.ytd.longLabel());
|
||||
try std.testing.expectEqualStrings("10 years", HistoricalPeriod.@"10Y".longLabel());
|
||||
}
|
||||
|
||||
test "HistoricalPeriod.timeline_windows: 8 periods, no all_time" {
|
||||
// `all_time` is intentionally handled inline by the timeline renderer.
|
||||
// This test pins that decision — if a future change tries to add it
|
||||
// here, it will break.
|
||||
try std.testing.expectEqual(@as(usize, 8), HistoricalPeriod.timeline_windows.len);
|
||||
try std.testing.expectEqual(HistoricalPeriod.@"1D", HistoricalPeriod.timeline_windows[0]);
|
||||
try std.testing.expectEqual(HistoricalPeriod.@"10Y", HistoricalPeriod.timeline_windows[7]);
|
||||
}
|
||||
|
||||
test "indexAtOrBefore: exact / before all / after all / empty" {
|
||||
const dates = [_]Date{
|
||||
Date.fromYmd(2026, 4, 17),
|
||||
Date.fromYmd(2026, 4, 18),
|
||||
Date.fromYmd(2026, 4, 21),
|
||||
};
|
||||
const dateOf = struct {
|
||||
fn f(d: Date) Date {
|
||||
return d;
|
||||
}
|
||||
}.f;
|
||||
|
||||
// Exact match -> that index
|
||||
try std.testing.expectEqual(@as(usize, 1), indexAtOrBefore(Date, &dates, Date.fromYmd(2026, 4, 18), dateOf).?);
|
||||
// Between two entries -> earlier index
|
||||
try std.testing.expectEqual(@as(usize, 1), indexAtOrBefore(Date, &dates, Date.fromYmd(2026, 4, 19), dateOf).?);
|
||||
// After all -> last index
|
||||
try std.testing.expectEqual(@as(usize, 2), indexAtOrBefore(Date, &dates, Date.fromYmd(2099, 1, 1), dateOf).?);
|
||||
// Before all -> null
|
||||
try std.testing.expect(indexAtOrBefore(Date, &dates, Date.fromYmd(1999, 1, 1), dateOf) == null);
|
||||
// Empty -> null
|
||||
const empty: []const Date = &.{};
|
||||
try std.testing.expect(indexAtOrBefore(Date, empty, Date.fromYmd(2026, 4, 1), dateOf) == null);
|
||||
}
|
||||
|
||||
test "adjustForNonStockAssets" {
|
||||
const Portfolio = portfolio_mod.Portfolio;
|
||||
const Lot = portfolio_mod.Lot;
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
|
|
@ -46,6 +46,15 @@ const usage =
|
|||
\\Portfolio command options:
|
||||
\\ --refresh Force refresh (ignore cache, re-fetch all prices)
|
||||
\\
|
||||
\\History command options (portfolio mode; omit SYMBOL):
|
||||
\\ --since <YYYY-MM-DD> Earliest as_of_date (inclusive)
|
||||
\\ --until <YYYY-MM-DD> Latest as_of_date (inclusive)
|
||||
\\ --metric <name> liquid (default), illiquid, or net_worth
|
||||
\\ --resolution <name> daily, weekly, monthly, or auto (default: auto)
|
||||
\\ auto: daily ≤90d, weekly ≤730d, else monthly
|
||||
\\ --limit <N> Max rows in the recent-snapshots table (default: 40)
|
||||
\\ --rebuild-rollup (Re)write history/rollup.srf and exit
|
||||
\\
|
||||
\\Audit command options:
|
||||
\\ --fidelity <CSV> Fidelity positions CSV export (download from "All accounts" positions tab)
|
||||
\\ --schwab <CSV> Schwab per-account positions CSV export
|
||||
|
|
|
|||
11
src/tui.zig
11
src/tui.zig
|
|
@ -382,6 +382,11 @@ pub const App = struct {
|
|||
// day-to-day. Illiquid barely changes, net_worth is dominated by
|
||||
// liquid anyway, so "show me liquid" is the headline view.
|
||||
history_metric: timeline.Metric = .liquid,
|
||||
/// Forced resolution for the history table + chart. Null means
|
||||
/// "auto" — `timeline.selectResolution` picks daily/weekly/monthly
|
||||
/// based on the span of the loaded series. Cycled via the
|
||||
/// `history_resolution_next` keybind ('t' by default).
|
||||
history_resolution: ?timeline.Resolution = null,
|
||||
|
||||
// Mouse wheel debounce for cursor-based tabs (portfolio, options).
|
||||
// Terminals often send multiple wheel events per physical tick.
|
||||
|
|
@ -1038,6 +1043,12 @@ pub const App = struct {
|
|||
return ctx.consumeAndRedraw();
|
||||
}
|
||||
},
|
||||
.history_resolution_next => {
|
||||
if (self.active_tab == .history) {
|
||||
history_tab.cycleResolution(self);
|
||||
return ctx.consumeAndRedraw();
|
||||
}
|
||||
},
|
||||
.sort_col_next => {
|
||||
if (self.active_tab == .portfolio) {
|
||||
if (self.portfolio_sort_field.next()) |new_field| {
|
||||
|
|
|
|||
|
|
@ -1,17 +1,25 @@
|
|||
//! TUI history tab — portfolio value timeline over time.
|
||||
//!
|
||||
//! Layout (top-to-bottom):
|
||||
//! 1. Rolling-windows block for the focused metric
|
||||
//! (1D / 1W / 1M / YTD / 1Y / 3Y / 5Y / 10Y / All-time)
|
||||
//! 2. Braille timeline chart for the focused metric
|
||||
//! 3. "Recent snapshots" table: Liquid | Illiquid | Net Worth with
|
||||
//! per-row Δ vs. previous row. Newest-first. Row colored by the
|
||||
//! focused-metric delta.
|
||||
//!
|
||||
//! Consumes `src/analytics/timeline.zig` (pure compute) and
|
||||
//! `src/history.zig` (snapshot IO). No analytics live here: this module
|
||||
//! is only responsible for driving data loading on tab activation and
|
||||
//! converting the timeline series into `StyledLine`s for rendering.
|
||||
//!
|
||||
//! Shows:
|
||||
//! - Headline summary (first/last/Δ for net worth / liquid / illiquid)
|
||||
//! - Braille timeline chart for the selected metric
|
||||
//! - Recent snapshots table (last N entries with per-metric deltas)
|
||||
//! Keybinds:
|
||||
//! - `m` cycles chart metric (`history_metric_next`)
|
||||
//! - `t` cycles resolution (`history_resolution_next`)
|
||||
//!
|
||||
//! The chart metric is cycleable via the `history_metric_next` keybind
|
||||
//! (default `m`). The selected metric persists on `App.history_metric`.
|
||||
//! Default metric is `.liquid` — matches the CLI history default and
|
||||
//! is the headline view for day-to-day watching (illiquid barely
|
||||
//! changes, net worth is dominated by liquid anyway).
|
||||
|
||||
const std = @import("std");
|
||||
const vaxis = @import("vaxis");
|
||||
|
|
@ -21,19 +29,18 @@ const theme = @import("theme.zig");
|
|||
const tui = @import("../tui.zig");
|
||||
const history_io = @import("../history.zig");
|
||||
const timeline = @import("../analytics/timeline.zig");
|
||||
const view = @import("../view/history.zig");
|
||||
const App = tui.App;
|
||||
const StyledLine = tui.StyledLine;
|
||||
|
||||
// Show at most this many rows in the bottom table. Older rows still
|
||||
// contribute to the chart and summary, just not to the table.
|
||||
const max_table_rows: usize = 20;
|
||||
// contribute to the chart and windows block, just not to the table.
|
||||
const max_table_rows: usize = 30;
|
||||
|
||||
// ── Data loading ──────────────────────────────────────────────
|
||||
|
||||
pub fn loadData(app: *App) void {
|
||||
app.history_loaded = true;
|
||||
|
||||
// Clear any previous load (refresh path).
|
||||
freeLoaded(app);
|
||||
|
||||
const portfolio_path = app.portfolio_path orelse {
|
||||
|
|
@ -41,11 +48,6 @@ pub fn loadData(app: *App) void {
|
|||
return;
|
||||
};
|
||||
|
||||
// Shared path with the `zfin history` CLI command — derives
|
||||
// history/, loads every snapshot, and builds the timeline series.
|
||||
// If history/ is missing or all files are malformed, we surface a
|
||||
// status message and leave `app.history_timeline` null; the
|
||||
// renderer handles the empty case.
|
||||
app.history_timeline = history_io.loadTimeline(app.allocator, portfolio_path) catch {
|
||||
app.setStatus("Failed to read history/ directory");
|
||||
return;
|
||||
|
|
@ -65,12 +67,32 @@ pub fn freeLoaded(app: *App) void {
|
|||
}
|
||||
}
|
||||
|
||||
/// Cycle the displayed metric: net_worth → liquid → illiquid → net_worth.
|
||||
/// Cycle the displayed metric: liquid → illiquid → net_worth → liquid.
|
||||
///
|
||||
/// Starts at liquid to match the default and the "most-useful-first"
|
||||
/// reading order: markets first, illiquid revaluations second, total
|
||||
/// last.
|
||||
pub fn cycleMetric(app: *App) void {
|
||||
app.history_metric = switch (app.history_metric) {
|
||||
.net_worth => .liquid,
|
||||
.liquid => .illiquid,
|
||||
.illiquid => .net_worth,
|
||||
.net_worth => .liquid,
|
||||
};
|
||||
}
|
||||
|
||||
/// Cycle resolution: auto → daily → weekly → monthly → auto.
|
||||
///
|
||||
/// Null = auto (defers to `timeline.selectResolution`). The cycle runs
|
||||
/// through the explicit choices so the user can force a given resolution
|
||||
/// when the auto pick doesn't match intent.
|
||||
pub fn cycleResolution(app: *App) void {
|
||||
app.history_resolution = switch (app.history_resolution orelse {
|
||||
app.history_resolution = .daily;
|
||||
return;
|
||||
}) {
|
||||
.daily => .weekly,
|
||||
.weekly => .monthly,
|
||||
.monthly => null, // back to auto
|
||||
};
|
||||
}
|
||||
|
||||
|
|
@ -78,16 +100,17 @@ pub fn cycleMetric(app: *App) void {
|
|||
|
||||
pub fn buildStyledLines(app: *App, arena: std.mem.Allocator) ![]const StyledLine {
|
||||
const series: ?timeline.TimelineSeries = if (app.history_timeline) |tl| tl.series else null;
|
||||
return renderHistoryLines(arena, app.theme, series, app.history_metric);
|
||||
return renderHistoryLines(arena, app.theme, series, app.history_metric, app.history_resolution);
|
||||
}
|
||||
|
||||
/// Pure renderer — no App dependency. Builds the styled lines from a
|
||||
/// timeline series and the currently-selected metric.
|
||||
/// timeline series, a focused metric, and an optional resolution override.
|
||||
pub fn renderHistoryLines(
|
||||
arena: std.mem.Allocator,
|
||||
th: theme.Theme,
|
||||
series_opt: ?timeline.TimelineSeries,
|
||||
metric: timeline.Metric,
|
||||
focus_metric: timeline.Metric,
|
||||
resolution_override: ?timeline.Resolution,
|
||||
) ![]const StyledLine {
|
||||
var lines: std.ArrayList(StyledLine) = .empty;
|
||||
|
||||
|
|
@ -107,31 +130,25 @@ pub fn renderHistoryLines(
|
|||
return lines.toOwnedSlice(arena);
|
||||
}
|
||||
|
||||
// ── Summary block: one line per metric ───────────────────────
|
||||
try appendSummaryLine(arena, &lines, th, "Net Worth", points, .net_worth);
|
||||
try appendSummaryLine(arena, &lines, th, "Liquid ", points, .liquid);
|
||||
try appendSummaryLine(arena, &lines, th, "Illiquid ", points, .illiquid);
|
||||
const metric_label = focus_metric.label();
|
||||
|
||||
// ── Windows block (focused metric only) ──────────────────────
|
||||
try appendWindowsBlock(arena, &lines, th, points, focus_metric, metric_label);
|
||||
try lines.append(arena, .{ .text = "", .style = th.contentStyle() });
|
||||
|
||||
// ── Chart ────────────────────────────────────────────────────
|
||||
const metric_label = switch (metric) {
|
||||
.net_worth => "Net Worth",
|
||||
.liquid => "Liquid",
|
||||
.illiquid => "Illiquid",
|
||||
};
|
||||
const chart_header = try std.fmt.allocPrint(arena, " Chart: {s} (press 'm' to cycle)", .{metric_label});
|
||||
const chart_header = try std.fmt.allocPrint(
|
||||
arena,
|
||||
" Chart: {s} (press 'm' to cycle metric, 't' to cycle resolution)",
|
||||
.{metric_label},
|
||||
);
|
||||
try lines.append(arena, .{ .text = chart_header, .style = th.headerStyle() });
|
||||
try lines.append(arena, .{ .text = "", .style = th.contentStyle() });
|
||||
|
||||
// Render via the existing braille chart: convert MetricPoints into
|
||||
// synthetic candles so we can call renderBrailleToStyledLines.
|
||||
// Chart: synthesize candles from the focused metric's value.
|
||||
const candles = try arena.alloc(zfin.Candle, points.len);
|
||||
for (points, 0..) |p, i| {
|
||||
const value = switch (metric) {
|
||||
.net_worth => p.net_worth,
|
||||
.liquid => p.liquid,
|
||||
.illiquid => p.illiquid,
|
||||
};
|
||||
const value = extractOne(p, focus_metric);
|
||||
candles[i] = .{
|
||||
.date = p.as_of_date,
|
||||
.open = value,
|
||||
|
|
@ -147,81 +164,128 @@ pub fn renderHistoryLines(
|
|||
try lines.append(arena, .{ .text = "", .style = th.contentStyle() });
|
||||
|
||||
// ── Recent snapshots table ───────────────────────────────────
|
||||
try lines.append(arena, .{ .text = " Recent snapshots", .style = th.headerStyle() });
|
||||
const resolution = resolution_override orelse timeline.selectResolution(points);
|
||||
const aggregated = try timeline.aggregatePoints(arena, points, resolution);
|
||||
const deltas = try timeline.computeRowDeltas(arena, aggregated);
|
||||
|
||||
var rlabel_buf: [32]u8 = undefined;
|
||||
const rlabel = view.fmtResolutionLabel(&rlabel_buf, resolution_override, resolution);
|
||||
const table_header = try std.fmt.allocPrint(
|
||||
arena,
|
||||
" Recent snapshots {s}",
|
||||
.{rlabel},
|
||||
);
|
||||
try lines.append(arena, .{ .text = table_header, .style = th.headerStyle() });
|
||||
try lines.append(arena, .{ .text = "", .style = th.contentStyle() });
|
||||
|
||||
// Headers: Date | Illiquid | Liquid | Net Worth (components → total)
|
||||
try lines.append(arena, .{
|
||||
.text = " Date Illiquid Liquid Net Worth",
|
||||
.style = th.mutedStyle(),
|
||||
});
|
||||
// Header widths mirror the CLI exactly. Leading " " indent + 10-char
|
||||
// date + 2 gap + three 28-char composite cells separated by 2 gaps.
|
||||
const header_line = try std.fmt.allocPrint(
|
||||
arena,
|
||||
" {s:>10} {s:>28} {s:>28} {s:>28}",
|
||||
.{ "Date", "Liquid (Δ)", "Illiquid (Δ)", "Net Worth (Δ)" },
|
||||
);
|
||||
try lines.append(arena, .{ .text = header_line, .style = th.mutedStyle() });
|
||||
|
||||
// Show up to max_table_rows most recent. Render newest-first so
|
||||
// the latest snapshot sits directly under the column headers,
|
||||
// matching typical "recent items" lists and saving the eye a scroll.
|
||||
const start = if (points.len > max_table_rows) points.len - max_table_rows else 0;
|
||||
const first = points[0]; // deltas still measured from series start
|
||||
|
||||
const window = points[start..];
|
||||
// Render up to max_table_rows newest-first.
|
||||
const start = if (deltas.len > max_table_rows) deltas.len - max_table_rows else 0;
|
||||
const window = deltas[start..];
|
||||
var i: usize = window.len;
|
||||
while (i > 0) {
|
||||
i -= 1;
|
||||
const text = try fmtTableRow(arena, window[i], first);
|
||||
try lines.append(arena, .{ .text = text, .style = th.contentStyle() });
|
||||
const row = window[i];
|
||||
const text = try fmtTableRow(arena, row);
|
||||
const style = rowStyle(th, row, focus_metric);
|
||||
try lines.append(arena, .{ .text = text, .style = style });
|
||||
}
|
||||
|
||||
return lines.toOwnedSlice(arena);
|
||||
}
|
||||
|
||||
fn appendSummaryLine(
|
||||
/// Render the rolling-windows block into `lines`. Output matches the
|
||||
/// CLI byte-for-byte (modulo ANSI) — same widths, same labels, same
|
||||
/// dashed divider — because both call `view.buildWindowRowCells`.
|
||||
fn appendWindowsBlock(
|
||||
arena: std.mem.Allocator,
|
||||
lines: *std.ArrayList(StyledLine),
|
||||
th: theme.Theme,
|
||||
label: []const u8,
|
||||
points: []const timeline.TimelinePoint,
|
||||
metric: timeline.Metric,
|
||||
metric_label: []const u8,
|
||||
) !void {
|
||||
const first = extractOne(points[0], metric);
|
||||
const last = extractOne(points[points.len - 1], metric);
|
||||
const delta = last - first;
|
||||
const pct = if (first != 0) (delta / first) * 100.0 else 0.0;
|
||||
_ = metric_label; // outer "Portfolio History" + chart header already name the metric
|
||||
|
||||
var first_buf: [24]u8 = undefined;
|
||||
var last_buf: [24]u8 = undefined;
|
||||
var delta_buf: [24]u8 = undefined;
|
||||
const today = points[points.len - 1].as_of_date;
|
||||
const ws = try timeline.computeWindowSet(arena, points, metric, today);
|
||||
// Arena-backed: no deinit needed.
|
||||
|
||||
const first_s = fmt.fmtMoneyAbs(&first_buf, first);
|
||||
const last_s = fmt.fmtMoneyAbs(&last_buf, last);
|
||||
const delta_abs = fmt.fmtMoneyAbs(&delta_buf, @abs(delta));
|
||||
const sign: []const u8 = if (delta < 0) "-" else "+";
|
||||
// Block title — just "Change". Metric is redundant with the chart
|
||||
// header below ("Chart: Liquid") and the outer "Portfolio History".
|
||||
try lines.append(arena, .{ .text = " Change", .style = th.headerStyle() });
|
||||
|
||||
const text = try std.fmt.allocPrint(arena, " {s} first: {s} last: {s} Δ: {s}{s} ({d:.2}%)", .{
|
||||
label, first_s, last_s, sign, delta_abs, pct,
|
||||
// Column header + dashed divider. Widths pinned to view constants
|
||||
// (12 / 18 / 10).
|
||||
const header_line = try std.fmt.allocPrint(
|
||||
arena,
|
||||
" {s:<12} {s:>18} {s:>10}",
|
||||
.{ "", "Δ", "%" },
|
||||
);
|
||||
try lines.append(arena, .{ .text = header_line, .style = th.mutedStyle() });
|
||||
try lines.append(arena, .{
|
||||
.text = " ------------ ------------------ ----------",
|
||||
.style = th.mutedStyle(),
|
||||
});
|
||||
|
||||
const style = if (delta < 0) th.negativeStyle() else if (delta > 0) th.positiveStyle() else th.contentStyle();
|
||||
try lines.append(arena, .{ .text = text, .style = style });
|
||||
for (ws.rows) |row| {
|
||||
var dbuf: [32]u8 = undefined;
|
||||
var pbuf: [16]u8 = undefined;
|
||||
const cells = view.buildWindowRowCells(row, &dbuf, &pbuf);
|
||||
|
||||
const text = try std.fmt.allocPrint(
|
||||
arena,
|
||||
" {s:<12} {s:>18} {s:>10}",
|
||||
.{ cells.label, cells.delta_str, cells.pct_str },
|
||||
);
|
||||
const style: vaxis.Cell.Style = switch (cells.sign) {
|
||||
.positive => th.positiveStyle(),
|
||||
.negative => th.negativeStyle(),
|
||||
.zero, .missing => th.mutedStyle(),
|
||||
};
|
||||
try lines.append(arena, .{ .text = text, .style = style });
|
||||
}
|
||||
}
|
||||
|
||||
fn fmtTableRow(
|
||||
arena: std.mem.Allocator,
|
||||
p: timeline.TimelinePoint,
|
||||
first: timeline.TimelinePoint,
|
||||
) ![]const u8 {
|
||||
/// Build a recent-snapshots table row. Cells align with the header
|
||||
/// because both use `view.fmtValueDeltaCell` with `view.table_cell_width`.
|
||||
fn fmtTableRow(arena: std.mem.Allocator, row: timeline.RowDelta) ![]const u8 {
|
||||
var date_buf: [10]u8 = undefined;
|
||||
var ill_buf: [24]u8 = undefined;
|
||||
var liq_buf: [24]u8 = undefined;
|
||||
var nw_buf: [24]u8 = undefined;
|
||||
var liq_cell_buf: [64]u8 = undefined;
|
||||
var ill_cell_buf: [64]u8 = undefined;
|
||||
var nw_cell_buf: [64]u8 = undefined;
|
||||
|
||||
const date_s = p.as_of_date.format(&date_buf);
|
||||
const ill_s = fmt.fmtMoneyAbs(&ill_buf, p.illiquid);
|
||||
const liq_s = fmt.fmtMoneyAbs(&liq_buf, p.liquid);
|
||||
const nw_s = fmt.fmtMoneyAbs(&nw_buf, p.net_worth);
|
||||
_ = first; // reserved — could render Δ columns here later
|
||||
const date_s = row.date.format(&date_buf);
|
||||
const liq_cell = view.fmtValueDeltaCell(&liq_cell_buf, row.liquid, row.d_liquid, view.table_cell_width);
|
||||
const ill_cell = view.fmtValueDeltaCell(&ill_cell_buf, row.illiquid, row.d_illiquid, view.table_cell_width);
|
||||
const nw_cell = view.fmtValueDeltaCell(&nw_cell_buf, row.net_worth, row.d_net_worth, view.table_cell_width);
|
||||
|
||||
return std.fmt.allocPrint(arena, " {s} {s:>16} {s:>16} {s:>16}", .{
|
||||
date_s, ill_s, liq_s, nw_s,
|
||||
});
|
||||
return std.fmt.allocPrint(
|
||||
arena,
|
||||
" {s:>10} {s} {s} {s}",
|
||||
.{ date_s, liq_cell, ill_cell, nw_cell },
|
||||
);
|
||||
}
|
||||
|
||||
fn rowStyle(th: theme.Theme, row: timeline.RowDelta, metric: timeline.Metric) vaxis.Cell.Style {
|
||||
const d_opt: ?f64 = switch (metric) {
|
||||
.liquid => row.d_liquid,
|
||||
.illiquid => row.d_illiquid,
|
||||
.net_worth => row.d_net_worth,
|
||||
};
|
||||
if (d_opt) |d| {
|
||||
if (d < 0) return th.negativeStyle();
|
||||
if (d > 0) return th.positiveStyle();
|
||||
}
|
||||
return th.mutedStyle();
|
||||
}
|
||||
|
||||
fn extractOne(p: timeline.TimelinePoint, metric: timeline.Metric) f64 {
|
||||
|
|
@ -244,7 +308,7 @@ test "renderHistoryLines: no series shows no-data message" {
|
|||
const a = arena.allocator();
|
||||
const th = theme.default_theme;
|
||||
|
||||
const lines = try renderHistoryLines(a, th, null, .net_worth);
|
||||
const lines = try renderHistoryLines(a, th, null, .liquid, null);
|
||||
var saw_no_data = false;
|
||||
for (lines) |l| {
|
||||
if (std.mem.indexOf(u8, l.text, "No history snapshots") != null) saw_no_data = true;
|
||||
|
|
@ -252,14 +316,12 @@ test "renderHistoryLines: no series shows no-data message" {
|
|||
try testing.expect(saw_no_data);
|
||||
}
|
||||
|
||||
test "renderHistoryLines: renders summary + chart + table" {
|
||||
test "renderHistoryLines: renders windows + chart + table in correct order" {
|
||||
var arena = std.heap.ArenaAllocator.init(testing.allocator);
|
||||
defer arena.deinit();
|
||||
const a = arena.allocator();
|
||||
const th = theme.default_theme;
|
||||
|
||||
// Build a tiny timeline by hand (bypasses buildSeries + its snapshot
|
||||
// input). Two points: day1 and day2.
|
||||
const pts = try a.alloc(timeline.TimelinePoint, 2);
|
||||
pts[0] = .{
|
||||
.as_of_date = Date.fromYmd(2026, 4, 20),
|
||||
|
|
@ -278,29 +340,120 @@ test "renderHistoryLines: renders summary + chart + table" {
|
|||
.tax_types = &.{},
|
||||
};
|
||||
|
||||
const series: timeline.TimelineSeries = .{
|
||||
.points = pts,
|
||||
.allocator = a, // arena, so deinit is safe no-op
|
||||
};
|
||||
const series: timeline.TimelineSeries = .{ .points = pts, .allocator = a };
|
||||
const lines = try renderHistoryLines(a, th, series, .liquid, .daily);
|
||||
|
||||
const lines = try renderHistoryLines(a, th, series, .net_worth);
|
||||
// Expect: header + summary rows + chart header + chart + table header
|
||||
try testing.expect(lines.len > 10);
|
||||
|
||||
var saw_header = false;
|
||||
var saw_net_worth = false;
|
||||
var saw_table = false;
|
||||
for (lines) |l| {
|
||||
if (std.mem.indexOf(u8, l.text, "Portfolio History") != null) saw_header = true;
|
||||
if (std.mem.indexOf(u8, l.text, "Net Worth") != null) saw_net_worth = true;
|
||||
if (std.mem.indexOf(u8, l.text, "Recent snapshots") != null) saw_table = true;
|
||||
// Find the indices of the three major section headers.
|
||||
var windows_idx: ?usize = null;
|
||||
var chart_idx: ?usize = null;
|
||||
var table_idx: ?usize = null;
|
||||
for (lines, 0..) |l, i| {
|
||||
if (std.mem.eql(u8, std.mem.trim(u8, l.text, " "), "Change")) windows_idx = i;
|
||||
if (std.mem.indexOf(u8, l.text, "Chart: Liquid") != null) chart_idx = i;
|
||||
if (std.mem.indexOf(u8, l.text, "Recent snapshots") != null) table_idx = i;
|
||||
}
|
||||
try testing.expect(saw_header);
|
||||
try testing.expect(saw_net_worth);
|
||||
try testing.expect(saw_table);
|
||||
try testing.expect(windows_idx != null);
|
||||
try testing.expect(chart_idx != null);
|
||||
try testing.expect(table_idx != null);
|
||||
// Order: windows → chart → table
|
||||
try testing.expect(windows_idx.? < chart_idx.?);
|
||||
try testing.expect(chart_idx.? < table_idx.?);
|
||||
}
|
||||
|
||||
test "renderHistoryLines: metric switching changes chart label" {
|
||||
test "renderHistoryLines: windows block includes 1 day + All-time" {
|
||||
var arena = std.heap.ArenaAllocator.init(testing.allocator);
|
||||
defer arena.deinit();
|
||||
const a = arena.allocator();
|
||||
const th = theme.default_theme;
|
||||
|
||||
const pts = try a.alloc(timeline.TimelinePoint, 2);
|
||||
pts[0] = .{
|
||||
.as_of_date = Date.fromYmd(2026, 4, 20),
|
||||
.net_worth = 1000,
|
||||
.liquid = 700,
|
||||
.illiquid = 300,
|
||||
.accounts = &.{},
|
||||
.tax_types = &.{},
|
||||
};
|
||||
pts[1] = .{
|
||||
.as_of_date = Date.fromYmd(2026, 4, 21),
|
||||
.net_worth = 1100,
|
||||
.liquid = 800,
|
||||
.illiquid = 300,
|
||||
.accounts = &.{},
|
||||
.tax_types = &.{},
|
||||
};
|
||||
const series: timeline.TimelineSeries = .{ .points = pts, .allocator = a };
|
||||
|
||||
const lines = try renderHistoryLines(a, th, series, .liquid, null);
|
||||
|
||||
var saw_1d = false;
|
||||
var saw_all_time = false;
|
||||
for (lines) |l| {
|
||||
if (std.mem.indexOf(u8, l.text, "1 day") != null) saw_1d = true;
|
||||
if (std.mem.indexOf(u8, l.text, "All-time") != null) saw_all_time = true;
|
||||
}
|
||||
try testing.expect(saw_1d);
|
||||
try testing.expect(saw_all_time);
|
||||
}
|
||||
|
||||
test "renderHistoryLines: table rows emitted newest-first and column order is Liquid → Illiquid → NW" {
|
||||
var arena = std.heap.ArenaAllocator.init(testing.allocator);
|
||||
defer arena.deinit();
|
||||
const a = arena.allocator();
|
||||
const th = theme.default_theme;
|
||||
|
||||
const pts = try a.alloc(timeline.TimelinePoint, 3);
|
||||
pts[0] = .{
|
||||
.as_of_date = Date.fromYmd(2026, 4, 19),
|
||||
.net_worth = 900,
|
||||
.liquid = 600,
|
||||
.illiquid = 300,
|
||||
.accounts = &.{},
|
||||
.tax_types = &.{},
|
||||
};
|
||||
pts[1] = .{
|
||||
.as_of_date = Date.fromYmd(2026, 4, 20),
|
||||
.net_worth = 1000,
|
||||
.liquid = 700,
|
||||
.illiquid = 300,
|
||||
.accounts = &.{},
|
||||
.tax_types = &.{},
|
||||
};
|
||||
pts[2] = .{
|
||||
.as_of_date = Date.fromYmd(2026, 4, 21),
|
||||
.net_worth = 1100,
|
||||
.liquid = 800,
|
||||
.illiquid = 300,
|
||||
.accounts = &.{},
|
||||
.tax_types = &.{},
|
||||
};
|
||||
const series: timeline.TimelineSeries = .{ .points = pts, .allocator = a };
|
||||
|
||||
const lines = try renderHistoryLines(a, th, series, .liquid, .daily);
|
||||
|
||||
// Join all lines to scan row ordering.
|
||||
var joined: std.ArrayList(u8) = .empty;
|
||||
for (lines) |l| {
|
||||
try joined.appendSlice(a, l.text);
|
||||
try joined.append(a, '\n');
|
||||
}
|
||||
const text = joined.items;
|
||||
|
||||
// Header column order: Liquid before Illiquid before Net Worth
|
||||
const h_liq = std.mem.indexOf(u8, text, "Liquid") orelse return error.TestExpectedMatch;
|
||||
const h_ill = std.mem.indexOf(u8, text, "Illiquid") orelse return error.TestExpectedMatch;
|
||||
const h_nw = std.mem.indexOf(u8, text, "Net Worth") orelse return error.TestExpectedMatch;
|
||||
try testing.expect(h_liq < h_ill);
|
||||
try testing.expect(h_ill < h_nw);
|
||||
|
||||
// Newest-first: 2026-04-21 appears before 2026-04-19 in the text
|
||||
const d_new = std.mem.lastIndexOf(u8, text, "2026-04-21") orelse return error.TestExpectedMatch;
|
||||
const d_old = std.mem.lastIndexOf(u8, text, "2026-04-19") orelse return error.TestExpectedMatch;
|
||||
try testing.expect(d_new < d_old);
|
||||
}
|
||||
|
||||
test "renderHistoryLines: metric cycling changes chart label and windows header" {
|
||||
var arena = std.heap.ArenaAllocator.init(testing.allocator);
|
||||
defer arena.deinit();
|
||||
const a = arena.allocator();
|
||||
|
|
@ -317,14 +470,7 @@ test "renderHistoryLines: metric switching changes chart label" {
|
|||
};
|
||||
const series: timeline.TimelineSeries = .{ .points = pts, .allocator = a };
|
||||
|
||||
const lines_liquid = try renderHistoryLines(a, th, series, .liquid);
|
||||
var saw_liquid_chart = false;
|
||||
for (lines_liquid) |l| {
|
||||
if (std.mem.indexOf(u8, l.text, "Chart: Liquid") != null) saw_liquid_chart = true;
|
||||
}
|
||||
try testing.expect(saw_liquid_chart);
|
||||
|
||||
const lines_ill = try renderHistoryLines(a, th, series, .illiquid);
|
||||
const lines_ill = try renderHistoryLines(a, th, series, .illiquid, null);
|
||||
var saw_ill_chart = false;
|
||||
for (lines_ill) |l| {
|
||||
if (std.mem.indexOf(u8, l.text, "Chart: Illiquid") != null) saw_ill_chart = true;
|
||||
|
|
@ -332,27 +478,26 @@ test "renderHistoryLines: metric switching changes chart label" {
|
|||
try testing.expect(saw_ill_chart);
|
||||
}
|
||||
|
||||
test "cycleMetric: walks net_worth → liquid → illiquid → net_worth" {
|
||||
// Test the pure logic without needing an App — replicate inline.
|
||||
var m: timeline.Metric = .net_worth;
|
||||
test "cycleMetric: liquid → illiquid → net_worth → liquid" {
|
||||
var m: timeline.Metric = .liquid;
|
||||
m = switch (m) {
|
||||
.net_worth => .liquid,
|
||||
.liquid => .illiquid,
|
||||
.illiquid => .net_worth,
|
||||
};
|
||||
try testing.expectEqual(timeline.Metric.liquid, m);
|
||||
m = switch (m) {
|
||||
.net_worth => .liquid,
|
||||
.liquid => .illiquid,
|
||||
.illiquid => .net_worth,
|
||||
};
|
||||
try testing.expectEqual(timeline.Metric.illiquid, m);
|
||||
m = switch (m) {
|
||||
.net_worth => .liquid,
|
||||
.liquid => .illiquid,
|
||||
.illiquid => .net_worth,
|
||||
.net_worth => .liquid,
|
||||
};
|
||||
try testing.expectEqual(timeline.Metric.net_worth, m);
|
||||
m = switch (m) {
|
||||
.liquid => .illiquid,
|
||||
.illiquid => .net_worth,
|
||||
.net_worth => .liquid,
|
||||
};
|
||||
try testing.expectEqual(timeline.Metric.liquid, m);
|
||||
}
|
||||
|
||||
// Keep refAllDeclsRecursive happy
|
||||
|
|
|
|||
|
|
@ -41,6 +41,7 @@ pub const Action = enum {
|
|||
chart_timeframe_next,
|
||||
chart_timeframe_prev,
|
||||
history_metric_next,
|
||||
history_resolution_next,
|
||||
sort_col_next,
|
||||
sort_col_prev,
|
||||
sort_reverse,
|
||||
|
|
@ -126,6 +127,7 @@ const default_bindings = [_]Binding{
|
|||
.{ .action = .chart_timeframe_next, .key = .{ .codepoint = ']' } },
|
||||
.{ .action = .chart_timeframe_prev, .key = .{ .codepoint = '[' } },
|
||||
.{ .action = .history_metric_next, .key = .{ .codepoint = 'm' } },
|
||||
.{ .action = .history_resolution_next, .key = .{ .codepoint = 't' } },
|
||||
.{ .action = .sort_col_next, .key = .{ .codepoint = '>' } },
|
||||
.{ .action = .sort_col_prev, .key = .{ .codepoint = '<' } },
|
||||
.{ .action = .sort_reverse, .key = .{ .codepoint = 'o' } },
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue