diff --git a/build.zig.zon b/build.zig.zon index 88dfbc9..738b33b 100644 --- a/build.zig.zon +++ b/build.zig.zon @@ -13,8 +13,8 @@ .hash = "z2d-0.10.0-j5P_Hu-6FgBsZNgwphIqh17jDnj8_yPtD8yzjO6PpHRQ", }, .srf = .{ - .url = "git+https://git.lerch.org/lobo/srf.git#95036e83e26bb885641c62aaf1e26dbfbb147ea9", - .hash = "srf-0.0.0-qZj575xZAQB4wzO6J8wf0hBFTZMDjCfFFCtHx6BCQifK", + .url = "git+https://git.lerch.org/lobo/srf.git#8e12b7396afc1bcbc4e2a3f19d8725a82b71b27e", + .hash = "srf-0.0.0-qZj573V9AQBJTR8ehcnA6KW_wb6cdkJZtFZGq87b8dAJ", }, }, .paths = .{ diff --git a/src/analytics/analysis.zig b/src/analytics/analysis.zig index b191cfe..4b6bfd7 100644 --- a/src/analytics/analysis.zig +++ b/src/analytics/analysis.zig @@ -78,7 +78,7 @@ pub fn parseAccountsFile(allocator: std.mem.Allocator, data: []const u8) !Accoun const parsed = srf.parse(&reader, allocator, .{ .alloc_strings = false }) catch return error.InvalidData; defer parsed.deinit(); - for (parsed.records.items) |record| { + for (parsed.records) |record| { const entry = record.to(AccountTaxEntry) catch continue; try entries.append(allocator, .{ .account = try allocator.dupe(u8, entry.account), diff --git a/src/cache/store.zig b/src/cache/store.zig index c78c4b2..338b9f7 100644 --- a/src/cache/store.zig +++ b/src/cache/store.zig @@ -129,18 +129,19 @@ pub const Store = struct { /// - Negative cache entries (# fetch_failed) are always fresh. /// - Data with `#!expires=` is fresh if the SRF library says so. /// - Data without expiry metadata is considered stale (triggers re-fetch). + /// Uses the SRF iterator to read only the header directives without parsing any records. pub fn isFreshData(data: []const u8, allocator: std.mem.Allocator) bool { // Negative cache entry -- always fresh if (std.mem.indexOf(u8, data, "# fetch_failed")) |_| return true; var reader = std.Io.Reader.fixed(data); - const parsed = srf.parse(&reader, allocator, .{}) catch return false; - defer parsed.deinit(); + const it = srf.iterator(&reader, allocator, .{}) catch return false; + defer it.deinit(); // No expiry directive → stale (legacy file, trigger re-fetch + rewrite) - if (parsed.expires == null) return false; + if (it.expires == null) return false; - return parsed.isFresh(); + return it.isFresh(); } /// Clear all cached data for a symbol. @@ -232,11 +233,11 @@ pub const Store = struct { errdefer candles.deinit(allocator); var reader = std.Io.Reader.fixed(data); - const parsed = srf.parse(&reader, allocator, .{ .alloc_strings = false }) catch return error.InvalidData; - defer parsed.deinit(); + var it = srf.iterator(&reader, allocator, .{ .alloc_strings = false }) catch return error.InvalidData; + defer it.deinit(); - for (parsed.records.items) |record| { - const candle = record.to(Candle) catch continue; + while (try it.next()) |fields| { + const candle = fields.to(Candle) catch continue; try candles.append(allocator, candle); } @@ -262,13 +263,14 @@ pub const Store = struct { } /// Deserialize candle metadata from SRF data. + /// Uses the SRF iterator to read only the first record without parsing the entire file. pub fn deserializeCandleMeta(allocator: std.mem.Allocator, data: []const u8) !CandleMeta { var reader = std.Io.Reader.fixed(data); - const parsed = srf.parse(&reader, allocator, .{ .alloc_strings = false }) catch return error.InvalidData; - defer parsed.deinit(); + var it = srf.iterator(&reader, allocator, .{ .alloc_strings = false }) catch return error.InvalidData; + defer it.deinit(); - if (parsed.records.items.len == 0) return error.InvalidData; - return parsed.records.items[0].to(CandleMeta) catch error.InvalidData; + const fields = (try it.next()) orelse return error.InvalidData; + return fields.to(CandleMeta) catch error.InvalidData; } /// Inline fetch metadata embedded as the first record in non-candle SRF files. @@ -278,14 +280,15 @@ pub const Store = struct { }; /// Read the `fetched_at` timestamp from the first record of an SRF file. + /// Uses the SRF iterator to read only the first record without parsing the entire file. /// Returns null if the file has no FetchMeta record or cannot be parsed. pub fn readFetchedAt(allocator: std.mem.Allocator, data: []const u8) ?i64 { var reader = std.Io.Reader.fixed(data); - const parsed = srf.parse(&reader, allocator, .{ .alloc_strings = false }) catch return null; - defer parsed.deinit(); + var it = srf.iterator(&reader, allocator, .{ .alloc_strings = false }) catch return null; + defer it.deinit(); - if (parsed.records.items.len == 0) return null; - const meta = parsed.records.items[0].to(FetchMeta) catch return null; + const fields = (it.next() catch return null) orelse return null; + const meta = fields.to(FetchMeta) catch return null; return meta.fetched_at; } @@ -323,12 +326,12 @@ pub const Store = struct { } var reader = std.Io.Reader.fixed(data); - const parsed = srf.parse(&reader, allocator, .{ .alloc_strings = false }) catch return error.InvalidData; - defer parsed.deinit(); + var it = srf.iterator(&reader, allocator, .{ .alloc_strings = false }) catch return error.InvalidData; + defer it.deinit(); - for (parsed.records.items) |record| { - var div = record.to(Dividend) catch continue; - // Dupe owned strings before parsed.deinit() frees the backing buffer + while (try it.next()) |fields| { + var div = fields.to(Dividend) catch continue; + // Dupe owned strings before iterator.deinit() frees the backing buffer if (div.currency) |c| { div.currency = allocator.dupe(u8, c) catch null; } @@ -344,11 +347,11 @@ pub const Store = struct { errdefer splits.deinit(allocator); var reader = std.Io.Reader.fixed(data); - const parsed = srf.parse(&reader, allocator, .{ .alloc_strings = false }) catch return error.InvalidData; - defer parsed.deinit(); + var it = srf.iterator(&reader, allocator, .{ .alloc_strings = false }) catch return error.InvalidData; + defer it.deinit(); - for (parsed.records.items) |record| { - const split = record.to(Split) catch continue; + while (try it.next()) |fields| { + const split = fields.to(Split) catch continue; try splits.append(allocator, split); } @@ -373,11 +376,11 @@ pub const Store = struct { errdefer events.deinit(allocator); var reader = std.Io.Reader.fixed(data); - const parsed = srf.parse(&reader, allocator, .{ .alloc_strings = false }) catch return error.InvalidData; - defer parsed.deinit(); + var it = srf.iterator(&reader, allocator, .{ .alloc_strings = false }) catch return error.InvalidData; + defer it.deinit(); - for (parsed.records.items) |record| { - var ev = record.to(EarningsEvent) catch continue; + while (try it.next()) |fields| { + var ev = fields.to(EarningsEvent) catch continue; // Recompute surprise from actual/estimate if (ev.actual != null and ev.estimate != null) { ev.surprise = ev.actual.? - ev.estimate.?; @@ -444,8 +447,8 @@ pub const Store = struct { /// Deserialize ETF profile from SRF data. pub fn deserializeEtfProfile(allocator: std.mem.Allocator, data: []const u8) !EtfProfile { var reader = std.Io.Reader.fixed(data); - const parsed = srf.parse(&reader, allocator, .{ .alloc_strings = false }) catch return error.InvalidData; - defer parsed.deinit(); + var it = srf.iterator(&reader, allocator, .{ .alloc_strings = false }) catch return error.InvalidData; + defer it.deinit(); var profile = EtfProfile{ .symbol = "" }; var sectors: std.ArrayList(SectorWeight) = .empty; @@ -453,8 +456,8 @@ pub const Store = struct { var holdings: std.ArrayList(Holding) = .empty; errdefer holdings.deinit(allocator); - for (parsed.records.items) |record| { - const etf_rec = record.to(EtfRecord) catch continue; + while (try it.next()) |fields| { + const etf_rec = fields.to(EtfRecord) catch continue; switch (etf_rec) { .meta => |m| { profile.expense_ratio = m.expense_ratio; @@ -583,10 +586,12 @@ pub const Store = struct { } /// Deserialize options chains from SRF data. + /// Chain headers appear before their contracts in the SRF file, so a single + /// pass can assign contracts to the correct chain as they are encountered. pub fn deserializeOptions(allocator: std.mem.Allocator, data: []const u8) ![]OptionsChain { var reader = std.Io.Reader.fixed(data); - const parsed = srf.parse(&reader, allocator, .{ .alloc_strings = false }) catch return error.InvalidData; - defer parsed.deinit(); + var it = srf.iterator(&reader, allocator, .{ .alloc_strings = false }) catch return error.InvalidData; + defer it.deinit(); var chains: std.ArrayList(OptionsChain) = .empty; errdefer { @@ -601,26 +606,7 @@ pub const Store = struct { var exp_map = std.AutoHashMap(i32, usize).init(allocator); defer exp_map.deinit(); - // First pass: collect chain headers - for (parsed.records.items) |record| { - const opt_rec = record.to(OptionsRecord) catch continue; - switch (opt_rec) { - .chain => |ch| { - const idx = chains.items.len; - try chains.append(allocator, .{ - .underlying_symbol = try allocator.dupe(u8, ch.symbol), - .underlying_price = ch.price, - .expiration = ch.expiration, - .calls = &.{}, - .puts = &.{}, - }); - try exp_map.put(ch.expiration.days, idx); - }, - else => {}, - } - } - - // Second pass: collect contracts + // Accumulate contracts per chain var calls_map = std.AutoHashMap(usize, std.ArrayList(OptionContract)).init(allocator); defer { var iter = calls_map.valueIterator(); @@ -634,9 +620,21 @@ pub const Store = struct { puts_map.deinit(); } - for (parsed.records.items) |record| { - const opt_rec = record.to(OptionsRecord) catch continue; + // Single pass: chain headers and contracts arrive in order + while (try it.next()) |fields| { + const opt_rec = fields.to(OptionsRecord) catch continue; switch (opt_rec) { + .chain => |ch| { + const idx = chains.items.len; + try chains.append(allocator, .{ + .underlying_symbol = try allocator.dupe(u8, ch.symbol), + .underlying_price = ch.price, + .expiration = ch.expiration, + .calls = &.{}, + .puts = &.{}, + }); + try exp_map.put(ch.expiration.days, idx); + }, .call => |cf| { if (exp_map.get(cf.expiration.days)) |idx| { const entry = try calls_map.getOrPut(idx); @@ -651,7 +649,6 @@ pub const Store = struct { try entry.value_ptr.append(allocator, fieldsToContract(cf, .put)); } }, - .chain => {}, } } @@ -712,7 +709,7 @@ pub fn deserializePortfolio(allocator: std.mem.Allocator, data: []const u8) !Por const parsed = srf.parse(&reader, allocator, .{ .alloc_strings = false }) catch return error.InvalidData; defer parsed.deinit(); - for (parsed.records.items) |record| { + for (parsed.records) |record| { var lot = Lot{ .symbol = "", .shares = 0, diff --git a/src/models/classification.zig b/src/models/classification.zig index fbc1cbb..7b46fa9 100644 --- a/src/models/classification.zig +++ b/src/models/classification.zig @@ -60,7 +60,7 @@ pub fn parseClassificationFile(allocator: std.mem.Allocator, data: []const u8) ! const parsed = srf.parse(&reader, allocator, .{ .alloc_strings = false }) catch return error.InvalidData; defer parsed.deinit(); - for (parsed.records.items) |record| { + for (parsed.records) |record| { const entry = record.to(ClassificationEntry) catch continue; try entries.append(allocator, .{ .symbol = try allocator.dupe(u8, entry.symbol), diff --git a/src/tui/keybinds.zig b/src/tui/keybinds.zig index c81d4cc..73a005c 100644 --- a/src/tui/keybinds.zig +++ b/src/tui/keybinds.zig @@ -321,7 +321,7 @@ pub fn loadFromData(allocator: std.mem.Allocator, data: []const u8) ?KeyMap { var bindings = std.ArrayList(Binding).empty; - for (parsed.records.items) |record| { + for (parsed.records) |record| { var action: ?Action = null; var key: ?KeyCombo = null; diff --git a/src/tui/theme.zig b/src/tui/theme.zig index c577ef7..ff2be6c 100644 --- a/src/tui/theme.zig +++ b/src/tui/theme.zig @@ -256,7 +256,7 @@ pub fn loadFromData(data: []const u8) ?Theme { var theme = default_theme; - for (parsed.records.items) |record| { + for (parsed.records) |record| { for (record.fields) |field| { if (field.value) |v| { const str = switch (v) {