871 lines
34 KiB
Zig
871 lines
34 KiB
Zig
const std = @import("std");
|
|
const srf = @import("srf");
|
|
const Date = @import("../models/date.zig").Date;
|
|
const Candle = @import("../models/candle.zig").Candle;
|
|
const Dividend = @import("../models/dividend.zig").Dividend;
|
|
const DividendType = @import("../models/dividend.zig").DividendType;
|
|
const Split = @import("../models/split.zig").Split;
|
|
const EarningsEvent = @import("../models/earnings.zig").EarningsEvent;
|
|
const ReportTime = @import("../models/earnings.zig").ReportTime;
|
|
const EtfProfile = @import("../models/etf_profile.zig").EtfProfile;
|
|
const Holding = @import("../models/etf_profile.zig").Holding;
|
|
const SectorWeight = @import("../models/etf_profile.zig").SectorWeight;
|
|
const Lot = @import("../models/portfolio.zig").Lot;
|
|
const Portfolio = @import("../models/portfolio.zig").Portfolio;
|
|
const OptionsChain = @import("../models/option.zig").OptionsChain;
|
|
const OptionContract = @import("../models/option.zig").OptionContract;
|
|
const ContractType = @import("../models/option.zig").ContractType;
|
|
|
|
/// TTL durations in seconds for cache expiry.
|
|
pub const Ttl = struct {
|
|
/// Historical candles older than 1 day never expire
|
|
pub const candles_historical: i64 = -1; // infinite
|
|
/// Latest day's candle refreshes every 24h
|
|
pub const candles_latest: i64 = 24 * 3600;
|
|
/// Dividend data refreshes weekly
|
|
pub const dividends: i64 = 7 * 24 * 3600;
|
|
/// Split data refreshes weekly
|
|
pub const splits: i64 = 7 * 24 * 3600;
|
|
/// Options chains refresh hourly
|
|
pub const options: i64 = 3600;
|
|
/// Earnings refresh daily
|
|
pub const earnings: i64 = 24 * 3600;
|
|
/// ETF profiles refresh monthly
|
|
pub const etf_profile: i64 = 30 * 24 * 3600;
|
|
};
|
|
|
|
pub const DataType = enum {
|
|
candles_daily,
|
|
dividends,
|
|
splits,
|
|
options,
|
|
earnings,
|
|
etf_profile,
|
|
meta,
|
|
|
|
pub fn fileName(self: DataType) []const u8 {
|
|
return switch (self) {
|
|
.candles_daily => "candles_daily.srf",
|
|
.dividends => "dividends.srf",
|
|
.splits => "splits.srf",
|
|
.options => "options.srf",
|
|
.earnings => "earnings.srf",
|
|
.etf_profile => "etf_profile.srf",
|
|
.meta => "meta.srf",
|
|
};
|
|
}
|
|
};
|
|
|
|
/// Persistent SRF-backed cache with per-symbol, per-data-type files.
|
|
///
|
|
/// Layout:
|
|
/// {cache_dir}/{SYMBOL}/candles_daily.srf
|
|
/// {cache_dir}/{SYMBOL}/dividends.srf
|
|
/// {cache_dir}/{SYMBOL}/meta.srf
|
|
/// ...
|
|
pub const Store = struct {
|
|
cache_dir: []const u8,
|
|
allocator: std.mem.Allocator,
|
|
|
|
pub fn init(allocator: std.mem.Allocator, cache_dir: []const u8) Store {
|
|
return .{
|
|
.cache_dir = cache_dir,
|
|
.allocator = allocator,
|
|
};
|
|
}
|
|
|
|
/// Ensure the cache directory for a symbol exists.
|
|
pub fn ensureSymbolDir(self: *Store, symbol: []const u8) !void {
|
|
const path = try self.symbolPath(symbol, "");
|
|
defer self.allocator.free(path);
|
|
std.fs.cwd().makePath(path) catch |err| switch (err) {
|
|
error.PathAlreadyExists => {},
|
|
else => return err,
|
|
};
|
|
}
|
|
|
|
/// Read raw SRF file contents for a symbol and data type.
|
|
/// Returns null if the file does not exist.
|
|
pub fn readRaw(self: *Store, symbol: []const u8, data_type: DataType) !?[]const u8 {
|
|
const path = try self.symbolPath(symbol, data_type.fileName());
|
|
defer self.allocator.free(path);
|
|
|
|
return std.fs.cwd().readFileAlloc(self.allocator, path, 50 * 1024 * 1024) catch |err| switch (err) {
|
|
error.FileNotFound => return null,
|
|
else => return err,
|
|
};
|
|
}
|
|
|
|
/// Write raw SRF data for a symbol and data type.
|
|
pub fn writeRaw(self: *Store, symbol: []const u8, data_type: DataType, data: []const u8) !void {
|
|
try self.ensureSymbolDir(symbol);
|
|
const path = try self.symbolPath(symbol, data_type.fileName());
|
|
defer self.allocator.free(path);
|
|
|
|
const file = try std.fs.cwd().createFile(path, .{});
|
|
defer file.close();
|
|
try file.writeAll(data);
|
|
}
|
|
|
|
/// Check if raw SRF data is fresh using the embedded `#!expires=` directive.
|
|
/// - Negative cache entries (# fetch_failed) are always fresh.
|
|
/// - Data with `#!expires=` is fresh if the SRF library says so.
|
|
/// - Data without expiry metadata is considered stale (triggers re-fetch).
|
|
pub fn isFreshData(data: []const u8, allocator: std.mem.Allocator) bool {
|
|
// Negative cache entry -- always fresh
|
|
if (std.mem.indexOf(u8, data, "# fetch_failed")) |_| return true;
|
|
|
|
var reader = std.Io.Reader.fixed(data);
|
|
const parsed = srf.parse(&reader, allocator, .{}) catch return false;
|
|
defer parsed.deinit();
|
|
|
|
// No expiry directive → stale (legacy file, trigger re-fetch + rewrite)
|
|
if (parsed.expires == null) return false;
|
|
|
|
return parsed.isFresh();
|
|
}
|
|
|
|
/// Get the modification time (unix seconds) of a cached data file.
|
|
/// Returns null if the file does not exist.
|
|
pub fn getMtime(self: *Store, symbol: []const u8, data_type: DataType) ?i64 {
|
|
const path = self.symbolPath(symbol, data_type.fileName()) catch return null;
|
|
defer self.allocator.free(path);
|
|
|
|
const file = std.fs.cwd().openFile(path, .{}) catch return null;
|
|
defer file.close();
|
|
|
|
const stat = file.stat() catch return null;
|
|
return @intCast(@divFloor(stat.mtime, std.time.ns_per_s));
|
|
}
|
|
|
|
/// Clear all cached data for a symbol.
|
|
pub fn clearSymbol(self: *Store, symbol: []const u8) !void {
|
|
const path = try self.symbolPath(symbol, "");
|
|
defer self.allocator.free(path);
|
|
std.fs.cwd().deleteTree(path) catch {};
|
|
}
|
|
|
|
/// Content of a negative cache entry (fetch failed, don't retry until --refresh).
|
|
pub const negative_cache_content = "#!srfv1\n# fetch_failed\n";
|
|
|
|
/// Write a negative cache entry for a symbol + data type.
|
|
/// This records that a fetch was attempted and failed, preventing repeated
|
|
/// network requests for symbols that will never resolve.
|
|
/// Cleared by --refresh (which calls clearData/invalidate).
|
|
pub fn writeNegative(self: *Store, symbol: []const u8, data_type: DataType) void {
|
|
self.writeRaw(symbol, data_type, negative_cache_content) catch {};
|
|
}
|
|
|
|
/// Check if a cached data file is a negative entry (fetch_failed marker).
|
|
/// Negative entries are always considered "fresh" -- they never expire.
|
|
pub fn isNegative(self: *Store, symbol: []const u8, data_type: DataType) bool {
|
|
const path = self.symbolPath(symbol, data_type.fileName()) catch return false;
|
|
defer self.allocator.free(path);
|
|
|
|
const file = std.fs.cwd().openFile(path, .{}) catch return false;
|
|
defer file.close();
|
|
|
|
var buf: [negative_cache_content.len]u8 = undefined;
|
|
const n = file.readAll(&buf) catch return false;
|
|
return n == negative_cache_content.len and
|
|
std.mem.eql(u8, buf[0..n], negative_cache_content);
|
|
}
|
|
|
|
/// Clear a specific data type for a symbol.
|
|
pub fn clearData(self: *Store, symbol: []const u8, data_type: DataType) void {
|
|
const path = self.symbolPath(symbol, data_type.fileName()) catch return;
|
|
defer self.allocator.free(path);
|
|
std.fs.cwd().deleteFile(path) catch {};
|
|
}
|
|
|
|
/// Read the close price from the last candle record without parsing the entire file.
|
|
/// Seeks to the end, reads the last ~256 bytes, and extracts `close:num:X`.
|
|
/// Returns null if the file doesn't exist or has no candle data.
|
|
pub fn readLastClose(self: *Store, symbol: []const u8) ?f64 {
|
|
const path = self.symbolPath(symbol, DataType.candles_daily.fileName()) catch return null;
|
|
defer self.allocator.free(path);
|
|
|
|
const file = std.fs.cwd().openFile(path, .{}) catch return null;
|
|
defer file.close();
|
|
|
|
const stat = file.stat() catch return null;
|
|
const file_size = stat.size;
|
|
if (file_size < 20) return null; // too small to have candle data
|
|
|
|
// Read the last 256 bytes (one candle line is ~100 bytes, gives margin)
|
|
const read_size: u64 = @min(256, file_size);
|
|
file.seekTo(file_size - read_size) catch return null;
|
|
|
|
var buf: [256]u8 = undefined;
|
|
const n = file.readAll(buf[0..@intCast(read_size)]) catch return null;
|
|
const chunk = buf[0..n];
|
|
|
|
// Find the last complete line (skip trailing newline, then find the previous newline)
|
|
const trimmed = std.mem.trimRight(u8, chunk, "\n");
|
|
if (trimmed.len == 0) return null;
|
|
const last_nl = std.mem.lastIndexOfScalar(u8, trimmed, '\n');
|
|
const last_line = if (last_nl) |pos| trimmed[pos + 1 ..] else trimmed;
|
|
|
|
// Extract close:num:VALUE from the line
|
|
const marker = "close:num:";
|
|
const close_start = std.mem.indexOf(u8, last_line, marker) orelse return null;
|
|
const val_start = close_start + marker.len;
|
|
const val_end = std.mem.indexOfScalar(u8, last_line[val_start..], ',') orelse (last_line.len - val_start);
|
|
return std.fmt.parseFloat(f64, last_line[val_start .. val_start + val_end]) catch null;
|
|
}
|
|
|
|
/// Clear all cached data.
|
|
pub fn clearAll(self: *Store) !void {
|
|
std.fs.cwd().deleteTree(self.cache_dir) catch {};
|
|
}
|
|
|
|
// -- Serialization helpers --
|
|
|
|
/// Serialize candles to SRF compact format.
|
|
pub fn serializeCandles(allocator: std.mem.Allocator, candles: []const Candle, options: srf.FormatOptions) ![]const u8 {
|
|
var buf: std.ArrayList(u8) = .empty;
|
|
errdefer buf.deinit(allocator);
|
|
const writer = buf.writer(allocator);
|
|
try writer.print("{f}", .{srf.fmtFrom(Candle, allocator, candles, options)});
|
|
return buf.toOwnedSlice(allocator);
|
|
}
|
|
|
|
/// Deserialize candles from SRF data.
|
|
pub fn deserializeCandles(allocator: std.mem.Allocator, data: []const u8) ![]Candle {
|
|
var candles: std.ArrayList(Candle) = .empty;
|
|
errdefer candles.deinit(allocator);
|
|
|
|
var reader = std.Io.Reader.fixed(data);
|
|
const parsed = srf.parse(&reader, allocator, .{ .alloc_strings = false }) catch return error.InvalidData;
|
|
defer parsed.deinit();
|
|
|
|
for (parsed.records.items) |record| {
|
|
const candle = record.to(Candle) catch continue;
|
|
try candles.append(allocator, candle);
|
|
}
|
|
|
|
return candles.toOwnedSlice(allocator);
|
|
}
|
|
|
|
/// Serialize dividends to SRF compact format.
|
|
pub fn serializeDividends(allocator: std.mem.Allocator, dividends: []const Dividend, options: srf.FormatOptions) ![]const u8 {
|
|
var buf: std.ArrayList(u8) = .empty;
|
|
errdefer buf.deinit(allocator);
|
|
const writer = buf.writer(allocator);
|
|
try writer.print("{f}", .{srf.fmtFrom(Dividend, allocator, dividends, options)});
|
|
return buf.toOwnedSlice(allocator);
|
|
}
|
|
|
|
/// Serialize splits to SRF compact format.
|
|
pub fn serializeSplits(allocator: std.mem.Allocator, splits: []const Split, options: srf.FormatOptions) ![]const u8 {
|
|
var buf: std.ArrayList(u8) = .empty;
|
|
errdefer buf.deinit(allocator);
|
|
const writer = buf.writer(allocator);
|
|
try writer.print("{f}", .{srf.fmtFrom(Split, allocator, splits, options)});
|
|
return buf.toOwnedSlice(allocator);
|
|
}
|
|
|
|
/// Deserialize dividends from SRF data.
|
|
pub fn deserializeDividends(allocator: std.mem.Allocator, data: []const u8) ![]Dividend {
|
|
var dividends: std.ArrayList(Dividend) = .empty;
|
|
errdefer {
|
|
for (dividends.items) |d| d.deinit(allocator);
|
|
dividends.deinit(allocator);
|
|
}
|
|
|
|
var reader = std.Io.Reader.fixed(data);
|
|
const parsed = srf.parse(&reader, allocator, .{ .alloc_strings = false }) catch return error.InvalidData;
|
|
defer parsed.deinit();
|
|
|
|
for (parsed.records.items) |record| {
|
|
var div = record.to(Dividend) catch continue;
|
|
// Dupe owned strings before parsed.deinit() frees the backing buffer
|
|
if (div.currency) |c| {
|
|
div.currency = allocator.dupe(u8, c) catch null;
|
|
}
|
|
try dividends.append(allocator, div);
|
|
}
|
|
|
|
return dividends.toOwnedSlice(allocator);
|
|
}
|
|
|
|
/// Deserialize splits from SRF data.
|
|
pub fn deserializeSplits(allocator: std.mem.Allocator, data: []const u8) ![]Split {
|
|
var splits: std.ArrayList(Split) = .empty;
|
|
errdefer splits.deinit(allocator);
|
|
|
|
var reader = std.Io.Reader.fixed(data);
|
|
const parsed = srf.parse(&reader, allocator, .{ .alloc_strings = false }) catch return error.InvalidData;
|
|
defer parsed.deinit();
|
|
|
|
for (parsed.records.items) |record| {
|
|
const split = record.to(Split) catch continue;
|
|
try splits.append(allocator, split);
|
|
}
|
|
|
|
return splits.toOwnedSlice(allocator);
|
|
}
|
|
|
|
/// Serialize earnings events to SRF compact format.
|
|
pub fn serializeEarnings(allocator: std.mem.Allocator, events: []const EarningsEvent, options: srf.FormatOptions) ![]const u8 {
|
|
var buf: std.ArrayList(u8) = .empty;
|
|
errdefer buf.deinit(allocator);
|
|
const writer = buf.writer(allocator);
|
|
try writer.print("{f}", .{srf.fmtFrom(EarningsEvent, allocator, events, options)});
|
|
return buf.toOwnedSlice(allocator);
|
|
}
|
|
|
|
/// Deserialize earnings events from SRF data.
|
|
pub fn deserializeEarnings(allocator: std.mem.Allocator, data: []const u8) ![]EarningsEvent {
|
|
var events: std.ArrayList(EarningsEvent) = .empty;
|
|
errdefer events.deinit(allocator);
|
|
|
|
var reader = std.Io.Reader.fixed(data);
|
|
const parsed = srf.parse(&reader, allocator, .{ .alloc_strings = false }) catch return error.InvalidData;
|
|
defer parsed.deinit();
|
|
|
|
for (parsed.records.items) |record| {
|
|
var ev = record.to(EarningsEvent) catch continue;
|
|
// Recompute surprise from actual/estimate
|
|
if (ev.actual != null and ev.estimate != null) {
|
|
ev.surprise = ev.actual.? - ev.estimate.?;
|
|
if (ev.estimate.? != 0) {
|
|
ev.surprise_percent = (ev.surprise.? / @abs(ev.estimate.?)) * 100.0;
|
|
}
|
|
}
|
|
try events.append(allocator, ev);
|
|
}
|
|
|
|
return events.toOwnedSlice(allocator);
|
|
}
|
|
|
|
/// SRF record types for ETF profile serialization.
|
|
const EtfMeta = struct {
|
|
expense_ratio: ?f64 = null,
|
|
net_assets: ?f64 = null,
|
|
dividend_yield: ?f64 = null,
|
|
portfolio_turnover: ?f64 = null,
|
|
total_holdings: ?u32 = null,
|
|
inception_date: ?Date = null,
|
|
leveraged: bool = false,
|
|
};
|
|
|
|
const EtfRecord = union(enum) {
|
|
pub const srf_tag_field = "type";
|
|
meta: EtfMeta,
|
|
sector: SectorWeight,
|
|
holding: Holding,
|
|
};
|
|
|
|
/// Serialize ETF profile to SRF compact format.
|
|
/// Uses multiple record types: meta fields, then sector and holding records.
|
|
pub fn serializeEtfProfile(allocator: std.mem.Allocator, profile: EtfProfile, options: srf.FormatOptions) ![]const u8 {
|
|
var records: std.ArrayList(EtfRecord) = .empty;
|
|
defer records.deinit(allocator);
|
|
|
|
try records.append(allocator, .{ .meta = .{
|
|
.expense_ratio = profile.expense_ratio,
|
|
.net_assets = profile.net_assets,
|
|
.dividend_yield = profile.dividend_yield,
|
|
.portfolio_turnover = profile.portfolio_turnover,
|
|
.total_holdings = profile.total_holdings,
|
|
.inception_date = profile.inception_date,
|
|
.leveraged = profile.leveraged,
|
|
} });
|
|
if (profile.sectors) |sectors| {
|
|
for (sectors) |s| try records.append(allocator, .{ .sector = s });
|
|
}
|
|
if (profile.holdings) |holdings| {
|
|
for (holdings) |h| try records.append(allocator, .{ .holding = h });
|
|
}
|
|
|
|
var buf: std.ArrayList(u8) = .empty;
|
|
errdefer buf.deinit(allocator);
|
|
const writer = buf.writer(allocator);
|
|
try writer.print("{f}", .{srf.fmtFrom(EtfRecord, allocator, records.items, options)});
|
|
return buf.toOwnedSlice(allocator);
|
|
}
|
|
|
|
/// Deserialize ETF profile from SRF data.
|
|
pub fn deserializeEtfProfile(allocator: std.mem.Allocator, data: []const u8) !EtfProfile {
|
|
var reader = std.Io.Reader.fixed(data);
|
|
const parsed = srf.parse(&reader, allocator, .{ .alloc_strings = false }) catch return error.InvalidData;
|
|
defer parsed.deinit();
|
|
|
|
var profile = EtfProfile{ .symbol = "" };
|
|
var sectors: std.ArrayList(SectorWeight) = .empty;
|
|
errdefer sectors.deinit(allocator);
|
|
var holdings: std.ArrayList(Holding) = .empty;
|
|
errdefer holdings.deinit(allocator);
|
|
|
|
for (parsed.records.items) |record| {
|
|
const etf_rec = record.to(EtfRecord) catch continue;
|
|
switch (etf_rec) {
|
|
.meta => |m| {
|
|
profile.expense_ratio = m.expense_ratio;
|
|
profile.net_assets = m.net_assets;
|
|
profile.dividend_yield = m.dividend_yield;
|
|
profile.portfolio_turnover = m.portfolio_turnover;
|
|
profile.total_holdings = m.total_holdings;
|
|
profile.inception_date = m.inception_date;
|
|
profile.leveraged = m.leveraged;
|
|
},
|
|
.sector => |s| {
|
|
const duped = try allocator.dupe(u8, s.name);
|
|
try sectors.append(allocator, .{ .name = duped, .weight = s.weight });
|
|
},
|
|
.holding => |h| {
|
|
const duped_sym = if (h.symbol) |s| try allocator.dupe(u8, s) else null;
|
|
const duped_name = try allocator.dupe(u8, h.name);
|
|
try holdings.append(allocator, .{ .symbol = duped_sym, .name = duped_name, .weight = h.weight });
|
|
},
|
|
}
|
|
}
|
|
|
|
if (sectors.items.len > 0) {
|
|
profile.sectors = try sectors.toOwnedSlice(allocator);
|
|
} else {
|
|
sectors.deinit(allocator);
|
|
}
|
|
if (holdings.items.len > 0) {
|
|
profile.holdings = try holdings.toOwnedSlice(allocator);
|
|
} else {
|
|
holdings.deinit(allocator);
|
|
}
|
|
|
|
return profile;
|
|
}
|
|
|
|
/// SRF record types for options chain serialization.
|
|
const ChainHeader = struct {
|
|
expiration: Date,
|
|
symbol: []const u8,
|
|
price: ?f64 = null,
|
|
};
|
|
|
|
const ContractFields = struct {
|
|
expiration: Date,
|
|
strike: f64,
|
|
bid: ?f64 = null,
|
|
ask: ?f64 = null,
|
|
last: ?f64 = null,
|
|
volume: ?u64 = null,
|
|
oi: ?u64 = null,
|
|
iv: ?f64 = null,
|
|
delta: ?f64 = null,
|
|
gamma: ?f64 = null,
|
|
theta: ?f64 = null,
|
|
vega: ?f64 = null,
|
|
};
|
|
|
|
const OptionsRecord = union(enum) {
|
|
pub const srf_tag_field = "type";
|
|
chain: ChainHeader,
|
|
call: ContractFields,
|
|
put: ContractFields,
|
|
};
|
|
|
|
fn contractToFields(c: OptionContract, expiration: Date) ContractFields {
|
|
return .{
|
|
.expiration = expiration,
|
|
.strike = c.strike,
|
|
.bid = c.bid,
|
|
.ask = c.ask,
|
|
.last = c.last_price,
|
|
.volume = c.volume,
|
|
.oi = c.open_interest,
|
|
.iv = c.implied_volatility,
|
|
.delta = c.delta,
|
|
.gamma = c.gamma,
|
|
.theta = c.theta,
|
|
.vega = c.vega,
|
|
};
|
|
}
|
|
|
|
fn fieldsToContract(cf: ContractFields, contract_type: ContractType) OptionContract {
|
|
return .{
|
|
.contract_type = contract_type,
|
|
.expiration = cf.expiration,
|
|
.strike = cf.strike,
|
|
.bid = cf.bid,
|
|
.ask = cf.ask,
|
|
.last_price = cf.last,
|
|
.volume = cf.volume,
|
|
.open_interest = cf.oi,
|
|
.implied_volatility = cf.iv,
|
|
.delta = cf.delta,
|
|
.gamma = cf.gamma,
|
|
.theta = cf.theta,
|
|
.vega = cf.vega,
|
|
};
|
|
}
|
|
|
|
/// Serialize options chains to SRF compact format.
|
|
pub fn serializeOptions(allocator: std.mem.Allocator, chains: []const OptionsChain, options: srf.FormatOptions) ![]const u8 {
|
|
var records: std.ArrayList(OptionsRecord) = .empty;
|
|
defer records.deinit(allocator);
|
|
|
|
for (chains) |chain| {
|
|
try records.append(allocator, .{ .chain = .{
|
|
.expiration = chain.expiration,
|
|
.symbol = chain.underlying_symbol,
|
|
.price = chain.underlying_price,
|
|
} });
|
|
for (chain.calls) |c|
|
|
try records.append(allocator, .{ .call = contractToFields(c, chain.expiration) });
|
|
for (chain.puts) |p|
|
|
try records.append(allocator, .{ .put = contractToFields(p, chain.expiration) });
|
|
}
|
|
|
|
var buf: std.ArrayList(u8) = .empty;
|
|
errdefer buf.deinit(allocator);
|
|
const writer = buf.writer(allocator);
|
|
try writer.print("{f}", .{srf.fmtFrom(OptionsRecord, allocator, records.items, options)});
|
|
return buf.toOwnedSlice(allocator);
|
|
}
|
|
|
|
/// Deserialize options chains from SRF data.
|
|
pub fn deserializeOptions(allocator: std.mem.Allocator, data: []const u8) ![]OptionsChain {
|
|
var reader = std.Io.Reader.fixed(data);
|
|
const parsed = srf.parse(&reader, allocator, .{ .alloc_strings = false }) catch return error.InvalidData;
|
|
defer parsed.deinit();
|
|
|
|
var chains: std.ArrayList(OptionsChain) = .empty;
|
|
errdefer {
|
|
for (chains.items) |*ch| {
|
|
allocator.free(ch.calls);
|
|
allocator.free(ch.puts);
|
|
}
|
|
chains.deinit(allocator);
|
|
}
|
|
|
|
// Map expiration date → chain index
|
|
var exp_map = std.AutoHashMap(i32, usize).init(allocator);
|
|
defer exp_map.deinit();
|
|
|
|
// First pass: collect chain headers
|
|
for (parsed.records.items) |record| {
|
|
const opt_rec = record.to(OptionsRecord) catch continue;
|
|
switch (opt_rec) {
|
|
.chain => |ch| {
|
|
const idx = chains.items.len;
|
|
try chains.append(allocator, .{
|
|
.underlying_symbol = try allocator.dupe(u8, ch.symbol),
|
|
.underlying_price = ch.price,
|
|
.expiration = ch.expiration,
|
|
.calls = &.{},
|
|
.puts = &.{},
|
|
});
|
|
try exp_map.put(ch.expiration.days, idx);
|
|
},
|
|
else => {},
|
|
}
|
|
}
|
|
|
|
// Second pass: collect contracts
|
|
var calls_map = std.AutoHashMap(usize, std.ArrayList(OptionContract)).init(allocator);
|
|
defer {
|
|
var iter = calls_map.valueIterator();
|
|
while (iter.next()) |v| v.deinit(allocator);
|
|
calls_map.deinit();
|
|
}
|
|
var puts_map = std.AutoHashMap(usize, std.ArrayList(OptionContract)).init(allocator);
|
|
defer {
|
|
var iter = puts_map.valueIterator();
|
|
while (iter.next()) |v| v.deinit(allocator);
|
|
puts_map.deinit();
|
|
}
|
|
|
|
for (parsed.records.items) |record| {
|
|
const opt_rec = record.to(OptionsRecord) catch continue;
|
|
switch (opt_rec) {
|
|
.call => |cf| {
|
|
if (exp_map.get(cf.expiration.days)) |idx| {
|
|
const entry = try calls_map.getOrPut(idx);
|
|
if (!entry.found_existing) entry.value_ptr.* = .empty;
|
|
try entry.value_ptr.append(allocator, fieldsToContract(cf, .call));
|
|
}
|
|
},
|
|
.put => |cf| {
|
|
if (exp_map.get(cf.expiration.days)) |idx| {
|
|
const entry = try puts_map.getOrPut(idx);
|
|
if (!entry.found_existing) entry.value_ptr.* = .empty;
|
|
try entry.value_ptr.append(allocator, fieldsToContract(cf, .put));
|
|
}
|
|
},
|
|
.chain => {},
|
|
}
|
|
}
|
|
|
|
// Assign calls/puts to chains
|
|
for (chains.items, 0..) |*chain, idx| {
|
|
if (calls_map.getPtr(idx)) |cl| {
|
|
chain.calls = try cl.toOwnedSlice(allocator);
|
|
}
|
|
if (puts_map.getPtr(idx)) |pl| {
|
|
chain.puts = try pl.toOwnedSlice(allocator);
|
|
}
|
|
}
|
|
|
|
return chains.toOwnedSlice(allocator);
|
|
}
|
|
|
|
fn symbolPath(self: *Store, symbol: []const u8, file_name: []const u8) ![]const u8 {
|
|
if (file_name.len == 0) {
|
|
return std.fs.path.join(self.allocator, &.{ self.cache_dir, symbol });
|
|
}
|
|
return std.fs.path.join(self.allocator, &.{ self.cache_dir, symbol, file_name });
|
|
}
|
|
|
|
fn numVal(v: srf.Value) f64 {
|
|
return switch (v) {
|
|
.number => |n| n,
|
|
else => 0,
|
|
};
|
|
}
|
|
};
|
|
|
|
const InvalidData = error{InvalidData};
|
|
|
|
/// Serialize a portfolio (list of lots) to SRF format.
|
|
pub fn serializePortfolio(allocator: std.mem.Allocator, lots: []const Lot) ![]const u8 {
|
|
var buf: std.ArrayList(u8) = .empty;
|
|
errdefer buf.deinit(allocator);
|
|
const writer = buf.writer(allocator);
|
|
try writer.print("{f}", .{srf.fmtFrom(Lot, allocator, lots, .{})});
|
|
return buf.toOwnedSlice(allocator);
|
|
}
|
|
|
|
/// Deserialize a portfolio from SRF data. Caller owns the returned Portfolio.
|
|
pub fn deserializePortfolio(allocator: std.mem.Allocator, data: []const u8) !Portfolio {
|
|
const LotType = @import("../models/portfolio.zig").LotType;
|
|
var lots: std.ArrayList(Lot) = .empty;
|
|
errdefer {
|
|
for (lots.items) |lot| {
|
|
allocator.free(lot.symbol);
|
|
if (lot.note) |n| allocator.free(n);
|
|
if (lot.account) |a| allocator.free(a);
|
|
if (lot.ticker) |t| allocator.free(t);
|
|
}
|
|
lots.deinit(allocator);
|
|
}
|
|
|
|
var reader = std.Io.Reader.fixed(data);
|
|
const parsed = srf.parse(&reader, allocator, .{ .alloc_strings = false }) catch return error.InvalidData;
|
|
defer parsed.deinit();
|
|
|
|
for (parsed.records.items) |record| {
|
|
var lot = Lot{
|
|
.symbol = "",
|
|
.shares = 0,
|
|
.open_date = Date.epoch,
|
|
.open_price = 0,
|
|
};
|
|
var sym_raw: ?[]const u8 = null;
|
|
var note_raw: ?[]const u8 = null;
|
|
var account_raw: ?[]const u8 = null;
|
|
var sec_type_raw: ?[]const u8 = null;
|
|
var ticker_raw: ?[]const u8 = null;
|
|
|
|
for (record.fields) |field| {
|
|
if (std.mem.eql(u8, field.key, "symbol")) {
|
|
if (field.value) |v| sym_raw = switch (v) {
|
|
.string => |s| s,
|
|
else => null,
|
|
};
|
|
} else if (std.mem.eql(u8, field.key, "shares")) {
|
|
if (field.value) |v| lot.shares = Store.numVal(v);
|
|
} else if (std.mem.eql(u8, field.key, "open_date")) {
|
|
if (field.value) |v| {
|
|
const str = switch (v) {
|
|
.string => |s| s,
|
|
else => continue,
|
|
};
|
|
lot.open_date = Date.parse(str) catch continue;
|
|
}
|
|
} else if (std.mem.eql(u8, field.key, "open_price")) {
|
|
if (field.value) |v| lot.open_price = Store.numVal(v);
|
|
} else if (std.mem.eql(u8, field.key, "close_date")) {
|
|
if (field.value) |v| {
|
|
const str = switch (v) {
|
|
.string => |s| s,
|
|
else => continue,
|
|
};
|
|
lot.close_date = Date.parse(str) catch null;
|
|
}
|
|
} else if (std.mem.eql(u8, field.key, "close_price")) {
|
|
if (field.value) |v| lot.close_price = Store.numVal(v);
|
|
} else if (std.mem.eql(u8, field.key, "note")) {
|
|
if (field.value) |v| note_raw = switch (v) {
|
|
.string => |s| s,
|
|
else => null,
|
|
};
|
|
} else if (std.mem.eql(u8, field.key, "account")) {
|
|
if (field.value) |v| account_raw = switch (v) {
|
|
.string => |s| s,
|
|
else => null,
|
|
};
|
|
} else if (std.mem.eql(u8, field.key, "security_type")) {
|
|
if (field.value) |v| sec_type_raw = switch (v) {
|
|
.string => |s| s,
|
|
else => null,
|
|
};
|
|
} else if (std.mem.eql(u8, field.key, "maturity_date")) {
|
|
if (field.value) |v| {
|
|
const str = switch (v) {
|
|
.string => |s| s,
|
|
else => continue,
|
|
};
|
|
lot.maturity_date = Date.parse(str) catch null;
|
|
}
|
|
} else if (std.mem.eql(u8, field.key, "rate")) {
|
|
if (field.value) |v| {
|
|
const r = Store.numVal(v);
|
|
if (r > 0) lot.rate = r;
|
|
}
|
|
} else if (std.mem.eql(u8, field.key, "drip")) {
|
|
if (field.value) |v| {
|
|
switch (v) {
|
|
.string => |s| lot.drip = std.mem.eql(u8, s, "true") or std.mem.eql(u8, s, "1"),
|
|
.number => |n| lot.drip = n > 0,
|
|
.boolean => |b| lot.drip = b,
|
|
else => {},
|
|
}
|
|
}
|
|
} else if (std.mem.eql(u8, field.key, "ticker")) {
|
|
if (field.value) |v| ticker_raw = switch (v) {
|
|
.string => |s| s,
|
|
else => null,
|
|
};
|
|
} else if (std.mem.eql(u8, field.key, "price")) {
|
|
if (field.value) |v| {
|
|
const p = Store.numVal(v);
|
|
if (p > 0) lot.price = p;
|
|
}
|
|
} else if (std.mem.eql(u8, field.key, "price_date")) {
|
|
if (field.value) |v| {
|
|
const str = switch (v) {
|
|
.string => |s| s,
|
|
else => continue,
|
|
};
|
|
lot.price_date = Date.parse(str) catch null;
|
|
}
|
|
}
|
|
}
|
|
|
|
// Determine lot type
|
|
if (sec_type_raw) |st| {
|
|
lot.security_type = LotType.fromString(st);
|
|
}
|
|
|
|
// Cash lots don't require a symbol -- generate a placeholder
|
|
if (lot.security_type == .cash) {
|
|
if (sym_raw == null) {
|
|
lot.symbol = try allocator.dupe(u8, "CASH");
|
|
} else {
|
|
lot.symbol = try allocator.dupe(u8, sym_raw.?);
|
|
}
|
|
} else if (sym_raw) |s| {
|
|
lot.symbol = try allocator.dupe(u8, s);
|
|
} else continue;
|
|
|
|
if (note_raw) |n| {
|
|
lot.note = try allocator.dupe(u8, n);
|
|
}
|
|
|
|
if (account_raw) |a| {
|
|
lot.account = try allocator.dupe(u8, a);
|
|
}
|
|
|
|
if (ticker_raw) |t| {
|
|
lot.ticker = try allocator.dupe(u8, t);
|
|
}
|
|
|
|
try lots.append(allocator, lot);
|
|
}
|
|
|
|
return .{
|
|
.lots = try lots.toOwnedSlice(allocator),
|
|
.allocator = allocator,
|
|
};
|
|
}
|
|
|
|
test "dividend serialize/deserialize round-trip" {
|
|
const allocator = std.testing.allocator;
|
|
const divs = [_]Dividend{
|
|
.{ .ex_date = Date.fromYmd(2024, 3, 15), .amount = 0.8325, .pay_date = Date.fromYmd(2024, 3, 28), .frequency = 4, .type = .regular },
|
|
.{ .ex_date = Date.fromYmd(2024, 6, 14), .amount = 0.9148, .type = .special },
|
|
};
|
|
|
|
const data = try Store.serializeDividends(allocator, &divs, .{});
|
|
defer allocator.free(data);
|
|
|
|
const parsed = try Store.deserializeDividends(allocator, data);
|
|
defer Dividend.freeSlice(allocator, parsed);
|
|
|
|
try std.testing.expectEqual(@as(usize, 2), parsed.len);
|
|
|
|
try std.testing.expect(parsed[0].ex_date.eql(Date.fromYmd(2024, 3, 15)));
|
|
try std.testing.expectApproxEqAbs(@as(f64, 0.8325), parsed[0].amount, 0.0001);
|
|
try std.testing.expect(parsed[0].pay_date != null);
|
|
try std.testing.expect(parsed[0].pay_date.?.eql(Date.fromYmd(2024, 3, 28)));
|
|
try std.testing.expectEqual(@as(?u8, 4), parsed[0].frequency);
|
|
try std.testing.expectEqual(DividendType.regular, parsed[0].type);
|
|
|
|
try std.testing.expect(parsed[1].ex_date.eql(Date.fromYmd(2024, 6, 14)));
|
|
try std.testing.expectApproxEqAbs(@as(f64, 0.9148), parsed[1].amount, 0.0001);
|
|
try std.testing.expect(parsed[1].pay_date == null);
|
|
try std.testing.expectEqual(DividendType.special, parsed[1].type);
|
|
}
|
|
|
|
test "split serialize/deserialize round-trip" {
|
|
const allocator = std.testing.allocator;
|
|
const splits = [_]Split{
|
|
.{ .date = Date.fromYmd(2020, 8, 31), .numerator = 4, .denominator = 1 },
|
|
.{ .date = Date.fromYmd(2014, 6, 9), .numerator = 7, .denominator = 1 },
|
|
};
|
|
|
|
const data = try Store.serializeSplits(allocator, &splits, .{});
|
|
defer allocator.free(data);
|
|
|
|
const parsed = try Store.deserializeSplits(allocator, data);
|
|
defer allocator.free(parsed);
|
|
|
|
try std.testing.expectEqual(@as(usize, 2), parsed.len);
|
|
|
|
try std.testing.expect(parsed[0].date.eql(Date.fromYmd(2020, 8, 31)));
|
|
try std.testing.expectApproxEqAbs(@as(f64, 4), parsed[0].numerator, 0.001);
|
|
try std.testing.expectApproxEqAbs(@as(f64, 1), parsed[0].denominator, 0.001);
|
|
|
|
try std.testing.expect(parsed[1].date.eql(Date.fromYmd(2014, 6, 9)));
|
|
try std.testing.expectApproxEqAbs(@as(f64, 7), parsed[1].numerator, 0.001);
|
|
}
|
|
|
|
test "portfolio serialize/deserialize round-trip" {
|
|
const allocator = std.testing.allocator;
|
|
const lots = [_]Lot{
|
|
.{ .symbol = "AMZN", .shares = 10, .open_date = Date.fromYmd(2022, 3, 15), .open_price = 150.25 },
|
|
.{ .symbol = "AMZN", .shares = 5, .open_date = Date.fromYmd(2023, 6, 1), .open_price = 125.00, .close_date = Date.fromYmd(2024, 1, 15), .close_price = 185.50 },
|
|
.{ .symbol = "VTI", .shares = 100, .open_date = Date.fromYmd(2022, 1, 10), .open_price = 220.00 },
|
|
};
|
|
|
|
const data = try serializePortfolio(allocator, &lots);
|
|
defer allocator.free(data);
|
|
|
|
var portfolio = try deserializePortfolio(allocator, data);
|
|
defer portfolio.deinit();
|
|
|
|
try std.testing.expectEqual(@as(usize, 3), portfolio.lots.len);
|
|
|
|
try std.testing.expectEqualStrings("AMZN", portfolio.lots[0].symbol);
|
|
try std.testing.expectApproxEqAbs(@as(f64, 10), portfolio.lots[0].shares, 0.01);
|
|
try std.testing.expect(portfolio.lots[0].isOpen());
|
|
|
|
try std.testing.expectEqualStrings("AMZN", portfolio.lots[1].symbol);
|
|
try std.testing.expectApproxEqAbs(@as(f64, 5), portfolio.lots[1].shares, 0.01);
|
|
try std.testing.expect(!portfolio.lots[1].isOpen());
|
|
try std.testing.expect(portfolio.lots[1].close_date.?.eql(Date.fromYmd(2024, 1, 15)));
|
|
try std.testing.expectApproxEqAbs(@as(f64, 185.50), portfolio.lots[1].close_price.?, 0.01);
|
|
|
|
try std.testing.expectEqualStrings("VTI", portfolio.lots[2].symbol);
|
|
}
|