ai generated, functional, no code review yet

This commit is contained in:
Emil Lerch 2026-02-25 14:10:27 -08:00
parent 10195bfaa6
commit 18827be200
Signed by: lobo
GPG key ID: A7B62D657EF764F8
35 changed files with 9616 additions and 0 deletions

4
.gitignore vendored Normal file
View file

@ -0,0 +1,4 @@
.zig-cache/
zig-out/
.env
*.srf

3
.mise.toml Normal file
View file

@ -0,0 +1,3 @@
[tools]
zig = "0.15.2"
zls = "0.15.1"

21
LICENSE Normal file
View file

@ -0,0 +1,21 @@
MIT License
Copyright (c) 2026 Emil Lerch
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

345
README.md Normal file
View file

@ -0,0 +1,345 @@
# zfin
A financial data library, CLI, and terminal UI written in Zig. Tracks portfolios, analyzes trailing returns, displays options chains, earnings history, and more -- all from the terminal.
## Quick start
```bash
# Set at least one API key (see "API keys" below)
export TWELVEDATA_API_KEY=your_key
# Build
zig build
# CLI usage
zig build run -- perf VTI # trailing returns
zig build run -- quote AAPL # real-time quote
zig build run -- options AAPL # options chains
zig build run -- earnings MSFT # earnings history
# Interactive TUI
zig build run -- i # auto-loads portfolio.srf from cwd
zig build run -- i -p portfolio.srf -w watchlist.srf
zig build run -- i -s AAPL # start with a symbol, no portfolio
```
Requires Zig 0.15.2 or later.
## Data providers
zfin aggregates data from multiple free-tier APIs. Each provider is used for the data it does best, and aggressive caching keeps usage well within free-tier limits.
### Provider summary
| Data type | Provider | Auth | Free-tier limit | Cache TTL |
|---|---|---|---|---|
| Daily candles (OHLCV) | TwelveData | `TWELVEDATA_API_KEY` | 8 req/min, 800/day | 24 hours |
| Real-time quotes | TwelveData | `TWELVEDATA_API_KEY` | 8 req/min, 800/day | Never cached |
| Dividends | Polygon | `POLYGON_API_KEY` | 5 req/min | 7 days |
| Splits | Polygon | `POLYGON_API_KEY` | 5 req/min | 7 days |
| Options chains | CBOE | None required | ~30 req/min (self-imposed) | 1 hour |
| Earnings | Finnhub | `FINNHUB_API_KEY` | 60 req/min | 24 hours |
| ETF profiles | Alpha Vantage | `ALPHAVANTAGE_API_KEY` | 25 req/day | 30 days |
### TwelveData
**Used for:** daily candles and real-time quotes.
- Endpoint: `https://api.twelvedata.com/time_series` and `/quote`
- Free tier: 8 API credits per minute, 800 per day. Each symbol in a request costs 1 credit (batch requests do NOT reduce credit cost).
- Candles are fetched with a 10-year + 60-day lookback window for trailing return calculations.
- Returns split-adjusted but NOT dividend-adjusted prices. Total returns are computed separately using Polygon dividend data.
- Quotes are never cached (always a live fetch, ~15 min delay on free tier).
### Polygon
**Used for:** dividend history and stock splits.
- Endpoints: `https://api.polygon.io/v3/reference/dividends` and `/v3/reference/splits`
- Free tier: 5 requests per minute, unlimited daily. Full historical dividend/split data.
- Dividend endpoint uses cursor-based pagination (automatically followed).
- Provides dividend type classification (regular, special, supplemental).
### CBOE
**Used for:** options chains.
- Endpoint: `https://cdn.cboe.com/api/global/delayed_quotes/options/{SYMBOL}.json`
- No API key required. Data is 15-minute delayed during market hours.
- Returns all expirations with full chains including greeks (delta, gamma, theta, vega), bid/ask, volume, open interest, and implied volatility.
- OCC option symbols are parsed to extract expiration, strike, and contract type.
### Finnhub
**Used for:** earnings calendar (historical and upcoming).
- Endpoint: `https://finnhub.io/api/v1/calendar/earnings`
- Free tier: 60 requests per minute.
- Fetches 5 years back and 1 year forward from today.
- Note: Finnhub requires TLS 1.2. Since Zig's HTTP client only supports TLS 1.3, requests to Finnhub automatically fall back to system `curl`.
### Alpha Vantage
**Used for:** ETF profiles (expense ratio, holdings, sector weights).
- Endpoint: `https://www.alphavantage.co/query?function=ETF_PROFILE`
- Free tier: 25 requests per day. Used sparingly -- ETF profiles rarely change.
## API keys
Set keys as environment variables or in a `.env` file (searched in the executable's parent directory, then cwd):
```bash
TWELVEDATA_API_KEY=your_key # Required for candles and quotes
POLYGON_API_KEY=your_key # Required for dividends/splits (total returns)
FINNHUB_API_KEY=your_key # Required for earnings data
ALPHAVANTAGE_API_KEY=your_key # Required for ETF profiles
```
The cache directory defaults to `~/.cache/zfin` and can be overridden with `ZFIN_CACHE_DIR`.
Not all keys are required. Without a key, the corresponding data simply won't be available:
| Key | Without it |
|---|---|
| `TWELVEDATA_API_KEY` | No candles, quotes, or trailing returns |
| `POLYGON_API_KEY` | No dividends -- trailing returns show price-only (no total return) |
| `FINNHUB_API_KEY` | No earnings data (tab disabled) |
| `ALPHAVANTAGE_API_KEY` | No ETF profiles |
CBOE options require no API key.
## Caching strategy
Every data fetch follows the same pattern:
1. Check local cache (`~/.cache/zfin/{SYMBOL}/{data_type}.srf`)
2. If cached file exists and is within TTL -- deserialize and return (no network)
3. Otherwise -- fetch from provider -- serialize to cache -- return
Cache files use [SRF](https://github.com/lobo/srf) (Simple Record Format), a line-oriented key-value format. Freshness is determined by file modification time vs. the TTL for that data type.
| Data type | TTL | Rationale |
|---|---|---|
| Daily candles | 24 hours | Only changes once per trading day |
| Dividends | 7 days | Declared well in advance |
| Splits | 7 days | Rare corporate events |
| Options | 1 hour | Prices change continuously during market hours |
| Earnings | 24 hours | Quarterly events, estimates update periodically |
| ETF profiles | 30 days | Holdings/weights change slowly |
| Quotes | Never cached | Intended for live price checks |
Manual refresh (`r` / `F5` in TUI) invalidates the cache for the current tab's data before re-fetching.
### Rate limiting
Each provider has a client-side token-bucket rate limiter that prevents exceeding free-tier limits:
| Provider | Rate limit |
|---|---|
| TwelveData | 8/minute |
| Polygon | 5/minute |
| Finnhub | 60/minute |
| CBOE | 30/minute |
| Alpha Vantage | 25/day |
The limiter blocks until a token is available, spreading bursts of requests automatically rather than failing with 429 errors.
## CLI commands
```
zfin <command> [args]
Commands:
perf <SYMBOL> Trailing returns (1yr/3yr/5yr/10yr, price + total)
quote <SYMBOL> Real-time quote
history <SYMBOL> Last 30 days price history
divs <SYMBOL> Dividend history with TTM yield
splits <SYMBOL> Split history
options <SYMBOL> Options chains (all expirations)
earnings <SYMBOL> Earnings history and upcoming events
etf <SYMBOL> ETF profile (expense ratio, holdings, sectors)
portfolio <FILE> Portfolio analysis from .srf file
cache stats Show cached symbols
cache clear Delete all cached data
interactive, i Launch interactive TUI
help Show usage
```
### Interactive TUI flags
```
zfin i [options]
-p, --portfolio <FILE> Portfolio file (.srf format)
-w, --watchlist <FILE> Watchlist file (default: watchlist.srf if present)
-s, --symbol <SYMBOL> Start with a specific symbol
--default-keys Print default keybindings config to stdout
--default-theme Print default theme config to stdout
```
If no portfolio or symbol is specified and `portfolio.srf` exists in the current directory, it is loaded automatically.
## Interactive TUI
The TUI has five tabs: Portfolio, Quote, Performance, Options, and Earnings.
### Tabs
**Portfolio** -- navigable list of positions with market value, gain/loss, weight, and purchase date. Multi-lot positions can be expanded to show individual lots with per-lot gain/loss, capital gains indicator (ST/LT), and account name.
**Quote** -- current price, OHLCV, daily change, and a 60-day ASCII chart with recent history table.
**Performance** -- trailing returns using two methodologies (as-of-date and month-end), matching Morningstar's "Trailing Returns" and "Performance" pages respectively. Shows price-only and total return (with dividend reinvestment) when Polygon data is available. Also shows risk metrics (volatility, Sharpe ratio, max drawdown).
**Options** -- all expirations in a navigable list. Expand any expiration to see calls and puts inline. Calls and puts sections are independently collapsible. Near-the-money filter limits strikes shown (default +/- 8, adjustable with Ctrl+1-9). ITM strikes are marked with `|`. Monthly expirations display in normal color, weeklies are dimmed.
**Earnings** -- historical and upcoming earnings events with EPS estimate/actual, surprise amount and percentage. Future events are dimmed. Tab is disabled for ETFs.
### Keybindings
All keybindings are configurable via `~/.config/zfin/keys.srf`. Generate the default config:
```bash
zfin i --default-keys > ~/.config/zfin/keys.srf
```
Default keybindings:
| Key | Action |
|---|---|
| `q`, `Ctrl+c` | Quit |
| `r`, `F5` | Refresh current tab (invalidates cache) |
| `h`, Left | Previous tab |
| `l`, Right, Tab | Next tab |
| `1`-`5` | Jump to tab |
| `j`, Down | Select next row |
| `k`, Up | Select previous row |
| `Enter` | Expand/collapse (positions, expirations, calls/puts) |
| `s` | Select symbol from portfolio for other tabs |
| `/` | Enter symbol search |
| `e` | Edit portfolio/watchlist in `$EDITOR` |
| `c` | Toggle all calls collapsed/expanded (options tab) |
| `p` | Toggle all puts collapsed/expanded (options tab) |
| `Ctrl+1`-`Ctrl+9` | Set options near-the-money filter to +/- N strikes |
| `g` | Scroll to top |
| `G` | Scroll to bottom |
| `Ctrl+d` | Half-page down |
| `Ctrl+u` | Half-page up |
| `PageDown` | Page down |
| `PageUp` | Page up |
| `?` | Help screen |
Mouse: scroll wheel navigates, left-click selects rows and switches tabs, double-click expands/collapses.
### Theme
The TUI uses a dark theme inspired by Monokai/opencode. Customize via `~/.config/zfin/theme.srf`:
```bash
zfin i --default-theme > ~/.config/zfin/theme.srf
```
Colors are specified as `#rrggbb` hex values. The theme uses RGB colors (not terminal color indices) to work correctly with transparent terminal backgrounds.
## Portfolio format
Portfolios are SRF files with one lot per line:
```
#!srfv1
symbol::VTI,shares:num:100,open_date::2024-01-15,open_price:num:220.50
symbol::AAPL,shares:num:50,open_date::2024-03-01,open_price:num:170.00
symbol::AAPL,shares:num:25,open_date::2023-06-15,open_price:num:155.00,account::Roth IRA
symbol::AMZN,shares:num:10,open_date::2022-03-15,open_price:num:150.25,close_date::2024-01-15,close_price:num:185.50
```
### Lot fields
| Field | Type | Required | Description |
|---|---|---|---|
| `symbol` | string | Yes | Ticker symbol |
| `shares` | number | Yes | Number of shares |
| `open_date` | string | Yes | Purchase date (YYYY-MM-DD) |
| `open_price` | number | Yes | Purchase price per share |
| `close_date` | string | No | Sale date (null = open lot) |
| `close_price` | number | No | Sale price per share |
| `note` | string | No | Tag or note |
| `account` | string | No | Account name (e.g. "Roth IRA", "Brokerage") |
Open lots (no `close_date`) contribute to positions. Closed lots (with `close_date` and `close_price`) show realized P&L. The `account` field is displayed in the lot detail view when a position is expanded.
### Watchlist format
A watchlist is an SRF file with just symbol fields:
```
#!srfv1
symbol::NVDA
symbol::TSLA
symbol::GOOG
```
Watchlist symbols appear at the bottom of the portfolio tab. They show the latest cached price but no position data. Press Enter or double-click to jump to the Quote tab for that symbol.
## Architecture
```
src/
root.zig Library root, exports all public types
config.zig Configuration from env vars / .env files
service.zig DataService: cache-check -> fetch -> cache -> return
models/
candle.zig OHLCV price bars
date.zig Date type with arithmetic, snapping, formatting
dividend.zig Dividend records with type classification
split.zig Stock splits
option.zig Option contracts and chains
earnings.zig Earnings events with surprise calculation
etf_profile.zig ETF profiles with holdings and sectors
portfolio.zig Lots, positions, and portfolio aggregation
quote.zig Real-time quote data
ticker_info.zig Security metadata
providers/
provider.zig Type-erased provider interface (vtable)
twelvedata.zig TwelveData: candles, quotes
polygon.zig Polygon: dividends, splits
finnhub.zig Finnhub: earnings
cboe.zig CBOE: options chains (no API key)
alphavantage.zig Alpha Vantage: ETF profiles
analytics/
performance.zig Trailing returns (as-of-date + month-end)
risk.zig Volatility, Sharpe, drawdown, portfolio summary
cache/
store.zig SRF file cache with TTL freshness checks
net/
http.zig HTTP client with retries and TLS 1.2 fallback
rate_limiter.zig Token-bucket rate limiter
cli/
main.zig CLI entry point and all commands
tui/
main.zig Interactive TUI application
keybinds.zig Configurable keybinding system
theme.zig Configurable color theme
```
### Dependencies
| Dependency | Source | Purpose |
|---|---|---|
| [SRF](https://github.com/lobo/srf) | Local (`../../srf`) | Cache file format and portfolio/watchlist parsing |
| [libvaxis](https://github.com/rockorager/libvaxis) | Git (v0.5.1) | Terminal UI rendering |
## Building
```bash
zig build # build the zfin binary
zig build test # run all tests
zig build run -- <args> # build and run
```
The compiled binary is at `zig-out/bin/zfin`.
## License
MIT

102
build.zig Normal file
View file

@ -0,0 +1,102 @@
const std = @import("std");
pub fn build(b: *std.Build) void {
const target = b.standardTargetOptions(.{});
const optimize = b.standardOptimizeOption(.{});
// External dependencies
const srf_dep = b.dependency("srf", .{
.target = target,
.optimize = optimize,
});
const vaxis_dep = b.dependency("vaxis", .{
.target = target,
.optimize = optimize,
});
// Library module -- the public API for consumers of zfin
const mod = b.addModule("zfin", .{
.root_source_file = b.path("src/root.zig"),
.target = target,
.imports = &.{
.{ .name = "srf", .module = srf_dep.module("srf") },
},
});
// TUI module (imported by the unified binary)
const tui_mod = b.addModule("tui", .{
.root_source_file = b.path("src/tui/main.zig"),
.target = target,
.imports = &.{
.{ .name = "zfin", .module = mod },
.{ .name = "srf", .module = srf_dep.module("srf") },
.{ .name = "vaxis", .module = vaxis_dep.module("vaxis") },
},
});
// Unified executable (CLI + TUI in one binary)
const exe = b.addExecutable(.{
.name = "zfin",
.root_module = b.createModule(.{
.root_source_file = b.path("src/cli/main.zig"),
.target = target,
.optimize = optimize,
.imports = &.{
.{ .name = "zfin", .module = mod },
.{ .name = "srf", .module = srf_dep.module("srf") },
.{ .name = "tui", .module = tui_mod },
},
}),
});
b.installArtifact(exe);
// Run step: `zig build run -- <args>`
const run_step = b.step("run", "Run the zfin CLI");
const run_cmd = b.addRunArtifact(exe);
run_step.dependOn(&run_cmd.step);
run_cmd.step.dependOn(b.getInstallStep());
if (b.args) |args| {
run_cmd.addArgs(args);
}
// Tests
const test_step = b.step("test", "Run all tests");
const mod_tests = b.addTest(.{ .root_module = mod });
test_step.dependOn(&b.addRunArtifact(mod_tests).step);
const exe_tests = b.addTest(.{ .root_module = exe.root_module });
test_step.dependOn(&b.addRunArtifact(exe_tests).step);
const tui_tests = b.addTest(.{ .root_module = b.createModule(.{
.root_source_file = b.path("src/tui/main.zig"),
.target = target,
.optimize = optimize,
.imports = &.{
.{ .name = "zfin", .module = mod },
.{ .name = "srf", .module = srf_dep.module("srf") },
.{ .name = "vaxis", .module = vaxis_dep.module("vaxis") },
},
}) });
test_step.dependOn(&b.addRunArtifact(tui_tests).step);
// Docs
const lib = b.addLibrary(.{
.name = "zfin",
.root_module = b.createModule(.{
.root_source_file = b.path("src/root.zig"),
.target = target,
.optimize = optimize,
.imports = &.{
.{ .name = "srf", .module = srf_dep.module("srf") },
},
}),
});
const docs_step = b.step("docs", "Generate documentation");
docs_step.dependOn(&b.addInstallDirectory(.{
.source_dir = lib.getEmittedDocs(),
.install_dir = .prefix,
.install_subdir = "docs",
}).step);
}

20
build.zig.zon Normal file
View file

@ -0,0 +1,20 @@
.{
.name = .zfin,
.version = "0.0.0",
.fingerprint = 0x77a9b4c7d676e027,
.minimum_zig_version = "0.15.2",
.dependencies = .{
.srf = .{
.path = "../../srf",
},
.vaxis = .{
.url = "git+https://github.com/rockorager/libvaxis.git#67bbc1ee072aa390838c66caf4ed47edee282dc4",
.hash = "vaxis-0.5.1-BWNV_IxJCQC5OGNaXQfNnqgn9_Vku0PMgey-dplubcQK",
},
},
.paths = .{
"build.zig",
"build.zig.zon",
"src",
},
}

View file

@ -0,0 +1,501 @@
const std = @import("std");
const Date = @import("../models/date.zig").Date;
const Candle = @import("../models/candle.zig").Candle;
const Dividend = @import("../models/dividend.zig").Dividend;
/// Performance calculation results, Morningstar-style.
pub const PerformanceResult = struct {
/// Total return over the period (e.g., 0.25 = 25%)
total_return: f64,
/// Annualized return (for periods > 1 year)
annualized_return: ?f64,
/// Start date used
from: Date,
/// End date used
to: Date,
};
/// Compute total return from adjusted close prices.
/// Candles must be sorted by date ascending.
/// `from` snaps forward (first trading day on/after), `to` snaps backward.
pub fn totalReturnFromAdjClose(candles: []const Candle, from: Date, to: Date) ?PerformanceResult {
return totalReturnFromAdjCloseSnap(candles, from, to, .forward);
}
/// Same as totalReturnFromAdjClose but both dates snap backward
/// (last trading day on or before). Used for month-end methodology where
/// both from and to represent month-end reference dates.
fn totalReturnFromAdjCloseBackward(candles: []const Candle, from: Date, to: Date) ?PerformanceResult {
return totalReturnFromAdjCloseSnap(candles, from, to, .backward);
}
fn totalReturnFromAdjCloseSnap(candles: []const Candle, from: Date, to: Date, start_dir: SearchDirection) ?PerformanceResult {
const start = findNearestCandle(candles, from, start_dir) orelse return null;
const end = findNearestCandle(candles, to, .backward) orelse return null;
if (start.adj_close == 0) return null;
const total = (end.adj_close / start.adj_close) - 1.0;
const years = Date.yearsBetween(start.date, end.date);
return .{
.total_return = total,
.annualized_return = if (years >= 1.0)
std.math.pow(f64, 1.0 + total, 1.0 / years) - 1.0
else
null,
.from = start.date,
.to = end.date,
};
}
/// Compute total return with manual dividend reinvestment.
/// Uses raw close prices and dividend records independently.
/// Candles and dividends must be sorted by date ascending.
/// `from` snaps forward, `to` snaps backward.
pub fn totalReturnWithDividends(
candles: []const Candle,
dividends: []const Dividend,
from: Date,
to: Date,
) ?PerformanceResult {
return totalReturnWithDividendsSnap(candles, dividends, from, to, .forward);
}
/// Same as totalReturnWithDividends but both dates snap backward.
fn totalReturnWithDividendsBackward(
candles: []const Candle,
dividends: []const Dividend,
from: Date,
to: Date,
) ?PerformanceResult {
return totalReturnWithDividendsSnap(candles, dividends, from, to, .backward);
}
fn totalReturnWithDividendsSnap(
candles: []const Candle,
dividends: []const Dividend,
from: Date,
to: Date,
start_dir: SearchDirection,
) ?PerformanceResult {
const start = findNearestCandle(candles, from, start_dir) orelse return null;
const end = findNearestCandle(candles, to, .backward) orelse return null;
if (start.close == 0) return null;
// Simulate: start with 1 share, reinvest dividends at ex-date close
var shares: f64 = 1.0;
for (dividends) |div| {
if (div.ex_date.lessThan(start.date)) continue;
if (end.date.lessThan(div.ex_date)) break;
// Find close price on or near the ex-date
const price_candle = findNearestCandle(candles, div.ex_date, .backward) orelse continue;
if (price_candle.close > 0) {
shares += (div.amount * shares) / price_candle.close;
}
}
const final_value = shares * end.close;
const total = (final_value / start.close) - 1.0;
const years = Date.yearsBetween(start.date, end.date);
return .{
.total_return = total,
.annualized_return = if (years >= 1.0)
std.math.pow(f64, 1.0 + total, 1.0 / years) - 1.0
else
null,
.from = start.date,
.to = end.date,
};
}
/// Convenience: compute 1yr, 3yr, 5yr, 10yr trailing returns from adjusted close.
/// Uses the last available date as the endpoint.
pub const TrailingReturns = struct {
one_year: ?PerformanceResult = null,
three_year: ?PerformanceResult = null,
five_year: ?PerformanceResult = null,
ten_year: ?PerformanceResult = null,
};
/// Trailing returns from exact calendar date N years ago to latest candle date.
/// Start dates snap forward to the next trading day (e.g., weekend Monday).
pub fn trailingReturns(candles: []const Candle) TrailingReturns {
if (candles.len == 0) return .{};
const end_date = candles[candles.len - 1].date;
return .{
.one_year = totalReturnFromAdjClose(candles, end_date.subtractYears(1), end_date),
.three_year = totalReturnFromAdjClose(candles, end_date.subtractYears(3), end_date),
.five_year = totalReturnFromAdjClose(candles, end_date.subtractYears(5), end_date),
.ten_year = totalReturnFromAdjClose(candles, end_date.subtractYears(10), end_date),
};
}
/// Same as trailingReturns but with dividend reinvestment.
pub fn trailingReturnsWithDividends(
candles: []const Candle,
dividends: []const Dividend,
) TrailingReturns {
if (candles.len == 0) return .{};
const end_date = candles[candles.len - 1].date;
return .{
.one_year = totalReturnWithDividends(candles, dividends, end_date.subtractYears(1), end_date),
.three_year = totalReturnWithDividends(candles, dividends, end_date.subtractYears(3), end_date),
.five_year = totalReturnWithDividends(candles, dividends, end_date.subtractYears(5), end_date),
.ten_year = totalReturnWithDividends(candles, dividends, end_date.subtractYears(10), end_date),
};
}
/// Morningstar-style trailing returns using month-end reference dates.
/// End date = last calendar day of prior month. Start date = that month-end minus N years.
/// Both dates snap backward to the last trading day on or before, matching
/// Morningstar's "last business day of the month" convention.
pub fn trailingReturnsMonthEnd(candles: []const Candle, today: Date) TrailingReturns {
if (candles.len == 0) return .{};
// End reference = last day of the prior month (snaps backward to last trading day)
const month_end = today.lastDayOfPriorMonth();
return .{
.one_year = totalReturnFromAdjCloseBackward(candles, month_end.subtractYears(1), month_end),
.three_year = totalReturnFromAdjCloseBackward(candles, month_end.subtractYears(3), month_end),
.five_year = totalReturnFromAdjCloseBackward(candles, month_end.subtractYears(5), month_end),
.ten_year = totalReturnFromAdjCloseBackward(candles, month_end.subtractYears(10), month_end),
};
}
/// Same as trailingReturnsMonthEnd but with dividend reinvestment.
pub fn trailingReturnsMonthEndWithDividends(
candles: []const Candle,
dividends: []const Dividend,
today: Date,
) TrailingReturns {
if (candles.len == 0) return .{};
const month_end = today.lastDayOfPriorMonth();
return .{
.one_year = totalReturnWithDividendsBackward(candles, dividends, month_end.subtractYears(1), month_end),
.three_year = totalReturnWithDividendsBackward(candles, dividends, month_end.subtractYears(3), month_end),
.five_year = totalReturnWithDividendsBackward(candles, dividends, month_end.subtractYears(5), month_end),
.ten_year = totalReturnWithDividendsBackward(candles, dividends, month_end.subtractYears(10), month_end),
};
}
const SearchDirection = enum { forward, backward };
/// Maximum calendar days a snapped candle can be from the target date.
/// Covers weekends + holidays (e.g., Christmas week). Beyond this, the data
/// is likely missing and the result would be misleading.
const max_snap_days: i32 = 10;
fn findNearestCandle(candles: []const Candle, target: Date, direction: SearchDirection) ?Candle {
if (candles.len == 0) return null;
// Binary search: lo = first index where candles[lo].date >= target
var lo: usize = 0;
var hi: usize = candles.len;
while (lo < hi) {
const mid = lo + (hi - lo) / 2;
if (candles[mid].date.lessThan(target)) {
lo = mid + 1;
} else {
hi = mid;
}
}
const candidate = switch (direction) {
// First candle on or after target
.forward => if (lo < candles.len) candles[lo] else return null,
// Last candle on or before target
.backward => if (lo < candles.len and candles[lo].date.eql(target))
candles[lo]
else if (lo > 0)
candles[lo - 1]
else
return null,
};
// Reject if the snap distance exceeds tolerance
const gap = candidate.date.days - target.days;
if (gap > max_snap_days or gap < -max_snap_days) return null;
return candidate;
}
/// Format a return value as a percentage string (e.g., "12.34%")
pub fn formatReturn(buf: []u8, value: f64) []const u8 {
return std.fmt.bufPrint(buf, "{d:.2}%", .{value * 100.0}) catch "??%";
}
test "total return simple" {
const candles = [_]Candle{
.{ .date = Date.fromYmd(2024, 1, 2), .open = 100, .high = 101, .low = 99, .close = 100, .adj_close = 100, .volume = 1000 },
.{ .date = Date.fromYmd(2024, 6, 28), .open = 110, .high = 111, .low = 109, .close = 110, .adj_close = 110, .volume = 1000 },
.{ .date = Date.fromYmd(2024, 12, 31), .open = 120, .high = 121, .low = 119, .close = 120, .adj_close = 120, .volume = 1000 },
};
const result = totalReturnFromAdjClose(&candles, Date.fromYmd(2024, 1, 1), Date.fromYmd(2025, 1, 1));
try std.testing.expect(result != null);
// 120/100 - 1 = 0.20
try std.testing.expectApproxEqAbs(@as(f64, 0.20), result.?.total_return, 0.001);
}
test "total return with dividends -- single dividend" {
// Stock at $100, pays $2 dividend, price stays $100.
// Without reinvestment: 0% return.
// With reinvestment: $2/$100 = 0.02 extra shares -> 1.02 * $100 / $100 - 1 = 2%
const candles = [_]Candle{
makeCandle(Date.fromYmd(2024, 1, 2), 100),
makeCandle(Date.fromYmd(2024, 3, 15), 100),
makeCandle(Date.fromYmd(2024, 12, 31), 100),
};
const divs = [_]Dividend{
.{ .ex_date = Date.fromYmd(2024, 3, 15), .amount = 2.0 },
};
const result = totalReturnWithDividends(&candles, &divs, Date.fromYmd(2024, 1, 1), Date.fromYmd(2025, 1, 1));
try std.testing.expect(result != null);
try std.testing.expectApproxEqAbs(@as(f64, 0.02), result.?.total_return, 0.0001);
}
test "total return with dividends -- quarterly dividends" {
// Stock at $100 all year, pays $1 quarterly. Each $1 reinvested at $100 = 0.01 shares.
// After Q1: 1.01 shares
// After Q2: 1.01 + 1.01*1/100 = 1.01 * 1.01 = 1.0201
// After Q3: 1.0201 * 1.01 = 1.030301
// After Q4: 1.030301 * 1.01 = 1.04060401
// Total return: 4.06%
const candles = [_]Candle{
makeCandle(Date.fromYmd(2024, 1, 2), 100),
makeCandle(Date.fromYmd(2024, 3, 15), 100),
makeCandle(Date.fromYmd(2024, 6, 14), 100),
makeCandle(Date.fromYmd(2024, 9, 13), 100),
makeCandle(Date.fromYmd(2024, 12, 13), 100),
makeCandle(Date.fromYmd(2024, 12, 31), 100),
};
const divs = [_]Dividend{
.{ .ex_date = Date.fromYmd(2024, 3, 15), .amount = 1.0 },
.{ .ex_date = Date.fromYmd(2024, 6, 14), .amount = 1.0 },
.{ .ex_date = Date.fromYmd(2024, 9, 13), .amount = 1.0 },
.{ .ex_date = Date.fromYmd(2024, 12, 13), .amount = 1.0 },
};
const result = totalReturnWithDividends(&candles, &divs, Date.fromYmd(2024, 1, 1), Date.fromYmd(2025, 1, 1));
try std.testing.expect(result != null);
// (1.01)^4 - 1 = 0.04060401
try std.testing.expectApproxEqAbs(@as(f64, 0.04060401), result.?.total_return, 0.0001);
}
test "total return with dividends -- price growth plus dividends" {
// Start $100, end $120 (20% price return).
// One $3 dividend at mid-year when price is $110.
// Shares: 1 + 3/110 = 1.027273
// Final value: 1.027273 * 120 = 123.2727
// Total return: 123.2727 / 100 - 1 = 23.27%
const candles = [_]Candle{
makeCandle(Date.fromYmd(2024, 1, 2), 100),
makeCandle(Date.fromYmd(2024, 6, 14), 110),
makeCandle(Date.fromYmd(2024, 12, 31), 120),
};
const divs = [_]Dividend{
.{ .ex_date = Date.fromYmd(2024, 6, 14), .amount = 3.0 },
};
const result = totalReturnWithDividends(&candles, &divs, Date.fromYmd(2024, 1, 1), Date.fromYmd(2025, 1, 1));
try std.testing.expect(result != null);
const expected = (1.0 + 3.0 / 110.0) * 120.0 / 100.0 - 1.0; // 0.23272727...
try std.testing.expectApproxEqAbs(expected, result.?.total_return, 0.0001);
}
test "annualized return -- 3 year period" {
// 3 years: $100 -> $150. Total return = 50%.
// Annualized = (1.50)^(1/3) - 1 = 14.47%
const candles = [_]Candle{
makeCandle(Date.fromYmd(2021, 1, 4), 100),
makeCandle(Date.fromYmd(2024, 1, 2), 150),
};
const result = totalReturnFromAdjClose(&candles, Date.fromYmd(2021, 1, 1), Date.fromYmd(2024, 1, 3));
try std.testing.expect(result != null);
try std.testing.expectApproxEqAbs(@as(f64, 0.50), result.?.total_return, 0.001);
const ann = result.?.annualized_return.?;
// (1.50)^(1/years) - 1, years ~ 3.0 (via 365.25)
const years = Date.yearsBetween(Date.fromYmd(2021, 1, 4), Date.fromYmd(2024, 1, 2));
const expected_ann = std.math.pow(f64, 1.50, 1.0 / years) - 1.0;
try std.testing.expectApproxEqAbs(expected_ann, ann, 0.0001);
}
test "findNearestCandle -- exact match" {
const candles = [_]Candle{
makeCandle(Date.fromYmd(2024, 1, 2), 100),
makeCandle(Date.fromYmd(2024, 1, 3), 101),
makeCandle(Date.fromYmd(2024, 1, 4), 102),
};
// Forward exact
const fwd = findNearestCandle(&candles, Date.fromYmd(2024, 1, 3), .forward).?;
try std.testing.expect(fwd.date.eql(Date.fromYmd(2024, 1, 3)));
// Backward exact
const bwd = findNearestCandle(&candles, Date.fromYmd(2024, 1, 3), .backward).?;
try std.testing.expect(bwd.date.eql(Date.fromYmd(2024, 1, 3)));
}
test "findNearestCandle -- weekend snap" {
// Jan 4 2025 is Saturday, Jan 5 is Sunday
const candles = [_]Candle{
makeCandle(Date.fromYmd(2025, 1, 3), 100), // Friday
makeCandle(Date.fromYmd(2025, 1, 6), 101), // Monday
};
// Forward from Saturday -> Monday
const fwd = findNearestCandle(&candles, Date.fromYmd(2025, 1, 4), .forward).?;
try std.testing.expect(fwd.date.eql(Date.fromYmd(2025, 1, 6)));
// Backward from Saturday -> Friday
const bwd = findNearestCandle(&candles, Date.fromYmd(2025, 1, 4), .backward).?;
try std.testing.expect(bwd.date.eql(Date.fromYmd(2025, 1, 3)));
}
test "month-end trailing returns -- date windowing" {
// Verify month-end logic uses correct reference dates.
// "Today" = 2026-02-15, prior month end = 2026-01-31
// 1yr window: 2025-01-31 to 2026-01-31
const candles = [_]Candle{
makeCandle(Date.fromYmd(2025, 1, 31), 100), // Jan 31 2025 is Friday
makeCandle(Date.fromYmd(2025, 7, 1), 110),
makeCandle(Date.fromYmd(2026, 1, 30), 120), // Jan 31 is Sat, trading day is 30th
makeCandle(Date.fromYmd(2026, 2, 14), 125),
};
const today = Date.fromYmd(2026, 2, 15);
const ret = trailingReturnsMonthEnd(&candles, today);
// Month-end = Jan 31 2026. backward snap -> Jan 30.
// Start = Jan 31 2025 (exact match, backward snap). End = Jan 30 2026.
// Return = 120/100 - 1 = 20%
try std.testing.expect(ret.one_year != null);
try std.testing.expectApproxEqAbs(@as(f64, 0.20), ret.one_year.?.total_return, 0.001);
}
test "month-end trailing returns -- weekend start snaps backward" {
// When the start month-end falls on a weekend, it should snap BACKWARD
// to the last trading day (Friday), not forward to Monday.
// This matches Morningstar's "last business day of the month" convention.
const candles = [_]Candle{
makeCandle(Date.fromYmd(2016, 1, 29), 100), // Friday (last biz day of Jan 2016)
makeCandle(Date.fromYmd(2016, 2, 1), 95), // Monday (NOT what we want)
makeCandle(Date.fromYmd(2026, 1, 30), 240), // End: Friday (last biz day of Jan 2026)
};
// Jan 31 2016 is Sunday. Backward snap -> Jan 29 (Friday).
// Jan 31 2026 is Saturday. Backward snap -> Jan 30 (Friday).
// Return = 240/100 - 1 = 140%
const today = Date.fromYmd(2026, 2, 15);
const ret = trailingReturnsMonthEnd(&candles, today);
try std.testing.expect(ret.ten_year != null);
try std.testing.expectApproxEqAbs(@as(f64, 1.40), ret.ten_year.?.total_return, 0.001);
// Verify start date is Jan 29 (Friday), not Feb 1 (Monday)
try std.testing.expect(ret.ten_year.?.from.eql(Date.fromYmd(2016, 1, 29)));
}
test "dividends outside window are excluded" {
const candles = [_]Candle{
makeCandle(Date.fromYmd(2024, 1, 2), 100),
makeCandle(Date.fromYmd(2024, 6, 14), 100),
makeCandle(Date.fromYmd(2024, 12, 31), 100),
};
const divs = [_]Dividend{
.{ .ex_date = Date.fromYmd(2023, 12, 15), .amount = 5.0 }, // before window
.{ .ex_date = Date.fromYmd(2024, 6, 14), .amount = 2.0 }, // inside
.{ .ex_date = Date.fromYmd(2025, 3, 15), .amount = 5.0 }, // after window
};
const result = totalReturnWithDividends(&candles, &divs, Date.fromYmd(2024, 1, 1), Date.fromYmd(2025, 1, 1));
try std.testing.expect(result != null);
// Only the $2 mid-year dividend counts: 2/100 = 2%
try std.testing.expectApproxEqAbs(@as(f64, 0.02), result.?.total_return, 0.0001);
}
test "zero price candle returns null" {
const candles = [_]Candle{
makeCandle(Date.fromYmd(2024, 1, 2), 0),
makeCandle(Date.fromYmd(2024, 12, 31), 100),
};
const result = totalReturnFromAdjClose(&candles, Date.fromYmd(2024, 1, 1), Date.fromYmd(2025, 1, 1));
try std.testing.expect(result == null);
}
test "empty candles returns null" {
const candles = [_]Candle{};
const result = totalReturnFromAdjClose(&candles, Date.fromYmd(2024, 1, 1), Date.fromYmd(2025, 1, 1));
try std.testing.expect(result == null);
}
fn makeCandle(date: Date, price: f64) Candle {
return .{ .date = date, .open = price, .high = price, .low = price, .close = price, .adj_close = price, .volume = 1000 };
}
// Morningstar reference data, captured 2026-02-24.
//
// AMZN Trailing Returns (as-of-date, from morningstar.com/stocks/xnas/amzn/trailing-returns):
// Day end 2026-02-24: 1yr=-1.95% 3yr=30.66% 5yr=5.71% 10yr=22.37%
// AMZN has no dividends, so price return = total return.
//
// VTI Trailing Returns (as-of-date, from morningstar.com/etfs/arcx/vti/trailing-returns):
// Day end 2026-02-24: 1yr=16.62% 3yr=21.01% 5yr=12.03% 10yr=15.10% (price)
//
// VTI Performance (month-end, from morningstar.com/etfs/arcx/vti/performance):
// Month-end Jan 31: 10yr total=15.10% 3yr total=20.20% (NAV ~20.24%)
test "as-of-date trailing returns -- AMZN vs Morningstar" {
// Real AMZN split-adjusted closing prices from Twelve Data.
// AMZN pays no dividends, so adj_close == close.
const candles = [_]Candle{
makeCandle(Date.fromYmd(2016, 2, 24), 27.702), // 10yr start
makeCandle(Date.fromYmd(2021, 2, 24), 157.9765), // 5yr start
makeCandle(Date.fromYmd(2023, 2, 24), 93.50), // 3yr start
makeCandle(Date.fromYmd(2025, 2, 24), 212.71), // 1yr start
makeCandle(Date.fromYmd(2026, 2, 24), 208.56), // end (latest close)
};
const ret = trailingReturns(&candles);
// 1yr: 208.56 / 212.71 - 1 = -1.95%
try std.testing.expect(ret.one_year != null);
try std.testing.expectApproxEqAbs(@as(f64, -0.0195), ret.one_year.?.total_return, 0.001);
// 3yr: annualized. Morningstar shows 30.66%.
try std.testing.expect(ret.three_year != null);
try std.testing.expectApproxEqAbs(@as(f64, 0.3066), ret.three_year.?.annualized_return.?, 0.002);
// 5yr: annualized. Morningstar shows 5.71%.
try std.testing.expect(ret.five_year != null);
try std.testing.expectApproxEqAbs(@as(f64, 0.0571), ret.five_year.?.annualized_return.?, 0.002);
// 10yr: annualized. Morningstar shows 22.37%.
try std.testing.expect(ret.ten_year != null);
try std.testing.expectApproxEqAbs(@as(f64, 0.2237), ret.ten_year.?.annualized_return.?, 0.002);
}
test "as-of-date vs month-end -- different results from same data" {
// Demonstrates that as-of-date and month-end give different results
// when the latest close differs significantly from the month-end close.
//
// "Today" = 2026-02-25, month-end = Jan 31 2026
// As-of end = Feb 24 (latest candle), month-end = Jan 30 (snap from Jan 31 Sat)
const candles = [_]Candle{
makeCandle(Date.fromYmd(2025, 1, 31), 100), // month-end 1yr start (Friday)
makeCandle(Date.fromYmd(2025, 2, 24), 100), // as-of 1yr start
makeCandle(Date.fromYmd(2025, 7, 1), 110),
makeCandle(Date.fromYmd(2026, 1, 30), 115), // month-end end (Friday, Jan 31 is Sat)
makeCandle(Date.fromYmd(2026, 2, 24), 120), // as-of end (latest)
};
// As-of-date: end=Feb 24 ($120), start=Feb 24 prior year ($100) 20%
const asof = trailingReturns(&candles);
try std.testing.expect(asof.one_year != null);
try std.testing.expectApproxEqAbs(@as(f64, 0.20), asof.one_year.?.total_return, 0.001);
// Month-end: end=Jan 30 ($115), start=Jan 31 ($100) 15%
const me = trailingReturnsMonthEnd(&candles, Date.fromYmd(2026, 2, 25));
try std.testing.expect(me.one_year != null);
try std.testing.expectApproxEqAbs(@as(f64, 0.15), me.one_year.?.total_return, 0.001);
}

218
src/analytics/risk.zig Normal file
View file

@ -0,0 +1,218 @@
const std = @import("std");
const Candle = @import("../models/candle.zig").Candle;
const Date = @import("../models/date.zig").Date;
/// Daily return series statistics.
pub const RiskMetrics = struct {
/// Annualized standard deviation of returns
volatility: f64,
/// Sharpe ratio (assuming risk-free rate of ~4.5% -- current T-bill)
sharpe: f64,
/// Maximum drawdown as a positive decimal (e.g., 0.30 = 30% drawdown)
max_drawdown: f64,
/// Start date of max drawdown period
drawdown_start: ?Date = null,
/// Trough date of max drawdown
drawdown_trough: ?Date = null,
/// Number of daily returns used
sample_size: usize,
};
const risk_free_annual = 0.045; // ~4.5% annualized, current T-bill proxy
const trading_days_per_year: f64 = 252.0;
/// Compute risk metrics from a series of daily candles.
/// Candles must be sorted by date ascending.
pub fn computeRisk(candles: []const Candle) ?RiskMetrics {
if (candles.len < 21) return null; // need at least ~1 month
// Compute daily log returns
const n = candles.len - 1;
var sum: f64 = 0;
var sum_sq: f64 = 0;
var peak: f64 = candles[0].close;
var max_dd: f64 = 0;
var dd_start: ?Date = null;
var dd_trough: ?Date = null;
var current_dd_start: Date = candles[0].date;
for (1..candles.len) |i| {
const prev = candles[i - 1].close;
const curr = candles[i].close;
if (prev <= 0 or curr <= 0) continue;
const ret = (curr / prev) - 1.0;
sum += ret;
sum_sq += ret * ret;
// Drawdown tracking
if (curr > peak) {
peak = curr;
current_dd_start = candles[i].date;
}
const dd = (peak - curr) / peak;
if (dd > max_dd) {
max_dd = dd;
dd_start = current_dd_start;
dd_trough = candles[i].date;
}
}
const mean = sum / @as(f64, @floatFromInt(n));
const variance = (sum_sq / @as(f64, @floatFromInt(n))) - (mean * mean);
const daily_vol = @sqrt(@max(variance, 0));
const annual_vol = daily_vol * @sqrt(trading_days_per_year);
const annual_return = mean * trading_days_per_year;
const sharpe = if (annual_vol > 0) (annual_return - risk_free_annual) / annual_vol else 0;
return .{
.volatility = annual_vol,
.sharpe = sharpe,
.max_drawdown = max_dd,
.drawdown_start = dd_start,
.drawdown_trough = dd_trough,
.sample_size = n,
};
}
/// Portfolio-level metrics computed from weighted position data.
pub const PortfolioSummary = struct {
/// Total market value of open positions
total_value: f64,
/// Total cost basis of open positions
total_cost: f64,
/// Total unrealized P&L
unrealized_pnl: f64,
/// Total unrealized return (decimal)
unrealized_return: f64,
/// Total realized P&L from closed lots
realized_pnl: f64,
/// Per-symbol breakdown
allocations: []Allocation,
pub fn deinit(self: *PortfolioSummary, allocator: std.mem.Allocator) void {
allocator.free(self.allocations);
}
};
pub const Allocation = struct {
symbol: []const u8,
shares: f64,
avg_cost: f64,
current_price: f64,
market_value: f64,
cost_basis: f64,
weight: f64, // fraction of total portfolio
unrealized_pnl: f64,
unrealized_return: f64,
};
/// Compute portfolio summary given positions and current prices.
/// `prices` maps symbol -> current price.
pub fn portfolioSummary(
allocator: std.mem.Allocator,
positions: []const @import("../models/portfolio.zig").Position,
prices: std.StringHashMap(f64),
) !PortfolioSummary {
var allocs = std.ArrayList(Allocation).empty;
errdefer allocs.deinit(allocator);
var total_value: f64 = 0;
var total_cost: f64 = 0;
var total_realized: f64 = 0;
for (positions) |pos| {
if (pos.shares <= 0) continue;
const price = prices.get(pos.symbol) orelse continue;
const mv = pos.shares * price;
total_value += mv;
total_cost += pos.total_cost;
total_realized += pos.realized_pnl;
try allocs.append(allocator, .{
.symbol = pos.symbol,
.shares = pos.shares,
.avg_cost = pos.avg_cost,
.current_price = price,
.market_value = mv,
.cost_basis = pos.total_cost,
.weight = 0, // filled below
.unrealized_pnl = mv - pos.total_cost,
.unrealized_return = if (pos.total_cost > 0) (mv / pos.total_cost) - 1.0 else 0,
});
}
// Fill weights
if (total_value > 0) {
for (allocs.items) |*a| {
a.weight = a.market_value / total_value;
}
}
return .{
.total_value = total_value,
.total_cost = total_cost,
.unrealized_pnl = total_value - total_cost,
.unrealized_return = if (total_cost > 0) (total_value / total_cost) - 1.0 else 0,
.realized_pnl = total_realized,
.allocations = try allocs.toOwnedSlice(allocator),
};
}
test "risk metrics basic" {
// Construct a simple price series: $100 going up $1/day for 60 days
var candles: [60]Candle = undefined;
for (0..60) |i| {
const price: f64 = 100.0 + @as(f64, @floatFromInt(i));
candles[i] = .{
.date = Date.fromYmd(2024, 1, 2).addDays(@intCast(i)),
.open = price, .high = price, .low = price,
.close = price, .adj_close = price, .volume = 1000,
};
}
const metrics = computeRisk(&candles);
try std.testing.expect(metrics != null);
const m = metrics.?;
// Monotonically increasing price -> 0 drawdown
try std.testing.expectApproxEqAbs(@as(f64, 0), m.max_drawdown, 0.001);
// Should have positive Sharpe
try std.testing.expect(m.sharpe > 0);
try std.testing.expect(m.volatility > 0);
try std.testing.expectEqual(@as(usize, 59), m.sample_size);
}
test "max drawdown" {
const candles = [_]Candle{
makeCandle(Date.fromYmd(2024, 1, 2), 100),
makeCandle(Date.fromYmd(2024, 1, 3), 110),
makeCandle(Date.fromYmd(2024, 1, 4), 120), // peak
makeCandle(Date.fromYmd(2024, 1, 5), 100),
makeCandle(Date.fromYmd(2024, 1, 8), 90), // trough: 25% drawdown from 120
makeCandle(Date.fromYmd(2024, 1, 9), 95),
makeCandle(Date.fromYmd(2024, 1, 10), 100),
makeCandle(Date.fromYmd(2024, 1, 11), 105),
makeCandle(Date.fromYmd(2024, 1, 12), 110),
makeCandle(Date.fromYmd(2024, 1, 15), 115),
makeCandle(Date.fromYmd(2024, 1, 16), 118),
makeCandle(Date.fromYmd(2024, 1, 17), 120),
makeCandle(Date.fromYmd(2024, 1, 18), 122),
makeCandle(Date.fromYmd(2024, 1, 19), 125),
makeCandle(Date.fromYmd(2024, 1, 22), 128),
makeCandle(Date.fromYmd(2024, 1, 23), 130),
makeCandle(Date.fromYmd(2024, 1, 24), 132),
makeCandle(Date.fromYmd(2024, 1, 25), 135),
makeCandle(Date.fromYmd(2024, 1, 26), 137),
makeCandle(Date.fromYmd(2024, 1, 29), 140),
makeCandle(Date.fromYmd(2024, 1, 30), 142),
};
const metrics = computeRisk(&candles);
try std.testing.expect(metrics != null);
// Max drawdown: (120 - 90) / 120 = 0.25
try std.testing.expectApproxEqAbs(@as(f64, 0.25), metrics.?.max_drawdown, 0.001);
try std.testing.expect(metrics.?.drawdown_trough.?.eql(Date.fromYmd(2024, 1, 8)));
}
fn makeCandle(date: Date, price: f64) Candle {
return .{ .date = date, .open = price, .high = price, .low = price, .close = price, .adj_close = price, .volume = 1000 };
}

995
src/cache/store.zig vendored Normal file
View file

@ -0,0 +1,995 @@
const std = @import("std");
const srf = @import("srf");
const Date = @import("../models/date.zig").Date;
const Candle = @import("../models/candle.zig").Candle;
const Dividend = @import("../models/dividend.zig").Dividend;
const DividendType = @import("../models/dividend.zig").DividendType;
const Split = @import("../models/split.zig").Split;
const EarningsEvent = @import("../models/earnings.zig").EarningsEvent;
const ReportTime = @import("../models/earnings.zig").ReportTime;
const EtfProfile = @import("../models/etf_profile.zig").EtfProfile;
const Holding = @import("../models/etf_profile.zig").Holding;
const SectorWeight = @import("../models/etf_profile.zig").SectorWeight;
const Lot = @import("../models/portfolio.zig").Lot;
const Portfolio = @import("../models/portfolio.zig").Portfolio;
const OptionsChain = @import("../models/option.zig").OptionsChain;
const OptionContract = @import("../models/option.zig").OptionContract;
/// TTL durations in seconds for cache expiry.
pub const Ttl = struct {
/// Historical candles older than 1 day never expire
pub const candles_historical: i64 = -1; // infinite
/// Latest day's candle refreshes every 24h
pub const candles_latest: i64 = 24 * 3600;
/// Dividend data refreshes weekly
pub const dividends: i64 = 7 * 24 * 3600;
/// Split data refreshes weekly
pub const splits: i64 = 7 * 24 * 3600;
/// Options chains refresh hourly
pub const options: i64 = 3600;
/// Earnings refresh daily
pub const earnings: i64 = 24 * 3600;
/// ETF profiles refresh monthly
pub const etf_profile: i64 = 30 * 24 * 3600;
};
pub const DataType = enum {
candles_daily,
dividends,
splits,
options,
earnings,
etf_profile,
meta,
pub fn fileName(self: DataType) []const u8 {
return switch (self) {
.candles_daily => "candles_daily.srf",
.dividends => "dividends.srf",
.splits => "splits.srf",
.options => "options.srf",
.earnings => "earnings.srf",
.etf_profile => "etf_profile.srf",
.meta => "meta.srf",
};
}
};
/// Persistent SRF-backed cache with per-symbol, per-data-type files.
///
/// Layout:
/// {cache_dir}/{SYMBOL}/candles_daily.srf
/// {cache_dir}/{SYMBOL}/dividends.srf
/// {cache_dir}/{SYMBOL}/meta.srf
/// ...
pub const Store = struct {
cache_dir: []const u8,
allocator: std.mem.Allocator,
pub fn init(allocator: std.mem.Allocator, cache_dir: []const u8) Store {
return .{
.cache_dir = cache_dir,
.allocator = allocator,
};
}
/// Ensure the cache directory for a symbol exists.
pub fn ensureSymbolDir(self: *Store, symbol: []const u8) !void {
const path = try self.symbolPath(symbol, "");
defer self.allocator.free(path);
std.fs.cwd().makePath(path) catch |err| switch (err) {
error.PathAlreadyExists => {},
else => return err,
};
}
/// Read raw SRF file contents for a symbol and data type.
/// Returns null if the file does not exist.
pub fn readRaw(self: *Store, symbol: []const u8, data_type: DataType) !?[]const u8 {
const path = try self.symbolPath(symbol, data_type.fileName());
defer self.allocator.free(path);
return std.fs.cwd().readFileAlloc(self.allocator, path, 50 * 1024 * 1024) catch |err| switch (err) {
error.FileNotFound => return null,
else => return err,
};
}
/// Write raw SRF data for a symbol and data type.
pub fn writeRaw(self: *Store, symbol: []const u8, data_type: DataType, data: []const u8) !void {
try self.ensureSymbolDir(symbol);
const path = try self.symbolPath(symbol, data_type.fileName());
defer self.allocator.free(path);
const file = try std.fs.cwd().createFile(path, .{});
defer file.close();
try file.writeAll(data);
}
/// Check if a cached data file exists and is within its TTL.
pub fn isFresh(self: *Store, symbol: []const u8, data_type: DataType, ttl_seconds: i64) !bool {
if (ttl_seconds < 0) {
// Infinite TTL: just check existence
const path = try self.symbolPath(symbol, data_type.fileName());
defer self.allocator.free(path);
std.fs.cwd().access(path, .{}) catch return false;
return true;
}
const path = try self.symbolPath(symbol, data_type.fileName());
defer self.allocator.free(path);
const file = std.fs.cwd().openFile(path, .{}) catch return false;
defer file.close();
const stat = file.stat() catch return false;
const mtime_s: i64 = @intCast(@divFloor(stat.mtime, std.time.ns_per_s));
const now_s: i64 = std.time.timestamp();
return (now_s - mtime_s) < ttl_seconds;
}
/// Get the modification time (unix seconds) of a cached data file.
/// Returns null if the file does not exist.
pub fn getMtime(self: *Store, symbol: []const u8, data_type: DataType) ?i64 {
const path = self.symbolPath(symbol, data_type.fileName()) catch return null;
defer self.allocator.free(path);
const file = std.fs.cwd().openFile(path, .{}) catch return null;
defer file.close();
const stat = file.stat() catch return null;
return @intCast(@divFloor(stat.mtime, std.time.ns_per_s));
}
/// Clear all cached data for a symbol.
pub fn clearSymbol(self: *Store, symbol: []const u8) !void {
const path = try self.symbolPath(symbol, "");
defer self.allocator.free(path);
std.fs.cwd().deleteTree(path) catch {};
}
/// Clear a specific data type for a symbol.
pub fn clearData(self: *Store, symbol: []const u8, data_type: DataType) void {
const path = self.symbolPath(symbol, data_type.fileName()) catch return;
defer self.allocator.free(path);
std.fs.cwd().deleteFile(path) catch {};
}
/// Clear all cached data.
pub fn clearAll(self: *Store) !void {
std.fs.cwd().deleteTree(self.cache_dir) catch {};
}
// -- Serialization helpers --
/// Serialize candles to SRF compact format.
pub fn serializeCandles(allocator: std.mem.Allocator, candles: []const Candle) ![]const u8 {
var buf: std.ArrayList(u8) = .empty;
errdefer buf.deinit(allocator);
const writer = buf.writer(allocator);
try writer.writeAll("#!srfv1\n");
for (candles) |c| {
var date_buf: [10]u8 = undefined;
const date_str = c.date.format(&date_buf);
try writer.print(
"date::{s},open:num:{d},high:num:{d},low:num:{d},close:num:{d},adj_close:num:{d},volume:num:{d}\n",
.{ date_str, c.open, c.high, c.low, c.close, c.adj_close, c.volume },
);
}
return buf.toOwnedSlice(allocator);
}
/// Deserialize candles from SRF data.
pub fn deserializeCandles(allocator: std.mem.Allocator, data: []const u8) ![]Candle {
var candles: std.ArrayList(Candle) = .empty;
errdefer candles.deinit(allocator);
var reader = std.Io.Reader.fixed(data);
const parsed = srf.parse(&reader, allocator, .{ .alloc_strings = false }) catch return error.InvalidData;
defer parsed.deinit();
for (parsed.records.items) |record| {
var candle = Candle{
.date = Date.epoch,
.open = 0,
.high = 0,
.low = 0,
.close = 0,
.adj_close = 0,
.volume = 0,
};
for (record.fields) |field| {
if (std.mem.eql(u8, field.key, "date")) {
if (field.value) |v| {
const str = switch (v) {
.string => |s| s,
else => continue,
};
candle.date = Date.parse(str) catch continue;
}
} else if (std.mem.eql(u8, field.key, "open")) {
if (field.value) |v| candle.open = numVal(v);
} else if (std.mem.eql(u8, field.key, "high")) {
if (field.value) |v| candle.high = numVal(v);
} else if (std.mem.eql(u8, field.key, "low")) {
if (field.value) |v| candle.low = numVal(v);
} else if (std.mem.eql(u8, field.key, "close")) {
if (field.value) |v| candle.close = numVal(v);
} else if (std.mem.eql(u8, field.key, "adj_close")) {
if (field.value) |v| candle.adj_close = numVal(v);
} else if (std.mem.eql(u8, field.key, "volume")) {
if (field.value) |v| candle.volume = @intFromFloat(numVal(v));
}
}
try candles.append(allocator, candle);
}
return candles.toOwnedSlice(allocator);
}
/// Serialize dividends to SRF compact format.
pub fn serializeDividends(allocator: std.mem.Allocator, dividends: []const Dividend) ![]const u8 {
var buf: std.ArrayList(u8) = .empty;
errdefer buf.deinit(allocator);
const writer = buf.writer(allocator);
try writer.writeAll("#!srfv1\n");
for (dividends) |d| {
var ex_buf: [10]u8 = undefined;
const ex_str = d.ex_date.format(&ex_buf);
try writer.print("ex_date::{s},amount:num:{d}", .{ ex_str, d.amount });
if (d.pay_date) |pd| {
var pay_buf: [10]u8 = undefined;
try writer.print(",pay_date::{s}", .{pd.format(&pay_buf)});
}
if (d.frequency) |f| {
try writer.print(",frequency:num:{d}", .{f});
}
try writer.print(",type::{s}\n", .{@tagName(d.distribution_type)});
}
return buf.toOwnedSlice(allocator);
}
/// Serialize splits to SRF compact format.
pub fn serializeSplits(allocator: std.mem.Allocator, splits: []const Split) ![]const u8 {
var buf: std.ArrayList(u8) = .empty;
errdefer buf.deinit(allocator);
const writer = buf.writer(allocator);
try writer.writeAll("#!srfv1\n");
for (splits) |s| {
var date_buf: [10]u8 = undefined;
const date_str = s.date.format(&date_buf);
try writer.print("date::{s},numerator:num:{d},denominator:num:{d}\n", .{
date_str, s.numerator, s.denominator,
});
}
return buf.toOwnedSlice(allocator);
}
/// Deserialize dividends from SRF data.
pub fn deserializeDividends(allocator: std.mem.Allocator, data: []const u8) ![]Dividend {
var dividends: std.ArrayList(Dividend) = .empty;
errdefer dividends.deinit(allocator);
var reader = std.Io.Reader.fixed(data);
const parsed = srf.parse(&reader, allocator, .{ .alloc_strings = false }) catch return error.InvalidData;
defer parsed.deinit();
for (parsed.records.items) |record| {
var div = Dividend{
.ex_date = Date.epoch,
.amount = 0,
};
for (record.fields) |field| {
if (std.mem.eql(u8, field.key, "ex_date")) {
if (field.value) |v| {
const str = switch (v) {
.string => |s| s,
else => continue,
};
div.ex_date = Date.parse(str) catch continue;
}
} else if (std.mem.eql(u8, field.key, "amount")) {
if (field.value) |v| div.amount = numVal(v);
} else if (std.mem.eql(u8, field.key, "pay_date")) {
if (field.value) |v| {
const str = switch (v) {
.string => |s| s,
else => continue,
};
div.pay_date = Date.parse(str) catch null;
}
} else if (std.mem.eql(u8, field.key, "frequency")) {
if (field.value) |v| {
const n = numVal(v);
if (n > 0 and n <= 255) div.frequency = @intFromFloat(n);
}
} else if (std.mem.eql(u8, field.key, "type")) {
if (field.value) |v| {
const str = switch (v) {
.string => |s| s,
else => continue,
};
div.distribution_type = parseDividendTypeTag(str);
}
}
}
try dividends.append(allocator, div);
}
return dividends.toOwnedSlice(allocator);
}
/// Deserialize splits from SRF data.
pub fn deserializeSplits(allocator: std.mem.Allocator, data: []const u8) ![]Split {
var splits: std.ArrayList(Split) = .empty;
errdefer splits.deinit(allocator);
var reader = std.Io.Reader.fixed(data);
const parsed = srf.parse(&reader, allocator, .{ .alloc_strings = false }) catch return error.InvalidData;
defer parsed.deinit();
for (parsed.records.items) |record| {
var split = Split{
.date = Date.epoch,
.numerator = 0,
.denominator = 0,
};
for (record.fields) |field| {
if (std.mem.eql(u8, field.key, "date")) {
if (field.value) |v| {
const str = switch (v) {
.string => |s| s,
else => continue,
};
split.date = Date.parse(str) catch continue;
}
} else if (std.mem.eql(u8, field.key, "numerator")) {
if (field.value) |v| split.numerator = numVal(v);
} else if (std.mem.eql(u8, field.key, "denominator")) {
if (field.value) |v| split.denominator = numVal(v);
}
}
try splits.append(allocator, split);
}
return splits.toOwnedSlice(allocator);
}
/// Serialize earnings events to SRF compact format.
pub fn serializeEarnings(allocator: std.mem.Allocator, events: []const EarningsEvent) ![]const u8 {
var buf: std.ArrayList(u8) = .empty;
errdefer buf.deinit(allocator);
const writer = buf.writer(allocator);
try writer.writeAll("#!srfv1\n");
for (events) |e| {
var date_buf: [10]u8 = undefined;
const date_str = e.date.format(&date_buf);
try writer.print("date::{s}", .{date_str});
if (e.estimate) |est| try writer.print(",estimate:num:{d}", .{est});
if (e.actual) |act| try writer.print(",actual:num:{d}", .{act});
if (e.quarter) |q| try writer.print(",quarter:num:{d}", .{q});
if (e.fiscal_year) |fy| try writer.print(",fiscal_year:num:{d}", .{fy});
if (e.revenue_actual) |ra| try writer.print(",revenue_actual:num:{d}", .{ra});
if (e.revenue_estimate) |re| try writer.print(",revenue_estimate:num:{d}", .{re});
try writer.print(",report_time::{s}\n", .{@tagName(e.report_time)});
}
return buf.toOwnedSlice(allocator);
}
/// Deserialize earnings events from SRF data.
pub fn deserializeEarnings(allocator: std.mem.Allocator, data: []const u8) ![]EarningsEvent {
var events: std.ArrayList(EarningsEvent) = .empty;
errdefer events.deinit(allocator);
var reader = std.Io.Reader.fixed(data);
const parsed = srf.parse(&reader, allocator, .{ .alloc_strings = false }) catch return error.InvalidData;
defer parsed.deinit();
for (parsed.records.items) |record| {
var ev = EarningsEvent{
.symbol = "",
.date = Date.epoch,
};
for (record.fields) |field| {
if (std.mem.eql(u8, field.key, "date")) {
if (field.value) |v| {
const str = switch (v) { .string => |s| s, else => continue };
ev.date = Date.parse(str) catch continue;
}
} else if (std.mem.eql(u8, field.key, "estimate")) {
if (field.value) |v| ev.estimate = numVal(v);
} else if (std.mem.eql(u8, field.key, "actual")) {
if (field.value) |v| ev.actual = numVal(v);
} else if (std.mem.eql(u8, field.key, "quarter")) {
if (field.value) |v| {
const n = numVal(v);
if (n >= 1 and n <= 4) ev.quarter = @intFromFloat(n);
}
} else if (std.mem.eql(u8, field.key, "fiscal_year")) {
if (field.value) |v| {
const n = numVal(v);
if (n > 1900 and n < 2200) ev.fiscal_year = @intFromFloat(n);
}
} else if (std.mem.eql(u8, field.key, "revenue_actual")) {
if (field.value) |v| ev.revenue_actual = numVal(v);
} else if (std.mem.eql(u8, field.key, "revenue_estimate")) {
if (field.value) |v| ev.revenue_estimate = numVal(v);
} else if (std.mem.eql(u8, field.key, "report_time")) {
if (field.value) |v| {
const str = switch (v) { .string => |s| s, else => continue };
ev.report_time = parseReportTimeTag(str);
}
}
}
// Recompute surprise from actual/estimate
if (ev.actual != null and ev.estimate != null) {
ev.surprise = ev.actual.? - ev.estimate.?;
if (ev.estimate.? != 0) {
ev.surprise_percent = (ev.surprise.? / @abs(ev.estimate.?)) * 100.0;
}
}
try events.append(allocator, ev);
}
return events.toOwnedSlice(allocator);
}
/// Serialize ETF profile to SRF compact format.
/// Uses multiple record types: meta fields, then sector:: and holding:: prefixed records.
pub fn serializeEtfProfile(allocator: std.mem.Allocator, profile: EtfProfile) ![]const u8 {
var buf: std.ArrayList(u8) = .empty;
errdefer buf.deinit(allocator);
const writer = buf.writer(allocator);
try writer.writeAll("#!srfv1\n");
// Meta record
try writer.writeAll("type::meta");
if (profile.expense_ratio) |er| try writer.print(",expense_ratio:num:{d}", .{er});
if (profile.net_assets) |na| try writer.print(",net_assets:num:{d}", .{na});
if (profile.dividend_yield) |dy| try writer.print(",dividend_yield:num:{d}", .{dy});
if (profile.portfolio_turnover) |pt| try writer.print(",portfolio_turnover:num:{d}", .{pt});
if (profile.total_holdings) |th| try writer.print(",total_holdings:num:{d}", .{th});
if (profile.inception_date) |d| {
var db: [10]u8 = undefined;
try writer.print(",inception_date::{s}", .{d.format(&db)});
}
if (profile.leveraged) try writer.writeAll(",leveraged::yes");
try writer.writeAll("\n");
// Sector records
if (profile.sectors) |sectors| {
for (sectors) |sec| {
try writer.print("type::sector,name::{s},weight:num:{d}\n", .{ sec.sector, sec.weight });
}
}
// Holding records
if (profile.holdings) |holdings| {
for (holdings) |h| {
try writer.writeAll("type::holding");
if (h.symbol) |s| try writer.print(",symbol::{s}", .{s});
try writer.print(",name::{s},weight:num:{d}\n", .{ h.name, h.weight });
}
}
return buf.toOwnedSlice(allocator);
}
/// Deserialize ETF profile from SRF data.
pub fn deserializeEtfProfile(allocator: std.mem.Allocator, data: []const u8) !EtfProfile {
var reader = std.Io.Reader.fixed(data);
const parsed = srf.parse(&reader, allocator, .{ .alloc_strings = false }) catch return error.InvalidData;
defer parsed.deinit();
var profile = EtfProfile{ .symbol = "" };
var sectors: std.ArrayList(SectorWeight) = .empty;
errdefer sectors.deinit(allocator);
var holdings: std.ArrayList(Holding) = .empty;
errdefer holdings.deinit(allocator);
for (parsed.records.items) |record| {
var record_type: []const u8 = "";
for (record.fields) |field| {
if (std.mem.eql(u8, field.key, "type")) {
if (field.value) |v| {
record_type = switch (v) { .string => |s| s, else => "" };
}
}
}
if (std.mem.eql(u8, record_type, "meta")) {
for (record.fields) |field| {
if (std.mem.eql(u8, field.key, "expense_ratio")) {
if (field.value) |v| profile.expense_ratio = numVal(v);
} else if (std.mem.eql(u8, field.key, "net_assets")) {
if (field.value) |v| profile.net_assets = numVal(v);
} else if (std.mem.eql(u8, field.key, "dividend_yield")) {
if (field.value) |v| profile.dividend_yield = numVal(v);
} else if (std.mem.eql(u8, field.key, "portfolio_turnover")) {
if (field.value) |v| profile.portfolio_turnover = numVal(v);
} else if (std.mem.eql(u8, field.key, "total_holdings")) {
if (field.value) |v| {
const n = numVal(v);
if (n > 0) profile.total_holdings = @intFromFloat(n);
}
} else if (std.mem.eql(u8, field.key, "inception_date")) {
if (field.value) |v| {
const str = switch (v) { .string => |s| s, else => continue };
profile.inception_date = Date.parse(str) catch null;
}
} else if (std.mem.eql(u8, field.key, "leveraged")) {
if (field.value) |v| {
const str = switch (v) { .string => |s| s, else => continue };
profile.leveraged = std.mem.eql(u8, str, "yes");
}
}
}
} else if (std.mem.eql(u8, record_type, "sector")) {
var name: ?[]const u8 = null;
var weight: f64 = 0;
for (record.fields) |field| {
if (std.mem.eql(u8, field.key, "name")) {
if (field.value) |v| name = switch (v) { .string => |s| s, else => null };
} else if (std.mem.eql(u8, field.key, "weight")) {
if (field.value) |v| weight = numVal(v);
}
}
if (name) |n| {
const duped = try allocator.dupe(u8, n);
try sectors.append(allocator, .{ .sector = duped, .weight = weight });
}
} else if (std.mem.eql(u8, record_type, "holding")) {
var sym: ?[]const u8 = null;
var hname: ?[]const u8 = null;
var weight: f64 = 0;
for (record.fields) |field| {
if (std.mem.eql(u8, field.key, "symbol")) {
if (field.value) |v| sym = switch (v) { .string => |s| s, else => null };
} else if (std.mem.eql(u8, field.key, "name")) {
if (field.value) |v| hname = switch (v) { .string => |s| s, else => null };
} else if (std.mem.eql(u8, field.key, "weight")) {
if (field.value) |v| weight = numVal(v);
}
}
if (hname) |hn| {
const duped_sym = if (sym) |s| try allocator.dupe(u8, s) else null;
const duped_name = try allocator.dupe(u8, hn);
try holdings.append(allocator, .{ .symbol = duped_sym, .name = duped_name, .weight = weight });
}
}
}
if (sectors.items.len > 0) {
profile.sectors = try sectors.toOwnedSlice(allocator);
} else {
sectors.deinit(allocator);
}
if (holdings.items.len > 0) {
profile.holdings = try holdings.toOwnedSlice(allocator);
} else {
holdings.deinit(allocator);
}
return profile;
}
/// Serialize options chains to SRF compact format.
pub fn serializeOptions(allocator: std.mem.Allocator, chains: []const OptionsChain) ![]const u8 {
var buf: std.ArrayList(u8) = .empty;
errdefer buf.deinit(allocator);
const w = buf.writer(allocator);
try w.writeAll("#!srfv1\n");
for (chains) |chain| {
var exp_buf: [10]u8 = undefined;
try w.print("type::chain,expiration::{s},symbol::{s}", .{
chain.expiration.format(&exp_buf), chain.underlying_symbol,
});
if (chain.underlying_price) |p| try w.print(",price:num:{d}", .{p});
try w.writeAll("\n");
for (chain.calls) |c| {
var eb: [10]u8 = undefined;
try w.print("type::call,expiration::{s},strike:num:{d}", .{ chain.expiration.format(&eb), c.strike });
if (c.bid) |v| try w.print(",bid:num:{d}", .{v});
if (c.ask) |v| try w.print(",ask:num:{d}", .{v});
if (c.last_price) |v| try w.print(",last:num:{d}", .{v});
if (c.volume) |v| try w.print(",volume:num:{d}", .{v});
if (c.open_interest) |v| try w.print(",oi:num:{d}", .{v});
if (c.implied_volatility) |v| try w.print(",iv:num:{d}", .{v});
if (c.delta) |v| try w.print(",delta:num:{d}", .{v});
if (c.gamma) |v| try w.print(",gamma:num:{d}", .{v});
if (c.theta) |v| try w.print(",theta:num:{d}", .{v});
if (c.vega) |v| try w.print(",vega:num:{d}", .{v});
try w.writeAll("\n");
}
for (chain.puts) |p| {
var eb: [10]u8 = undefined;
try w.print("type::put,expiration::{s},strike:num:{d}", .{ chain.expiration.format(&eb), p.strike });
if (p.bid) |v| try w.print(",bid:num:{d}", .{v});
if (p.ask) |v| try w.print(",ask:num:{d}", .{v});
if (p.last_price) |v| try w.print(",last:num:{d}", .{v});
if (p.volume) |v| try w.print(",volume:num:{d}", .{v});
if (p.open_interest) |v| try w.print(",oi:num:{d}", .{v});
if (p.implied_volatility) |v| try w.print(",iv:num:{d}", .{v});
if (p.delta) |v| try w.print(",delta:num:{d}", .{v});
if (p.gamma) |v| try w.print(",gamma:num:{d}", .{v});
if (p.theta) |v| try w.print(",theta:num:{d}", .{v});
if (p.vega) |v| try w.print(",vega:num:{d}", .{v});
try w.writeAll("\n");
}
}
return buf.toOwnedSlice(allocator);
}
/// Deserialize options chains from SRF data.
pub fn deserializeOptions(allocator: std.mem.Allocator, data: []const u8) ![]OptionsChain {
var reader = std.Io.Reader.fixed(data);
const parsed = srf.parse(&reader, allocator, .{ .alloc_strings = false }) catch return error.InvalidData;
defer parsed.deinit();
var chains: std.ArrayList(OptionsChain) = .empty;
errdefer {
for (chains.items) |*ch| {
allocator.free(ch.calls);
allocator.free(ch.puts);
}
chains.deinit(allocator);
}
// First pass: collect chain headers (expirations)
// Second: collect calls/puts per expiration
var exp_map = std.StringHashMap(usize).init(allocator);
defer exp_map.deinit();
// Collect all chain records first
for (parsed.records.items) |record| {
var rec_type: []const u8 = "";
var expiration: ?Date = null;
var exp_str: []const u8 = "";
var symbol: []const u8 = "";
var price: ?f64 = null;
for (record.fields) |field| {
if (std.mem.eql(u8, field.key, "type")) {
if (field.value) |v| rec_type = switch (v) { .string => |s| s, else => "" };
} else if (std.mem.eql(u8, field.key, "expiration")) {
if (field.value) |v| {
exp_str = switch (v) { .string => |s| s, else => continue };
expiration = Date.parse(exp_str) catch null;
}
} else if (std.mem.eql(u8, field.key, "symbol")) {
if (field.value) |v| symbol = switch (v) { .string => |s| s, else => "" };
} else if (std.mem.eql(u8, field.key, "price")) {
if (field.value) |v| price = numVal(v);
}
}
if (std.mem.eql(u8, rec_type, "chain")) {
if (expiration) |exp| {
const idx = chains.items.len;
try chains.append(allocator, .{
.underlying_symbol = try allocator.dupe(u8, symbol),
.underlying_price = price,
.expiration = exp,
.calls = &.{},
.puts = &.{},
});
try exp_map.put(exp_str, idx);
}
}
}
// Second pass: collect contracts
var calls_map = std.AutoHashMap(usize, std.ArrayList(OptionContract)).init(allocator);
defer {
var iter = calls_map.valueIterator();
while (iter.next()) |v| v.deinit(allocator);
calls_map.deinit();
}
var puts_map = std.AutoHashMap(usize, std.ArrayList(OptionContract)).init(allocator);
defer {
var iter = puts_map.valueIterator();
while (iter.next()) |v| v.deinit(allocator);
puts_map.deinit();
}
for (parsed.records.items) |record| {
var rec_type: []const u8 = "";
var exp_str: []const u8 = "";
var contract = OptionContract{
.contract_type = .call,
.strike = 0,
.expiration = Date.epoch,
};
for (record.fields) |field| {
if (std.mem.eql(u8, field.key, "type")) {
if (field.value) |v| rec_type = switch (v) { .string => |s| s, else => "" };
} else if (std.mem.eql(u8, field.key, "expiration")) {
if (field.value) |v| {
exp_str = switch (v) { .string => |s| s, else => continue };
contract.expiration = Date.parse(exp_str) catch Date.epoch;
}
} else if (std.mem.eql(u8, field.key, "strike")) {
if (field.value) |v| contract.strike = numVal(v);
} else if (std.mem.eql(u8, field.key, "bid")) {
if (field.value) |v| contract.bid = numVal(v);
} else if (std.mem.eql(u8, field.key, "ask")) {
if (field.value) |v| contract.ask = numVal(v);
} else if (std.mem.eql(u8, field.key, "last")) {
if (field.value) |v| contract.last_price = numVal(v);
} else if (std.mem.eql(u8, field.key, "volume")) {
if (field.value) |v| contract.volume = @intFromFloat(numVal(v));
} else if (std.mem.eql(u8, field.key, "oi")) {
if (field.value) |v| contract.open_interest = @intFromFloat(numVal(v));
} else if (std.mem.eql(u8, field.key, "iv")) {
if (field.value) |v| contract.implied_volatility = numVal(v);
} else if (std.mem.eql(u8, field.key, "delta")) {
if (field.value) |v| contract.delta = numVal(v);
} else if (std.mem.eql(u8, field.key, "gamma")) {
if (field.value) |v| contract.gamma = numVal(v);
} else if (std.mem.eql(u8, field.key, "theta")) {
if (field.value) |v| contract.theta = numVal(v);
} else if (std.mem.eql(u8, field.key, "vega")) {
if (field.value) |v| contract.vega = numVal(v);
}
}
if (std.mem.eql(u8, rec_type, "call")) {
contract.contract_type = .call;
if (exp_map.get(exp_str)) |idx| {
const entry = try calls_map.getOrPut(idx);
if (!entry.found_existing) entry.value_ptr.* = .empty;
try entry.value_ptr.append(allocator, contract);
}
} else if (std.mem.eql(u8, rec_type, "put")) {
contract.contract_type = .put;
if (exp_map.get(exp_str)) |idx| {
const entry = try puts_map.getOrPut(idx);
if (!entry.found_existing) entry.value_ptr.* = .empty;
try entry.value_ptr.append(allocator, contract);
}
}
}
// Assign calls/puts to chains
for (chains.items, 0..) |*chain, idx| {
if (calls_map.getPtr(idx)) |cl| {
chain.calls = try cl.toOwnedSlice(allocator);
}
if (puts_map.getPtr(idx)) |pl| {
chain.puts = try pl.toOwnedSlice(allocator);
}
}
return chains.toOwnedSlice(allocator);
}
fn parseDividendTypeTag(s: []const u8) DividendType {
if (std.mem.eql(u8, s, "regular")) return .regular;
if (std.mem.eql(u8, s, "special")) return .special;
if (std.mem.eql(u8, s, "supplemental")) return .supplemental;
if (std.mem.eql(u8, s, "irregular")) return .irregular;
return .unknown;
}
fn parseReportTimeTag(s: []const u8) ReportTime {
if (std.mem.eql(u8, s, "bmo")) return .bmo;
if (std.mem.eql(u8, s, "amc")) return .amc;
if (std.mem.eql(u8, s, "dmh")) return .dmh;
return .unknown;
}
fn symbolPath(self: *Store, symbol: []const u8, file_name: []const u8) ![]const u8 {
if (file_name.len == 0) {
return std.fs.path.join(self.allocator, &.{ self.cache_dir, symbol });
}
return std.fs.path.join(self.allocator, &.{ self.cache_dir, symbol, file_name });
}
fn numVal(v: srf.Value) f64 {
return switch (v) {
.number => |n| n,
else => 0,
};
}
};
const InvalidData = error{InvalidData};
/// Serialize a portfolio (list of lots) to SRF format.
pub fn serializePortfolio(allocator: std.mem.Allocator, lots: []const Lot) ![]const u8 {
var buf: std.ArrayList(u8) = .empty;
errdefer buf.deinit(allocator);
const writer = buf.writer(allocator);
try writer.writeAll("#!srfv1\n");
for (lots) |lot| {
var od_buf: [10]u8 = undefined;
try writer.print("symbol::{s},shares:num:{d},open_date::{s},open_price:num:{d}", .{
lot.symbol, lot.shares, lot.open_date.format(&od_buf), lot.open_price,
});
if (lot.close_date) |cd| {
var cd_buf: [10]u8 = undefined;
try writer.print(",close_date::{s}", .{cd.format(&cd_buf)});
}
if (lot.close_price) |cp| {
try writer.print(",close_price:num:{d}", .{cp});
}
if (lot.note) |n| {
try writer.print(",note::{s}", .{n});
}
if (lot.account) |a| {
try writer.print(",account::{s}", .{a});
}
try writer.writeAll("\n");
}
return buf.toOwnedSlice(allocator);
}
/// Deserialize a portfolio from SRF data. Caller owns the returned Portfolio.
pub fn deserializePortfolio(allocator: std.mem.Allocator, data: []const u8) !Portfolio {
var lots: std.ArrayList(Lot) = .empty;
errdefer {
for (lots.items) |lot| {
allocator.free(lot.symbol);
if (lot.note) |n| allocator.free(n);
if (lot.account) |a| allocator.free(a);
}
lots.deinit(allocator);
}
var reader = std.Io.Reader.fixed(data);
const parsed = srf.parse(&reader, allocator, .{ .alloc_strings = false }) catch return error.InvalidData;
defer parsed.deinit();
for (parsed.records.items) |record| {
var lot = Lot{
.symbol = "",
.shares = 0,
.open_date = Date.epoch,
.open_price = 0,
};
var sym_raw: ?[]const u8 = null;
var note_raw: ?[]const u8 = null;
var account_raw: ?[]const u8 = null;
for (record.fields) |field| {
if (std.mem.eql(u8, field.key, "symbol")) {
if (field.value) |v| sym_raw = switch (v) { .string => |s| s, else => null };
} else if (std.mem.eql(u8, field.key, "shares")) {
if (field.value) |v| lot.shares = Store.numVal(v);
} else if (std.mem.eql(u8, field.key, "open_date")) {
if (field.value) |v| {
const str = switch (v) { .string => |s| s, else => continue };
lot.open_date = Date.parse(str) catch continue;
}
} else if (std.mem.eql(u8, field.key, "open_price")) {
if (field.value) |v| lot.open_price = Store.numVal(v);
} else if (std.mem.eql(u8, field.key, "close_date")) {
if (field.value) |v| {
const str = switch (v) { .string => |s| s, else => continue };
lot.close_date = Date.parse(str) catch null;
}
} else if (std.mem.eql(u8, field.key, "close_price")) {
if (field.value) |v| lot.close_price = Store.numVal(v);
} else if (std.mem.eql(u8, field.key, "note")) {
if (field.value) |v| note_raw = switch (v) { .string => |s| s, else => null };
} else if (std.mem.eql(u8, field.key, "account")) {
if (field.value) |v| account_raw = switch (v) { .string => |s| s, else => null };
}
}
if (sym_raw) |s| {
lot.symbol = try allocator.dupe(u8, s);
} else continue;
if (note_raw) |n| {
lot.note = try allocator.dupe(u8, n);
}
if (account_raw) |a| {
lot.account = try allocator.dupe(u8, a);
}
try lots.append(allocator, lot);
}
return .{
.lots = try lots.toOwnedSlice(allocator),
.allocator = allocator,
};
}
test "dividend serialize/deserialize round-trip" {
const allocator = std.testing.allocator;
const divs = [_]Dividend{
.{ .ex_date = Date.fromYmd(2024, 3, 15), .amount = 0.8325, .pay_date = Date.fromYmd(2024, 3, 28), .frequency = 4, .distribution_type = .regular },
.{ .ex_date = Date.fromYmd(2024, 6, 14), .amount = 0.9148, .distribution_type = .special },
};
const data = try Store.serializeDividends(allocator, &divs);
defer allocator.free(data);
const parsed = try Store.deserializeDividends(allocator, data);
defer allocator.free(parsed);
try std.testing.expectEqual(@as(usize, 2), parsed.len);
try std.testing.expect(parsed[0].ex_date.eql(Date.fromYmd(2024, 3, 15)));
try std.testing.expectApproxEqAbs(@as(f64, 0.8325), parsed[0].amount, 0.0001);
try std.testing.expect(parsed[0].pay_date != null);
try std.testing.expect(parsed[0].pay_date.?.eql(Date.fromYmd(2024, 3, 28)));
try std.testing.expectEqual(@as(?u8, 4), parsed[0].frequency);
try std.testing.expectEqual(DividendType.regular, parsed[0].distribution_type);
try std.testing.expect(parsed[1].ex_date.eql(Date.fromYmd(2024, 6, 14)));
try std.testing.expectApproxEqAbs(@as(f64, 0.9148), parsed[1].amount, 0.0001);
try std.testing.expect(parsed[1].pay_date == null);
try std.testing.expectEqual(DividendType.special, parsed[1].distribution_type);
}
test "split serialize/deserialize round-trip" {
const allocator = std.testing.allocator;
const splits = [_]Split{
.{ .date = Date.fromYmd(2020, 8, 31), .numerator = 4, .denominator = 1 },
.{ .date = Date.fromYmd(2014, 6, 9), .numerator = 7, .denominator = 1 },
};
const data = try Store.serializeSplits(allocator, &splits);
defer allocator.free(data);
const parsed = try Store.deserializeSplits(allocator, data);
defer allocator.free(parsed);
try std.testing.expectEqual(@as(usize, 2), parsed.len);
try std.testing.expect(parsed[0].date.eql(Date.fromYmd(2020, 8, 31)));
try std.testing.expectApproxEqAbs(@as(f64, 4), parsed[0].numerator, 0.001);
try std.testing.expectApproxEqAbs(@as(f64, 1), parsed[0].denominator, 0.001);
try std.testing.expect(parsed[1].date.eql(Date.fromYmd(2014, 6, 9)));
try std.testing.expectApproxEqAbs(@as(f64, 7), parsed[1].numerator, 0.001);
}
test "portfolio serialize/deserialize round-trip" {
const allocator = std.testing.allocator;
const lots = [_]Lot{
.{ .symbol = "AMZN", .shares = 10, .open_date = Date.fromYmd(2022, 3, 15), .open_price = 150.25 },
.{ .symbol = "AMZN", .shares = 5, .open_date = Date.fromYmd(2023, 6, 1), .open_price = 125.00, .close_date = Date.fromYmd(2024, 1, 15), .close_price = 185.50 },
.{ .symbol = "VTI", .shares = 100, .open_date = Date.fromYmd(2022, 1, 10), .open_price = 220.00 },
};
const data = try serializePortfolio(allocator, &lots);
defer allocator.free(data);
var portfolio = try deserializePortfolio(allocator, data);
defer portfolio.deinit();
try std.testing.expectEqual(@as(usize, 3), portfolio.lots.len);
try std.testing.expectEqualStrings("AMZN", portfolio.lots[0].symbol);
try std.testing.expectApproxEqAbs(@as(f64, 10), portfolio.lots[0].shares, 0.01);
try std.testing.expect(portfolio.lots[0].isOpen());
try std.testing.expectEqualStrings("AMZN", portfolio.lots[1].symbol);
try std.testing.expectApproxEqAbs(@as(f64, 5), portfolio.lots[1].shares, 0.01);
try std.testing.expect(!portfolio.lots[1].isOpen());
try std.testing.expect(portfolio.lots[1].close_date.?.eql(Date.fromYmd(2024, 1, 15)));
try std.testing.expectApproxEqAbs(@as(f64, 185.50), portfolio.lots[1].close_price.?, 0.01);
try std.testing.expectEqualStrings("VTI", portfolio.lots[2].symbol);
}

966
src/cli/main.zig Normal file
View file

@ -0,0 +1,966 @@
const std = @import("std");
const zfin = @import("zfin");
const tui = @import("tui");
const usage =
\\Usage: zfin <command> [options]
\\
\\Commands:
\\ interactive [opts] Launch interactive TUI
\\ perf <SYMBOL> Show 1yr/3yr/5yr/10yr trailing returns (Morningstar-style)
\\ quote <SYMBOL> Show latest quote
\\ history <SYMBOL> Show recent price history
\\ divs <SYMBOL> Show dividend history
\\ splits <SYMBOL> Show split history
\\ options <SYMBOL> Show options chain (nearest expiration)
\\ earnings <SYMBOL> Show earnings history and upcoming
\\ etf <SYMBOL> Show ETF profile (holdings, sectors, expense ratio)
\\ portfolio <FILE> Load and analyze a portfolio (.srf file)
\\ cache stats Show cache statistics
\\ cache clear Clear all cached data
\\
\\Interactive mode options:
\\ -p, --portfolio <FILE> Portfolio file (.srf)
\\ -w, --watchlist <FILE> Watchlist file (default: watchlist.srf)
\\ -s, --symbol <SYMBOL> Initial symbol (default: VTI)
\\ --default-keys Print default keybindings
\\ --default-theme Print default theme
\\
\\Environment Variables:
\\ TWELVEDATA_API_KEY Twelve Data API key (primary: prices)
\\ POLYGON_API_KEY Polygon.io API key (dividends, splits)
\\ FINNHUB_API_KEY Finnhub API key (earnings)
\\ ALPHAVANTAGE_API_KEY Alpha Vantage API key (ETF profiles)
\\ ZFIN_CACHE_DIR Cache directory (default: ~/.cache/zfin)
\\
;
pub fn main() !void {
var gpa = std.heap.GeneralPurposeAllocator(.{}){};
defer _ = gpa.deinit();
const allocator = gpa.allocator();
const args = try std.process.argsAlloc(allocator);
defer std.process.argsFree(allocator, args);
if (args.len < 2) {
try stdout_print(usage);
return;
}
var config = zfin.Config.fromEnv(allocator);
defer config.deinit();
const command = args[1];
if (std.mem.eql(u8, command, "help") or std.mem.eql(u8, command, "--help") or std.mem.eql(u8, command, "-h")) {
try stdout_print(usage);
return;
}
// Interactive TUI -- delegates to the TUI module (owns its own DataService)
if (std.mem.eql(u8, command, "interactive") or std.mem.eql(u8, command, "i")) {
try tui.run(allocator, config, args);
return;
}
var svc = zfin.DataService.init(allocator, config);
defer svc.deinit();
if (std.mem.eql(u8, command, "perf")) {
if (args.len < 3) return try stderr_print("Error: 'perf' requires a symbol argument\n");
try cmdPerf(allocator, &svc, args[2]);
} else if (std.mem.eql(u8, command, "quote")) {
if (args.len < 3) return try stderr_print("Error: 'quote' requires a symbol argument\n");
try cmdQuote(allocator, config, args[2]);
} else if (std.mem.eql(u8, command, "history")) {
if (args.len < 3) return try stderr_print("Error: 'history' requires a symbol argument\n");
try cmdHistory(allocator, &svc, args[2]);
} else if (std.mem.eql(u8, command, "divs")) {
if (args.len < 3) return try stderr_print("Error: 'divs' requires a symbol argument\n");
try cmdDivs(allocator, &svc, config, args[2]);
} else if (std.mem.eql(u8, command, "splits")) {
if (args.len < 3) return try stderr_print("Error: 'splits' requires a symbol argument\n");
try cmdSplits(allocator, &svc, args[2]);
} else if (std.mem.eql(u8, command, "options")) {
if (args.len < 3) return try stderr_print("Error: 'options' requires a symbol argument\n");
try cmdOptions(allocator, &svc, args[2]);
} else if (std.mem.eql(u8, command, "earnings")) {
if (args.len < 3) return try stderr_print("Error: 'earnings' requires a symbol argument\n");
try cmdEarnings(allocator, &svc, args[2]);
} else if (std.mem.eql(u8, command, "etf")) {
if (args.len < 3) return try stderr_print("Error: 'etf' requires a symbol argument\n");
try cmdEtf(allocator, &svc, args[2]);
} else if (std.mem.eql(u8, command, "portfolio")) {
if (args.len < 3) return try stderr_print("Error: 'portfolio' requires a file path argument\n");
try cmdPortfolio(allocator, config, args[2]);
} else if (std.mem.eql(u8, command, "cache")) {
if (args.len < 3) return try stderr_print("Error: 'cache' requires a subcommand (stats, clear)\n");
try cmdCache(allocator, config, args[2]);
} else {
try stderr_print("Unknown command. Run 'zfin help' for usage.\n");
}
}
fn cmdPerf(allocator: std.mem.Allocator, svc: *zfin.DataService, symbol: []const u8) !void {
const result = svc.getTrailingReturns(symbol) catch |err| switch (err) {
zfin.DataError.NoApiKey => {
try stderr_print("Error: TWELVEDATA_API_KEY not set. Get a free key at https://twelvedata.com\n");
return;
},
else => {
try stderr_print("Error fetching data.\n");
return;
},
};
defer allocator.free(result.candles);
defer if (result.dividends) |d| allocator.free(d);
if (result.source == .cached) try stderr_print("(using cached data)\n");
const c = result.candles;
const end_date = c[c.len - 1].date;
const today = todayDate();
const month_end = today.lastDayOfPriorMonth();
var buf: [8192]u8 = undefined;
var writer = std.fs.File.stdout().writer(&buf);
const out = &writer.interface;
try out.print("\nTrailing Returns for {s}\n", .{symbol});
try out.print("========================================\n", .{});
try out.print("Data points: {d} (", .{c.len});
{
var db: [10]u8 = undefined;
try out.print("{s}", .{c[0].date.format(&db)});
}
try out.print(" to ", .{});
{
var db: [10]u8 = undefined;
try out.print("{s}", .{end_date.format(&db)});
}
try out.print(")\nLatest close: ${d:.2}\n", .{c[c.len - 1].close});
const has_divs = result.asof_total != null;
// -- As-of-date returns (matches Morningstar "Trailing Returns" page) --
{
var db: [10]u8 = undefined;
try out.print("\nAs-of {s}:\n", .{end_date.format(&db)});
}
try printReturnsTable(out, result.asof_price, if (has_divs) result.asof_total else null);
// -- Month-end returns (matches Morningstar "Performance" page) --
{
var db: [10]u8 = undefined;
try out.print("\nMonth-end ({s}):\n", .{month_end.format(&db)});
}
try printReturnsTable(out, result.me_price, if (has_divs) result.me_total else null);
if (!has_divs) {
try out.print("\nSet POLYGON_API_KEY for total returns with dividend reinvestment.\n", .{});
}
try out.print("\n", .{});
try out.flush();
}
fn printReturnsTable(
out: anytype,
price: zfin.performance.TrailingReturns,
total: ?zfin.performance.TrailingReturns,
) !void {
const has_total = total != null;
if (has_total) {
try out.print("{s:>22} {s:>14} {s:>14}\n", .{ "", "Price Only", "Total Return" });
try out.print("{s:->22} {s:->14} {s:->14}\n", .{ "", "", "" });
} else {
try out.print("{s:>22} {s:>14}\n", .{ "", "Price Only" });
try out.print("{s:->22} {s:->14}\n", .{ "", "" });
}
const periods = [_]struct { label: []const u8, years: u16 }{
.{ .label = "1-Year Return:", .years = 1 },
.{ .label = "3-Year Return:", .years = 3 },
.{ .label = "5-Year Return:", .years = 5 },
.{ .label = "10-Year Return:", .years = 10 },
};
const price_arr = [_]?zfin.performance.PerformanceResult{
price.one_year, price.three_year, price.five_year, price.ten_year,
};
const total_arr: [4]?zfin.performance.PerformanceResult = if (total) |t|
.{ t.one_year, t.three_year, t.five_year, t.ten_year }
else
.{ null, null, null, null };
for (periods, 0..) |period, i| {
try out.print(" {s:<20}", .{period.label});
if (price_arr[i]) |r| {
var rb: [32]u8 = undefined;
const val = if (period.years > 1) r.annualized_return orelse r.total_return else r.total_return;
try out.print(" {s:>13}", .{zfin.performance.formatReturn(&rb, val)});
} else {
try out.print(" {s:>13}", .{"N/A"});
}
if (has_total) {
if (total_arr[i]) |r| {
var rb: [32]u8 = undefined;
const val = if (period.years > 1) r.annualized_return orelse r.total_return else r.total_return;
try out.print(" {s:>13}", .{zfin.performance.formatReturn(&rb, val)});
} else {
try out.print(" {s:>13}", .{"N/A"});
}
}
if (period.years > 1) {
try out.print(" ann.", .{});
}
try out.print("\n", .{});
}
}
fn cmdQuote(allocator: std.mem.Allocator, config: zfin.Config, symbol: []const u8) !void {
// Quote is a real-time endpoint, not cached -- use TwelveData directly
const td_key = config.twelvedata_key orelse {
try stderr_print("Error: TWELVEDATA_API_KEY not set.\n");
return;
};
var td = zfin.TwelveData.init(allocator, td_key);
defer td.deinit();
var qr = td.fetchQuote(allocator, symbol) catch |err| {
try stderr_print("API error: ");
try stderr_print(@errorName(err));
try stderr_print("\n");
return;
};
defer qr.deinit();
var q = qr.parse(allocator) catch |err| {
try stderr_print("Parse error: ");
try stderr_print(@errorName(err));
try stderr_print("\n");
return;
};
defer q.deinit();
var buf: [4096]u8 = undefined;
var writer = std.fs.File.stdout().writer(&buf);
const out = &writer.interface;
try out.print("\n{s} -- {s}\n", .{ q.symbol(), q.name() });
try out.print("========================================\n", .{});
try out.print(" Exchange: {s}\n", .{q.exchange()});
try out.print(" Date: {s}\n", .{q.datetime()});
try out.print(" Close: ${d:.2}\n", .{q.close()});
try out.print(" Open: ${d:.2}\n", .{q.open()});
try out.print(" High: ${d:.2}\n", .{q.high()});
try out.print(" Low: ${d:.2}\n", .{q.low()});
try out.print(" Volume: {d}\n", .{q.volume()});
try out.print(" Prev Close: ${d:.2}\n", .{q.previous_close()});
try out.print(" Change: ${d:.2} ({d:.2}%)\n", .{ q.change(), q.percent_change() });
try out.print(" 52-Week Low: ${d:.2}\n", .{q.fifty_two_week_low()});
try out.print(" 52-Week High: ${d:.2}\n", .{q.fifty_two_week_high()});
try out.print(" Avg Volume: {d}\n\n", .{q.average_volume()});
try out.flush();
}
fn cmdHistory(allocator: std.mem.Allocator, svc: *zfin.DataService, symbol: []const u8) !void {
// History uses getCandles but filters to last 30 days
const result = svc.getCandles(symbol) catch |err| switch (err) {
zfin.DataError.NoApiKey => {
try stderr_print("Error: TWELVEDATA_API_KEY not set.\n");
return;
},
else => {
try stderr_print("Error fetching data.\n");
return;
},
};
defer allocator.free(result.data);
if (result.source == .cached) try stderr_print("(using cached data)\n");
const all = result.data;
if (all.len == 0) return try stderr_print("No data available.\n");
// Filter to last 30 days
const today = todayDate();
const one_month_ago = today.addDays(-30);
const c = filterCandlesFrom(all, one_month_ago);
if (c.len == 0) return try stderr_print("No data available.\n");
var buf: [8192]u8 = undefined;
var writer = std.fs.File.stdout().writer(&buf);
const out = &writer.interface;
try out.print("\nPrice History for {s} (last 30 days)\n", .{symbol});
try out.print("========================================\n", .{});
try out.print("{s:>12} {s:>10} {s:>10} {s:>10} {s:>10} {s:>12}\n", .{
"Date", "Open", "High", "Low", "Close", "Volume",
});
try out.print("{s:->12} {s:->10} {s:->10} {s:->10} {s:->10} {s:->12}\n", .{
"", "", "", "", "", "",
});
for (c) |candle| {
var db: [10]u8 = undefined;
try out.print("{s:>12} {d:>10.2} {d:>10.2} {d:>10.2} {d:>10.2} {d:>12}\n", .{
candle.date.format(&db), candle.open, candle.high, candle.low, candle.close, candle.volume,
});
}
try out.print("\n{d} trading days\n\n", .{c.len});
try out.flush();
}
/// Return a slice view of candles on or after the given date (no allocation).
fn filterCandlesFrom(candles: []const zfin.Candle, from: zfin.Date) []const zfin.Candle {
// Binary search for first candle >= from
var lo: usize = 0;
var hi: usize = candles.len;
while (lo < hi) {
const mid = lo + (hi - lo) / 2;
if (candles[mid].date.lessThan(from)) {
lo = mid + 1;
} else {
hi = mid;
}
}
if (lo >= candles.len) return candles[0..0];
return candles[lo..];
}
fn cmdDivs(allocator: std.mem.Allocator, svc: *zfin.DataService, config: zfin.Config, symbol: []const u8) !void {
const result = svc.getDividends(symbol) catch |err| switch (err) {
zfin.DataError.NoApiKey => {
try stderr_print("Error: POLYGON_API_KEY not set. Get a free key at https://polygon.io\n");
return;
},
else => {
try stderr_print("Error fetching dividend data.\n");
return;
},
};
defer allocator.free(result.data);
if (result.source == .cached) try stderr_print("(using cached dividend data)\n");
const d = result.data;
// Fetch current price for yield calculation
var current_price: ?f64 = null;
if (config.twelvedata_key) |td_key| {
var td = zfin.TwelveData.init(allocator, td_key);
defer td.deinit();
if (td.fetchQuote(allocator, symbol)) |qr_val| {
var qr = qr_val;
defer qr.deinit();
if (qr.parse(allocator)) |q_val| {
var q = q_val;
defer q.deinit();
current_price = q.close();
} else |_| {}
} else |_| {}
}
var buf: [8192]u8 = undefined;
var writer = std.fs.File.stdout().writer(&buf);
const out = &writer.interface;
try out.print("\nDividend History for {s}\n", .{symbol});
try out.print("========================================\n", .{});
if (d.len == 0) {
try out.print(" No dividends found.\n\n", .{});
try out.flush();
return;
}
try out.print("{s:>12} {s:>10} {s:>12} {s:>6} {s:>10}\n", .{
"Ex-Date", "Amount", "Pay Date", "Freq", "Type",
});
try out.print("{s:->12} {s:->10} {s:->12} {s:->6} {s:->10}\n", .{
"", "", "", "", "",
});
const today = todayDate();
const one_year_ago = today.subtractYears(1);
var total: f64 = 0;
var ttm: f64 = 0;
for (d) |div| {
var ex_buf: [10]u8 = undefined;
try out.print("{s:>12} {d:>10.4}", .{ div.ex_date.format(&ex_buf), div.amount });
if (div.pay_date) |pd| {
var pay_buf: [10]u8 = undefined;
try out.print(" {s:>12}", .{pd.format(&pay_buf)});
} else {
try out.print(" {s:>12}", .{"--"});
}
if (div.frequency) |f| {
try out.print(" {d:>6}", .{f});
} else {
try out.print(" {s:>6}", .{"--"});
}
try out.print(" {s:>10}\n", .{@tagName(div.distribution_type)});
total += div.amount;
if (!div.ex_date.lessThan(one_year_ago)) ttm += div.amount;
}
try out.print("\n{d} dividends, total: ${d:.4}\n", .{ d.len, total });
try out.print("TTM dividends: ${d:.4}", .{ttm});
if (current_price) |price| {
if (price > 0) {
const yield = (ttm / price) * 100.0;
try out.print(" (yield: {d:.2}% at ${d:.2})", .{ yield, price });
}
}
try out.print("\n\n", .{});
try out.flush();
}
fn cmdSplits(allocator: std.mem.Allocator, svc: *zfin.DataService, symbol: []const u8) !void {
const result = svc.getSplits(symbol) catch |err| switch (err) {
zfin.DataError.NoApiKey => {
try stderr_print("Error: POLYGON_API_KEY not set. Get a free key at https://polygon.io\n");
return;
},
else => {
try stderr_print("Error fetching split data.\n");
return;
},
};
defer allocator.free(result.data);
if (result.source == .cached) try stderr_print("(using cached split data)\n");
const sp = result.data;
var buf: [4096]u8 = undefined;
var writer = std.fs.File.stdout().writer(&buf);
const out = &writer.interface;
try out.print("\nSplit History for {s}\n", .{symbol});
try out.print("========================================\n", .{});
if (sp.len == 0) {
try out.print(" No splits found.\n\n", .{});
try out.flush();
return;
}
try out.print("{s:>12} {s:>10}\n", .{ "Date", "Ratio" });
try out.print("{s:->12} {s:->10}\n", .{ "", "" });
for (sp) |s| {
var db: [10]u8 = undefined;
try out.print("{s:>12} {d:.0}:{d:.0}\n", .{ s.date.format(&db), s.numerator, s.denominator });
}
try out.print("\n{d} split(s)\n\n", .{sp.len});
try out.flush();
}
fn cmdOptions(allocator: std.mem.Allocator, svc: *zfin.DataService, symbol: []const u8) !void {
const result = svc.getOptions(symbol) catch |err| switch (err) {
zfin.DataError.FetchFailed => {
try stderr_print("Error fetching options data from CBOE.\n");
return;
},
else => {
try stderr_print("Error loading options data.\n");
return;
},
};
const ch = result.data;
defer {
// All chains share the same underlying_symbol pointer; free it once.
if (ch.len > 0) allocator.free(ch[0].underlying_symbol);
for (ch) |chain| {
allocator.free(chain.calls);
allocator.free(chain.puts);
}
allocator.free(ch);
}
if (result.source == .cached) try stderr_print("(using cached options data)\n");
if (ch.len == 0) {
try stderr_print("No options data found.\n");
return;
}
var buf: [16384]u8 = undefined;
var writer = std.fs.File.stdout().writer(&buf);
const out = &writer.interface;
try out.print("\nOptions Chain for {s}\n", .{symbol});
try out.print("========================================\n", .{});
if (ch[0].underlying_price) |price| {
try out.print("Underlying: ${d:.2}\n", .{price});
}
try out.print("{d} expiration(s) available\n", .{ch.len});
// List expirations
try out.print("\nExpirations:\n", .{});
for (ch) |chain| {
var db: [10]u8 = undefined;
try out.print(" {s} ({d} calls, {d} puts)\n", .{
chain.expiration.format(&db),
chain.calls.len,
chain.puts.len,
});
}
// Show nearest expiration chain in detail
const nearest = ch[0];
{
var db: [10]u8 = undefined;
try out.print("\nNearest Expiration: {s}\n", .{nearest.expiration.format(&db)});
}
try out.print("{s:->64}\n", .{""});
// Calls
try out.print("\n CALLS\n", .{});
try out.print(" {s:>10} {s:>10} {s:>10} {s:>10} {s:>10} {s:>8}\n", .{
"Strike", "Last", "Bid", "Ask", "Volume", "OI",
});
try out.print(" {s:->10} {s:->10} {s:->10} {s:->10} {s:->10} {s:->8}\n", .{
"", "", "", "", "", "",
});
for (nearest.calls) |c| {
try out.print(" {d:>10.2}", .{c.strike});
if (c.last_price) |p| try out.print(" {d:>10.2}", .{p}) else try out.print(" {s:>10}", .{"--"});
if (c.bid) |b| try out.print(" {d:>10.2}", .{b}) else try out.print(" {s:>10}", .{"--"});
if (c.ask) |a| try out.print(" {d:>10.2}", .{a}) else try out.print(" {s:>10}", .{"--"});
if (c.volume) |v| try out.print(" {d:>10}", .{v}) else try out.print(" {s:>10}", .{"--"});
if (c.open_interest) |oi| try out.print(" {d:>8}", .{oi}) else try out.print(" {s:>8}", .{"--"});
try out.print("\n", .{});
}
// Puts
try out.print("\n PUTS\n", .{});
try out.print(" {s:>10} {s:>10} {s:>10} {s:>10} {s:>10} {s:>8}\n", .{
"Strike", "Last", "Bid", "Ask", "Volume", "OI",
});
try out.print(" {s:->10} {s:->10} {s:->10} {s:->10} {s:->10} {s:->8}\n", .{
"", "", "", "", "", "",
});
for (nearest.puts) |p| {
try out.print(" {d:>10.2}", .{p.strike});
if (p.last_price) |lp| try out.print(" {d:>10.2}", .{lp}) else try out.print(" {s:>10}", .{"--"});
if (p.bid) |b| try out.print(" {d:>10.2}", .{b}) else try out.print(" {s:>10}", .{"--"});
if (p.ask) |a| try out.print(" {d:>10.2}", .{a}) else try out.print(" {s:>10}", .{"--"});
if (p.volume) |v| try out.print(" {d:>10}", .{v}) else try out.print(" {s:>10}", .{"--"});
if (p.open_interest) |oi| try out.print(" {d:>8}", .{oi}) else try out.print(" {s:>8}", .{"--"});
try out.print("\n", .{});
}
try out.print("\n", .{});
try out.flush();
}
fn cmdEarnings(allocator: std.mem.Allocator, svc: *zfin.DataService, symbol: []const u8) !void {
const result = svc.getEarnings(symbol) catch |err| switch (err) {
zfin.DataError.NoApiKey => {
try stderr_print("Error: FINNHUB_API_KEY not set. Get a free key at https://finnhub.io\n");
return;
},
else => {
try stderr_print("Error fetching earnings data.\n");
return;
},
};
defer allocator.free(result.data);
if (result.source == .cached) try stderr_print("(using cached earnings data)\n");
const ev = result.data;
var buf: [8192]u8 = undefined;
var writer = std.fs.File.stdout().writer(&buf);
const out = &writer.interface;
try out.print("\nEarnings History for {s}\n", .{symbol});
try out.print("========================================\n", .{});
if (ev.len == 0) {
try out.print(" No earnings data found.\n\n", .{});
try out.flush();
return;
}
try out.print("{s:>12} {s:>4} {s:>10} {s:>10} {s:>10} {s:>16} {s:>5}\n", .{
"Date", "Q", "Estimate", "Actual", "Surprise", "Revenue", "When",
});
try out.print("{s:->12} {s:->4} {s:->10} {s:->10} {s:->10} {s:->16} {s:->5}\n", .{
"", "", "", "", "", "", "",
});
for (ev) |e| {
var db: [10]u8 = undefined;
try out.print("{s:>12}", .{e.date.format(&db)});
if (e.quarter) |q| try out.print(" Q{d}", .{q}) else try out.print(" {s:>4}", .{"--"});
if (e.estimate) |est| try out.print(" {d:>10.4}", .{est}) else try out.print(" {s:>10}", .{"--"});
if (e.actual) |act| try out.print(" {d:>10.4}", .{act}) else try out.print(" {s:>10}", .{"--"});
if (e.surpriseAmount()) |s| {
if (s >= 0)
try out.print(" +{d:.4}", .{s})
else
try out.print(" {d:.4}", .{s});
} else {
try out.print(" {s:>10}", .{"--"});
}
if (e.revenue_actual) |rev| {
try out.print(" {s:>14}", .{formatLargeNum(rev)});
} else if (e.revenue_estimate) |rev| {
try out.print(" ~{s:>14}", .{formatLargeNum(rev)});
} else {
try out.print(" {s:>16}", .{"--"});
}
try out.print(" {s:>5}\n", .{@tagName(e.report_time)});
}
try out.print("\n{d} earnings event(s)\n\n", .{ev.len});
try out.flush();
}
fn cmdEtf(allocator: std.mem.Allocator, svc: *zfin.DataService, symbol: []const u8) !void {
const result = svc.getEtfProfile(symbol) catch |err| switch (err) {
zfin.DataError.NoApiKey => {
try stderr_print("Error: ALPHAVANTAGE_API_KEY not set. Get a free key at https://alphavantage.co\n");
return;
},
else => {
try stderr_print("Error fetching ETF profile.\n");
return;
},
};
const profile = result.data;
defer {
if (profile.holdings) |h| {
for (h) |holding| {
if (holding.symbol) |s| allocator.free(s);
allocator.free(holding.name);
}
allocator.free(h);
}
if (profile.sectors) |s| {
for (s) |sec| allocator.free(sec.sector);
allocator.free(s);
}
}
if (result.source == .cached) try stderr_print("(using cached ETF profile)\n");
try printEtfProfile(profile, symbol);
}
fn printEtfProfile(profile: zfin.EtfProfile, symbol: []const u8) !void {
var buf: [16384]u8 = undefined;
var writer = std.fs.File.stdout().writer(&buf);
const out = &writer.interface;
try out.print("\nETF Profile: {s}\n", .{symbol});
try out.print("========================================\n", .{});
if (profile.expense_ratio) |er| {
try out.print(" Expense Ratio: {d:.2}%\n", .{er * 100.0});
}
if (profile.net_assets) |na| {
try out.print(" Net Assets: ${s}\n", .{formatLargeNum(na)});
}
if (profile.dividend_yield) |dy| {
try out.print(" Dividend Yield: {d:.2}%\n", .{dy * 100.0});
}
if (profile.portfolio_turnover) |pt| {
try out.print(" Portfolio Turnover: {d:.1}%\n", .{pt * 100.0});
}
if (profile.inception_date) |d| {
var db: [10]u8 = undefined;
try out.print(" Inception Date: {s}\n", .{d.format(&db)});
}
if (profile.leveraged) {
try out.print(" Leveraged: YES\n", .{});
}
if (profile.total_holdings) |th| {
try out.print(" Total Holdings: {d}\n", .{th});
}
// Sectors
if (profile.sectors) |sectors| {
if (sectors.len > 0) {
try out.print("\n Sector Allocation:\n", .{});
for (sectors) |sec| {
try out.print(" {d:>5.1}% {s}\n", .{ sec.weight * 100.0, sec.sector });
}
}
}
// Top holdings
if (profile.holdings) |holdings| {
if (holdings.len > 0) {
try out.print("\n Top Holdings:\n", .{});
try out.print(" {s:>6} {s:>7} {s}\n", .{ "Symbol", "Weight", "Name" });
try out.print(" {s:->6} {s:->7} {s:->30}\n", .{ "", "", "" });
for (holdings) |h| {
if (h.symbol) |s| {
try out.print(" {s:>6} {d:>6.2}% {s}\n", .{ s, h.weight * 100.0, h.name });
} else {
try out.print(" {s:>6} {d:>6.2}% {s}\n", .{ "--", h.weight * 100.0, h.name });
}
}
}
}
try out.print("\n", .{});
try out.flush();
}
fn cmdPortfolio(allocator: std.mem.Allocator, config: zfin.Config, file_path: []const u8) !void {
// Load portfolio from SRF file
const data = std.fs.cwd().readFileAlloc(allocator, file_path, 10 * 1024 * 1024) catch |err| {
try stderr_print("Error reading portfolio file: ");
try stderr_print(@errorName(err));
try stderr_print("\n");
return;
};
defer allocator.free(data);
var portfolio = zfin.cache.deserializePortfolio(allocator, data) catch {
try stderr_print("Error parsing portfolio file.\n");
return;
};
defer portfolio.deinit();
if (portfolio.lots.len == 0) {
try stderr_print("Portfolio is empty.\n");
return;
}
// Get positions
const positions = try portfolio.positions(allocator);
defer allocator.free(positions);
// Get unique symbols and fetch current prices
const syms = try portfolio.symbols(allocator);
defer allocator.free(syms);
var prices = std.StringHashMap(f64).init(allocator);
defer prices.deinit();
if (config.twelvedata_key) |td_key| {
var td = zfin.TwelveData.init(allocator, td_key);
defer td.deinit();
for (syms) |sym| {
try stderr_print("Fetching quote: ");
try stderr_print(sym);
try stderr_print("...\n");
if (td.fetchQuote(allocator, sym)) |qr_val| {
var qr = qr_val;
defer qr.deinit();
if (qr.parse(allocator)) |q_val| {
var q = q_val;
defer q.deinit();
const price = q.close();
if (price > 0) try prices.put(sym, price);
} else |_| {}
} else |_| {}
}
} else {
try stderr_print("Warning: TWELVEDATA_API_KEY not set. Cannot fetch current prices.\n");
}
// Compute summary
var summary = zfin.risk.portfolioSummary(allocator, positions, prices) catch {
try stderr_print("Error computing portfolio summary.\n");
return;
};
defer summary.deinit(allocator);
var buf: [16384]u8 = undefined;
var writer = std.fs.File.stdout().writer(&buf);
const out = &writer.interface;
// Header
try out.print("\nPortfolio Summary ({s})\n", .{file_path});
try out.print("========================================\n", .{});
// Lot counts
var open_lots: u32 = 0;
var closed_lots: u32 = 0;
for (portfolio.lots) |lot| {
if (lot.isOpen()) open_lots += 1 else closed_lots += 1;
}
try out.print(" Lots: {d} open, {d} closed\n", .{ open_lots, closed_lots });
try out.print(" Positions: {d} symbols\n\n", .{positions.len});
// Positions table
try out.print("{s:>6} {s:>8} {s:>10} {s:>10} {s:>12} {s:>10} {s:>8}\n", .{
"Symbol", "Shares", "Avg Cost", "Price", "Mkt Value", "P&L", "Weight",
});
try out.print("{s:->6} {s:->8} {s:->10} {s:->10} {s:->12} {s:->10} {s:->8}\n", .{
"", "", "", "", "", "", "",
});
for (summary.allocations) |a| {
try out.print("{s:>6} {d:>8.1} {d:>10.2} {d:>10.2} {d:>12.2} ", .{
a.symbol, a.shares, a.avg_cost, a.current_price, a.market_value,
});
if (a.unrealized_pnl >= 0) {
try out.print("+{d:>9.2}", .{a.unrealized_pnl});
} else {
try out.print("{d:>10.2}", .{a.unrealized_pnl});
}
try out.print(" {d:>6.1}%\n", .{a.weight * 100.0});
}
try out.print("{s:->6} {s:->8} {s:->10} {s:->10} {s:->12} {s:->10} {s:->8}\n", .{
"", "", "", "", "", "", "",
});
try out.print("{s:>6} {s:>8} {s:>10} {s:>10} {d:>12.2} ", .{
"", "", "", "TOTAL", summary.total_value,
});
if (summary.unrealized_pnl >= 0) {
try out.print("+{d:>9.2}", .{summary.unrealized_pnl});
} else {
try out.print("{d:>10.2}", .{summary.unrealized_pnl});
}
try out.print(" {s:>7}\n", .{"100.0%"});
try out.print("\n Cost Basis: ${d:.2}\n", .{summary.total_cost});
try out.print(" Market Value: ${d:.2}\n", .{summary.total_value});
try out.print(" Unrealized P&L: ", .{});
if (summary.unrealized_pnl >= 0) {
try out.print("+${d:.2} ({d:.2}%)\n", .{ summary.unrealized_pnl, summary.unrealized_return * 100.0 });
} else {
try out.print("-${d:.2} ({d:.2}%)\n", .{ -summary.unrealized_pnl, summary.unrealized_return * 100.0 });
}
if (summary.realized_pnl != 0) {
try out.print(" Realized P&L: ", .{});
if (summary.realized_pnl >= 0) {
try out.print("+${d:.2}\n", .{summary.realized_pnl});
} else {
try out.print("-${d:.2}\n", .{-summary.realized_pnl});
}
}
// Risk metrics for each position if we have candles cached
var store = zfin.cache.Store.init(allocator, config.cache_dir);
var any_risk = false;
for (summary.allocations) |a| {
const cached = store.readRaw(a.symbol, .candles_daily) catch null;
if (cached) |cdata| {
defer allocator.free(cdata);
if (zfin.cache.Store.deserializeCandles(allocator, cdata)) |candles| {
defer allocator.free(candles);
if (zfin.risk.computeRisk(candles)) |metrics| {
if (!any_risk) {
try out.print("\n Risk Metrics (from cached price data):\n", .{});
try out.print(" {s:>6} {s:>10} {s:>8} {s:>10}\n", .{
"Symbol", "Volatility", "Sharpe", "Max DD",
});
try out.print(" {s:->6} {s:->10} {s:->8} {s:->10}\n", .{
"", "", "", "",
});
any_risk = true;
}
try out.print(" {s:>6} {d:>9.1}% {d:>8.2} {d:>9.1}%", .{
a.symbol, metrics.volatility * 100.0, metrics.sharpe, metrics.max_drawdown * 100.0,
});
if (metrics.drawdown_trough) |dt| {
var db: [10]u8 = undefined;
try out.print(" (trough {s})", .{dt.format(&db)});
}
try out.print("\n", .{});
}
} else |_| {}
}
}
try out.print("\n", .{});
try out.flush();
}
fn formatLargeNum(val: f64) [15]u8 {
var result: [15]u8 = .{' '} ** 15;
if (val >= 1_000_000_000_000) {
_ = std.fmt.bufPrint(&result, "{d:.1}T", .{val / 1_000_000_000_000}) catch {};
} else if (val >= 1_000_000_000) {
_ = std.fmt.bufPrint(&result, "{d:.1}B", .{val / 1_000_000_000}) catch {};
} else if (val >= 1_000_000) {
_ = std.fmt.bufPrint(&result, "{d:.1}M", .{val / 1_000_000}) catch {};
} else {
_ = std.fmt.bufPrint(&result, "{d:.0}", .{val}) catch {};
}
return result;
}
fn cmdCache(allocator: std.mem.Allocator, config: zfin.Config, subcommand: []const u8) !void {
if (std.mem.eql(u8, subcommand, "stats")) {
var buf: [4096]u8 = undefined;
var writer = std.fs.File.stdout().writer(&buf);
const out = &writer.interface;
try out.print("Cache directory: {s}\n", .{config.cache_dir});
std.fs.cwd().access(config.cache_dir, .{}) catch {
try out.print(" (empty -- no cached data)\n", .{});
try out.flush();
return;
};
// List symbol directories
var dir = std.fs.cwd().openDir(config.cache_dir, .{ .iterate = true }) catch {
try out.print(" (empty -- no cached data)\n", .{});
try out.flush();
return;
};
defer dir.close();
var count: usize = 0;
var iter = dir.iterate();
while (iter.next() catch null) |entry| {
if (entry.kind == .directory) {
try out.print(" {s}/\n", .{entry.name});
count += 1;
}
}
if (count == 0) {
try out.print(" (empty -- no cached data)\n", .{});
} else {
try out.print("\n {d} symbol(s) cached\n", .{count});
}
try out.flush();
} else if (std.mem.eql(u8, subcommand, "clear")) {
var store = zfin.cache.Store.init(allocator, config.cache_dir);
try store.clearAll();
try stdout_print("Cache cleared.\n");
} else {
try stderr_print("Unknown cache subcommand. Use 'stats' or 'clear'.\n");
}
}
fn todayDate() zfin.Date {
const ts = std.time.timestamp();
const days: i32 = @intCast(@divFloor(ts, 86400));
return .{ .days = days };
}
fn stdout_print(msg: []const u8) !void {
var buf: [4096]u8 = undefined;
var writer = std.fs.File.stdout().writer(&buf);
const out = &writer.interface;
try out.writeAll(msg);
try out.flush();
}
fn stderr_print(msg: []const u8) !void {
var buf: [1024]u8 = undefined;
var writer = std.fs.File.stderr().writer(&buf);
const out = &writer.interface;
try out.writeAll(msg);
try out.flush();
}

93
src/config.zig Normal file
View file

@ -0,0 +1,93 @@
const std = @import("std");
pub const Config = struct {
twelvedata_key: ?[]const u8 = null,
polygon_key: ?[]const u8 = null,
finnhub_key: ?[]const u8 = null,
alphavantage_key: ?[]const u8 = null,
cache_dir: []const u8,
allocator: ?std.mem.Allocator = null,
env_buf: ?[]const u8 = null,
pub fn fromEnv(allocator: std.mem.Allocator) Config {
var self = Config{
.cache_dir = undefined,
.allocator = allocator,
};
// Try loading .env file from the project directory or home directory
self.env_buf = loadEnvFile(allocator);
self.twelvedata_key = self.resolve("TWELVEDATA_API_KEY");
self.polygon_key = self.resolve("POLYGON_API_KEY");
self.finnhub_key = self.resolve("FINNHUB_API_KEY");
self.alphavantage_key = self.resolve("ALPHAVANTAGE_API_KEY");
const env_cache = self.resolve("ZFIN_CACHE_DIR");
self.cache_dir = env_cache orelse blk: {
const home = std.posix.getenv("HOME") orelse "/tmp";
break :blk std.fs.path.join(allocator, &.{ home, ".cache", "zfin" }) catch @panic("OOM");
};
return self;
}
pub fn deinit(self: *Config) void {
if (self.allocator) |a| {
// Check if cache_dir was allocated (not from env/envfile) BEFORE freeing env_buf
const cache_dir_from_env = self.resolve("ZFIN_CACHE_DIR") != null;
if (self.env_buf) |buf| a.free(buf);
if (!cache_dir_from_env) {
a.free(self.cache_dir);
}
}
}
pub fn hasAnyKey(self: Config) bool {
return self.twelvedata_key != null or
self.polygon_key != null or
self.finnhub_key != null or
self.alphavantage_key != null;
}
/// Look up a key: environment variable first, then .env file fallback.
fn resolve(self: Config, key: []const u8) ?[]const u8 {
if (std.posix.getenv(key)) |v| return v;
return envFileGet(self.env_buf, key);
}
};
/// Parse a KEY=VALUE line from .env content. Returns value for the given key.
fn envFileGet(buf: ?[]const u8, key: []const u8) ?[]const u8 {
const data = buf orelse return null;
var iter = std.mem.splitScalar(u8, data, '\n');
while (iter.next()) |line| {
const trimmed = std.mem.trim(u8, line, &std.ascii.whitespace);
if (trimmed.len == 0 or trimmed[0] == '#') continue;
if (std.mem.indexOfScalar(u8, trimmed, '=')) |eq| {
const k = std.mem.trim(u8, trimmed[0..eq], &std.ascii.whitespace);
if (std.mem.eql(u8, k, key)) {
return std.mem.trim(u8, trimmed[eq + 1 ..], &std.ascii.whitespace);
}
}
}
return null;
}
/// Try to load .env from the executable's directory, then cwd.
fn loadEnvFile(allocator: std.mem.Allocator) ?[]const u8 {
// Try relative to the executable
const exe_dir = std.fs.selfExeDirPathAlloc(allocator) catch null;
defer if (exe_dir) |d| allocator.free(d);
if (exe_dir) |dir| {
const path = std.fs.path.join(allocator, &.{ dir, "..", ".env" }) catch null;
defer if (path) |p| allocator.free(p);
if (path) |p| {
if (std.fs.cwd().readFileAlloc(allocator, p, 4096)) |data| return data else |_| {}
}
}
// Try cwd
return std.fs.cwd().readFileAlloc(allocator, ".env", 4096) catch null;
}

27
src/main.zig Normal file
View file

@ -0,0 +1,27 @@
const std = @import("std");
const zfin = @import("zfin");
pub fn main() !void {
// Prints to stderr, ignoring potential errors.
std.debug.print("All your {s} are belong to us.\n", .{"codebase"});
try zfin.bufferedPrint();
}
test "simple test" {
const gpa = std.testing.allocator;
var list: std.ArrayList(i32) = .empty;
defer list.deinit(gpa); // Try commenting this out and see if zig detects the memory leak!
try list.append(gpa, 42);
try std.testing.expectEqual(@as(i32, 42), list.pop());
}
test "fuzz example" {
const Context = struct {
fn testOne(context: @This(), input: []const u8) anyerror!void {
_ = context;
// Try passing `--fuzz` to `zig build test` and see if it manages to fail this test case!
try std.testing.expect(!std.mem.eql(u8, "canyoufindme", input));
}
};
try std.testing.fuzz(Context{}, Context.testOne, .{});
}

14
src/models/candle.zig Normal file
View file

@ -0,0 +1,14 @@
const Date = @import("date.zig").Date;
/// A single OHLCV bar, normalized from any provider.
pub const Candle = struct {
date: Date,
open: f64,
high: f64,
low: f64,
close: f64,
/// Close price adjusted for splits and dividends (for total return calculations).
/// If the provider does not supply this, it equals `close`.
adj_close: f64,
volume: u64,
};

199
src/models/date.zig Normal file
View file

@ -0,0 +1,199 @@
/// Date represented as days since epoch (compact, sortable).
/// Use helper functions for formatting and parsing.
pub const Date = struct {
/// Days since 1970-01-01
days: i32,
pub const epoch = Date{ .days = 0 };
pub fn year(self: Date) i16 {
return epochDaysToYmd(self.days).year;
}
pub fn month(self: Date) u8 {
return epochDaysToYmd(self.days).month;
}
pub fn day(self: Date) u8 {
return epochDaysToYmd(self.days).day;
}
pub fn fromYmd(y: i16, m: u8, d: u8) Date {
return .{ .days = ymdToEpochDays(y, m, d) };
}
/// Parse "YYYY-MM-DD" format
pub fn parse(str: []const u8) !Date {
if (str.len != 10 or str[4] != '-' or str[7] != '-') return error.InvalidDateFormat;
const y = std.fmt.parseInt(i16, str[0..4], 10) catch return error.InvalidDateFormat;
const m = std.fmt.parseInt(u8, str[5..7], 10) catch return error.InvalidDateFormat;
const d = std.fmt.parseInt(u8, str[8..10], 10) catch return error.InvalidDateFormat;
return fromYmd(y, m, d);
}
/// Format as "YYYY-MM-DD"
pub fn format(self: Date, buf: *[10]u8) []const u8 {
const ymd = epochDaysToYmd(self.days);
const y: u16 = @intCast(ymd.year);
buf[0] = '0' + @as(u8, @intCast(y / 1000));
buf[1] = '0' + @as(u8, @intCast((y / 100) % 10));
buf[2] = '0' + @as(u8, @intCast((y / 10) % 10));
buf[3] = '0' + @as(u8, @intCast(y % 10));
buf[4] = '-';
buf[5] = '0' + @as(u8, @intCast(ymd.month / 10));
buf[6] = '0' + @as(u8, @intCast(ymd.month % 10));
buf[7] = '-';
buf[8] = '0' + @as(u8, @intCast(ymd.day / 10));
buf[9] = '0' + @as(u8, @intCast(ymd.day % 10));
return buf[0..10];
}
/// Day of week: 0=Monday, 1=Tuesday, ..., 4=Friday, 5=Saturday, 6=Sunday.
pub fn dayOfWeek(self: Date) u8 {
// 1970-01-01 was a Thursday (day 3 in 0=Mon scheme)
const d = @mod(self.days + 3, @as(i32, 7));
return @intCast(if (d < 0) d + 7 else d);
}
pub fn eql(a: Date, b: Date) bool {
return a.days == b.days;
}
pub fn lessThan(a: Date, b: Date) bool {
return a.days < b.days;
}
pub fn addDays(self: Date, n: i32) Date {
return .{ .days = self.days + n };
}
/// Subtract N calendar years. Clamps Feb 29 -> Feb 28 if target is not a leap year.
pub fn subtractYears(self: Date, n: u16) Date {
const ymd = epochDaysToYmd(self.days);
const new_year: i16 = ymd.year - @as(i16, @intCast(n));
const new_day: u8 = if (ymd.month == 2 and ymd.day == 29 and !isLeapYear(new_year)) 28 else ymd.day;
return .{ .days = ymdToEpochDays(new_year, ymd.month, new_day) };
}
/// Return the last day of the previous month.
/// E.g., if self is 2026-02-24, returns 2026-01-31.
pub fn lastDayOfPriorMonth(self: Date) Date {
const ymd = epochDaysToYmd(self.days);
if (ymd.month == 1) {
return fromYmd(ymd.year - 1, 12, 31);
} else {
return fromYmd(ymd.year, ymd.month - 1, daysInMonth(ymd.year, ymd.month - 1));
}
}
fn daysInMonth(y: i16, m: u8) u8 {
const table = [_]u8{ 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31 };
if (m == 2 and isLeapYear(y)) return 29;
return table[m - 1];
}
/// Returns approximate number of years between two dates
pub fn yearsBetween(from: Date, to: Date) f64 {
return @as(f64, @floatFromInt(to.days - from.days)) / 365.25;
}
fn isLeapYear(y: i16) bool {
const yu: u16 = @bitCast(y);
return (yu % 4 == 0 and yu % 100 != 0) or (yu % 400 == 0);
}
};
const Ymd = struct { year: i16, month: u8, day: u8 };
fn epochDaysToYmd(days: i32) Ymd {
// Algorithm from http://howardhinnant.github.io/date_algorithms.html
// Using i64 throughout to avoid overflow on unsigned intermediate values.
const z: i64 = @as(i64, days) + 719468;
const era: i64 = @divFloor(if (z >= 0) z else z - 146096, 146097);
const doe_i: i64 = z - era * 146097; // [0, 146096]
const doe: u64 = @intCast(doe_i);
const yoe_val: u64 = (doe - doe / 1460 + doe / 36524 - doe / 146096) / 365; // [0, 399]
const y: i64 = @as(i64, @intCast(yoe_val)) + era * 400;
const doy: u64 = doe - (365 * yoe_val + yoe_val / 4 - yoe_val / 100);
const mp: u64 = (5 * doy + 2) / 153;
const d: u8 = @intCast(doy - (153 * mp + 2) / 5 + 1);
const m_raw: u64 = if (mp < 10) mp + 3 else mp - 9;
const m: u8 = @intCast(m_raw);
const y_adj: i16 = @intCast(if (m <= 2) y + 1 else y);
return .{ .year = y_adj, .month = m, .day = d };
}
fn ymdToEpochDays(y: i16, m: u8, d: u8) i32 {
const y_adj: i64 = @as(i64, y) - @as(i64, if (m <= 2) @as(i64, 1) else @as(i64, 0));
const era: i64 = @divFloor(if (y_adj >= 0) y_adj else y_adj - 399, 400);
const yoe: u64 = @intCast(y_adj - era * 400);
const m_adj: u64 = if (m > 2) @as(u64, m) - 3 else @as(u64, m) + 9;
const doy: u64 = (153 * m_adj + 2) / 5 + @as(u64, d) - 1;
const doe: u64 = yoe * 365 + yoe / 4 -| yoe / 100 + doy;
return @intCast(era * 146097 + @as(i64, @intCast(doe)) - 719468);
}
const std = @import("std");
test "date roundtrip" {
const d = Date.fromYmd(2024, 6, 15);
try std.testing.expectEqual(@as(i16, 2024), d.year());
try std.testing.expectEqual(@as(u8, 6), d.month());
try std.testing.expectEqual(@as(u8, 15), d.day());
}
test "date parse" {
const d = try Date.parse("2024-06-15");
try std.testing.expectEqual(@as(i16, 2024), d.year());
try std.testing.expectEqual(@as(u8, 6), d.month());
try std.testing.expectEqual(@as(u8, 15), d.day());
}
test "date format" {
const d = Date.fromYmd(2024, 1, 5);
var buf: [10]u8 = undefined;
const s = d.format(&buf);
try std.testing.expectEqualStrings("2024-01-05", s);
}
test "subtractYears" {
const d = Date.fromYmd(2026, 2, 24);
const d1 = d.subtractYears(1);
try std.testing.expectEqual(@as(i16, 2025), d1.year());
try std.testing.expectEqual(@as(u8, 2), d1.month());
try std.testing.expectEqual(@as(u8, 24), d1.day());
const d3 = d.subtractYears(3);
try std.testing.expectEqual(@as(i16, 2023), d3.year());
// Leap year edge case: Feb 29 2024 - 1 year = Feb 28 2023
const leap = Date.fromYmd(2024, 2, 29);
const non_leap = leap.subtractYears(1);
try std.testing.expectEqual(@as(i16, 2023), non_leap.year());
try std.testing.expectEqual(@as(u8, 2), non_leap.month());
try std.testing.expectEqual(@as(u8, 28), non_leap.day());
}
test "lastDayOfPriorMonth" {
// Feb 24 -> Jan 31
const d1 = Date.fromYmd(2026, 2, 24).lastDayOfPriorMonth();
try std.testing.expectEqual(@as(i16, 2026), d1.year());
try std.testing.expectEqual(@as(u8, 1), d1.month());
try std.testing.expectEqual(@as(u8, 31), d1.day());
// Jan 15 -> Dec 31 of prior year
const d2 = Date.fromYmd(2026, 1, 15).lastDayOfPriorMonth();
try std.testing.expectEqual(@as(i16, 2025), d2.year());
try std.testing.expectEqual(@as(u8, 12), d2.month());
try std.testing.expectEqual(@as(u8, 31), d2.day());
// Mar 1 leap year -> Feb 29
const d3 = Date.fromYmd(2024, 3, 1).lastDayOfPriorMonth();
try std.testing.expectEqual(@as(u8, 2), d3.month());
try std.testing.expectEqual(@as(u8, 29), d3.day());
// Mar 1 non-leap -> Feb 28
const d4 = Date.fromYmd(2025, 3, 1).lastDayOfPriorMonth();
try std.testing.expectEqual(@as(u8, 2), d4.month());
try std.testing.expectEqual(@as(u8, 28), d4.day());
}

27
src/models/dividend.zig Normal file
View file

@ -0,0 +1,27 @@
const Date = @import("date.zig").Date;
pub const DividendType = enum {
regular,
special,
supplemental,
irregular,
unknown,
};
/// A single dividend payment record.
pub const Dividend = struct {
/// Date the stock begins trading without the dividend
ex_date: Date,
/// Date the dividend is paid (may be null if unknown)
pay_date: ?Date = null,
/// Date of record for eligibility
record_date: ?Date = null,
/// Cash amount per share
amount: f64,
/// How many times per year this dividend is expected
frequency: ?u8 = null,
/// Classification of the dividend
distribution_type: DividendType = .unknown,
/// Currency code (e.g., "USD")
currency: ?[]const u8 = null,
};

51
src/models/earnings.zig Normal file
View file

@ -0,0 +1,51 @@
const Date = @import("date.zig").Date;
pub const ReportTime = enum {
bmo, // before market open
amc, // after market close
dmh, // during market hours
unknown,
};
/// An earnings event (historical or upcoming).
pub const EarningsEvent = struct {
symbol: []const u8,
date: Date,
/// Estimated EPS (analyst consensus)
estimate: ?f64 = null,
/// Actual reported EPS (null if upcoming)
actual: ?f64 = null,
/// Surprise amount (actual - estimate)
surprise: ?f64 = null,
/// Surprise percentage
surprise_percent: ?f64 = null,
/// Fiscal quarter (1-4)
quarter: ?u8 = null,
/// Fiscal year
fiscal_year: ?i16 = null,
/// Revenue actual
revenue_actual: ?f64 = null,
/// Revenue estimate
revenue_estimate: ?f64 = null,
/// When earnings are reported relative to market hours
report_time: ReportTime = .unknown,
pub fn isFuture(self: EarningsEvent) bool {
return self.actual == null;
}
pub fn surpriseAmount(self: EarningsEvent) ?f64 {
if (self.surprise) |s| return s;
const act = self.actual orelse return null;
const est = self.estimate orelse return null;
return act - est;
}
pub fn surprisePct(self: EarningsEvent) ?f64 {
if (self.surprise_percent) |s| return s;
const act = self.actual orelse return null;
const est = self.estimate orelse return null;
if (est == 0) return null;
return ((act - est) / @abs(est)) * 100.0;
}
};

View file

@ -0,0 +1,43 @@
const Date = @import("date.zig").Date;
/// Top holding in an ETF.
pub const Holding = struct {
symbol: ?[]const u8 = null,
name: []const u8,
weight: f64,
};
/// Sector allocation in an ETF.
pub const SectorWeight = struct {
sector: []const u8,
weight: f64,
};
/// ETF profile and metadata.
pub const EtfProfile = struct {
symbol: []const u8,
name: ?[]const u8 = null,
asset_class: ?[]const u8 = null,
/// Expense ratio as a decimal (e.g., 0.0003 for 0.03%)
expense_ratio: ?f64 = null,
/// Net assets in USD
net_assets: ?f64 = null,
/// Morningstar-style category (e.g., "Large Blend")
category: ?[]const u8 = null,
/// Investment focus description
description: ?[]const u8 = null,
/// Top holdings
holdings: ?[]const Holding = null,
/// Number of total holdings in the fund
total_holdings: ?u32 = null,
/// Sector allocations
sectors: ?[]const SectorWeight = null,
/// Dividend yield as decimal (e.g., 0.0111 for 1.11%)
dividend_yield: ?f64 = null,
/// Portfolio turnover as decimal
portfolio_turnover: ?f64 = null,
/// Fund inception date
inception_date: ?Date = null,
/// Whether the fund is leveraged
leveraged: bool = false,
};

35
src/models/option.zig Normal file
View file

@ -0,0 +1,35 @@
const Date = @import("date.zig").Date;
pub const ContractType = enum {
call,
put,
};
/// A single options contract in a chain.
pub const OptionContract = struct {
/// Full OCC symbol (e.g., "O:AAPL211022C000150000")
contract_symbol: ?[]const u8 = null,
contract_type: ContractType,
strike: f64,
expiration: Date,
bid: ?f64 = null,
ask: ?f64 = null,
last_price: ?f64 = null,
volume: ?u64 = null,
open_interest: ?u64 = null,
implied_volatility: ?f64 = null,
// Greeks
delta: ?f64 = null,
gamma: ?f64 = null,
theta: ?f64 = null,
vega: ?f64 = null,
};
/// Full options chain for an underlying asset at a given expiration.
pub const OptionsChain = struct {
underlying_symbol: []const u8,
underlying_price: ?f64 = null,
expiration: Date,
calls: []const OptionContract,
puts: []const OptionContract,
};

231
src/models/portfolio.zig Normal file
View file

@ -0,0 +1,231 @@
const std = @import("std");
const Date = @import("date.zig").Date;
/// A single lot in a portfolio -- one purchase/sale event.
/// Open lots have no close_date/close_price.
/// Closed lots have both.
pub const Lot = struct {
symbol: []const u8,
shares: f64,
open_date: Date,
open_price: f64,
close_date: ?Date = null,
close_price: ?f64 = null,
/// Optional note/tag for the lot
note: ?[]const u8 = null,
/// Optional account identifier (e.g. "Roth IRA", "Brokerage")
account: ?[]const u8 = null,
pub fn isOpen(self: Lot) bool {
return self.close_date == null;
}
pub fn costBasis(self: Lot) f64 {
return self.shares * self.open_price;
}
pub fn marketValue(self: Lot, current_price: f64) f64 {
return self.shares * current_price;
}
pub fn realizedPnl(self: Lot) ?f64 {
const cp = self.close_price orelse return null;
return self.shares * (cp - self.open_price);
}
pub fn unrealizedPnl(self: Lot, current_price: f64) f64 {
return self.shares * (current_price - self.open_price);
}
pub fn returnPct(self: Lot, current_price: f64) f64 {
if (self.open_price == 0) return 0;
const price = if (self.close_price) |cp| cp else current_price;
return (price / self.open_price) - 1.0;
}
};
/// Aggregated position for a single symbol across multiple lots.
pub const Position = struct {
symbol: []const u8,
/// Total open shares
shares: f64,
/// Weighted average cost basis per share (open lots only)
avg_cost: f64,
/// Total cost basis of open lots
total_cost: f64,
/// Number of open lots
open_lots: u32,
/// Number of closed lots
closed_lots: u32,
/// Total realized P&L from closed lots
realized_pnl: f64,
};
/// A portfolio is a collection of lots.
pub const Portfolio = struct {
lots: []Lot,
allocator: std.mem.Allocator,
pub fn deinit(self: *Portfolio) void {
for (self.lots) |lot| {
self.allocator.free(lot.symbol);
if (lot.note) |n| self.allocator.free(n);
if (lot.account) |a| self.allocator.free(a);
}
self.allocator.free(self.lots);
}
/// Get all unique symbols in the portfolio.
pub fn symbols(self: Portfolio, allocator: std.mem.Allocator) ![][]const u8 {
var seen = std.StringHashMap(void).init(allocator);
defer seen.deinit();
for (self.lots) |lot| {
try seen.put(lot.symbol, {});
}
var result = std.ArrayList([]const u8).empty;
errdefer result.deinit(allocator);
var iter = seen.keyIterator();
while (iter.next()) |key| {
try result.append(allocator, key.*);
}
return result.toOwnedSlice(allocator);
}
/// Get all lots for a given symbol.
pub fn lotsForSymbol(self: Portfolio, allocator: std.mem.Allocator, symbol: []const u8) ![]Lot {
var result = std.ArrayList(Lot).empty;
errdefer result.deinit(allocator);
for (self.lots) |lot| {
if (std.mem.eql(u8, lot.symbol, symbol)) {
try result.append(allocator, lot);
}
}
return result.toOwnedSlice(allocator);
}
/// Aggregate lots into positions by symbol.
pub fn positions(self: Portfolio, allocator: std.mem.Allocator) ![]Position {
var map = std.StringHashMap(Position).init(allocator);
defer map.deinit();
for (self.lots) |lot| {
const entry = try map.getOrPut(lot.symbol);
if (!entry.found_existing) {
entry.value_ptr.* = .{
.symbol = lot.symbol,
.shares = 0,
.avg_cost = 0,
.total_cost = 0,
.open_lots = 0,
.closed_lots = 0,
.realized_pnl = 0,
};
}
if (lot.isOpen()) {
entry.value_ptr.shares += lot.shares;
entry.value_ptr.total_cost += lot.costBasis();
entry.value_ptr.open_lots += 1;
} else {
entry.value_ptr.closed_lots += 1;
entry.value_ptr.realized_pnl += lot.realizedPnl() orelse 0;
}
}
// Compute avg_cost
var iter = map.valueIterator();
while (iter.next()) |pos| {
if (pos.shares > 0) {
pos.avg_cost = pos.total_cost / pos.shares;
}
}
var result = std.ArrayList(Position).empty;
errdefer result.deinit(allocator);
var viter = map.valueIterator();
while (viter.next()) |pos| {
try result.append(allocator, pos.*);
}
return result.toOwnedSlice(allocator);
}
/// Total cost basis of all open lots.
pub fn totalCostBasis(self: Portfolio) f64 {
var total: f64 = 0;
for (self.lots) |lot| {
if (lot.isOpen()) total += lot.costBasis();
}
return total;
}
/// Total realized P&L from all closed lots.
pub fn totalRealizedPnl(self: Portfolio) f64 {
var total: f64 = 0;
for (self.lots) |lot| {
if (lot.realizedPnl()) |pnl| total += pnl;
}
return total;
}
};
test "lot basics" {
const lot = Lot{
.symbol = "AAPL",
.shares = 10,
.open_date = Date.fromYmd(2024, 1, 15),
.open_price = 150.0,
};
try std.testing.expect(lot.isOpen());
try std.testing.expectApproxEqAbs(@as(f64, 1500.0), lot.costBasis(), 0.01);
try std.testing.expectApproxEqAbs(@as(f64, 2000.0), lot.marketValue(200.0), 0.01);
try std.testing.expectApproxEqAbs(@as(f64, 500.0), lot.unrealizedPnl(200.0), 0.01);
try std.testing.expect(lot.realizedPnl() == null);
}
test "closed lot" {
const lot = Lot{
.symbol = "AAPL",
.shares = 10,
.open_date = Date.fromYmd(2024, 1, 15),
.open_price = 150.0,
.close_date = Date.fromYmd(2024, 6, 15),
.close_price = 200.0,
};
try std.testing.expect(!lot.isOpen());
try std.testing.expectApproxEqAbs(@as(f64, 500.0), lot.realizedPnl().?, 0.01);
try std.testing.expectApproxEqAbs(@as(f64, 0.3333), lot.returnPct(0), 0.001);
}
test "portfolio positions" {
const allocator = std.testing.allocator;
var lots = [_]Lot{
.{ .symbol = "AAPL", .shares = 10, .open_date = Date.fromYmd(2024, 1, 1), .open_price = 150.0 },
.{ .symbol = "AAPL", .shares = 5, .open_date = Date.fromYmd(2024, 3, 1), .open_price = 160.0 },
.{ .symbol = "VTI", .shares = 100, .open_date = Date.fromYmd(2024, 1, 1), .open_price = 220.0 },
.{ .symbol = "AAPL", .shares = 3, .open_date = Date.fromYmd(2023, 6, 1), .open_price = 130.0, .close_date = Date.fromYmd(2024, 2, 1), .close_price = 155.0 },
};
var portfolio = Portfolio{ .lots = &lots, .allocator = allocator };
// Don't call deinit since these are stack-allocated test strings
const pos = try portfolio.positions(allocator);
defer allocator.free(pos);
try std.testing.expectEqual(@as(usize, 2), pos.len);
// Find AAPL position
var aapl: ?Position = null;
for (pos) |p| {
if (std.mem.eql(u8, p.symbol, "AAPL")) aapl = p;
}
try std.testing.expect(aapl != null);
try std.testing.expectApproxEqAbs(@as(f64, 15.0), aapl.?.shares, 0.01);
try std.testing.expectEqual(@as(u32, 2), aapl.?.open_lots);
try std.testing.expectEqual(@as(u32, 1), aapl.?.closed_lots);
try std.testing.expectApproxEqAbs(@as(f64, 75.0), aapl.?.realized_pnl, 0.01); // 3 * (155-130)
}

18
src/models/quote.zig Normal file
View file

@ -0,0 +1,18 @@
/// Real-time (or near-real-time) quote snapshot for a symbol.
pub const Quote = struct {
symbol: []const u8,
name: []const u8,
exchange: []const u8,
datetime: []const u8,
close: f64,
open: f64,
high: f64,
low: f64,
volume: u64,
previous_close: f64,
change: f64,
percent_change: f64,
average_volume: u64,
fifty_two_week_low: f64,
fifty_two_week_high: f64,
};

15
src/models/split.zig Normal file
View file

@ -0,0 +1,15 @@
const Date = @import("date.zig").Date;
/// A stock split event.
pub const Split = struct {
date: Date,
/// Number of shares after the split (e.g., 4 in a 4:1 split)
numerator: f64,
/// Number of shares before the split (e.g., 1 in a 4:1 split)
denominator: f64,
/// Returns the split ratio (e.g., 4.0 for a 4:1 split)
pub fn ratio(self: Split) f64 {
return self.numerator / self.denominator;
}
};

View file

@ -0,0 +1,19 @@
pub const SecurityType = enum {
stock,
etf,
mutual_fund,
index,
crypto,
forex,
unknown,
};
/// Basic information about a ticker symbol.
pub const TickerInfo = struct {
symbol: []const u8,
name: ?[]const u8 = null,
exchange: ?[]const u8 = null,
security_type: SecurityType = .unknown,
currency: ?[]const u8 = null,
country: ?[]const u8 = null,
};

158
src/net/http.zig Normal file
View file

@ -0,0 +1,158 @@
const std = @import("std");
pub const HttpError = error{
RequestFailed,
RateLimited,
Unauthorized,
NotFound,
ServerError,
InvalidResponse,
OutOfMemory,
};
pub const Response = struct {
status: std.http.Status,
body: []const u8,
allocator: std.mem.Allocator,
pub fn deinit(self: *Response) void {
self.allocator.free(self.body);
}
};
/// Thin HTTP client wrapper with retry and error classification.
pub const Client = struct {
allocator: std.mem.Allocator,
http_client: std.http.Client,
max_retries: u8 = 3,
base_backoff_ms: u64 = 500,
pub fn init(allocator: std.mem.Allocator) Client {
return .{
.allocator = allocator,
.http_client = std.http.Client{ .allocator = allocator },
};
}
pub fn deinit(self: *Client) void {
self.http_client.deinit();
}
/// Perform a GET request with automatic retries on transient errors.
pub fn get(self: *Client, url: []const u8) HttpError!Response {
var attempt: u8 = 0;
while (true) : (attempt += 1) {
if (self.doGet(url)) |response| {
return classifyResponse(response);
} else |_| {
if (attempt >= self.max_retries) return HttpError.RequestFailed;
const backoff = self.base_backoff_ms * std.math.shl(u64, 1, attempt);
std.Thread.sleep(backoff * std.time.ns_per_ms);
}
}
}
fn doGet(self: *Client, url: []const u8) HttpError!Response {
var aw: std.Io.Writer.Allocating = .init(self.allocator);
const result = self.http_client.fetch(.{
.location = .{ .url = url },
.response_writer = &aw.writer,
}) catch |err| {
aw.deinit();
// TLS 1.2-only hosts (e.g., finnhub.io) fail with Zig's TLS 1.3-only client.
// Fall back to system curl for these cases.
if (err == error.TlsInitializationFailed) {
return curlGet(self.allocator, url);
}
return HttpError.RequestFailed;
};
const body = aw.toOwnedSlice() catch {
aw.deinit();
return HttpError.OutOfMemory;
};
return .{
.status = result.status,
.body = body,
.allocator = self.allocator,
};
}
fn classifyResponse(response: Response) HttpError!Response {
return switch (response.status) {
.ok => response,
.too_many_requests => HttpError.RateLimited,
.unauthorized, .forbidden => HttpError.Unauthorized,
.not_found => HttpError.NotFound,
.internal_server_error, .bad_gateway, .service_unavailable, .gateway_timeout => HttpError.ServerError,
else => HttpError.InvalidResponse,
};
}
};
/// Fallback HTTP GET using system curl for TLS 1.2 hosts.
fn curlGet(allocator: std.mem.Allocator, url: []const u8) HttpError!Response {
const result = std.process.Child.run(.{
.allocator = allocator,
.argv = &.{ "curl", "-sS", "-f", "-L", "--max-time", "30", url },
.max_output_bytes = 10 * 1024 * 1024,
}) catch return HttpError.RequestFailed;
allocator.free(result.stderr);
const success = switch (result.term) {
.Exited => |code| code == 0,
else => false,
};
if (!success) {
allocator.free(result.stdout);
return HttpError.RequestFailed;
}
return .{
.status = .ok,
.body = result.stdout,
.allocator = allocator,
};
}
/// Build a URL with query parameters.
pub fn buildUrl(
allocator: std.mem.Allocator,
base: []const u8,
params: []const [2][]const u8,
) ![]const u8 {
var buf: std.ArrayList(u8) = .empty;
errdefer buf.deinit(allocator);
try buf.appendSlice(allocator, base);
for (params, 0..) |param, i| {
try buf.append(allocator, if (i == 0) '?' else '&');
try buf.appendSlice(allocator, param[0]);
try buf.append(allocator, '=');
for (param[1]) |c| {
switch (c) {
' ' => try buf.appendSlice(allocator, "%20"),
'&' => try buf.appendSlice(allocator, "%26"),
'=' => try buf.appendSlice(allocator, "%3D"),
'+' => try buf.appendSlice(allocator, "%2B"),
else => try buf.append(allocator, c),
}
}
}
return buf.toOwnedSlice(allocator);
}
test "buildUrl" {
const allocator = std.testing.allocator;
const url = try buildUrl(allocator, "https://api.example.com/v1/data", &.{
.{ "symbol", "AAPL" },
.{ "apikey", "test123" },
});
defer allocator.free(url);
try std.testing.expectEqualStrings("https://api.example.com/v1/data?symbol=AAPL&apikey=test123", url);
}

87
src/net/rate_limiter.zig Normal file
View file

@ -0,0 +1,87 @@
const std = @import("std");
/// Token-bucket rate limiter. Enforces a maximum number of requests per time window.
pub const RateLimiter = struct {
/// Maximum tokens (requests) in the bucket
max_tokens: u32,
/// Current available tokens
tokens: f64,
/// Tokens added per nanosecond
refill_rate_per_ns: f64,
/// Last time tokens were refilled
last_refill: i128,
/// Create a rate limiter.
/// `max_per_window` is the max requests allowed in `window_ns` nanoseconds.
pub fn init(max_per_window: u32, window_ns: u64) RateLimiter {
return .{
.max_tokens = max_per_window,
.tokens = @floatFromInt(max_per_window),
.refill_rate_per_ns = @as(f64, @floatFromInt(max_per_window)) / @as(f64, @floatFromInt(window_ns)),
.last_refill = std.time.nanoTimestamp(),
};
}
/// Convenience: N requests per minute
pub fn perMinute(n: u32) RateLimiter {
return init(n, 60 * std.time.ns_per_s);
}
/// Convenience: N requests per day
pub fn perDay(n: u32) RateLimiter {
return init(n, 24 * 3600 * std.time.ns_per_s);
}
/// Try to acquire a token. Returns true if granted, false if rate-limited.
/// Caller should sleep and retry if false.
pub fn tryAcquire(self: *RateLimiter) bool {
self.refill();
if (self.tokens >= 1.0) {
self.tokens -= 1.0;
return true;
}
return false;
}
/// Acquire a token, blocking (sleeping) until one is available.
pub fn acquire(self: *RateLimiter) void {
while (!self.tryAcquire()) {
// Sleep for the time needed to generate 1 token
const wait_ns: u64 = @intFromFloat(1.0 / self.refill_rate_per_ns);
std.Thread.sleep(wait_ns);
}
}
/// Returns estimated wait time in nanoseconds until a token is available.
/// Returns 0 if a token is available now.
pub fn estimateWaitNs(self: *RateLimiter) u64 {
self.refill();
if (self.tokens >= 1.0) return 0;
const deficit = 1.0 - self.tokens;
return @intFromFloat(deficit / self.refill_rate_per_ns);
}
fn refill(self: *RateLimiter) void {
const now = std.time.nanoTimestamp();
const elapsed = now - self.last_refill;
if (elapsed <= 0) return;
const new_tokens = @as(f64, @floatFromInt(elapsed)) * self.refill_rate_per_ns;
self.tokens = @min(self.tokens + new_tokens, @as(f64, @floatFromInt(self.max_tokens)));
self.last_refill = now;
}
};
test "rate limiter basic" {
var rl = RateLimiter.perMinute(60);
// Should have full bucket initially
try std.testing.expect(rl.tryAcquire());
}
test "rate limiter exhaustion" {
var rl = RateLimiter.init(2, std.time.ns_per_s);
try std.testing.expect(rl.tryAcquire());
try std.testing.expect(rl.tryAcquire());
// Bucket should be empty now
try std.testing.expect(!rl.tryAcquire());
}

View file

@ -0,0 +1,216 @@
//! Alpha Vantage API provider -- used for ETF profiles (free endpoint).
//! API docs: https://www.alphavantage.co/documentation/
//!
//! Free tier: 25 requests/day. Only used for data other providers don't have.
//!
//! ETF Profile endpoint: GET /query?function=ETF_PROFILE&symbol=X&apikey=KEY
//! Returns net assets, expense ratio, sector weights, top holdings, etc.
const std = @import("std");
const http = @import("../net/http.zig");
const RateLimiter = @import("../net/rate_limiter.zig").RateLimiter;
const Date = @import("../models/date.zig").Date;
const EtfProfile = @import("../models/etf_profile.zig").EtfProfile;
const Holding = @import("../models/etf_profile.zig").Holding;
const SectorWeight = @import("../models/etf_profile.zig").SectorWeight;
const provider = @import("provider.zig");
const base_url = "https://www.alphavantage.co/query";
pub const AlphaVantage = struct {
api_key: []const u8,
client: http.Client,
rate_limiter: RateLimiter,
allocator: std.mem.Allocator,
pub fn init(allocator: std.mem.Allocator, api_key: []const u8) AlphaVantage {
return .{
.api_key = api_key,
.client = http.Client.init(allocator),
.rate_limiter = RateLimiter.perDay(25),
.allocator = allocator,
};
}
pub fn deinit(self: *AlphaVantage) void {
self.client.deinit();
}
/// Fetch ETF profile data: expense ratio, holdings, sectors, etc.
pub fn fetchEtfProfile(
self: *AlphaVantage,
allocator: std.mem.Allocator,
symbol: []const u8,
) provider.ProviderError!EtfProfile {
self.rate_limiter.acquire();
const url = http.buildUrl(allocator, base_url, &.{
.{ "function", "ETF_PROFILE" },
.{ "symbol", symbol },
.{ "apikey", self.api_key },
}) catch return provider.ProviderError.OutOfMemory;
defer allocator.free(url);
var response = self.client.get(url) catch |err| return mapHttpError(err);
defer response.deinit();
return parseEtfProfileResponse(allocator, response.body, symbol);
}
pub fn asProvider(self: *AlphaVantage) provider.Provider {
return .{
.ptr = @ptrCast(self),
.vtable = &vtable,
};
}
const vtable = provider.Provider.VTable{
.fetchEtfProfile = @ptrCast(&fetchEtfProfileVtable),
.name = .alphavantage,
};
fn fetchEtfProfileVtable(
ptr: *AlphaVantage,
allocator: std.mem.Allocator,
symbol: []const u8,
) provider.ProviderError!EtfProfile {
return ptr.fetchEtfProfile(allocator, symbol);
}
};
// -- JSON parsing --
fn parseEtfProfileResponse(
allocator: std.mem.Allocator,
body: []const u8,
symbol: []const u8,
) provider.ProviderError!EtfProfile {
const parsed = std.json.parseFromSlice(std.json.Value, allocator, body, .{}) catch
return provider.ProviderError.ParseError;
defer parsed.deinit();
const root = parsed.value.object;
// Alpha Vantage returns {"Error Message": "..."} or {"Note": "..."} on error/rate limit
if (root.get("Error Message")) |_| return provider.ProviderError.RequestFailed;
if (root.get("Note")) |_| return provider.ProviderError.RateLimited;
if (root.get("Information")) |_| return provider.ProviderError.RateLimited;
var profile = EtfProfile{
.symbol = symbol,
};
if (root.get("net_assets")) |v| {
profile.net_assets = parseStrFloat(v);
}
if (root.get("net_expense_ratio")) |v| {
profile.expense_ratio = parseStrFloat(v);
}
if (root.get("portfolio_turnover")) |v| {
profile.portfolio_turnover = parseStrFloat(v);
}
if (root.get("dividend_yield")) |v| {
profile.dividend_yield = parseStrFloat(v);
}
if (root.get("inception_date")) |v| {
if (jsonStr(v)) |s| {
profile.inception_date = Date.parse(s) catch null;
}
}
if (root.get("leveraged")) |v| {
if (jsonStr(v)) |s| {
profile.leveraged = std.mem.eql(u8, s, "YES");
}
}
// Parse sectors
if (root.get("sectors")) |sectors_val| {
if (sectors_val == .array) {
var sectors: std.ArrayList(SectorWeight) = .empty;
errdefer sectors.deinit(allocator);
for (sectors_val.array.items) |item| {
const obj = switch (item) {
.object => |o| o,
else => continue,
};
const name = jsonStr(obj.get("sector")) orelse continue;
const weight = parseStrFloat(obj.get("weight") orelse continue) orelse continue;
const duped_name = allocator.dupe(u8, name) catch return provider.ProviderError.OutOfMemory;
sectors.append(allocator, .{
.sector = duped_name,
.weight = weight,
}) catch return provider.ProviderError.OutOfMemory;
}
profile.sectors = sectors.toOwnedSlice(allocator) catch return provider.ProviderError.OutOfMemory;
}
}
// Parse top holdings (limit to top 20 to keep output manageable)
if (root.get("holdings")) |holdings_val| {
if (holdings_val == .array) {
const max_holdings: usize = 20;
var holdings: std.ArrayList(Holding) = .empty;
errdefer holdings.deinit(allocator);
const total: u32 = @intCast(holdings_val.array.items.len);
profile.total_holdings = total;
const limit = @min(holdings_val.array.items.len, max_holdings);
for (holdings_val.array.items[0..limit]) |item| {
const obj = switch (item) {
.object => |o| o,
else => continue,
};
const desc = jsonStr(obj.get("description")) orelse continue;
const weight = parseStrFloat(obj.get("weight") orelse continue) orelse continue;
const duped_sym = if (jsonStr(obj.get("symbol"))) |s|
(allocator.dupe(u8, s) catch return provider.ProviderError.OutOfMemory)
else
null;
const duped_name = allocator.dupe(u8, desc) catch return provider.ProviderError.OutOfMemory;
holdings.append(allocator, .{
.symbol = duped_sym,
.name = duped_name,
.weight = weight,
}) catch return provider.ProviderError.OutOfMemory;
}
profile.holdings = holdings.toOwnedSlice(allocator) catch return provider.ProviderError.OutOfMemory;
}
}
return profile;
}
// -- Helpers --
fn parseStrFloat(val: ?std.json.Value) ?f64 {
const v = val orelse return null;
return switch (v) {
.string => |s| std.fmt.parseFloat(f64, s) catch null,
.float => |f| f,
.integer => |i| @as(f64, @floatFromInt(i)),
.null => null,
else => null,
};
}
fn jsonStr(val: ?std.json.Value) ?[]const u8 {
const v = val orelse return null;
return switch (v) {
.string => |s| s,
else => null,
};
}
fn mapHttpError(err: http.HttpError) provider.ProviderError {
return switch (err) {
error.RateLimited => provider.ProviderError.RateLimited,
error.Unauthorized => provider.ProviderError.Unauthorized,
error.NotFound => provider.ProviderError.NotFound,
else => provider.ProviderError.RequestFailed,
};
}

310
src/providers/cboe.zig Normal file
View file

@ -0,0 +1,310 @@
//! CBOE delayed quotes provider -- options chains from the exchange itself.
//! No API key required. Data is 15-minute delayed during market hours.
//!
//! Endpoint: GET https://cdn.cboe.com/api/global/delayed_quotes/options/{SYMBOL}.json
//! Returns all expirations with full chains including greeks, bid/ask, volume, OI.
const std = @import("std");
const http = @import("../net/http.zig");
const RateLimiter = @import("../net/rate_limiter.zig").RateLimiter;
const Date = @import("../models/date.zig").Date;
const OptionContract = @import("../models/option.zig").OptionContract;
const OptionsChain = @import("../models/option.zig").OptionsChain;
const ContractType = @import("../models/option.zig").ContractType;
const provider = @import("provider.zig");
const base_url = "https://cdn.cboe.com/api/global/delayed_quotes/options";
pub const Cboe = struct {
client: http.Client,
rate_limiter: RateLimiter,
allocator: std.mem.Allocator,
pub fn init(allocator: std.mem.Allocator) Cboe {
return .{
.client = http.Client.init(allocator),
.rate_limiter = RateLimiter.perMinute(30),
.allocator = allocator,
};
}
pub fn deinit(self: *Cboe) void {
self.client.deinit();
}
/// Fetch the full options chain for a symbol (all expirations).
/// Returns chains grouped by expiration date, sorted nearest-first.
pub fn fetchOptionsChain(
self: *Cboe,
allocator: std.mem.Allocator,
symbol: []const u8,
) provider.ProviderError![]OptionsChain {
self.rate_limiter.acquire();
// Build URL: {base_url}/{SYMBOL}.json
const url = buildCboeUrl(allocator, symbol) catch return provider.ProviderError.OutOfMemory;
defer allocator.free(url);
var response = self.client.get(url) catch |err| return mapHttpError(err);
defer response.deinit();
return parseResponse(allocator, response.body, symbol);
}
};
fn buildCboeUrl(allocator: std.mem.Allocator, symbol: []const u8) ![]const u8 {
var buf: std.ArrayList(u8) = .empty;
errdefer buf.deinit(allocator);
try buf.appendSlice(allocator, base_url);
try buf.append(allocator, '/');
try buf.appendSlice(allocator, symbol);
try buf.appendSlice(allocator, ".json");
return buf.toOwnedSlice(allocator);
}
/// Parse a CBOE options response into grouped OptionsChain slices.
fn parseResponse(
allocator: std.mem.Allocator,
body: []const u8,
symbol: []const u8,
) provider.ProviderError![]OptionsChain {
const parsed = std.json.parseFromSlice(std.json.Value, allocator, body, .{}) catch
return provider.ProviderError.ParseError;
defer parsed.deinit();
const root = switch (parsed.value) {
.object => |o| o,
else => return provider.ProviderError.ParseError,
};
const data_obj = switch (root.get("data") orelse return provider.ProviderError.ParseError) {
.object => |o| o,
else => return provider.ProviderError.ParseError,
};
const underlying_price: ?f64 = if (data_obj.get("current_price")) |v| optFloat(v) else null;
const options_arr = switch (data_obj.get("options") orelse return provider.ProviderError.ParseError) {
.array => |a| a.items,
else => return provider.ProviderError.ParseError,
};
// Parse all contracts and group by expiration.
// Use an ArrayList of (expiration, calls_list, puts_list) tuples.
var exp_map = ExpMap{};
for (options_arr) |item| {
const obj = switch (item) {
.object => |o| o,
else => continue,
};
const occ_sym = switch (obj.get("option") orelse continue) {
.string => |s| s,
else => continue,
};
const occ = parseOccSymbol(occ_sym, symbol.len) orelse continue;
const contract = OptionContract{
.contract_type = occ.contract_type,
.strike = occ.strike,
.expiration = occ.expiration,
.bid = optFloat(obj.get("bid")),
.ask = optFloat(obj.get("ask")),
.last_price = optFloat(obj.get("last_trade_price")),
.volume = optUint(obj.get("volume")),
.open_interest = optUint(obj.get("open_interest")),
.implied_volatility = optFloat(obj.get("iv")),
.delta = optFloat(obj.get("delta")),
.gamma = optFloat(obj.get("gamma")),
.theta = optFloat(obj.get("theta")),
.vega = optFloat(obj.get("vega")),
};
// Find or create the expiration bucket
const bucket = exp_map.getOrPut(allocator, occ.expiration) catch
return provider.ProviderError.OutOfMemory;
switch (occ.contract_type) {
.call => bucket.calls.append(allocator, contract) catch return provider.ProviderError.OutOfMemory,
.put => bucket.puts.append(allocator, contract) catch return provider.ProviderError.OutOfMemory,
}
}
// Convert to sorted OptionsChain slice
const owned_symbol = allocator.dupe(u8, symbol) catch return provider.ProviderError.OutOfMemory;
errdefer allocator.free(owned_symbol);
return exp_map.toOwnedChains(allocator, owned_symbol, underlying_price);
}
// OCC symbol parsing
const OccInfo = struct {
expiration: Date,
contract_type: ContractType,
strike: f64,
};
/// Parse OCC option symbol: AAPL260225C00205000
/// Format: {underlying}{YYMMDD}{C|P}{strike * 1000, zero-padded to 8 digits}
fn parseOccSymbol(sym: []const u8, underlying_len: usize) ?OccInfo {
// After underlying: 6 digits date + 1 char type + 8 digits strike = 15 chars
if (sym.len < underlying_len + 15) return null;
const rest = sym[underlying_len..];
// Parse date: YYMMDD
const yy = std.fmt.parseInt(i16, rest[0..2], 10) catch return null;
const mm = std.fmt.parseInt(u8, rest[2..4], 10) catch return null;
const dd = std.fmt.parseInt(u8, rest[4..6], 10) catch return null;
const expiration = Date.fromYmd(2000 + yy, mm, dd);
// Parse type
const contract_type: ContractType = switch (rest[6]) {
'C' => .call,
'P' => .put,
else => return null,
};
// Parse strike: 8 digits, divide by 1000
const strike_raw = std.fmt.parseInt(u64, rest[7..15], 10) catch return null;
const strike: f64 = @as(f64, @floatFromInt(strike_raw)) / 1000.0;
return .{
.expiration = expiration,
.contract_type = contract_type,
.strike = strike,
};
}
// Expiration grouping
/// Maps expiration dates to call/put ArrayLists. Uses a simple sorted array
/// since the number of expirations is small (typically 10-30).
const ExpMap = struct {
entries: std.ArrayList(Entry) = .empty,
const Entry = struct {
expiration: Date,
calls: std.ArrayList(OptionContract) = .empty,
puts: std.ArrayList(OptionContract) = .empty,
};
const Bucket = struct {
calls: *std.ArrayList(OptionContract),
puts: *std.ArrayList(OptionContract),
};
fn getOrPut(self: *ExpMap, allocator: std.mem.Allocator, exp: Date) !Bucket {
for (self.entries.items) |*entry| {
if (entry.expiration.eql(exp)) {
return .{ .calls = &entry.calls, .puts = &entry.puts };
}
}
try self.entries.append(allocator, .{ .expiration = exp });
const last = &self.entries.items[self.entries.items.len - 1];
return .{ .calls = &last.calls, .puts = &last.puts };
}
/// Convert to owned []OptionsChain, sorted by expiration ascending.
/// Frees internal structures; caller owns the returned chains.
fn toOwnedChains(
self: *ExpMap,
allocator: std.mem.Allocator,
owned_symbol: []const u8,
underlying_price: ?f64,
) provider.ProviderError![]OptionsChain {
// Sort entries by expiration
std.mem.sort(Entry, self.entries.items, {}, struct {
fn lessThan(_: void, a: Entry, b: Entry) bool {
return a.expiration.lessThan(b.expiration);
}
}.lessThan);
var chains = allocator.alloc(OptionsChain, self.entries.items.len) catch
return provider.ProviderError.OutOfMemory;
errdefer allocator.free(chains);
for (self.entries.items, 0..) |*entry, i| {
const calls = entry.calls.toOwnedSlice(allocator) catch
return provider.ProviderError.OutOfMemory;
const puts = entry.puts.toOwnedSlice(allocator) catch {
allocator.free(calls);
return provider.ProviderError.OutOfMemory;
};
chains[i] = .{
.underlying_symbol = owned_symbol,
.underlying_price = underlying_price,
.expiration = entry.expiration,
.calls = calls,
.puts = puts,
};
}
self.entries.deinit(allocator);
return chains;
}
};
// JSON helpers
fn optFloat(val: ?std.json.Value) ?f64 {
const v = val orelse return null;
return switch (v) {
.float => |f| f,
.integer => |i| @floatFromInt(i),
.null => null,
else => null,
};
}
fn optUint(val: ?std.json.Value) ?u64 {
const v = val orelse return null;
return switch (v) {
.integer => |i| if (i >= 0) @intCast(i) else null,
.float => |f| if (f >= 0 and f == @floor(f)) @intFromFloat(f) else null,
.null => null,
else => null,
};
}
fn mapHttpError(err: http.HttpError) provider.ProviderError {
return switch (err) {
error.RateLimited => provider.ProviderError.RateLimited,
error.Unauthorized => provider.ProviderError.Unauthorized,
error.NotFound => provider.ProviderError.NotFound,
else => provider.ProviderError.RequestFailed,
};
}
// Tests
test "parseOccSymbol -- call" {
const info = parseOccSymbol("AAPL260225C00205000", 4).?;
try std.testing.expect(info.contract_type == .call);
try std.testing.expect(info.expiration.eql(Date.fromYmd(2026, 2, 25)));
try std.testing.expectApproxEqAbs(@as(f64, 205.0), info.strike, 0.001);
}
test "parseOccSymbol -- put" {
const info = parseOccSymbol("AMZN260306P00180000", 4).?;
try std.testing.expect(info.contract_type == .put);
try std.testing.expect(info.expiration.eql(Date.fromYmd(2026, 3, 6)));
try std.testing.expectApproxEqAbs(@as(f64, 180.0), info.strike, 0.001);
}
test "parseOccSymbol -- fractional strike" {
const info = parseOccSymbol("SPY260227C00555500", 3).?;
try std.testing.expectApproxEqAbs(@as(f64, 555.5), info.strike, 0.001);
}
test "parseOccSymbol -- invalid" {
try std.testing.expect(parseOccSymbol("X", 1) == null);
try std.testing.expect(parseOccSymbol("AAPL26022", 4) == null);
}

464
src/providers/finnhub.zig Normal file
View file

@ -0,0 +1,464 @@
//! Finnhub API provider -- primary source for options chains and earnings.
//! API docs: https://finnhub.io/docs/api
//!
//! Free tier: 60 requests/min, all US market data.
//!
//! Options endpoint: GET /api/v1/stock/option-chain?symbol=X
//! Returns all expirations with full CALL/PUT chains including greeks.
//!
//! Earnings endpoint: GET /api/v1/calendar/earnings?symbol=X&from=YYYY-MM-DD&to=YYYY-MM-DD
//! Returns historical and upcoming earnings with EPS, revenue, estimates.
const std = @import("std");
const http = @import("../net/http.zig");
const RateLimiter = @import("../net/rate_limiter.zig").RateLimiter;
const Date = @import("../models/date.zig").Date;
const OptionContract = @import("../models/option.zig").OptionContract;
const OptionsChain = @import("../models/option.zig").OptionsChain;
const ContractType = @import("../models/option.zig").ContractType;
const EarningsEvent = @import("../models/earnings.zig").EarningsEvent;
const ReportTime = @import("../models/earnings.zig").ReportTime;
const provider = @import("provider.zig");
const base_url = "https://finnhub.io/api/v1";
pub const Finnhub = struct {
api_key: []const u8,
client: http.Client,
rate_limiter: RateLimiter,
allocator: std.mem.Allocator,
pub fn init(allocator: std.mem.Allocator, api_key: []const u8) Finnhub {
return .{
.api_key = api_key,
.client = http.Client.init(allocator),
.rate_limiter = RateLimiter.perMinute(60),
.allocator = allocator,
};
}
pub fn deinit(self: *Finnhub) void {
self.client.deinit();
}
/// Fetch the full options chain for a symbol (all expirations).
/// Returns chains grouped by expiration date, sorted nearest-first.
pub fn fetchOptionsChain(
self: *Finnhub,
allocator: std.mem.Allocator,
symbol: []const u8,
) provider.ProviderError![]OptionsChain {
self.rate_limiter.acquire();
const url = http.buildUrl(allocator, base_url ++ "/stock/option-chain", &.{
.{ "symbol", symbol },
.{ "token", self.api_key },
}) catch return provider.ProviderError.OutOfMemory;
defer allocator.free(url);
var response = self.client.get(url) catch |err| return mapHttpError(err);
defer response.deinit();
return parseOptionsResponse(allocator, response.body, symbol);
}
/// Fetch options for a specific expiration date only.
pub fn fetchOptionsForExpiration(
self: *Finnhub,
allocator: std.mem.Allocator,
symbol: []const u8,
expiration: Date,
) provider.ProviderError!?OptionsChain {
const chains = try self.fetchOptionsChain(allocator, symbol);
defer {
for (chains) |chain| {
allocator.free(chain.calls);
allocator.free(chain.puts);
}
allocator.free(chains);
}
for (chains) |chain| {
if (chain.expiration.eql(expiration)) {
// Copy the matching chain so caller owns the memory
const calls = allocator.dupe(OptionContract, chain.calls) catch
return provider.ProviderError.OutOfMemory;
errdefer allocator.free(calls);
const puts = allocator.dupe(OptionContract, chain.puts) catch
return provider.ProviderError.OutOfMemory;
return OptionsChain{
.underlying_symbol = chain.underlying_symbol,
.underlying_price = chain.underlying_price,
.expiration = chain.expiration,
.calls = calls,
.puts = puts,
};
}
}
return null;
}
/// Fetch earnings calendar for a symbol.
/// Returns earnings events sorted newest-first (upcoming first, then historical).
pub fn fetchEarnings(
self: *Finnhub,
allocator: std.mem.Allocator,
symbol: []const u8,
from: ?Date,
to: ?Date,
) provider.ProviderError![]EarningsEvent {
self.rate_limiter.acquire();
var params: [4][2][]const u8 = undefined;
var n: usize = 0;
params[n] = .{ "symbol", symbol };
n += 1;
params[n] = .{ "token", self.api_key };
n += 1;
var from_buf: [10]u8 = undefined;
var to_buf: [10]u8 = undefined;
if (from) |f| {
params[n] = .{ "from", f.format(&from_buf) };
n += 1;
}
if (to) |t| {
params[n] = .{ "to", t.format(&to_buf) };
n += 1;
}
const url = http.buildUrl(allocator, base_url ++ "/calendar/earnings", params[0..n]) catch
return provider.ProviderError.OutOfMemory;
defer allocator.free(url);
var response = self.client.get(url) catch |err| return mapHttpError(err);
defer response.deinit();
return parseEarningsResponse(allocator, response.body, symbol);
}
pub fn asProvider(self: *Finnhub) provider.Provider {
return .{
.ptr = @ptrCast(self),
.vtable = &vtable,
};
}
const vtable = provider.Provider.VTable{
.fetchOptions = @ptrCast(&fetchOptionsVtable),
.fetchEarnings = @ptrCast(&fetchEarningsVtable),
.name = .finnhub,
};
fn fetchOptionsVtable(
ptr: *Finnhub,
allocator: std.mem.Allocator,
symbol: []const u8,
expiration: ?Date,
) provider.ProviderError![]OptionContract {
if (expiration) |exp| {
const chain = try ptr.fetchOptionsForExpiration(allocator, symbol, exp);
if (chain) |c| {
// Merge calls and puts into a single slice
const total = c.calls.len + c.puts.len;
const merged = allocator.alloc(OptionContract, total) catch
return provider.ProviderError.OutOfMemory;
@memcpy(merged[0..c.calls.len], c.calls);
@memcpy(merged[c.calls.len..], c.puts);
allocator.free(c.calls);
allocator.free(c.puts);
return merged;
}
return allocator.alloc(OptionContract, 0) catch return provider.ProviderError.OutOfMemory;
}
// No expiration given: return contracts from nearest expiration
const chains = try ptr.fetchOptionsChain(allocator, symbol);
defer {
for (chains[1..]) |chain| {
allocator.free(chain.calls);
allocator.free(chain.puts);
}
allocator.free(chains);
}
if (chains.len == 0) return allocator.alloc(OptionContract, 0) catch return provider.ProviderError.OutOfMemory;
const first = chains[0];
const total = first.calls.len + first.puts.len;
const merged = allocator.alloc(OptionContract, total) catch
return provider.ProviderError.OutOfMemory;
@memcpy(merged[0..first.calls.len], first.calls);
@memcpy(merged[first.calls.len..], first.puts);
allocator.free(first.calls);
allocator.free(first.puts);
return merged;
}
fn fetchEarningsVtable(
ptr: *Finnhub,
allocator: std.mem.Allocator,
symbol: []const u8,
) provider.ProviderError![]EarningsEvent {
return ptr.fetchEarnings(allocator, symbol, null, null);
}
};
// -- JSON parsing --
fn parseOptionsResponse(
allocator: std.mem.Allocator,
body: []const u8,
symbol: []const u8,
) provider.ProviderError![]OptionsChain {
const parsed = std.json.parseFromSlice(std.json.Value, allocator, body, .{}) catch
return provider.ProviderError.ParseError;
defer parsed.deinit();
const root = parsed.value.object;
// Check for error response
if (root.get("error")) |_| return provider.ProviderError.RequestFailed;
const underlying_price: f64 = if (root.get("lastTradePrice")) |v| parseJsonFloat(v) else 0;
const data_arr = root.get("data") orelse {
const empty = allocator.alloc(OptionsChain, 0) catch return provider.ProviderError.OutOfMemory;
return empty;
};
const items = switch (data_arr) {
.array => |a| a.items,
else => {
const empty = allocator.alloc(OptionsChain, 0) catch return provider.ProviderError.OutOfMemory;
return empty;
},
};
var chains: std.ArrayList(OptionsChain) = .empty;
errdefer {
for (chains.items) |chain| {
allocator.free(chain.calls);
allocator.free(chain.puts);
}
chains.deinit(allocator);
}
for (items) |item| {
const obj = switch (item) {
.object => |o| o,
else => continue,
};
const exp_str = jsonStr(obj.get("expirationDate")) orelse continue;
const expiration = Date.parse(exp_str) catch continue;
const options_obj = (obj.get("options") orelse continue);
const options = switch (options_obj) {
.object => |o| o,
else => continue,
};
const calls = parseContracts(allocator, options.get("CALL"), .call, expiration) catch
return provider.ProviderError.OutOfMemory;
errdefer allocator.free(calls);
const puts = parseContracts(allocator, options.get("PUT"), .put, expiration) catch
return provider.ProviderError.OutOfMemory;
chains.append(allocator, .{
.underlying_symbol = symbol,
.underlying_price = underlying_price,
.expiration = expiration,
.calls = calls,
.puts = puts,
}) catch return provider.ProviderError.OutOfMemory;
}
return chains.toOwnedSlice(allocator) catch return provider.ProviderError.OutOfMemory;
}
fn parseContracts(
allocator: std.mem.Allocator,
val: ?std.json.Value,
contract_type: ContractType,
expiration: Date,
) ![]OptionContract {
const arr = val orelse {
return try allocator.alloc(OptionContract, 0);
};
const items = switch (arr) {
.array => |a| a.items,
else => return try allocator.alloc(OptionContract, 0),
};
var contracts: std.ArrayList(OptionContract) = .empty;
errdefer contracts.deinit(allocator);
for (items) |item| {
const obj = switch (item) {
.object => |o| o,
else => continue,
};
const strike = parseJsonFloat(obj.get("strike") orelse continue);
if (strike <= 0) continue;
contracts.append(allocator, .{
.contract_type = contract_type,
.strike = strike,
.expiration = expiration,
.bid = optFloat(obj.get("bid")),
.ask = optFloat(obj.get("ask")),
.last_price = optFloat(obj.get("lastPrice")),
.volume = optUint(obj.get("volume")),
.open_interest = optUint(obj.get("openInterest")),
.implied_volatility = optFloat(obj.get("impliedVolatility")),
.delta = optFloat(obj.get("delta")),
.gamma = optFloat(obj.get("gamma")),
.theta = optFloat(obj.get("theta")),
.vega = optFloat(obj.get("vega")),
}) catch return error.OutOfMemory;
}
return contracts.toOwnedSlice(allocator);
}
fn parseEarningsResponse(
allocator: std.mem.Allocator,
body: []const u8,
symbol: []const u8,
) provider.ProviderError![]EarningsEvent {
const parsed = std.json.parseFromSlice(std.json.Value, allocator, body, .{}) catch
return provider.ProviderError.ParseError;
defer parsed.deinit();
const root = parsed.value.object;
if (root.get("error")) |_| return provider.ProviderError.RequestFailed;
const cal = root.get("earningsCalendar") orelse {
const empty = allocator.alloc(EarningsEvent, 0) catch return provider.ProviderError.OutOfMemory;
return empty;
};
const items = switch (cal) {
.array => |a| a.items,
else => {
const empty = allocator.alloc(EarningsEvent, 0) catch return provider.ProviderError.OutOfMemory;
return empty;
},
};
var events: std.ArrayList(EarningsEvent) = .empty;
errdefer events.deinit(allocator);
for (items) |item| {
const obj = switch (item) {
.object => |o| o,
else => continue,
};
const date_str = jsonStr(obj.get("date")) orelse continue;
const date = Date.parse(date_str) catch continue;
const actual = optFloat(obj.get("epsActual"));
const estimate = optFloat(obj.get("epsEstimate"));
const surprise: ?f64 = if (actual != null and estimate != null)
actual.? - estimate.?
else
null;
const surprise_pct: ?f64 = if (surprise != null and estimate != null and estimate.? != 0)
(surprise.? / @abs(estimate.?)) * 100.0
else
null;
events.append(allocator, .{
.symbol = symbol,
.date = date,
.estimate = estimate,
.actual = actual,
.surprise = surprise,
.surprise_percent = surprise_pct,
.quarter = parseQuarter(obj.get("quarter")),
.fiscal_year = parseFiscalYear(obj.get("year")),
.revenue_actual = optFloat(obj.get("revenueActual")),
.revenue_estimate = optFloat(obj.get("revenueEstimate")),
.report_time = parseReportTime(obj.get("hour")),
}) catch return provider.ProviderError.OutOfMemory;
}
return events.toOwnedSlice(allocator) catch return provider.ProviderError.OutOfMemory;
}
// -- Helpers --
fn parseJsonFloat(val: std.json.Value) f64 {
return switch (val) {
.float => |f| f,
.integer => |i| @floatFromInt(i),
.string => |s| std.fmt.parseFloat(f64, s) catch 0,
else => 0,
};
}
fn optFloat(val: ?std.json.Value) ?f64 {
const v = val orelse return null;
return switch (v) {
.float => |f| f,
.integer => |i| @floatFromInt(i),
.null => null,
else => null,
};
}
fn optUint(val: ?std.json.Value) ?u64 {
const v = val orelse return null;
return switch (v) {
.integer => |i| if (i >= 0) @intCast(i) else null,
.float => |f| if (f >= 0) @intFromFloat(f) else null,
.null => null,
else => null,
};
}
fn jsonStr(val: ?std.json.Value) ?[]const u8 {
const v = val orelse return null;
return switch (v) {
.string => |s| s,
else => null,
};
}
fn parseQuarter(val: ?std.json.Value) ?u8 {
const v = val orelse return null;
const i = switch (v) {
.integer => |n| n,
.float => |f| @as(i64, @intFromFloat(f)),
else => return null,
};
return if (i >= 1 and i <= 4) @intCast(i) else null;
}
fn parseFiscalYear(val: ?std.json.Value) ?i16 {
const v = val orelse return null;
const i = switch (v) {
.integer => |n| n,
.float => |f| @as(i64, @intFromFloat(f)),
else => return null,
};
return if (i > 1900 and i < 2200) @intCast(i) else null;
}
fn parseReportTime(val: ?std.json.Value) ReportTime {
const s = jsonStr(val) orelse return .unknown;
if (std.mem.eql(u8, s, "bmo")) return .bmo;
if (std.mem.eql(u8, s, "amc")) return .amc;
if (std.mem.eql(u8, s, "dmh")) return .dmh;
return .unknown;
}
fn mapHttpError(err: http.HttpError) provider.ProviderError {
return switch (err) {
error.RateLimited => provider.ProviderError.RateLimited,
error.Unauthorized => provider.ProviderError.Unauthorized,
error.NotFound => provider.ProviderError.NotFound,
else => provider.ProviderError.RequestFailed,
};
}

439
src/providers/polygon.zig Normal file
View file

@ -0,0 +1,439 @@
//! Polygon.io API provider -- primary source for dividend/split reference data
//! and secondary source for daily OHLCV bars.
//! API docs: https://polygon.io/docs
//!
//! Free tier: 5 requests/min, unlimited daily, 2yr historical bars.
//! Dividends and splits are available for all history.
const std = @import("std");
const http = @import("../net/http.zig");
const RateLimiter = @import("../net/rate_limiter.zig").RateLimiter;
const Date = @import("../models/date.zig").Date;
const Candle = @import("../models/candle.zig").Candle;
const Dividend = @import("../models/dividend.zig").Dividend;
const DividendType = @import("../models/dividend.zig").DividendType;
const Split = @import("../models/split.zig").Split;
const provider = @import("provider.zig");
const base_url = "https://api.polygon.io";
pub const Polygon = struct {
api_key: []const u8,
client: http.Client,
rate_limiter: RateLimiter,
allocator: std.mem.Allocator,
pub fn init(allocator: std.mem.Allocator, api_key: []const u8) Polygon {
return .{
.api_key = api_key,
.client = http.Client.init(allocator),
.rate_limiter = RateLimiter.perMinute(5),
.allocator = allocator,
};
}
pub fn deinit(self: *Polygon) void {
self.client.deinit();
}
/// Fetch dividend history for a ticker. Results sorted oldest-first by ex_date.
/// Polygon endpoint: GET /v3/reference/dividends?ticker=X&ex_dividend_date.gte=YYYY-MM-DD&...
pub fn fetchDividends(
self: *Polygon,
allocator: std.mem.Allocator,
symbol: []const u8,
from: ?Date,
to: ?Date,
) provider.ProviderError![]Dividend {
var all_dividends: std.ArrayList(Dividend) = .empty;
errdefer all_dividends.deinit(allocator);
var next_url: ?[]const u8 = null;
defer if (next_url) |u| allocator.free(u);
// First request
{
self.rate_limiter.acquire();
var params: [5][2][]const u8 = undefined;
var n: usize = 0;
params[n] = .{ "ticker", symbol };
n += 1;
params[n] = .{ "limit", "1000" };
n += 1;
params[n] = .{ "sort", "ex_dividend_date" };
n += 1;
var from_buf: [10]u8 = undefined;
var to_buf: [10]u8 = undefined;
if (from) |f| {
params[n] = .{ "ex_dividend_date.gte", f.format(&from_buf) };
n += 1;
}
if (to) |t| {
params[n] = .{ "ex_dividend_date.lte", t.format(&to_buf) };
n += 1;
}
const url = http.buildUrl(allocator, base_url ++ "/v3/reference/dividends", params[0..n]) catch
return provider.ProviderError.OutOfMemory;
defer allocator.free(url);
const authed = appendApiKey(allocator, url, self.api_key) catch
return provider.ProviderError.OutOfMemory;
defer allocator.free(authed);
var response = self.client.get(authed) catch |err| return mapHttpError(err);
defer response.deinit();
next_url = try parseDividendsPage(allocator, response.body, &all_dividends);
}
// Paginate
while (next_url) |cursor_url| {
self.rate_limiter.acquire();
const authed = appendApiKey(allocator, cursor_url, self.api_key) catch
return provider.ProviderError.OutOfMemory;
defer allocator.free(authed);
var response = self.client.get(authed) catch |err| return mapHttpError(err);
defer response.deinit();
allocator.free(cursor_url);
next_url = try parseDividendsPage(allocator, response.body, &all_dividends);
}
return all_dividends.toOwnedSlice(allocator) catch return provider.ProviderError.OutOfMemory;
}
/// Fetch split history for a ticker. Results sorted oldest-first.
/// Polygon endpoint: GET /v3/reference/splits?ticker=X&...
pub fn fetchSplits(
self: *Polygon,
allocator: std.mem.Allocator,
symbol: []const u8,
) provider.ProviderError![]Split {
self.rate_limiter.acquire();
const url = http.buildUrl(allocator, base_url ++ "/v3/reference/splits", &.{
.{ "ticker", symbol },
.{ "limit", "1000" },
.{ "sort", "execution_date" },
}) catch return provider.ProviderError.OutOfMemory;
defer allocator.free(url);
const authed = appendApiKey(allocator, url, self.api_key) catch
return provider.ProviderError.OutOfMemory;
defer allocator.free(authed);
var response = self.client.get(authed) catch |err| return mapHttpError(err);
defer response.deinit();
return parseSplitsResponse(allocator, response.body);
}
/// Fetch daily OHLCV bars. Polygon free tier: 2 years max history.
/// Polygon endpoint: GET /v2/aggs/ticker/{ticker}/range/1/day/{from}/{to}
pub fn fetchCandles(
self: *Polygon,
allocator: std.mem.Allocator,
symbol: []const u8,
from: Date,
to: Date,
) provider.ProviderError![]Candle {
self.rate_limiter.acquire();
var from_buf: [10]u8 = undefined;
var to_buf: [10]u8 = undefined;
const from_str = from.format(&from_buf);
const to_str = to.format(&to_buf);
// Build URL manually since the path contains the date range
const path = std.fmt.allocPrint(
allocator,
"{s}/v2/aggs/ticker/{s}/range/1/day/{s}/{s}?adjusted=true&sort=asc&limit=5000",
.{ base_url, symbol, from_str, to_str },
) catch return provider.ProviderError.OutOfMemory;
defer allocator.free(path);
const authed = appendApiKey(allocator, path, self.api_key) catch
return provider.ProviderError.OutOfMemory;
defer allocator.free(authed);
var response = self.client.get(authed) catch |err| return mapHttpError(err);
defer response.deinit();
return parseCandlesResponse(allocator, response.body);
}
pub fn asProvider(self: *Polygon) provider.Provider {
return .{
.ptr = @ptrCast(self),
.vtable = &vtable,
};
}
const vtable = provider.Provider.VTable{
.fetchDividends = @ptrCast(&fetchDividendsVtable),
.fetchSplits = @ptrCast(&fetchSplitsVtable),
.fetchCandles = @ptrCast(&fetchCandlesVtable),
.name = .polygon,
};
fn fetchDividendsVtable(ptr: *Polygon, allocator: std.mem.Allocator, symbol: []const u8, from: ?Date, to: ?Date) provider.ProviderError![]Dividend {
return ptr.fetchDividends(allocator, symbol, from, to);
}
fn fetchSplitsVtable(ptr: *Polygon, allocator: std.mem.Allocator, symbol: []const u8) provider.ProviderError![]Split {
return ptr.fetchSplits(allocator, symbol);
}
fn fetchCandlesVtable(ptr: *Polygon, allocator: std.mem.Allocator, symbol: []const u8, from: Date, to: Date) provider.ProviderError![]Candle {
return ptr.fetchCandles(allocator, symbol, from, to);
}
};
// -- JSON parsing --
fn parseDividendsPage(
allocator: std.mem.Allocator,
body: []const u8,
out: *std.ArrayList(Dividend),
) provider.ProviderError!?[]const u8 {
const parsed = std.json.parseFromSlice(std.json.Value, allocator, body, .{}) catch
return provider.ProviderError.ParseError;
defer parsed.deinit();
const root = parsed.value.object;
// Check status
if (root.get("status")) |s| {
if (s == .string and std.mem.eql(u8, s.string, "ERROR"))
return provider.ProviderError.RequestFailed;
}
const results = root.get("results") orelse return null;
const items = switch (results) {
.array => |a| a.items,
else => return null,
};
for (items) |item| {
const obj = switch (item) {
.object => |o| o,
else => continue,
};
const ex_date = blk: {
const v = obj.get("ex_dividend_date") orelse continue;
const s = switch (v) {
.string => |str| str,
else => continue,
};
break :blk Date.parse(s) catch continue;
};
const amount = parseJsonFloat(obj.get("cash_amount"));
if (amount <= 0) continue;
out.append(allocator, .{
.ex_date = ex_date,
.amount = amount,
.pay_date = parseDateField(obj, "pay_date"),
.record_date = parseDateField(obj, "record_date"),
.frequency = parseFrequency(obj),
.distribution_type = parseDividendType(obj),
.currency = jsonStr(obj.get("currency")),
}) catch return provider.ProviderError.OutOfMemory;
}
// Check for next_url (pagination cursor)
if (root.get("next_url")) |nu| {
const url_str = switch (nu) {
.string => |s| s,
else => return null,
};
const duped = allocator.dupe(u8, url_str) catch return provider.ProviderError.OutOfMemory;
return duped;
}
return null;
}
fn parseSplitsResponse(allocator: std.mem.Allocator, body: []const u8) provider.ProviderError![]Split {
const parsed = std.json.parseFromSlice(std.json.Value, allocator, body, .{}) catch
return provider.ProviderError.ParseError;
defer parsed.deinit();
const root = parsed.value.object;
if (root.get("status")) |s| {
if (s == .string and std.mem.eql(u8, s.string, "ERROR"))
return provider.ProviderError.RequestFailed;
}
const results = root.get("results") orelse {
const empty = allocator.alloc(Split, 0) catch return provider.ProviderError.OutOfMemory;
return empty;
};
const items = switch (results) {
.array => |a| a.items,
else => {
const empty = allocator.alloc(Split, 0) catch return provider.ProviderError.OutOfMemory;
return empty;
},
};
var splits: std.ArrayList(Split) = .empty;
errdefer splits.deinit(allocator);
for (items) |item| {
const obj = switch (item) {
.object => |o| o,
else => continue,
};
const date = blk: {
const v = obj.get("execution_date") orelse continue;
const s = switch (v) {
.string => |str| str,
else => continue,
};
break :blk Date.parse(s) catch continue;
};
splits.append(allocator, .{
.date = date,
.numerator = parseJsonFloat(obj.get("split_to")),
.denominator = parseJsonFloat(obj.get("split_from")),
}) catch return provider.ProviderError.OutOfMemory;
}
return splits.toOwnedSlice(allocator) catch return provider.ProviderError.OutOfMemory;
}
fn parseCandlesResponse(allocator: std.mem.Allocator, body: []const u8) provider.ProviderError![]Candle {
const parsed = std.json.parseFromSlice(std.json.Value, allocator, body, .{}) catch
return provider.ProviderError.ParseError;
defer parsed.deinit();
const root = parsed.value.object;
if (root.get("status")) |s| {
if (s == .string and std.mem.eql(u8, s.string, "ERROR"))
return provider.ProviderError.RequestFailed;
}
const results = root.get("results") orelse {
const empty = allocator.alloc(Candle, 0) catch return provider.ProviderError.OutOfMemory;
return empty;
};
const items = switch (results) {
.array => |a| a.items,
else => {
const empty = allocator.alloc(Candle, 0) catch return provider.ProviderError.OutOfMemory;
return empty;
},
};
var candles: std.ArrayList(Candle) = .empty;
errdefer candles.deinit(allocator);
for (items) |item| {
const obj = switch (item) {
.object => |o| o,
else => continue,
};
// Polygon returns timestamp in milliseconds
const ts_ms = blk: {
const v = obj.get("t") orelse continue;
break :blk switch (v) {
.integer => |i| i,
.float => |f| @as(i64, @intFromFloat(f)),
else => continue,
};
};
const days: i32 = @intCast(@divFloor(ts_ms, 86400 * 1000));
const date = Date{ .days = days };
const close = parseJsonFloat(obj.get("c"));
candles.append(allocator, .{
.date = date,
.open = parseJsonFloat(obj.get("o")),
.high = parseJsonFloat(obj.get("h")),
.low = parseJsonFloat(obj.get("l")),
.close = close,
.adj_close = close, // Polygon adjusted=true gives adjusted values
.volume = @intFromFloat(parseJsonFloat(obj.get("v"))),
}) catch return provider.ProviderError.OutOfMemory;
}
return candles.toOwnedSlice(allocator) catch return provider.ProviderError.OutOfMemory;
}
// -- Helpers --
fn appendApiKey(allocator: std.mem.Allocator, url: []const u8, api_key: []const u8) ![]const u8 {
const sep: u8 = if (std.mem.indexOfScalar(u8, url, '?') != null) '&' else '?';
return std.fmt.allocPrint(allocator, "{s}{c}apiKey={s}", .{ url, sep, api_key });
}
fn parseJsonFloat(val: ?std.json.Value) f64 {
const v = val orelse return 0;
return switch (v) {
.float => |f| f,
.integer => |i| @floatFromInt(i),
.string => |s| std.fmt.parseFloat(f64, s) catch 0,
else => 0,
};
}
fn jsonStr(val: ?std.json.Value) ?[]const u8 {
const v = val orelse return null;
return switch (v) {
.string => |s| s,
else => null,
};
}
fn parseDateField(obj: std.json.ObjectMap, key: []const u8) ?Date {
const v = obj.get(key) orelse return null;
const s = switch (v) {
.string => |str| str,
else => return null,
};
return Date.parse(s) catch null;
}
fn parseFrequency(obj: std.json.ObjectMap) ?u8 {
const v = obj.get("frequency") orelse return null;
return switch (v) {
.integer => |i| if (i > 0 and i <= 255) @intCast(i) else null,
.float => |f| if (f > 0 and f <= 255) @intFromFloat(f) else null,
else => null,
};
}
fn parseDividendType(obj: std.json.ObjectMap) DividendType {
const v = obj.get("dividend_type") orelse return .unknown;
const s = switch (v) {
.string => |str| str,
else => return .unknown,
};
if (std.mem.eql(u8, s, "CD")) return .regular;
if (std.mem.eql(u8, s, "SC")) return .special;
if (std.mem.eql(u8, s, "LT") or std.mem.eql(u8, s, "ST")) return .regular;
return .unknown;
}
fn mapHttpError(err: http.HttpError) provider.ProviderError {
return switch (err) {
error.RateLimited => provider.ProviderError.RateLimited,
error.Unauthorized => provider.ProviderError.Unauthorized,
error.NotFound => provider.ProviderError.NotFound,
else => provider.ProviderError.RequestFailed,
};
}

147
src/providers/provider.zig Normal file
View file

@ -0,0 +1,147 @@
const std = @import("std");
const Date = @import("../models/date.zig").Date;
const Candle = @import("../models/candle.zig").Candle;
const Dividend = @import("../models/dividend.zig").Dividend;
const Split = @import("../models/split.zig").Split;
const OptionContract = @import("../models/option.zig").OptionContract;
const EarningsEvent = @import("../models/earnings.zig").EarningsEvent;
const EtfProfile = @import("../models/etf_profile.zig").EtfProfile;
pub const ProviderError = error{
ApiKeyMissing,
RequestFailed,
RateLimited,
ParseError,
NotSupported,
OutOfMemory,
Unauthorized,
NotFound,
ServerError,
InvalidResponse,
ConnectionRefused,
};
/// Identifies which upstream data source a result came from.
pub const ProviderName = enum {
twelvedata,
polygon,
finnhub,
cboe,
alphavantage,
};
/// Common interface for all data providers.
/// Each provider implements the capabilities it supports and returns
/// `error.NotSupported` for those it doesn't.
pub const Provider = struct {
ptr: *anyopaque,
vtable: *const VTable,
pub const VTable = struct {
fetchCandles: ?*const fn (
ptr: *anyopaque,
allocator: std.mem.Allocator,
symbol: []const u8,
from: Date,
to: Date,
) ProviderError![]Candle = null,
fetchDividends: ?*const fn (
ptr: *anyopaque,
allocator: std.mem.Allocator,
symbol: []const u8,
from: ?Date,
to: ?Date,
) ProviderError![]Dividend = null,
fetchSplits: ?*const fn (
ptr: *anyopaque,
allocator: std.mem.Allocator,
symbol: []const u8,
) ProviderError![]Split = null,
fetchOptions: ?*const fn (
ptr: *anyopaque,
allocator: std.mem.Allocator,
symbol: []const u8,
expiration: ?Date,
) ProviderError![]OptionContract = null,
fetchEarnings: ?*const fn (
ptr: *anyopaque,
allocator: std.mem.Allocator,
symbol: []const u8,
) ProviderError![]EarningsEvent = null,
fetchEtfProfile: ?*const fn (
ptr: *anyopaque,
allocator: std.mem.Allocator,
symbol: []const u8,
) ProviderError!EtfProfile = null,
name: ProviderName,
};
pub fn fetchCandles(
self: Provider,
allocator: std.mem.Allocator,
symbol: []const u8,
from: Date,
to: Date,
) ProviderError![]Candle {
const func = self.vtable.fetchCandles orelse return ProviderError.NotSupported;
return func(self.ptr, allocator, symbol, from, to);
}
pub fn fetchDividends(
self: Provider,
allocator: std.mem.Allocator,
symbol: []const u8,
from: ?Date,
to: ?Date,
) ProviderError![]Dividend {
const func = self.vtable.fetchDividends orelse return ProviderError.NotSupported;
return func(self.ptr, allocator, symbol, from, to);
}
pub fn fetchSplits(
self: Provider,
allocator: std.mem.Allocator,
symbol: []const u8,
) ProviderError![]Split {
const func = self.vtable.fetchSplits orelse return ProviderError.NotSupported;
return func(self.ptr, allocator, symbol);
}
pub fn fetchOptions(
self: Provider,
allocator: std.mem.Allocator,
symbol: []const u8,
expiration: ?Date,
) ProviderError![]OptionContract {
const func = self.vtable.fetchOptions orelse return ProviderError.NotSupported;
return func(self.ptr, allocator, symbol, expiration);
}
pub fn fetchEarnings(
self: Provider,
allocator: std.mem.Allocator,
symbol: []const u8,
) ProviderError![]EarningsEvent {
const func = self.vtable.fetchEarnings orelse return ProviderError.NotSupported;
return func(self.ptr, allocator, symbol);
}
pub fn fetchEtfProfile(
self: Provider,
allocator: std.mem.Allocator,
symbol: []const u8,
) ProviderError!EtfProfile {
const func = self.vtable.fetchEtfProfile orelse return ProviderError.NotSupported;
return func(self.ptr, allocator, symbol);
}
pub fn providerName(self: Provider) ProviderName {
return self.vtable.name;
}
};

View file

@ -0,0 +1,285 @@
//! Twelve Data API provider -- primary source for historical price data.
//! API docs: https://twelvedata.com/docs
//!
//! Free tier: 800 requests/day, 8 credits/min, all US market data.
//!
//! Note: Twelve Data returns split-adjusted prices but NOT dividend-adjusted.
//! The `adj_close` field is set equal to `close` here. For true total-return
//! calculations, use Polygon dividend data with the manual reinvestment method.
const std = @import("std");
const http = @import("../net/http.zig");
const RateLimiter = @import("../net/rate_limiter.zig").RateLimiter;
const Date = @import("../models/date.zig").Date;
const Candle = @import("../models/candle.zig").Candle;
const provider = @import("provider.zig");
const base_url = "https://api.twelvedata.com";
pub const TwelveData = struct {
api_key: []const u8,
client: http.Client,
rate_limiter: RateLimiter,
allocator: std.mem.Allocator,
pub fn init(allocator: std.mem.Allocator, api_key: []const u8) TwelveData {
return .{
.api_key = api_key,
.client = http.Client.init(allocator),
.rate_limiter = RateLimiter.perMinute(8),
.allocator = allocator,
};
}
pub fn deinit(self: *TwelveData) void {
self.client.deinit();
}
/// Fetch daily candles for a symbol between two dates.
/// Returns candles sorted oldest-first.
pub fn fetchCandles(
self: *TwelveData,
allocator: std.mem.Allocator,
symbol: []const u8,
from: Date,
to: Date,
) provider.ProviderError![]Candle {
self.rate_limiter.acquire();
var from_buf: [10]u8 = undefined;
var to_buf: [10]u8 = undefined;
const from_str = from.format(&from_buf);
const to_str = to.format(&to_buf);
const url = http.buildUrl(allocator, base_url ++ "/time_series", &.{
.{ "symbol", symbol },
.{ "interval", "1day" },
.{ "start_date", from_str },
.{ "end_date", to_str },
.{ "outputsize", "5000" },
.{ "apikey", self.api_key },
}) catch return provider.ProviderError.OutOfMemory;
defer allocator.free(url);
var response = self.client.get(url) catch |err| return switch (err) {
error.RateLimited => provider.ProviderError.RateLimited,
error.Unauthorized => provider.ProviderError.Unauthorized,
error.NotFound => provider.ProviderError.NotFound,
else => provider.ProviderError.RequestFailed,
};
defer response.deinit();
return parseTimeSeriesResponse(allocator, response.body);
}
pub const QuoteResponse = struct {
body: []const u8,
allocator: std.mem.Allocator,
pub fn deinit(self: *QuoteResponse) void {
self.allocator.free(self.body);
}
/// Parse and print quote data. Caller should use this within the
/// lifetime of the QuoteResponse.
pub fn parse(self: QuoteResponse, allocator: std.mem.Allocator) provider.ProviderError!ParsedQuote {
return parseQuoteBody(allocator, self.body);
}
};
pub const ParsedQuote = struct {
parsed: std.json.Parsed(std.json.Value),
pub fn deinit(self: *ParsedQuote) void {
self.parsed.deinit();
}
fn root(self: ParsedQuote) std.json.ObjectMap {
return self.parsed.value.object;
}
pub fn symbol(self: ParsedQuote) []const u8 { return jsonStr(self.root().get("symbol")); }
pub fn name(self: ParsedQuote) []const u8 { return jsonStr(self.root().get("name")); }
pub fn exchange(self: ParsedQuote) []const u8 { return jsonStr(self.root().get("exchange")); }
pub fn datetime(self: ParsedQuote) []const u8 { return jsonStr(self.root().get("datetime")); }
pub fn close(self: ParsedQuote) f64 { return parseJsonFloat(self.root().get("close")); }
pub fn open(self: ParsedQuote) f64 { return parseJsonFloat(self.root().get("open")); }
pub fn high(self: ParsedQuote) f64 { return parseJsonFloat(self.root().get("high")); }
pub fn low(self: ParsedQuote) f64 { return parseJsonFloat(self.root().get("low")); }
pub fn volume(self: ParsedQuote) u64 { return @intFromFloat(parseJsonFloat(self.root().get("volume"))); }
pub fn previous_close(self: ParsedQuote) f64 { return parseJsonFloat(self.root().get("previous_close")); }
pub fn change(self: ParsedQuote) f64 { return parseJsonFloat(self.root().get("change")); }
pub fn percent_change(self: ParsedQuote) f64 { return parseJsonFloat(self.root().get("percent_change")); }
pub fn average_volume(self: ParsedQuote) u64 { return @intFromFloat(parseJsonFloat(self.root().get("average_volume"))); }
pub fn fifty_two_week_low(self: ParsedQuote) f64 {
const ftw = self.root().get("fifty_two_week") orelse return 0;
return switch (ftw) {
.object => |o| parseJsonFloat(o.get("low")),
else => 0,
};
}
pub fn fifty_two_week_high(self: ParsedQuote) f64 {
const ftw = self.root().get("fifty_two_week") orelse return 0;
return switch (ftw) {
.object => |o| parseJsonFloat(o.get("high")),
else => 0,
};
}
};
pub fn fetchQuote(
self: *TwelveData,
allocator: std.mem.Allocator,
symbol: []const u8,
) provider.ProviderError!QuoteResponse {
self.rate_limiter.acquire();
const url = http.buildUrl(allocator, base_url ++ "/quote", &.{
.{ "symbol", symbol },
.{ "apikey", self.api_key },
}) catch return provider.ProviderError.OutOfMemory;
defer allocator.free(url);
var response = self.client.get(url) catch |err| return switch (err) {
error.RateLimited => provider.ProviderError.RateLimited,
error.Unauthorized => provider.ProviderError.Unauthorized,
error.NotFound => provider.ProviderError.NotFound,
else => provider.ProviderError.RequestFailed,
};
// Transfer ownership of body to QuoteResponse
const body = response.body;
response.body = &.{};
return .{
.body = body,
.allocator = allocator,
};
}
pub fn asProvider(self: *TwelveData) provider.Provider {
return .{
.ptr = @ptrCast(self),
.vtable = &vtable,
};
}
const vtable = provider.Provider.VTable{
.fetchCandles = @ptrCast(&fetchCandlesVtable),
.name = .twelvedata,
};
fn fetchCandlesVtable(
ptr: *TwelveData,
allocator: std.mem.Allocator,
symbol: []const u8,
from: Date,
to: Date,
) provider.ProviderError![]Candle {
return ptr.fetchCandles(allocator, symbol, from, to);
}
};
// -- JSON parsing --
fn parseTimeSeriesResponse(allocator: std.mem.Allocator, body: []const u8) provider.ProviderError![]Candle {
const parsed = std.json.parseFromSlice(std.json.Value, allocator, body, .{}) catch
return provider.ProviderError.ParseError;
defer parsed.deinit();
const root = parsed.value;
// Check for API error
if (root.object.get("status")) |status| {
if (status == .string) {
if (std.mem.eql(u8, status.string, "error")) {
// Check error code
if (root.object.get("code")) |code| {
if (code == .integer and code.integer == 429) return provider.ProviderError.RateLimited;
if (code == .integer and code.integer == 401) return provider.ProviderError.Unauthorized;
}
return provider.ProviderError.RequestFailed;
}
}
}
const values_json = root.object.get("values") orelse return provider.ProviderError.ParseError;
const values = switch (values_json) {
.array => |a| a.items,
else => return provider.ProviderError.ParseError,
};
// Twelve Data returns newest first. We'll parse into a list and reverse.
var candles: std.ArrayList(Candle) = .empty;
errdefer candles.deinit(allocator);
for (values) |val| {
const obj = switch (val) {
.object => |o| o,
else => continue,
};
const date = blk: {
const dt = obj.get("datetime") orelse continue;
const dt_str = switch (dt) {
.string => |s| s,
else => continue,
};
// datetime can be "YYYY-MM-DD" or "YYYY-MM-DD HH:MM:SS"
const date_part = if (dt_str.len >= 10) dt_str[0..10] else continue;
break :blk Date.parse(date_part) catch continue;
};
candles.append(allocator, .{
.date = date,
.open = parseJsonFloat(obj.get("open")),
.high = parseJsonFloat(obj.get("high")),
.low = parseJsonFloat(obj.get("low")),
.close = parseJsonFloat(obj.get("close")),
// Twelve Data close is split-adjusted only, not dividend-adjusted
.adj_close = parseJsonFloat(obj.get("close")),
.volume = @intFromFloat(parseJsonFloat(obj.get("volume"))),
}) catch return provider.ProviderError.OutOfMemory;
}
// Reverse to get oldest-first ordering
const slice = candles.toOwnedSlice(allocator) catch return provider.ProviderError.OutOfMemory;
std.mem.reverse(Candle, slice);
return slice;
}
fn parseQuoteBody(allocator: std.mem.Allocator, body: []const u8) provider.ProviderError!TwelveData.ParsedQuote {
const parsed = std.json.parseFromSlice(std.json.Value, allocator, body, .{}) catch
return provider.ProviderError.ParseError;
// Check for error
if (parsed.value.object.get("status")) |status| {
if (status == .string and std.mem.eql(u8, status.string, "error")) {
var p = parsed;
p.deinit();
return provider.ProviderError.RequestFailed;
}
}
return .{ .parsed = parsed };
}
/// Parse a JSON value that may be a string containing a number, or a number directly.
fn parseJsonFloat(val: ?std.json.Value) f64 {
const v = val orelse return 0;
return switch (v) {
.string => |s| std.fmt.parseFloat(f64, s) catch 0,
.float => |f| f,
.integer => |i| @floatFromInt(i),
else => 0,
};
}
fn jsonStr(val: ?std.json.Value) []const u8 {
const v = val orelse return "";
return switch (v) {
.string => |s| s,
else => "",
};
}

60
src/root.zig Normal file
View file

@ -0,0 +1,60 @@
//! zfin -- Zig Financial Data Library
//!
//! Fetches, caches, and analyzes US equity/ETF financial data from
//! multiple free-tier API providers (Twelve Data, Polygon, Finnhub,
//! Alpha Vantage). Includes Morningstar-style performance calculations.
// -- Data models --
pub const Date = @import("models/date.zig").Date;
pub const Candle = @import("models/candle.zig").Candle;
pub const Dividend = @import("models/dividend.zig").Dividend;
pub const DividendType = @import("models/dividend.zig").DividendType;
pub const Split = @import("models/split.zig").Split;
pub const OptionContract = @import("models/option.zig").OptionContract;
pub const OptionsChain = @import("models/option.zig").OptionsChain;
pub const ContractType = @import("models/option.zig").ContractType;
pub const EarningsEvent = @import("models/earnings.zig").EarningsEvent;
pub const ReportTime = @import("models/earnings.zig").ReportTime;
pub const EtfProfile = @import("models/etf_profile.zig").EtfProfile;
pub const Holding = @import("models/etf_profile.zig").Holding;
pub const SectorWeight = @import("models/etf_profile.zig").SectorWeight;
pub const TickerInfo = @import("models/ticker_info.zig").TickerInfo;
pub const SecurityType = @import("models/ticker_info.zig").SecurityType;
pub const Lot = @import("models/portfolio.zig").Lot;
pub const Position = @import("models/portfolio.zig").Position;
pub const Portfolio = @import("models/portfolio.zig").Portfolio;
pub const Quote = @import("models/quote.zig").Quote;
// -- Infrastructure --
pub const Config = @import("config.zig").Config;
pub const RateLimiter = @import("net/rate_limiter.zig").RateLimiter;
pub const http = @import("net/http.zig");
// -- Cache --
pub const cache = @import("cache/store.zig");
// -- Analytics --
pub const performance = @import("analytics/performance.zig");
pub const risk = @import("analytics/risk.zig");
// -- Service layer --
pub const DataService = @import("service.zig").DataService;
pub const DataError = @import("service.zig").DataError;
pub const DataSource = @import("service.zig").Source;
// -- Providers --
pub const Provider = @import("providers/provider.zig").Provider;
pub const TwelveData = @import("providers/twelvedata.zig").TwelveData;
pub const Polygon = @import("providers/polygon.zig").Polygon;
pub const Finnhub = @import("providers/finnhub.zig").Finnhub;
pub const Cboe = @import("providers/cboe.zig").Cboe;
pub const AlphaVantage = @import("providers/alphavantage.zig").AlphaVantage;
// -- Re-export SRF for portfolio file loading --
pub const srf = @import("srf");
// -- Tests --
test {
const std = @import("std");
std.testing.refAllDecls(@This());
}

429
src/service.zig Normal file
View file

@ -0,0 +1,429 @@
//! DataService -- unified data access layer for zfin.
//!
//! Encapsulates the "check cache -> fresh? return -> else fetch from provider -> cache -> return"
//! pattern that was previously duplicated between CLI and TUI. Both frontends should use this
//! as their sole data source.
//!
//! Provider selection is internal: each data type routes to the appropriate provider
//! based on available API keys. Callers never need to know which provider was used.
const std = @import("std");
const Date = @import("models/date.zig").Date;
const Candle = @import("models/candle.zig").Candle;
const Dividend = @import("models/dividend.zig").Dividend;
const Split = @import("models/split.zig").Split;
const OptionsChain = @import("models/option.zig").OptionsChain;
const EarningsEvent = @import("models/earnings.zig").EarningsEvent;
const Quote = @import("models/quote.zig").Quote;
const EtfProfile = @import("models/etf_profile.zig").EtfProfile;
const Config = @import("config.zig").Config;
const cache = @import("cache/store.zig");
const TwelveData = @import("providers/twelvedata.zig").TwelveData;
const Polygon = @import("providers/polygon.zig").Polygon;
const Finnhub = @import("providers/finnhub.zig").Finnhub;
const Cboe = @import("providers/cboe.zig").Cboe;
const AlphaVantage = @import("providers/alphavantage.zig").AlphaVantage;
const performance = @import("analytics/performance.zig");
pub const DataError = error{
NoApiKey,
FetchFailed,
CacheError,
ParseError,
OutOfMemory,
};
/// Indicates whether the returned data came from cache or was freshly fetched.
pub const Source = enum {
cached,
fetched,
};
pub const DataService = struct {
allocator: std.mem.Allocator,
config: Config,
// Lazily initialized providers (null until first use)
td: ?TwelveData = null,
pg: ?Polygon = null,
fh: ?Finnhub = null,
cboe: ?Cboe = null,
av: ?AlphaVantage = null,
pub fn init(allocator: std.mem.Allocator, config: Config) DataService {
return .{
.allocator = allocator,
.config = config,
};
}
pub fn deinit(self: *DataService) void {
if (self.td) |*td| td.deinit();
if (self.pg) |*pg| pg.deinit();
if (self.fh) |*fh| fh.deinit();
if (self.cboe) |*c| c.deinit();
if (self.av) |*av| av.deinit();
}
// Provider accessors
fn getTwelveData(self: *DataService) DataError!*TwelveData {
if (self.td) |*td| return td;
const key = self.config.twelvedata_key orelse return DataError.NoApiKey;
self.td = TwelveData.init(self.allocator, key);
return &self.td.?;
}
fn getPolygon(self: *DataService) DataError!*Polygon {
if (self.pg) |*pg| return pg;
const key = self.config.polygon_key orelse return DataError.NoApiKey;
self.pg = Polygon.init(self.allocator, key);
return &self.pg.?;
}
fn getFinnhub(self: *DataService) DataError!*Finnhub {
if (self.fh) |*fh| return fh;
const key = self.config.finnhub_key orelse return DataError.NoApiKey;
self.fh = Finnhub.init(self.allocator, key);
return &self.fh.?;
}
fn getCboe(self: *DataService) *Cboe {
if (self.cboe) |*c| return c;
self.cboe = Cboe.init(self.allocator);
return &self.cboe.?;
}
fn getAlphaVantage(self: *DataService) DataError!*AlphaVantage {
if (self.av) |*av| return av;
const key = self.config.alphavantage_key orelse return DataError.NoApiKey;
self.av = AlphaVantage.init(self.allocator, key);
return &self.av.?;
}
// Cache helper
fn store(self: *DataService) cache.Store {
return cache.Store.init(self.allocator, self.config.cache_dir);
}
/// Invalidate cached data for a symbol so the next get* call forces a fresh fetch.
pub fn invalidate(self: *DataService, symbol: []const u8, data_type: cache.DataType) void {
var s = self.store();
s.clearData(symbol, data_type);
}
// Public data methods
/// Fetch daily candles for a symbol (10+ years for trailing returns).
/// Checks cache first; fetches from TwelveData if stale/missing.
pub fn getCandles(self: *DataService, symbol: []const u8) DataError!struct { data: []Candle, source: Source, timestamp: i64 } {
var s = self.store();
// Try cache
const cached_raw = s.readRaw(symbol, .candles_daily) catch return DataError.CacheError;
if (cached_raw) |data| {
defer self.allocator.free(data);
const fresh = s.isFresh(symbol, .candles_daily, cache.Ttl.candles_latest) catch false;
if (fresh) {
const candles = cache.Store.deserializeCandles(self.allocator, data) catch null;
if (candles) |c| return .{ .data = c, .source = .cached, .timestamp = s.getMtime(symbol, .candles_daily) orelse std.time.timestamp() };
}
}
// Fetch from provider
var td = try self.getTwelveData();
const today = todayDate();
const from = today.addDays(-365 * 10 - 60);
const fetched = td.fetchCandles(self.allocator, symbol, from, today) catch
return DataError.FetchFailed;
// Cache the result
if (fetched.len > 0) {
if (cache.Store.serializeCandles(self.allocator, fetched)) |srf_data| {
defer self.allocator.free(srf_data);
s.writeRaw(symbol, .candles_daily, srf_data) catch {};
} else |_| {}
}
return .{ .data = fetched, .source = .fetched, .timestamp = std.time.timestamp() };
}
/// Fetch dividend history for a symbol.
/// Checks cache first; fetches from Polygon if stale/missing.
pub fn getDividends(self: *DataService, symbol: []const u8) DataError!struct { data: []Dividend, source: Source, timestamp: i64 } {
var s = self.store();
const cached_raw = s.readRaw(symbol, .dividends) catch return DataError.CacheError;
if (cached_raw) |data| {
defer self.allocator.free(data);
const fresh = s.isFresh(symbol, .dividends, cache.Ttl.dividends) catch false;
if (fresh) {
const divs = cache.Store.deserializeDividends(self.allocator, data) catch null;
if (divs) |d| return .{ .data = d, .source = .cached, .timestamp = s.getMtime(symbol, .dividends) orelse std.time.timestamp() };
}
}
var pg = try self.getPolygon();
const fetched = pg.fetchDividends(self.allocator, symbol, null, null) catch
return DataError.FetchFailed;
if (fetched.len > 0) {
if (cache.Store.serializeDividends(self.allocator, fetched)) |srf_data| {
defer self.allocator.free(srf_data);
s.writeRaw(symbol, .dividends, srf_data) catch {};
} else |_| {}
}
return .{ .data = fetched, .source = .fetched, .timestamp = std.time.timestamp() };
}
/// Fetch split history for a symbol.
/// Checks cache first; fetches from Polygon if stale/missing.
pub fn getSplits(self: *DataService, symbol: []const u8) DataError!struct { data: []Split, source: Source, timestamp: i64 } {
var s = self.store();
const cached_raw = s.readRaw(symbol, .splits) catch return DataError.CacheError;
if (cached_raw) |data| {
defer self.allocator.free(data);
const fresh = s.isFresh(symbol, .splits, cache.Ttl.splits) catch false;
if (fresh) {
const splits = cache.Store.deserializeSplits(self.allocator, data) catch null;
if (splits) |sp| return .{ .data = sp, .source = .cached, .timestamp = s.getMtime(symbol, .splits) orelse std.time.timestamp() };
}
}
var pg = try self.getPolygon();
const fetched = pg.fetchSplits(self.allocator, symbol) catch
return DataError.FetchFailed;
if (cache.Store.serializeSplits(self.allocator, fetched)) |srf_data| {
defer self.allocator.free(srf_data);
s.writeRaw(symbol, .splits, srf_data) catch {};
} else |_| {}
return .{ .data = fetched, .source = .fetched, .timestamp = std.time.timestamp() };
}
/// Fetch options chain for a symbol (all expirations).
/// Checks cache first; fetches from CBOE if stale/missing (no API key needed).
pub fn getOptions(self: *DataService, symbol: []const u8) DataError!struct { data: []OptionsChain, source: Source, timestamp: i64 } {
var s = self.store();
const cached_raw = s.readRaw(symbol, .options) catch return DataError.CacheError;
if (cached_raw) |data| {
defer self.allocator.free(data);
const fresh = s.isFresh(symbol, .options, cache.Ttl.options) catch false;
if (fresh) {
const chains = cache.Store.deserializeOptions(self.allocator, data) catch null;
if (chains) |c| return .{ .data = c, .source = .cached, .timestamp = s.getMtime(symbol, .options) orelse std.time.timestamp() };
}
}
var cboe = self.getCboe();
const fetched = cboe.fetchOptionsChain(self.allocator, symbol) catch
return DataError.FetchFailed;
if (fetched.len > 0) {
if (cache.Store.serializeOptions(self.allocator, fetched)) |srf_data| {
defer self.allocator.free(srf_data);
s.writeRaw(symbol, .options, srf_data) catch {};
} else |_| {}
}
return .{ .data = fetched, .source = .fetched, .timestamp = std.time.timestamp() };
}
/// Fetch earnings history for a symbol (5 years back, 1 year forward).
/// Checks cache first; fetches from Finnhub if stale/missing.
pub fn getEarnings(self: *DataService, symbol: []const u8) DataError!struct { data: []EarningsEvent, source: Source, timestamp: i64 } {
var s = self.store();
const cached_raw = s.readRaw(symbol, .earnings) catch return DataError.CacheError;
if (cached_raw) |data| {
defer self.allocator.free(data);
const fresh = s.isFresh(symbol, .earnings, cache.Ttl.earnings) catch false;
if (fresh) {
const events = cache.Store.deserializeEarnings(self.allocator, data) catch null;
if (events) |e| return .{ .data = e, .source = .cached, .timestamp = s.getMtime(symbol, .earnings) orelse std.time.timestamp() };
}
}
var fh = try self.getFinnhub();
const today = todayDate();
const from = today.subtractYears(5);
const to = today.addDays(365);
const fetched = fh.fetchEarnings(self.allocator, symbol, from, to) catch
return DataError.FetchFailed;
if (fetched.len > 0) {
if (cache.Store.serializeEarnings(self.allocator, fetched)) |srf_data| {
defer self.allocator.free(srf_data);
s.writeRaw(symbol, .earnings, srf_data) catch {};
} else |_| {}
}
return .{ .data = fetched, .source = .fetched, .timestamp = std.time.timestamp() };
}
/// Fetch ETF profile for a symbol.
/// Checks cache first; fetches from Alpha Vantage if stale/missing.
pub fn getEtfProfile(self: *DataService, symbol: []const u8) DataError!struct { data: EtfProfile, source: Source, timestamp: i64 } {
var s = self.store();
const cached_raw = s.readRaw(symbol, .etf_profile) catch return DataError.CacheError;
if (cached_raw) |data| {
defer self.allocator.free(data);
const fresh = s.isFresh(symbol, .etf_profile, cache.Ttl.etf_profile) catch false;
if (fresh) {
const profile = cache.Store.deserializeEtfProfile(self.allocator, data) catch null;
if (profile) |p| return .{ .data = p, .source = .cached, .timestamp = s.getMtime(symbol, .etf_profile) orelse std.time.timestamp() };
}
}
var av = try self.getAlphaVantage();
const fetched = av.fetchEtfProfile(self.allocator, symbol) catch
return DataError.FetchFailed;
if (cache.Store.serializeEtfProfile(self.allocator, fetched)) |srf_data| {
defer self.allocator.free(srf_data);
s.writeRaw(symbol, .etf_profile, srf_data) catch {};
} else |_| {}
return .{ .data = fetched, .source = .fetched, .timestamp = std.time.timestamp() };
}
/// Fetch a real-time (or 15-min delayed) quote for a symbol.
/// No cache -- always fetches fresh from TwelveData.
pub fn getQuote(self: *DataService, symbol: []const u8) DataError!Quote {
var td = try self.getTwelveData();
var resp = td.fetchQuote(self.allocator, symbol) catch
return DataError.FetchFailed;
defer resp.deinit();
var parsed = resp.parse(self.allocator) catch
return DataError.ParseError;
defer parsed.deinit();
return .{
.symbol = symbol,
.name = symbol, // name is in parsed JSON but lifetime is tricky; use symbol
.exchange = "",
.datetime = "",
.close = parsed.close(),
.open = parsed.open(),
.high = parsed.high(),
.low = parsed.low(),
.volume = parsed.volume(),
.previous_close = parsed.previous_close(),
.change = parsed.change(),
.percent_change = parsed.percent_change(),
.average_volume = parsed.average_volume(),
.fifty_two_week_low = parsed.fifty_two_week_low(),
.fifty_two_week_high = parsed.fifty_two_week_high(),
};
}
/// Compute trailing returns for a symbol (fetches candles + dividends).
/// Returns both as-of-date and month-end trailing returns.
/// As-of-date: end = latest close. Matches Morningstar "Trailing Returns" page.
/// Month-end: end = last business day of prior month. Matches Morningstar "Performance" page.
pub fn getTrailingReturns(self: *DataService, symbol: []const u8) DataError!struct {
asof_price: performance.TrailingReturns,
asof_total: ?performance.TrailingReturns,
me_price: performance.TrailingReturns,
me_total: ?performance.TrailingReturns,
candles: []Candle,
dividends: ?[]Dividend,
source: Source,
timestamp: i64,
} {
const candle_result = try self.getCandles(symbol);
const c = candle_result.data;
if (c.len == 0) return DataError.FetchFailed;
const today = todayDate();
// As-of-date (end = last candle)
const asof_price = performance.trailingReturns(c);
// Month-end (end = last business day of prior month)
const me_price = performance.trailingReturnsMonthEnd(c, today);
// Try to get dividends (non-fatal if unavailable)
var divs: ?[]Dividend = null;
var asof_total: ?performance.TrailingReturns = null;
var me_total: ?performance.TrailingReturns = null;
if (self.getDividends(symbol)) |div_result| {
divs = div_result.data;
asof_total = performance.trailingReturnsWithDividends(c, div_result.data);
me_total = performance.trailingReturnsMonthEndWithDividends(c, div_result.data, today);
} else |_| {}
return .{
.asof_price = asof_price,
.asof_total = asof_total,
.me_price = me_price,
.me_total = me_total,
.candles = c,
.dividends = divs,
.source = candle_result.source,
.timestamp = candle_result.timestamp,
};
}
/// Read candles from cache only (no network fetch). Used by TUI for display.
/// Returns null if no cached data exists.
pub fn getCachedCandles(self: *DataService, symbol: []const u8) ?[]Candle {
var s = self.store();
const data = s.readRaw(symbol, .candles_daily) catch return null;
if (data) |d| {
defer self.allocator.free(d);
return cache.Store.deserializeCandles(self.allocator, d) catch null;
}
return null;
}
/// Read dividends from cache only (no network fetch).
pub fn getCachedDividends(self: *DataService, symbol: []const u8) ?[]Dividend {
var s = self.store();
const data = s.readRaw(symbol, .dividends) catch return null;
if (data) |d| {
defer self.allocator.free(d);
return cache.Store.deserializeDividends(self.allocator, d) catch null;
}
return null;
}
/// Read earnings from cache only (no network fetch).
pub fn getCachedEarnings(self: *DataService, symbol: []const u8) ?[]EarningsEvent {
var s = self.store();
const data = s.readRaw(symbol, .earnings) catch return null;
if (data) |d| {
defer self.allocator.free(d);
return cache.Store.deserializeEarnings(self.allocator, d) catch null;
}
return null;
}
/// Read options from cache only (no network fetch).
pub fn getCachedOptions(self: *DataService, symbol: []const u8) ?[]OptionsChain {
var s = self.store();
const data = s.readRaw(symbol, .options) catch return null;
if (data) |d| {
defer self.allocator.free(d);
return cache.Store.deserializeOptions(self.allocator, d) catch null;
}
return null;
}
// Utility
fn todayDate() Date {
const ts = std.time.timestamp();
const days: i32 = @intCast(@divFloor(ts, 86400));
return .{ .days = days };
}
};

405
src/tui/keybinds.zig Normal file
View file

@ -0,0 +1,405 @@
const std = @import("std");
const vaxis = @import("vaxis");
const srf = @import("srf");
pub const Action = enum {
quit,
refresh,
prev_tab,
next_tab,
tab_1,
tab_2,
tab_3,
tab_4,
tab_5,
scroll_down,
scroll_up,
scroll_top,
scroll_bottom,
page_down,
page_up,
select_next,
select_prev,
expand_collapse,
select_symbol,
symbol_input,
help,
edit,
collapse_all_calls,
collapse_all_puts,
options_filter_1,
options_filter_2,
options_filter_3,
options_filter_4,
options_filter_5,
options_filter_6,
options_filter_7,
options_filter_8,
options_filter_9,
};
pub const KeyCombo = struct {
codepoint: u21,
mods: vaxis.Key.Modifiers = .{},
};
pub const Binding = struct {
action: Action,
key: KeyCombo,
};
pub const KeyMap = struct {
bindings: []const Binding,
arena: ?*std.heap.ArenaAllocator = null,
pub fn deinit(self: *KeyMap) void {
if (self.arena) |a| {
const backing = a.child_allocator;
a.deinit();
backing.destroy(a);
}
}
pub fn matchAction(self: KeyMap, key: vaxis.Key) ?Action {
for (self.bindings) |b| {
if (key.matches(b.key.codepoint, b.key.mods)) return b.action;
}
return null;
}
};
// Defaults
const default_bindings = [_]Binding{
.{ .action = .quit, .key = .{ .codepoint = 'q' } },
.{ .action = .quit, .key = .{ .codepoint = 'c', .mods = .{ .ctrl = true } } },
.{ .action = .refresh, .key = .{ .codepoint = 'r' } },
.{ .action = .refresh, .key = .{ .codepoint = vaxis.Key.f5 } },
.{ .action = .prev_tab, .key = .{ .codepoint = 'h' } },
.{ .action = .prev_tab, .key = .{ .codepoint = vaxis.Key.left } },
.{ .action = .next_tab, .key = .{ .codepoint = 'l' } },
.{ .action = .next_tab, .key = .{ .codepoint = vaxis.Key.right } },
.{ .action = .next_tab, .key = .{ .codepoint = vaxis.Key.tab } },
.{ .action = .tab_1, .key = .{ .codepoint = '1' } },
.{ .action = .tab_2, .key = .{ .codepoint = '2' } },
.{ .action = .tab_3, .key = .{ .codepoint = '3' } },
.{ .action = .tab_4, .key = .{ .codepoint = '4' } },
.{ .action = .tab_5, .key = .{ .codepoint = '5' } },
.{ .action = .scroll_down, .key = .{ .codepoint = 'd', .mods = .{ .ctrl = true } } },
.{ .action = .scroll_up, .key = .{ .codepoint = 'u', .mods = .{ .ctrl = true } } },
.{ .action = .scroll_top, .key = .{ .codepoint = 'g' } },
.{ .action = .scroll_bottom, .key = .{ .codepoint = 'G' } },
.{ .action = .page_down, .key = .{ .codepoint = vaxis.Key.page_down } },
.{ .action = .page_up, .key = .{ .codepoint = vaxis.Key.page_up } },
.{ .action = .select_next, .key = .{ .codepoint = 'j' } },
.{ .action = .select_next, .key = .{ .codepoint = vaxis.Key.down } },
.{ .action = .select_prev, .key = .{ .codepoint = 'k' } },
.{ .action = .select_prev, .key = .{ .codepoint = vaxis.Key.up } },
.{ .action = .expand_collapse, .key = .{ .codepoint = vaxis.Key.enter } },
.{ .action = .select_symbol, .key = .{ .codepoint = 's' } },
.{ .action = .symbol_input, .key = .{ .codepoint = '/' } },
.{ .action = .help, .key = .{ .codepoint = '?' } },
.{ .action = .edit, .key = .{ .codepoint = 'e' } },
.{ .action = .collapse_all_calls, .key = .{ .codepoint = 'c' } },
.{ .action = .collapse_all_puts, .key = .{ .codepoint = 'p' } },
.{ .action = .options_filter_1, .key = .{ .codepoint = '1', .mods = .{ .ctrl = true } } },
.{ .action = .options_filter_2, .key = .{ .codepoint = '2', .mods = .{ .ctrl = true } } },
.{ .action = .options_filter_3, .key = .{ .codepoint = '3', .mods = .{ .ctrl = true } } },
.{ .action = .options_filter_4, .key = .{ .codepoint = '4', .mods = .{ .ctrl = true } } },
.{ .action = .options_filter_5, .key = .{ .codepoint = '5', .mods = .{ .ctrl = true } } },
.{ .action = .options_filter_6, .key = .{ .codepoint = '6', .mods = .{ .ctrl = true } } },
.{ .action = .options_filter_7, .key = .{ .codepoint = '7', .mods = .{ .ctrl = true } } },
.{ .action = .options_filter_8, .key = .{ .codepoint = '8', .mods = .{ .ctrl = true } } },
.{ .action = .options_filter_9, .key = .{ .codepoint = '9', .mods = .{ .ctrl = true } } },
};
pub fn defaults() KeyMap {
return .{ .bindings = &default_bindings };
}
// SRF serialization
const special_key_names = [_]struct { name: []const u8, cp: u21 }{
.{ .name = "tab", .cp = vaxis.Key.tab },
.{ .name = "enter", .cp = vaxis.Key.enter },
.{ .name = "escape", .cp = vaxis.Key.escape },
.{ .name = "space", .cp = vaxis.Key.space },
.{ .name = "backspace", .cp = vaxis.Key.backspace },
.{ .name = "insert", .cp = vaxis.Key.insert },
.{ .name = "delete", .cp = vaxis.Key.delete },
.{ .name = "left", .cp = vaxis.Key.left },
.{ .name = "right", .cp = vaxis.Key.right },
.{ .name = "up", .cp = vaxis.Key.up },
.{ .name = "down", .cp = vaxis.Key.down },
.{ .name = "page_up", .cp = vaxis.Key.page_up },
.{ .name = "page_down", .cp = vaxis.Key.page_down },
.{ .name = "home", .cp = vaxis.Key.home },
.{ .name = "end", .cp = vaxis.Key.end },
.{ .name = "F1", .cp = vaxis.Key.f1 },
.{ .name = "F2", .cp = vaxis.Key.f2 },
.{ .name = "F3", .cp = vaxis.Key.f3 },
.{ .name = "F4", .cp = vaxis.Key.f4 },
.{ .name = "F5", .cp = vaxis.Key.f5 },
.{ .name = "F6", .cp = vaxis.Key.f6 },
.{ .name = "F7", .cp = vaxis.Key.f7 },
.{ .name = "F8", .cp = vaxis.Key.f8 },
.{ .name = "F9", .cp = vaxis.Key.f9 },
.{ .name = "F10", .cp = vaxis.Key.f10 },
.{ .name = "F11", .cp = vaxis.Key.f11 },
.{ .name = "F12", .cp = vaxis.Key.f12 },
};
fn codepointToName(cp: u21) ?[]const u8 {
for (special_key_names) |entry| {
if (entry.cp == cp) return entry.name;
}
return null;
}
fn nameToCodepoint(name: []const u8) ?u21 {
// Check our table first (case-insensitive for F-keys)
for (special_key_names) |entry| {
if (std.ascii.eqlIgnoreCase(entry.name, name)) return entry.cp;
}
// Fall back to vaxis name_map (lowercase)
var lower_buf: [32]u8 = undefined;
const lower = toLower(name, &lower_buf) orelse return null;
return vaxis.Key.name_map.get(lower);
}
fn toLower(s: []const u8, buf: []u8) ?[]const u8 {
if (s.len > buf.len) return null;
for (s, 0..) |c, i| {
buf[i] = std.ascii.toLower(c);
}
return buf[0..s.len];
}
pub fn formatKeyCombo(combo: KeyCombo, buf: []u8) ?[]const u8 {
var pos: usize = 0;
if (combo.mods.ctrl) {
const prefix = "ctrl+";
if (pos + prefix.len > buf.len) return null;
@memcpy(buf[pos..][0..prefix.len], prefix);
pos += prefix.len;
}
if (combo.mods.alt) {
const prefix = "alt+";
if (pos + prefix.len > buf.len) return null;
@memcpy(buf[pos..][0..prefix.len], prefix);
pos += prefix.len;
}
if (combo.mods.shift) {
const prefix = "shift+";
if (pos + prefix.len > buf.len) return null;
@memcpy(buf[pos..][0..prefix.len], prefix);
pos += prefix.len;
}
if (codepointToName(combo.codepoint)) |name| {
if (pos + name.len > buf.len) return null;
@memcpy(buf[pos..][0..name.len], name);
pos += name.len;
} else if (combo.codepoint >= 0x20 and combo.codepoint < 0x7f) {
if (pos + 1 > buf.len) return null;
buf[pos] = @intCast(combo.codepoint);
pos += 1;
} else {
return null;
}
return buf[0..pos];
}
fn parseKeyCombo(key_str: []const u8) ?KeyCombo {
var mods: vaxis.Key.Modifiers = .{};
var rest = key_str;
// Parse modifier prefixes
while (true) {
if (rest.len > 5 and std.ascii.eqlIgnoreCase(rest[0..5], "ctrl+")) {
mods.ctrl = true;
rest = rest[5..];
} else if (rest.len > 4 and std.ascii.eqlIgnoreCase(rest[0..4], "alt+")) {
mods.alt = true;
rest = rest[4..];
} else if (rest.len > 6 and std.ascii.eqlIgnoreCase(rest[0..6], "shift+")) {
mods.shift = true;
rest = rest[6..];
} else break;
}
if (rest.len == 0) return null;
// Single printable character
if (rest.len == 1 and rest[0] >= 0x20 and rest[0] < 0x7f) {
return .{ .codepoint = rest[0], .mods = mods };
}
// Named key
if (nameToCodepoint(rest)) |cp| {
return .{ .codepoint = cp, .mods = mods };
}
return null;
}
/// Print default keybindings in SRF format to stdout.
pub fn printDefaults() !void {
var buf: [4096]u8 = undefined;
var writer = std.fs.File.stdout().writer(&buf);
const out = &writer.interface;
try out.writeAll("#!srfv1\n");
try out.writeAll("# zfin TUI keybindings\n");
try out.writeAll("# This file is the sole source of keybindings when present.\n");
try out.writeAll("# If this file is removed, built-in defaults are used.\n");
try out.writeAll("# Regenerate: zfin interactive --default-keys > ~/.config/zfin/keys.srf\n");
try out.writeAll("#\n");
try out.writeAll("# Format: action::ACTION_NAME,key::KEY_STRING\n");
try out.writeAll("# Modifiers: ctrl+, alt+, shift+ (e.g. ctrl+c)\n");
try out.writeAll("# Special keys: tab, enter, escape, space, backspace,\n");
try out.writeAll("# left, right, up, down, page_up, page_down, home, end,\n");
try out.writeAll("# F1-F12, insert, delete\n");
try out.writeAll("# Multiple lines with the same action = multiple bindings.\n");
for (default_bindings) |b| {
var key_buf: [32]u8 = undefined;
const key_str = formatKeyCombo(b.key, &key_buf) orelse continue;
try out.print("action::{s},key::{s}\n", .{ @tagName(b.action), key_str });
}
try out.flush();
}
// SRF loading
fn parseAction(name: []const u8) ?Action {
inline for (std.meta.fields(Action)) |f| {
if (std.mem.eql(u8, name, f.name)) return @enumFromInt(f.value);
}
return null;
}
/// Load keybindings from an SRF file. Returns null if the file doesn't exist
/// or can't be parsed. On success, the caller owns the returned KeyMap and
/// must call deinit().
pub fn loadFromFile(allocator: std.mem.Allocator, path: []const u8) ?KeyMap {
const data = std.fs.cwd().readFileAlloc(allocator, path, 64 * 1024) catch return null;
defer allocator.free(data);
return loadFromData(allocator, data);
}
pub fn loadFromData(allocator: std.mem.Allocator, data: []const u8) ?KeyMap {
const arena = allocator.create(std.heap.ArenaAllocator) catch return null;
arena.* = std.heap.ArenaAllocator.init(allocator);
errdefer {
arena.deinit();
allocator.destroy(arena);
}
const aa = arena.allocator();
var reader = std.Io.Reader.fixed(data);
const parsed = srf.parse(&reader, aa, .{}) catch return null;
// Don't defer parsed.deinit() -- arena owns everything
var bindings = std.ArrayList(Binding).empty;
for (parsed.records.items) |record| {
var action: ?Action = null;
var key: ?KeyCombo = null;
for (record.fields) |field| {
if (std.mem.eql(u8, field.key, "action")) {
if (field.value) |v| {
switch (v) {
.string => |s| action = parseAction(s),
else => {},
}
}
} else if (std.mem.eql(u8, field.key, "key")) {
if (field.value) |v| {
switch (v) {
.string => |s| key = parseKeyCombo(s),
else => {},
}
}
}
}
if (action != null and key != null) {
bindings.append(aa, .{ .action = action.?, .key = key.? }) catch return null;
}
}
return .{
.bindings = bindings.toOwnedSlice(aa) catch return null,
.arena = arena,
};
}
// Tests
test "parseKeyCombo single char" {
const combo = parseKeyCombo("q").?;
try std.testing.expectEqual(@as(u21, 'q'), combo.codepoint);
try std.testing.expect(!combo.mods.ctrl);
}
test "parseKeyCombo ctrl modifier" {
const combo = parseKeyCombo("ctrl+c").?;
try std.testing.expectEqual(@as(u21, 'c'), combo.codepoint);
try std.testing.expect(combo.mods.ctrl);
}
test "parseKeyCombo special key" {
const combo = parseKeyCombo("F5").?;
try std.testing.expectEqual(vaxis.Key.f5, combo.codepoint);
}
test "parseKeyCombo named key" {
const combo = parseKeyCombo("tab").?;
try std.testing.expectEqual(vaxis.Key.tab, combo.codepoint);
}
test "formatKeyCombo roundtrip" {
var buf: [32]u8 = undefined;
const combo = KeyCombo{ .codepoint = 'c', .mods = .{ .ctrl = true } };
const str = formatKeyCombo(combo, &buf).?;
try std.testing.expectEqualStrings("ctrl+c", str);
const parsed = parseKeyCombo(str).?;
try std.testing.expectEqual(combo.codepoint, parsed.codepoint);
try std.testing.expect(parsed.mods.ctrl);
}
test "parseAction" {
try std.testing.expectEqual(Action.quit, parseAction("quit").?);
try std.testing.expectEqual(Action.refresh, parseAction("refresh").?);
try std.testing.expect(parseAction("nonexistent") == null);
}
test "loadFromData basic" {
const data =
\\#!srfv1
\\action::quit,key::q
\\action::quit,key::ctrl+c
\\action::refresh,key::F5
;
var km = loadFromData(std.testing.allocator, data) orelse return error.ParseFailed;
defer km.deinit();
try std.testing.expectEqual(@as(usize, 3), km.bindings.len);
try std.testing.expectEqual(Action.quit, km.bindings[0].action);
try std.testing.expectEqual(Action.refresh, km.bindings[2].action);
}
test "defaults returns valid keymap" {
const km = defaults();
try std.testing.expect(km.bindings.len > 0);
// Verify quit is in there
var found_quit = false;
for (km.bindings) |b| {
if (b.action == .quit) found_quit = true;
}
try std.testing.expect(found_quit);
}

2361
src/tui/main.zig Normal file

File diff suppressed because it is too large Load diff

308
src/tui/theme.zig Normal file
View file

@ -0,0 +1,308 @@
const std = @import("std");
const vaxis = @import("vaxis");
const srf = @import("srf");
pub const Color = [3]u8;
pub const Theme = struct {
// Backgrounds
bg: Color,
bg_panel: Color,
bg_element: Color,
// Tab bar
tab_bg: Color,
tab_fg: Color,
tab_active_bg: Color,
tab_active_fg: Color,
// Content
text: Color,
text_muted: Color,
text_dim: Color,
// Status bar
status_bg: Color,
status_fg: Color,
// Input prompt
input_bg: Color,
input_fg: Color,
input_hint: Color,
// Semantic
accent: Color,
positive: Color,
negative: Color,
warning: Color,
info: Color,
// Selection / cursor highlight
select_bg: Color,
select_fg: Color,
// Border
border: Color,
pub fn vcolor(c: Color) vaxis.Cell.Color {
return .{ .rgb = c };
}
pub fn style(_: Theme, fg_color: Color, bg_color: Color) vaxis.Style {
return .{ .fg = vcolor(fg_color), .bg = vcolor(bg_color) };
}
pub fn contentStyle(self: Theme) vaxis.Style {
return .{ .fg = vcolor(self.text), .bg = vcolor(self.bg) };
}
pub fn mutedStyle(self: Theme) vaxis.Style {
return .{ .fg = vcolor(self.text_muted), .bg = vcolor(self.bg) };
}
pub fn dimStyle(self: Theme) vaxis.Style {
return .{ .fg = vcolor(self.text_dim), .bg = vcolor(self.bg) };
}
pub fn statusStyle(self: Theme) vaxis.Style {
return .{ .fg = vcolor(self.status_fg), .bg = vcolor(self.status_bg) };
}
pub fn tabStyle(self: Theme) vaxis.Style {
return .{ .fg = vcolor(self.tab_fg), .bg = vcolor(self.tab_bg) };
}
pub fn tabActiveStyle(self: Theme) vaxis.Style {
return .{ .fg = vcolor(self.tab_active_fg), .bg = vcolor(self.tab_active_bg), .bold = true };
}
pub fn tabDisabledStyle(self: Theme) vaxis.Style {
return .{ .fg = vcolor(self.text_dim), .bg = vcolor(self.tab_bg) };
}
pub fn inputStyle(self: Theme) vaxis.Style {
return .{ .fg = vcolor(self.input_fg), .bg = vcolor(self.input_bg), .bold = true };
}
pub fn inputHintStyle(self: Theme) vaxis.Style {
return .{ .fg = vcolor(self.input_hint), .bg = vcolor(self.input_bg) };
}
pub fn selectStyle(self: Theme) vaxis.Style {
return .{ .fg = vcolor(self.select_fg), .bg = vcolor(self.select_bg), .bold = true };
}
pub fn positiveStyle(self: Theme) vaxis.Style {
return .{ .fg = vcolor(self.positive), .bg = vcolor(self.bg) };
}
pub fn negativeStyle(self: Theme) vaxis.Style {
return .{ .fg = vcolor(self.negative), .bg = vcolor(self.bg) };
}
pub fn borderStyle(self: Theme) vaxis.Style {
return .{ .fg = vcolor(self.border), .bg = vcolor(self.bg) };
}
pub fn headerStyle(self: Theme) vaxis.Style {
return .{ .fg = vcolor(self.accent), .bg = vcolor(self.bg), .bold = true };
}
pub fn watchlistStyle(self: Theme) vaxis.Style {
return .{ .fg = vcolor(self.text_dim), .bg = vcolor(self.bg) };
}
};
// Monokai-inspired dark theme, influenced by opencode color system.
// Backgrounds are near-black for transparent terminal compatibility.
// Accent colors draw from Monokai's iconic palette: orange, purple, green, pink, yellow, cyan.
pub const default_theme = Theme{
.bg = .{ 0x0a, 0x0a, 0x0a }, // near-black (opencode darkStep1)
.bg_panel = .{ 0x14, 0x14, 0x14 }, // slightly lighter (opencode darkStep2)
.bg_element = .{ 0x1e, 0x1e, 0x1e }, // element bg (opencode darkStep3)
.tab_bg = .{ 0x14, 0x14, 0x14 }, // panel bg
.tab_fg = .{ 0x80, 0x80, 0x80 }, // muted gray
.tab_active_bg = .{ 0xfa, 0xb2, 0x83 }, // warm orange (opencode primary/darkStep9)
.tab_active_fg = .{ 0x0a, 0x0a, 0x0a }, // dark on orange
.text = .{ 0xee, 0xee, 0xee }, // bright text (opencode darkStep12)
.text_muted = .{ 0x80, 0x80, 0x80 }, // muted (opencode darkStep11)
.text_dim = .{ 0x48, 0x48, 0x48 }, // dim (opencode darkStep7)
.status_bg = .{ 0x14, 0x14, 0x14 }, // panel bg
.status_fg = .{ 0x80, 0x80, 0x80 }, // muted gray
.input_bg = .{ 0x28, 0x28, 0x28 }, // subtle element bg
.input_fg = .{ 0xfa, 0xb2, 0x83 }, // warm orange prompt
.input_hint = .{ 0x60, 0x60, 0x60 }, // dim hint
.accent = .{ 0x9d, 0x7c, 0xd8 }, // purple (opencode darkAccent)
.positive = .{ 0x7f, 0xd8, 0x8f }, // green (opencode darkGreen)
.negative = .{ 0xe0, 0x6c, 0x75 }, // red (opencode darkRed)
.warning = .{ 0xe5, 0xc0, 0x7b }, // yellow (opencode darkYellow)
.info = .{ 0x56, 0xb6, 0xc2 }, // cyan (opencode darkCyan)
.select_bg = .{ 0x32, 0x32, 0x32 }, // subtle highlight (opencode darkStep5)
.select_fg = .{ 0xff, 0xc0, 0x9f }, // bright orange (opencode darkStep10)
.border = .{ 0x3c, 0x3c, 0x3c }, // subtle border (opencode darkStep6)
};
// SRF serialization
const field_names = [_]struct { name: []const u8, offset: usize }{
.{ .name = "bg", .offset = @offsetOf(Theme, "bg") },
.{ .name = "bg_panel", .offset = @offsetOf(Theme, "bg_panel") },
.{ .name = "bg_element", .offset = @offsetOf(Theme, "bg_element") },
.{ .name = "tab_bg", .offset = @offsetOf(Theme, "tab_bg") },
.{ .name = "tab_fg", .offset = @offsetOf(Theme, "tab_fg") },
.{ .name = "tab_active_bg", .offset = @offsetOf(Theme, "tab_active_bg") },
.{ .name = "tab_active_fg", .offset = @offsetOf(Theme, "tab_active_fg") },
.{ .name = "text", .offset = @offsetOf(Theme, "text") },
.{ .name = "text_muted", .offset = @offsetOf(Theme, "text_muted") },
.{ .name = "text_dim", .offset = @offsetOf(Theme, "text_dim") },
.{ .name = "status_bg", .offset = @offsetOf(Theme, "status_bg") },
.{ .name = "status_fg", .offset = @offsetOf(Theme, "status_fg") },
.{ .name = "input_bg", .offset = @offsetOf(Theme, "input_bg") },
.{ .name = "input_fg", .offset = @offsetOf(Theme, "input_fg") },
.{ .name = "input_hint", .offset = @offsetOf(Theme, "input_hint") },
.{ .name = "accent", .offset = @offsetOf(Theme, "accent") },
.{ .name = "positive", .offset = @offsetOf(Theme, "positive") },
.{ .name = "negative", .offset = @offsetOf(Theme, "negative") },
.{ .name = "warning", .offset = @offsetOf(Theme, "warning") },
.{ .name = "info", .offset = @offsetOf(Theme, "info") },
.{ .name = "select_bg", .offset = @offsetOf(Theme, "select_bg") },
.{ .name = "select_fg", .offset = @offsetOf(Theme, "select_fg") },
.{ .name = "border", .offset = @offsetOf(Theme, "border") },
};
fn colorPtr(theme: *Theme, offset: usize) *Color {
const bytes: [*]u8 = @ptrCast(theme);
return @ptrCast(@alignCast(bytes + offset));
}
fn colorPtrConst(theme: *const Theme, offset: usize) *const Color {
const bytes: [*]const u8 = @ptrCast(theme);
return @ptrCast(@alignCast(bytes + offset));
}
fn formatHex(c: Color) [7]u8 {
var buf: [7]u8 = undefined;
_ = std.fmt.bufPrint(&buf, "#{x:0>2}{x:0>2}{x:0>2}", .{ c[0], c[1], c[2] }) catch {};
return buf;
}
fn parseHex(s: []const u8) ?Color {
const hex = if (s.len > 0 and s[0] == '#') s[1..] else s;
if (hex.len != 6) return null;
const r = std.fmt.parseUnsigned(u8, hex[0..2], 16) catch return null;
const g = std.fmt.parseUnsigned(u8, hex[2..4], 16) catch return null;
const b = std.fmt.parseUnsigned(u8, hex[4..6], 16) catch return null;
return .{ r, g, b };
}
pub fn printDefaults() !void {
var buf: [4096]u8 = undefined;
var writer = std.fs.File.stdout().writer(&buf);
const out = &writer.interface;
try out.writeAll("#!srfv1\n");
try out.writeAll("# zfin TUI theme\n");
try out.writeAll("# This file is the sole source of colors when present.\n");
try out.writeAll("# If removed, built-in defaults (monokai/opencode) are used.\n");
try out.writeAll("# Regenerate: zfin interactive --default-theme > ~/.config/zfin/theme.srf\n");
try out.writeAll("#\n");
try out.writeAll("# All values are hex RGB: #rrggbb\n");
for (field_names) |f| {
const c = colorPtrConst(&default_theme, f.offset);
const hex = formatHex(c.*);
try out.print("{s}::{s}\n", .{ f.name, hex });
}
try out.flush();
}
pub fn loadFromFile(allocator: std.mem.Allocator, path: []const u8) ?Theme {
const data = std.fs.cwd().readFileAlloc(allocator, path, 64 * 1024) catch return null;
defer allocator.free(data);
return loadFromData(data);
}
pub fn loadFromData(data: []const u8) ?Theme {
// Use a stack allocator for parsing -- we don't need to keep parsed data
var arena_buf: [32 * 1024]u8 = undefined;
var fba = std.heap.FixedBufferAllocator.init(&arena_buf);
const alloc = fba.allocator();
var reader = std.Io.Reader.fixed(data);
const parsed = srf.parse(&reader, alloc, .{ .alloc_strings = false }) catch return null;
_ = &parsed; // don't deinit, fba owns everything
var theme = default_theme;
for (parsed.records.items) |record| {
for (record.fields) |field| {
if (field.value) |v| {
const str = switch (v) {
.string => |s| s,
else => continue,
};
const color = parseHex(str) orelse continue;
for (field_names) |f| {
if (std.mem.eql(u8, field.key, f.name)) {
colorPtr(&theme, f.offset).* = color;
break;
}
}
}
}
}
return theme;
}
// Tests
test "parseHex" {
const c = parseHex("#f8f8f2").?;
try std.testing.expectEqual(@as(u8, 0xf8), c[0]);
try std.testing.expectEqual(@as(u8, 0xf8), c[1]);
try std.testing.expectEqual(@as(u8, 0xf2), c[2]);
}
test "parseHex no hash" {
const c = parseHex("272822").?;
try std.testing.expectEqual(@as(u8, 0x27), c[0]);
}
test "formatHex roundtrip" {
const c = Color{ 0xae, 0x81, 0xff };
const hex = formatHex(c);
const parsed = parseHex(&hex).?;
try std.testing.expectEqual(c[0], parsed[0]);
try std.testing.expectEqual(c[1], parsed[1]);
try std.testing.expectEqual(c[2], parsed[2]);
}
test "loadFromData" {
const data =
\\#!srfv1
\\bg::#ff0000
\\text::#00ff00
;
const t = loadFromData(data).?;
try std.testing.expectEqual(@as(u8, 0xff), t.bg[0]);
try std.testing.expectEqual(@as(u8, 0x00), t.bg[1]);
try std.testing.expectEqual(@as(u8, 0x00), t.text[0]);
try std.testing.expectEqual(@as(u8, 0xff), t.text[1]);
}
test "default theme has valid colors" {
const t = default_theme;
// Background should be dark
try std.testing.expect(t.bg[0] < 0x20);
// Text should be bright
try std.testing.expect(t.text[0] > 0xc0);
}