Compare commits

...

10 commits

Author SHA1 Message Date
45b1cd58d9
add gitea/forgejo ci
Some checks failed
Build and Release / build (push) Failing after 2m27s
Build and Release / sign (push) Has been skipped
2025-07-19 13:54:12 -07:00
d2ecef20b1
replace (now revoked) sourcehut token with placeholder 2025-07-19 13:45:45 -07:00
825a8420bc
proper support for tags 2025-07-19 13:30:21 -07:00
292b3ce535
clean up logging in GH provider 2025-07-19 13:19:22 -07:00
fa061e7353
actually run the generated tests 2025-07-19 13:17:29 -07:00
39f872b4f2
use threadsafeallocator 2025-07-19 13:14:51 -07:00
7659ad6431
remove comments/change tag format 2025-07-19 13:14:20 -07:00
6f68d94fa7
pencil out tags/commits we will need later 2025-07-18 07:52:45 -07:00
68297f4389
basic infra for tag support 2025-07-18 07:26:08 -07:00
f25ce7e510
remove extraneous debugging info 2025-07-17 17:38:56 -07:00
11 changed files with 627 additions and 51 deletions

View file

@ -0,0 +1,89 @@
name: Build and Release
on:
push:
workflow_dispatch:
jobs:
build:
runs-on: ubuntu-latest
steps:
- name: Check out repository code
uses: actions/checkout@v4
- name: Setup Zig
uses: https://github.com/mlugg/setup-zig@v2.0.1
with:
version: 0.14.1
- name: Restore Zig caches
uses: https://github.com/Hanaasagi/zig-action-cache@3954aae427f8b05914e08dfd79f15e1f2e435929
- name: Run tests
run: zig build test --verbose --summary all
- name: Run tests (release mode)
run: zig build test -Dtarget=x86_64-linux-musl -Doptimize=ReleaseSafe --verbose
- name: Build release binary
run: |
zig build -Dtarget=x86_64-linux-musl -Doptimize=ReleaseSafe --verbose
- name: Create release archive (linux x86_64)
run: |
mkdir -p release-tracker-x86_64-linux
cp zig-out/bin/release-tracker release-tracker-linux-x86_64-linux/
cp config.example.json release-tracker-linux-x86_64-linux/
cp README.md release-tracker-linux-x86_64-linux/
tar -czf release-tracker-x86_64-linux.tar.gz release-tracker-linux-x86_64-linux/
- name: Publish release archive
run: |
curl -s --user ${{ github.actor }}:${{ secrets.PACKAGE_PUSH }} \
--upload-file ${{ runner.temp }}/${{ github.sha }}-with-models.tar.gz \
https://git.lerch.org/api/packages/lobo/generic/aws-sdk-with-models/${{ github.sha }}/${{ github.sha }}-with-models.tar.gz
- name: Upload
uses: actions/upload-artifact@v3
with:
name: release-tracker-linux-x86_64
path: 'zig-out/bin/relase-tracker'
- name: Notify
uses: https://git.lerch.org/lobo/action-notify-ntfy@v2
if: always()
with:
host: ${{ secrets.NTFY_HOST }}
topic: ${{ secrets.NTFY_TOPIC }}
user: ${{ secrets.NTFY_USER }}
password: ${{ secrets.NTFY_PASSWORD }}
sign:
runs-on: ubuntu-latest-with-hsm
needs: build
steps:
- name: Download Artifact
uses: actions/download-artifact@v3
with:
name: release-tracker-linux-x86_64
- name: "Make executable actually executable"
run: "chmod 755 release-tracker-linux-x86_64"
- name: Sign
id: sign
uses: https://git.lerch.org/lobo/action-hsm-sign@v1
with:
pin: ${{ secrets.HSM_USER_PIN }}
files: flexilib
public_key: 'https://emil.lerch.org/serverpublic.pem'
- name: Output signature URL
run: echo "Signature URL is ${{ steps.sign.outputs.URL_0 }}"
- name: Upload Artifact
uses: actions/upload-artifact@v3
with:
name: signature
path: ${{ steps.sign.outputs.SIG_0 }}
- name: Notify
uses: https://git.lerch.org/lobo/action-notify-ntfy@v2
if: always()
with:
host: ${{ secrets.NTFY_HOST }}
topic: ${{ secrets.NTFY_TOPIC }}
user: ${{ secrets.NTFY_USER }}
password: ${{ secrets.NTFY_PASSWORD }}

View file

@ -3,7 +3,7 @@
"gitlab_token": "glpat-your_gitlab_personal_access_token_here",
"codeberg_token": "your_codeberg_access_token_here",
"sourcehut": {
"token": "AFRfVWoAAAAAAAAGZWxlcmNoMXjCv+4TPV+Qq1CMiUWDAZ/RNZzykaxJVZttjjCa1BU",
"token": "your_sourcehut_token_here",
"repositories": [
"~sircmpwn/aerc",
"~emersion/gamja"

View file

@ -190,6 +190,9 @@ pub fn generateFeed(allocator: Allocator, releases: []const Release) ![]u8 {
try escapeXml(writer, release.repo_name);
try writer.writeAll(" - ");
try escapeXml(writer, release.tag_name);
if (release.is_tag) {
try writer.writeAll(":tag");
}
try writer.writeAll("</title>\n");
try writer.writeAll(" <link href=\"");
@ -205,8 +208,6 @@ pub fn generateFeed(allocator: Allocator, releases: []const Release) ![]u8 {
.timestamp = release.published_at * std.time.ns_per_s,
.timezone = &zeit.utc,
};
// try escapeXml(writer, release.published_at);
// try std.testing.expect(std.mem.indexOf(u8, atom_content, "<updated>2024-01-01T00:00:00Z</updated>") != null);
try published.time().strftime(writer, "%Y-%m-%dT%H:%M:%SZ");
try writer.writeAll("</updated>\n");
@ -289,6 +290,7 @@ test "Atom feed generation with markdown" {
.html_url = "https://github.com/test/repo/releases/tag/v1.0.0",
.description = "## What's Changed\n* Fixed bug\n* Added feature",
.provider = "github",
.is_tag = false,
},
};
@ -317,6 +319,7 @@ test "Atom feed with fenced code blocks" {
.html_url = "https://github.com/test/repo/releases/tag/v1.0.0",
.description = "Here's some code:\n```javascript\nconst greeting = 'Hello World';\nconsole.log(greeting);\n```\nEnd of example.",
.provider = "github",
.is_tag = false,
},
};
@ -348,6 +351,7 @@ test "Atom feed with fallback markdown" {
.html_url = "https://github.com/test/repo/releases/tag/v1.0.0",
.description = "| Column 1 | Column 2 |\n|----------|----------|\n| Value 1 | Value 2 |",
.provider = "github",
.is_tag = false,
},
};
@ -373,6 +377,7 @@ test "Atom feed with special characters" {
.html_url = "https://github.com/test/repo/releases/tag/v1.0.0",
.description = "Test \"release\" with <special> chars & symbols",
.provider = "github",
.is_tag = false,
},
};

View file

@ -28,6 +28,7 @@ test "Atom feed validates against W3C validator" {
.html_url = "https://github.com/ziglang/zig/releases/tag/0.14.0",
.description = "Zig 0.14.0 release with many improvements",
.provider = "github",
.is_tag = false,
},
Release{
.repo_name = "example/test",
@ -36,6 +37,7 @@ test "Atom feed validates against W3C validator" {
.html_url = "https://github.com/example/test/releases/tag/v1.2.3",
.description = "Bug fixes and performance improvements",
.provider = "github",
.is_tag = false,
},
};

View file

@ -66,6 +66,7 @@ pub const Release = struct {
html_url: []const u8,
description: []const u8,
provider: []const u8,
is_tag: bool = false,
pub fn deinit(self: Release, allocator: Allocator) void {
allocator.free(self.repo_name);
@ -102,7 +103,8 @@ pub fn main() !u8 {
defer if (is_debug) {
_ = debug_allocator.deinit();
};
const allocator = gpa;
var tsa = std.heap.ThreadSafeAllocator{ .child_allocator = gpa };
const allocator = tsa.allocator();
const args = try std.process.argsAlloc(allocator);
defer std.process.argsFree(allocator, args);
@ -197,8 +199,6 @@ pub fn main() !u8 {
last_index += 1;
} else break;
}
// Total releases in feed: 1170 of 3591 total in last 365 days
std.log.debug("last_index: {} : {s}", .{ last_index, result.provider_name });
try all_releases.appendSlice(result.releases.items[0..last_index]);
}
@ -365,6 +365,7 @@ test "atom feed generation" {
.html_url = "https://github.com/test/repo/releases/tag/v1.0.0",
.description = "Test release",
.provider = "github",
.is_tag = false,
},
};
@ -429,6 +430,7 @@ test "Age-based release filtering" {
.html_url = "https://github.com/test/recent/releases/tag/v1.0.0",
.description = "Recent release",
.provider = "github",
.is_tag = false,
};
const old_release = Release{
@ -438,6 +440,7 @@ test "Age-based release filtering" {
.html_url = "https://github.com/test/old/releases/tag/v0.1.0",
.description = "Old release",
.provider = "github",
.is_tag = false,
};
const borderline_release = Release{
@ -447,6 +450,7 @@ test "Age-based release filtering" {
.html_url = "https://github.com/test/borderline/releases/tag/v0.5.0",
.description = "Borderline release",
.provider = "github",
.is_tag = false,
};
const releases = [_]Release{ recent_release, old_release, borderline_release };
@ -491,4 +495,9 @@ test "Age-based release filtering" {
test {
std.testing.refAllDecls(@import("timestamp_tests.zig"));
std.testing.refAllDecls(@import("atom.zig"));
std.testing.refAllDecls(@import("utils.zig"));
std.testing.refAllDecls(@import("providers/GitHub.zig"));
std.testing.refAllDecls(@import("providers/GitLab.zig"));
std.testing.refAllDecls(@import("providers/SourceHut.zig"));
std.testing.refAllDecls(@import("providers/Codeberg.zig"));
}

View file

@ -37,6 +37,7 @@ pub fn fetchReleases(self: *Self, allocator: Allocator) !ArrayList(Release) {
// Get releases for each repo
for (starred_repos.items) |repo| {
// TODO: Investigate the tags/releases situation similar to GitHub
const repo_releases = getRepoReleases(allocator, &client, self.token, repo) catch |err| {
const stderr = std.io.getStdErr().writer();
stderr.print("Error fetching Codeberg releases for {s}: {}\n", .{ repo, err }) catch {};
@ -239,6 +240,7 @@ fn getRepoReleases(allocator: Allocator, client: *http.Client, token: []const u8
.html_url = try allocator.dupe(u8, html_url_value.string),
.description = try allocator.dupe(u8, body_str),
.provider = try allocator.dupe(u8, "codeberg"),
.is_tag = false,
};
releases.append(release) catch |err| {
@ -313,10 +315,11 @@ test "codeberg release parsing with live data snapshot" {
const release = Release{
.repo_name = try allocator.dupe(u8, "example/project"),
.tag_name = try allocator.dupe(u8, tag_name_value.string),
.published_at = try allocator.dupe(u8, published_at_value.string),
.published_at = try utils.parseReleaseTimestamp(published_at_value.string),
.html_url = try allocator.dupe(u8, html_url_value.string),
.description = try allocator.dupe(u8, body_str),
.provider = try allocator.dupe(u8, "codeberg"),
.is_tag = false,
};
try releases.append(release);
@ -330,6 +333,12 @@ test "codeberg release parsing with live data snapshot" {
try std.testing.expectEqualStrings("v3.0.1", releases.items[0].tag_name);
try std.testing.expectEqualStrings("v3.0.0", releases.items[1].tag_name);
try std.testing.expectEqualStrings("v2.9.5", releases.items[2].tag_name);
try std.testing.expectEqualStrings("2024-01-25T11:20:30Z", releases.items[0].published_at);
try std.testing.expectEqual(
@as(i64, @intCast(@divTrunc(
(try @import("zeit").instant(.{ .source = .{ .iso8601 = "2024-01-25T11:20:30Z" } })).timestamp,
std.time.ns_per_s,
))),
releases.items[0].published_at,
);
try std.testing.expectEqualStrings("codeberg", releases.items[0].provider);
}

View file

@ -13,6 +13,8 @@ token: []const u8,
const Self = @This();
const log = std.log.scoped(.@"");
const RepoFetchTask = struct {
allocator: Allocator,
token: []const u8,
@ -21,6 +23,14 @@ const RepoFetchTask = struct {
error_msg: ?[]const u8 = null,
};
const RepoTagsTask = struct {
allocator: Allocator,
token: []const u8,
repo: []const u8,
result: ?ArrayList(Release) = null,
error_msg: ?[]const u8 = null,
};
pub fn init(token: []const u8) Self {
return Self{ .token = token };
}
@ -48,27 +58,22 @@ pub fn fetchReleases(self: *Self, allocator: Allocator) !ArrayList(Release) {
}
const starred_end_time = std.time.milliTimestamp();
if (starred_repos.items.len == 0) {
return releases;
}
if (starred_repos.items.len == 0) return releases;
const starred_duration: u64 = @intCast(starred_end_time - starred_start_time);
std.log.debug("GitHub: Found {} starred repositories in {}ms", .{ starred_repos.items.len, starred_duration });
log.debug("Found {} starred repositories in {}ms", .{ starred_repos.items.len, starred_duration });
// Check for potentially inaccessible repositories due to enterprise policies
// try checkForInaccessibleRepos(allocator, &client, self.token, starred_repos.items);
std.log.debug("GitHub: Processing {} starred repositories with thread pool...", .{starred_repos.items.len});
const thread_start_time = std.time.milliTimestamp();
// Create thread pool - use reasonable number of threads for API calls
const thread_count = @min(@max(std.Thread.getCpuCount() catch 4, 8), 20);
var thread_pool: Thread.Pool = undefined;
try thread_pool.init(.{ .allocator = allocator, .n_jobs = thread_count });
try thread_pool.init(.{ .allocator = allocator });
defer thread_pool.deinit();
// Create tasks for each repository
// Create tasks for each repository: fetch releases
var tasks = try allocator.alloc(RepoFetchTask, starred_repos.items.len);
defer allocator.free(tasks);
@ -83,16 +88,32 @@ pub fn fetchReleases(self: *Self, allocator: Allocator) !ArrayList(Release) {
// Submit all tasks to the thread pool
var wait_group: Thread.WaitGroup = .{};
for (tasks) |*task| {
for (tasks) |*task|
thread_pool.spawnWg(&wait_group, fetchRepoReleasesTask, .{task});
// Create tasks for each repository: fetch tags
var tag_tasks = try allocator.alloc(RepoTagsTask, starred_repos.items.len);
defer allocator.free(tag_tasks);
// Initialize tag tasks
for (starred_repos.items, 0..) |repo, i| {
tag_tasks[i] = RepoTagsTask{
.allocator = allocator,
.token = self.token,
.repo = repo,
};
}
// Wait for all tasks to complete
// Submit all tag tasks to the thread pool
var tag_wait_group: Thread.WaitGroup = .{};
for (tag_tasks) |*task|
thread_pool.spawnWg(&tag_wait_group, fetchRepoTagsTask, .{task});
// Wait for all tasks to complete: releases
thread_pool.waitAndWork(&wait_group);
const releases_end_time = std.time.milliTimestamp();
const thread_end_time = std.time.milliTimestamp();
// Collect results from all tasks
// Collect results from releases
var successful_repos: usize = 0;
var failed_repos: usize = 0;
@ -114,11 +135,62 @@ pub fn fetchReleases(self: *Self, allocator: Allocator) !ArrayList(Release) {
}
}
// Wait for all tasks to complete: tags
thread_pool.waitAndWork(&tag_wait_group);
const tags_end_time = std.time.milliTimestamp();
// Process tag results with filtering
var total_tags_found: usize = 0;
for (tag_tasks) |*tag_task| {
if (tag_task.result) |task_tags| {
defer task_tags.deinit();
const debug = std.mem.eql(u8, tag_task.repo, "DonIsaac/zlint");
if (debug)
log.debug("Processing target repo for debugging {s}", .{tag_task.repo});
total_tags_found += task_tags.items.len;
if (debug)
log.debug("Found {} tags for {s}", .{ task_tags.items.len, tag_task.repo });
// Filter out tags that already have corresponding releases
// Tags filtered will be deinitted here
const added_tags = try addNonReleaseTags(
allocator,
&releases,
task_tags.items,
);
if (debug)
log.debug("Added {d} tags out of {d} to release list for {s} ({d} filtered)", .{
added_tags,
task_tags.items.len,
tag_task.repo,
task_tags.items.len - added_tags,
});
} else if (tag_task.error_msg) |err_msg| {
const is_test = @import("builtin").is_test;
if (!is_test) {
const stderr = std.io.getStdErr().writer();
stderr.print("Error fetching tags for {s}: {s}\n", .{ tag_task.repo, err_msg }) catch {};
}
allocator.free(err_msg);
}
}
log.debug("Total tags found across all repositories: {}", .{total_tags_found});
const total_end_time = std.time.milliTimestamp();
const thread_duration: u64 = @intCast(thread_end_time - thread_start_time);
const releases_duration: u64 = @intCast(releases_end_time - thread_start_time);
const tags_duration: u64 = @intCast(tags_end_time - thread_start_time);
const total_duration: u64 = @intCast(total_end_time - total_start_time);
std.log.debug("GitHub: Thread pool completed in {}ms using {} threads ({} successful, {} failed)\n", .{ thread_duration, thread_count, successful_repos, failed_repos });
std.log.debug("GitHub: Total time (including pagination): {}ms\n", .{total_duration});
log.debug("Fetched releases {}ms, tags {}ms ({} successful, {} failed)\n", .{
releases_duration,
tags_duration,
successful_repos,
failed_repos,
});
log.debug("Total processing time: {}ms\n", .{total_duration});
// Sort releases by date (most recent first)
std.mem.sort(Release, releases.items, {}, compareReleasesByDate);
@ -143,6 +215,18 @@ fn fetchRepoReleasesTask(task: *RepoFetchTask) void {
task.result = repo_releases;
}
fn fetchRepoTagsTask(task: *RepoTagsTask) void {
var client = http.Client{ .allocator = task.allocator };
defer client.deinit();
const repo_tags = getRepoTags(task.allocator, &client, task.token, task.repo) catch |err| {
task.error_msg = std.fmt.allocPrint(task.allocator, "{s}: {}", .{ task.repo, err }) catch "Unknown error";
return;
};
task.result = repo_tags;
}
fn getStarredRepos(allocator: Allocator, client: *http.Client, token: []const u8) !ArrayList([]const u8) {
var repos = ArrayList([]const u8).init(allocator);
@ -414,6 +498,7 @@ fn getRepoReleases(allocator: Allocator, client: *http.Client, token: []const u8
.html_url = try allocator.dupe(u8, obj.get("html_url").?.string),
.description = try allocator.dupe(u8, body_str),
.provider = try allocator.dupe(u8, "github"),
.is_tag = false,
};
try releases.append(release);
@ -422,6 +507,280 @@ fn getRepoReleases(allocator: Allocator, client: *http.Client, token: []const u8
return releases;
}
fn shouldSkipTag(allocator: std.mem.Allocator, tag_name: []const u8) bool {
// List of common moving tags that should be filtered out
const moving_tags = [_][]const u8{
// common "latest commit tags"
"latest",
"tip",
"continuous",
"head",
// common branch tags
"main",
"master",
"trunk",
"develop",
"development",
"dev",
// common fast moving channel names
"nightly",
"edge",
"canary",
"alpha",
// common slower channels, but without version information
// they probably are not something we're interested in
"beta",
"rc",
"release",
"snapshot",
"unstable",
"experimental",
"prerelease",
"preview",
};
// Check if tag name contains common moving patterns
const tag_lower = std.ascii.allocLowerString(allocator, tag_name) catch return false;
defer allocator.free(tag_lower);
for (moving_tags) |moving_tag|
if (std.mem.eql(u8, tag_lower, moving_tag))
return true;
// Skip pre-release and development tags
if (std.mem.startsWith(u8, tag_lower, "pre-") or
std.mem.startsWith(u8, tag_lower, "dev-") or
std.mem.startsWith(u8, tag_lower, "test-") or
std.mem.startsWith(u8, tag_lower, "debug-"))
return true;
return false;
}
fn getRepoTags(allocator: Allocator, client: *http.Client, token: []const u8, repo: []const u8) !ArrayList(Release) {
var tags = ArrayList(Release).init(allocator);
// Split repo into owner and name
const slash_pos = std.mem.indexOf(u8, repo, "/") orelse return error.InvalidRepoFormat;
const owner = repo[0..slash_pos];
const repo_name = repo[slash_pos + 1 ..];
var has_next_page = true;
var cursor: ?[]const u8 = null;
while (has_next_page) {
// Build GraphQL query for tags with commit info
const query = if (cursor) |c|
try std.fmt.allocPrint(allocator,
\\{{"query": "query {{ repository(owner: \"{s}\", name: \"{s}\") {{ refs(refPrefix: \"refs/tags/\", first: 100, after: \"{s}\", orderBy: {{field: TAG_COMMIT_DATE, direction: DESC}}) {{ pageInfo {{ hasNextPage endCursor }} nodes {{ name target {{ ... on Commit {{ message committedDate }} ... on Tag {{ message target {{ ... on Commit {{ message committedDate }} }} }} }} }} }} }} }}"}}
, .{ owner, repo_name, c })
else
try std.fmt.allocPrint(allocator,
\\{{"query": "query {{ repository(owner: \"{s}\", name: \"{s}\") {{ refs(refPrefix: \"refs/tags/\", first: 100, orderBy: {{field: TAG_COMMIT_DATE, direction: DESC}}) {{ pageInfo {{ hasNextPage endCursor }} nodes {{ name target {{ ... on Commit {{ message committedDate }} ... on Tag {{ message target {{ ... on Commit {{ message committedDate }} }} }} }} }} }} }} }}"}}
, .{ owner, repo_name });
defer allocator.free(query);
const uri = try std.Uri.parse("https://api.github.com/graphql");
const auth_header = try std.fmt.allocPrint(allocator, "Bearer {s}", .{token});
defer allocator.free(auth_header);
var server_header_buffer: [16 * 1024]u8 = undefined;
var req = try client.open(.POST, uri, .{
.server_header_buffer = &server_header_buffer,
.extra_headers = &.{
.{ .name = "Authorization", .value = auth_header },
.{ .name = "Content-Type", .value = "application/json" },
.{ .name = "User-Agent", .value = "release-tracker/1.0" },
},
});
defer req.deinit();
req.transfer_encoding = .{ .content_length = query.len };
try req.send();
_ = try req.writeAll(query);
try req.finish();
try req.wait();
if (req.response.status != .ok) {
// Try to read the error response body for more details
const error_body = req.reader().readAllAlloc(allocator, 4096) catch "";
defer if (error_body.len > 0) allocator.free(error_body);
const is_test = @import("builtin").is_test;
if (!is_test) {
const stderr = std.io.getStdErr().writer();
stderr.print("GitHub GraphQL: Failed to fetch tags for {s}: HTTP {} - {s}\n", .{ repo, @intFromEnum(req.response.status), error_body }) catch {};
}
return error.HttpRequestFailed;
}
const body = try req.reader().readAllAlloc(allocator, 10 * 1024 * 1024);
defer allocator.free(body);
has_next_page = try parseGraphQL(allocator, repo, body, &cursor, &tags);
}
// Clean up cursor if allocated
if (cursor) |c| allocator.free(c);
return tags;
}
fn parseGraphQL(allocator: std.mem.Allocator, repo: []const u8, body: []const u8, cursor: *?[]const u8, releases: *ArrayList(Release)) !bool {
const parsed = try json.parseFromSlice(json.Value, allocator, body, .{});
defer parsed.deinit();
// Check for GraphQL errors
if (parsed.value.object.get("errors")) |errors| {
log.err("GraphQL errors in output for repository {s}: {}", .{ repo, errors });
return error.GraphQLError;
}
const data = parsed.value.object.get("data") orelse return error.NoData;
const repository = data.object.get("repository") orelse return error.NoRepository;
const refs = repository.object.get("refs") orelse return error.NoRefs;
const page_info = refs.object.get("pageInfo").?.object;
const nodes = refs.object.get("nodes").?.array;
// Update pagination info
const has_next_page = page_info.get("hasNextPage").?.bool;
if (has_next_page) {
const end_cursor = page_info.get("endCursor").?.string;
if (cursor.*) |old_cursor| allocator.free(old_cursor);
cursor.* = try allocator.dupe(u8, end_cursor);
}
// Process each tag
for (nodes.items) |node| {
const node_obj = node.object;
const tag_name = node_obj.get("name").?.string;
// Skip common moving tags
if (shouldSkipTag(allocator, tag_name)) continue;
const target = node_obj.get("target").?.object;
var commit_date: i64 = 0;
// Handle lightweight tags (point directly to commits)
if (target.get("committedDate")) |date| {
commit_date = utils.parseReleaseTimestamp(date.string) catch continue;
}
// Handle annotated tags (point to tag objects which point to commits)
else if (target.get("target")) |nested_target| {
if (nested_target.object.get("committedDate")) |date| {
commit_date = utils.parseReleaseTimestamp(date.string) catch continue;
} else {
// Skip tags that don't have commit dates
continue;
}
} else {
// Skip tags that don't have commit dates
continue;
}
// Create tag URL
const tag_url = try std.fmt.allocPrint(allocator, "https://github.com/{s}/releases/tag/{s}", .{ repo, tag_name });
var tag_message: []const u8 = "";
if (target.get("message")) |m| {
if (m == .string) tag_message = m.string;
} else if (target.get("target")) |nested_target| {
if (nested_target.object.get("message")) |nm| {
if (nm == .string) tag_message = nm.string;
}
}
const tag_release = Release{
.repo_name = try allocator.dupe(u8, repo),
.tag_name = try allocator.dupe(u8, tag_name),
.published_at = commit_date,
.html_url = tag_url,
.description = try allocator.dupe(u8, tag_message),
.provider = try allocator.dupe(u8, "github"),
.is_tag = true,
};
try releases.append(tag_release);
}
return has_next_page;
}
/// Adds non-duplicate tags to the releases array.
///
/// This function takes ownership of all Release structs in `all_tags`. For each tag:
/// - If it's NOT a duplicate of an existing release, it's added to the releases array
/// - If it IS a duplicate, it's freed immediately using tag.deinit(allocator)
///
/// The caller should NOT call deinit on any Release structs in `all_tags` after calling
/// this function, as ownership has been transferred.
///
/// Duplicate detection is based on matching both repo_name and tag_name.
fn addNonReleaseTags(allocator: std.mem.Allocator, releases: *ArrayList(Release), all_tags: []const Release) !usize {
var added: usize = 0;
for (all_tags) |tag| {
var is_duplicate = false;
// Check if this tag already exists as a release
for (releases.items) |release| {
if (std.mem.eql(u8, tag.repo_name, release.repo_name) and
std.mem.eql(u8, tag.tag_name, release.tag_name))
{
is_duplicate = true;
break;
}
}
if (is_duplicate) {
tag.deinit(allocator);
} else {
try releases.append(tag);
added += 1;
}
}
return added;
}
fn getCommitDate(allocator: Allocator, client: *http.Client, token: []const u8, repo: []const u8, commit_sha: []const u8) !i64 {
const url = try std.fmt.allocPrint(allocator, "https://api.github.com/repos/{s}/commits/{s}", .{ repo, commit_sha });
defer allocator.free(url);
const uri = try std.Uri.parse(url);
const auth_header = try std.fmt.allocPrint(allocator, "Bearer {s}", .{token});
defer allocator.free(auth_header);
var server_header_buffer: [16 * 1024]u8 = undefined;
var req = try client.open(.GET, uri, .{
.server_header_buffer = &server_header_buffer,
.extra_headers = &.{
.{ .name = "Authorization", .value = auth_header },
.{ .name = "Accept", .value = "application/vnd.github.v3+json" },
.{ .name = "User-Agent", .value = "release-tracker/1.0" },
},
});
defer req.deinit();
try req.send();
try req.wait();
if (req.response.status != .ok) {
return error.HttpRequestFailed;
}
const body = try req.reader().readAllAlloc(allocator, 1024 * 1024);
defer allocator.free(body);
const parsed = try json.parseFromSlice(json.Value, allocator, body, .{});
defer parsed.deinit();
const commit_date_str = parsed.value.object.get("commit").?.object.get("committer").?.object.get("date").?.string;
return try utils.parseReleaseTimestamp(commit_date_str);
}
fn compareReleasesByDate(context: void, a: Release, b: Release) bool {
_ = context;
return a.published_at > b.published_at;
@ -564,6 +923,7 @@ test "github release parsing with live data snapshot" {
.html_url = try allocator.dupe(u8, obj.get("html_url").?.string),
.description = try allocator.dupe(u8, body_str),
.provider = try allocator.dupe(u8, "github"),
.is_tag = false,
};
try releases.append(release);
@ -577,6 +937,114 @@ test "github release parsing with live data snapshot" {
try std.testing.expectEqualStrings("v1.2.0", releases.items[0].tag_name);
try std.testing.expectEqualStrings("v1.1.0", releases.items[1].tag_name);
try std.testing.expectEqualStrings("v1.0.0", releases.items[2].tag_name);
try std.testing.expectEqual(try @import("zeit").instant(.{ .source = .{ .iso8601 = "2024-01-15T10:30:00Z" } }), releases.items[0].published_at);
try std.testing.expectEqual(
@as(i64, @intCast(@divTrunc(
(try @import("zeit").instant(.{ .source = .{ .iso8601 = "2024-01-15T10:30:00Z" } })).timestamp,
std.time.ns_per_s,
))),
releases.items[0].published_at,
);
try std.testing.expectEqualStrings("github", releases.items[0].provider);
}
test "addNonReleaseTags should not add duplicate tags" {
const allocator = std.testing.allocator;
// Create initial releases array with one existing release
var releases = ArrayList(Release).init(allocator);
defer {
for (releases.items) |release| release.deinit(allocator);
releases.deinit();
}
const existing_release = Release{
.repo_name = try allocator.dupe(u8, "pkgforge-dev/Cromite-AppImage"),
.tag_name = try allocator.dupe(u8, "v138.0.7204.97@2025-07-19_1752905672"),
.published_at = 1721404800,
.html_url = try allocator.dupe(u8, "https://github.com/pkgforge-dev/Cromite-AppImage/releases/tag/v138.0.7204.97@2025-07-19_1752905672"),
.description = try allocator.dupe(u8, ""),
.provider = try allocator.dupe(u8, "github"),
.is_tag = false,
};
try releases.append(existing_release);
// Create a tag that duplicates the existing release (should NOT be added)
const duplicate_tag = Release{
.repo_name = try allocator.dupe(u8, "pkgforge-dev/Cromite-AppImage"),
.tag_name = try allocator.dupe(u8, "v138.0.7204.97@2025-07-19_1752905672"),
.published_at = 1721404800,
.html_url = try allocator.dupe(u8, "https://github.com/pkgforge-dev/Cromite-AppImage/releases/tag/v138.0.7204.97@2025-07-19_1752905672"),
.description = try allocator.dupe(u8, ""),
.provider = try allocator.dupe(u8, "github"),
.is_tag = true,
};
// Create a tag that should be added (unique)
const unique_tag = Release{
.repo_name = try allocator.dupe(u8, "pkgforge-dev/Cromite-AppImage"),
.tag_name = try allocator.dupe(u8, "v137.0.7204.96@2025-07-18_1752905671"),
.published_at = 1721318400,
.html_url = try allocator.dupe(u8, "https://github.com/pkgforge-dev/Cromite-AppImage/releases/tag/v137.0.7204.96@2025-07-18_1752905671"),
.description = try allocator.dupe(u8, ""),
.provider = try allocator.dupe(u8, "github"),
.is_tag = true,
};
// Array of tags to process
const all_tags = [_]Release{ duplicate_tag, unique_tag };
// Add non-duplicate tags to releases
const added = try addNonReleaseTags(allocator, &releases, &all_tags);
try std.testing.expectEqual(@as(usize, 1), added);
// Should have 2 releases total: 1 original + 1 unique tag (duplicate should be ignored)
try std.testing.expectEqual(@as(usize, 2), releases.items.len);
// Verify the unique tag was added
var found_unique = false;
for (releases.items) |release| {
if (std.mem.eql(u8, release.tag_name, "v137.0.7204.96@2025-07-18_1752905671")) {
found_unique = true;
try std.testing.expectEqual(true, release.is_tag);
break;
}
}
try std.testing.expect(found_unique);
}
test "parse tag graphQL output" {
const result =
\\{"data":{"repository":{"refs":{"pageInfo":{"hasNextPage":false,"endCursor":"MzY"},"nodes":[{"name":"v0.7.9","target":{"committedDate":"2025-07-16T06:14:23Z","message":"chore: bump version to v0.7.9"}},{"name":"v0.7.8","target":{"committedDate":"2025-07-15T23:01:11Z","message":"chore: bump version to v0.7.8"}},{"name":"v0.7.7","target":{"committedDate":"2025-04-16T02:32:43Z","message":"chore: bump version to v0.7.0"}},{"name":"v0.7.6","target":{"committedDate":"2025-04-13T18:00:14Z","message":"chore: bump version to v0.7.6"}},{"name":"v0.7.5","target":{"committedDate":"2025-04-12T20:31:13Z","message":"chore: bump version to v0.7.5"}},{"name":"v0.7.4","target":{"committedDate":"2025-04-06T02:08:45Z","message":"chore: bump version to v0.7.4"}},{"name":"v0.3.6","target":{"committedDate":"2024-12-20T07:25:36Z","message":"chore: bump version to v3.4.6"}},{"name":"v0.1.0","target":{"committedDate":"2024-11-16T23:19:14Z","message":"chore: bump version to v0.1.0"}}]}}}}
;
const allocator = std.testing.allocator;
var cursor: ?[]const u8 = null;
var tags = ArrayList(Release).init(allocator);
defer {
for (tags.items) |tag| {
tag.deinit(allocator);
}
tags.deinit();
}
const has_next_page = try parseGraphQL(allocator, "DonIsaac/zlint", result, &cursor, &tags);
// Verify parsing results
try std.testing.expectEqual(false, has_next_page);
try std.testing.expectEqual(@as(usize, 8), tags.items.len);
// Check first tag (most recent)
try std.testing.expectEqualStrings("v0.7.9", tags.items[0].tag_name);
try std.testing.expectEqualStrings("DonIsaac/zlint", tags.items[0].repo_name);
try std.testing.expectEqualStrings("chore: bump version to v0.7.9", tags.items[0].description);
try std.testing.expectEqualStrings("https://github.com/DonIsaac/zlint/releases/tag/v0.7.9", tags.items[0].html_url);
try std.testing.expectEqualStrings("github", tags.items[0].provider);
try std.testing.expectEqual(true, tags.items[0].is_tag);
// Check last tag
try std.testing.expectEqualStrings("v0.1.0", tags.items[7].tag_name);
try std.testing.expectEqualStrings("chore: bump version to v0.1.0", tags.items[7].description);
// Verify that commit messages are properly extracted
try std.testing.expectEqualStrings("chore: bump version to v0.7.8", tags.items[1].description);
try std.testing.expectEqualStrings("chore: bump version to v3.4.6", tags.items[6].description); // Note: this one has a typo in the original data
}

View file

@ -36,6 +36,7 @@ pub fn fetchReleases(self: *Self, allocator: Allocator) !ArrayList(Release) {
}
// Get releases for each project
// TODO: Investigate tags similar to GitHub
for (starred_projects.items) |project_id| {
const project_releases = getProjectReleases(allocator, &client, self.token, project_id) catch |err| {
const stderr = std.io.getStdErr().writer();
@ -226,6 +227,7 @@ fn getProjectReleases(allocator: Allocator, client: *http.Client, token: []const
.html_url = try allocator.dupe(u8, obj.get("_links").?.object.get("self").?.string),
.description = try allocator.dupe(u8, desc_str),
.provider = try allocator.dupe(u8, "gitlab"),
.is_tag = false,
};
releases.append(release) catch |err| {
@ -318,10 +320,11 @@ test "gitlab release parsing with live data snapshot" {
const release = Release{
.repo_name = try allocator.dupe(u8, obj.get("name").?.string),
.tag_name = try allocator.dupe(u8, obj.get("tag_name").?.string),
.published_at = try allocator.dupe(u8, obj.get("created_at").?.string),
.published_at = try utils.parseReleaseTimestamp(obj.get("created_at").?.string),
.html_url = try allocator.dupe(u8, obj.get("_links").?.object.get("self").?.string),
.description = try allocator.dupe(u8, desc_str),
.provider = try allocator.dupe(u8, "gitlab"),
.is_tag = false,
};
try releases.append(release);
@ -335,6 +338,12 @@ test "gitlab release parsing with live data snapshot" {
try std.testing.expectEqualStrings("v2.1.0", releases.items[0].tag_name);
try std.testing.expectEqualStrings("v2.0.0", releases.items[1].tag_name);
try std.testing.expectEqualStrings("v1.9.0", releases.items[2].tag_name);
try std.testing.expectEqualStrings("2024-01-20T14:45:30.123Z", releases.items[0].published_at);
try std.testing.expectEqual(
@as(i64, @intCast(@divTrunc(
(try @import("zeit").instant(.{ .source = .{ .iso8601 = "2024-01-20T14:45:30.123Z" } })).timestamp,
std.time.ns_per_s,
))),
releases.items[0].published_at,
);
try std.testing.expectEqualStrings("gitlab", releases.items[0].provider);
}

View file

@ -50,28 +50,6 @@ pub fn fetchReleasesForRepos(self: *Self, allocator: Allocator, repositories: []
return fetchReleasesMultiRepo(allocator, &client, auth_token, repositories);
}
pub fn fetchReleasesForReposFiltered(self: *Self, allocator: Allocator, repositories: [][]const u8, token: ?[]const u8, existing_releases: []const Release) !ArrayList(Release) {
var latest_date: i64 = 0;
for (existing_releases) |release| {
if (std.mem.eql(u8, release.provider, "sourcehut")) {
const release_time = utils.parseReleaseTimestamp(release.published_at) catch 0;
if (release_time > latest_date) {
latest_date = release_time;
}
}
}
const all_releases = try self.fetchReleasesForRepos(allocator, repositories, token);
defer {
for (all_releases.items) |release| {
release.deinit(allocator);
}
all_releases.deinit();
}
return filterNewReleases(allocator, all_releases.items, latest_date);
}
pub fn getName(self: *Self) []const u8 {
_ = self;
return "sourcehut";
@ -150,6 +128,7 @@ fn fetchReleasesMultiRepo(allocator: Allocator, client: *http.Client, token: []c
else
"1970-01-01T00:00:00Z";
// TODO: Investigate annotated tags as the description here
const release = Release{
.repo_name = try std.fmt.allocPrint(allocator, "~{s}/{s}", .{ tag_data.username, tag_data.reponame }),
.tag_name = try allocator.dupe(u8, tag_data.tag_name),
@ -157,6 +136,7 @@ fn fetchReleasesMultiRepo(allocator: Allocator, client: *http.Client, token: []c
.html_url = try std.fmt.allocPrint(allocator, "https://git.sr.ht/~{s}/{s}/refs/{s}", .{ tag_data.username, tag_data.reponame, tag_data.tag_name }),
.description = try std.fmt.allocPrint(allocator, "Tag {s} (commit: {s})", .{ tag_data.tag_name, tag_data.commit_id }),
.provider = try allocator.dupe(u8, "sourcehut"),
.is_tag = false, // Well, this is a lie. However, sourcehut doesn't have "releases", so it is a little weird to always set this true
};
releases.append(release) catch |err| {

View file

@ -79,6 +79,7 @@ test "compareReleasesByDate with various timestamp formats" {
.html_url = "https://github.com/test/iso-early/releases/tag/v1.0.0",
.description = "Early ISO format",
.provider = "github",
.is_tag = false,
};
const release_iso_late = Release{
@ -91,6 +92,7 @@ test "compareReleasesByDate with various timestamp formats" {
.html_url = "https://github.com/test/iso-late/releases/tag/v2.0.0",
.description = "Late ISO format",
.provider = "github",
.is_tag = false,
};
const release_invalid = Release{
@ -100,6 +102,7 @@ test "compareReleasesByDate with various timestamp formats" {
.html_url = "https://github.com/test/invalid/releases/tag/v3.0.0",
.description = "Invalid format",
.provider = "github",
.is_tag = false,
};
// Later date should come before earlier date (more recent first)

View file

@ -53,6 +53,7 @@ test "compareReleasesByDate" {
.html_url = "https://github.com/test/repo1/releases/tag/v1.0.0",
.description = "First release",
.provider = "github",
.is_tag = false,
};
const release2 = Release{
@ -65,6 +66,7 @@ test "compareReleasesByDate" {
.html_url = "https://github.com/test/repo2/releases/tag/v2.0.0",
.description = "Second release",
.provider = "github",
.is_tag = false,
};
// release2 should come before release1 (more recent first)