clean up terminal output
This commit is contained in:
parent
4608aa33c5
commit
8a2e691c1f
8 changed files with 96 additions and 42 deletions
|
@ -34,7 +34,8 @@ pub const Config = struct {
|
||||||
pub fn loadConfig(allocator: Allocator, path: []const u8) !Config {
|
pub fn loadConfig(allocator: Allocator, path: []const u8) !Config {
|
||||||
const file = std.fs.cwd().openFile(path, .{}) catch |err| switch (err) {
|
const file = std.fs.cwd().openFile(path, .{}) catch |err| switch (err) {
|
||||||
error.FileNotFound => {
|
error.FileNotFound => {
|
||||||
std.debug.print("Config file not found, creating default config at {s}\n", .{path});
|
const stderr = std.io.getStdErr().writer();
|
||||||
|
stderr.print("Config file not found, creating default config at {s}\n", .{path}) catch {};
|
||||||
try createDefaultConfig(path);
|
try createDefaultConfig(path);
|
||||||
return Config{ .allocator = allocator };
|
return Config{ .allocator = allocator };
|
||||||
},
|
},
|
||||||
|
|
55
src/main.zig
55
src/main.zig
|
@ -28,6 +28,17 @@ fn print(comptime fmt: []const u8, args: anytype) void {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Error output functions that work in release mode
|
||||||
|
fn printError(comptime fmt: []const u8, args: anytype) void {
|
||||||
|
const stderr = std.io.getStdErr().writer();
|
||||||
|
stderr.print(fmt, args) catch {};
|
||||||
|
}
|
||||||
|
|
||||||
|
fn printInfo(comptime fmt: []const u8, args: anytype) void {
|
||||||
|
const stderr = std.io.getStdErr().writer();
|
||||||
|
stderr.print(fmt, args) catch {};
|
||||||
|
}
|
||||||
|
|
||||||
// Configuration: Only include releases from the last year in the output
|
// Configuration: Only include releases from the last year in the output
|
||||||
const RELEASE_AGE_LIMIT_SECONDS: i64 = 365 * 24 * 60 * 60; // 1 year in seconds
|
const RELEASE_AGE_LIMIT_SECONDS: i64 = 365 * 24 * 60 * 60; // 1 year in seconds
|
||||||
|
|
||||||
|
@ -89,7 +100,7 @@ pub fn main() !u8 {
|
||||||
const config_path = args[1];
|
const config_path = args[1];
|
||||||
const output_file = if (args.len >= 3) args[2] else "releases.xml";
|
const output_file = if (args.len >= 3) args[2] else "releases.xml";
|
||||||
var app_config = config.loadConfig(allocator, config_path) catch |err| {
|
var app_config = config.loadConfig(allocator, config_path) catch |err| {
|
||||||
print("Error loading config: {}\n", .{err});
|
printError("Error loading config: {}\n", .{err});
|
||||||
return 1;
|
return 1;
|
||||||
};
|
};
|
||||||
defer app_config.deinit();
|
defer app_config.deinit();
|
||||||
|
@ -111,7 +122,7 @@ pub fn main() !u8 {
|
||||||
new_releases.deinit();
|
new_releases.deinit();
|
||||||
}
|
}
|
||||||
|
|
||||||
print("Fetching releases from all providers concurrently...\n", .{});
|
printInfo("Fetching releases from all providers concurrently...\n", .{});
|
||||||
|
|
||||||
// Create providers list
|
// Create providers list
|
||||||
var providers = std.ArrayList(Provider).init(allocator);
|
var providers = std.ArrayList(Provider).init(allocator);
|
||||||
|
@ -154,10 +165,25 @@ pub fn main() !u8 {
|
||||||
allocator.free(provider_results);
|
allocator.free(provider_results);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Check for provider errors and report them
|
||||||
|
var has_errors = false;
|
||||||
|
for (provider_results) |result| {
|
||||||
|
if (result.error_msg) |error_msg| {
|
||||||
|
printError("✗ {s}: {s}\n", .{ result.provider_name, error_msg });
|
||||||
|
has_errors = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// If any provider failed, exit with error code
|
||||||
|
if (has_errors) {
|
||||||
|
printError("One or more providers failed to fetch releases\n", .{});
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
|
||||||
// Combine all new releases from threaded providers
|
// Combine all new releases from threaded providers
|
||||||
for (provider_results) |result| {
|
for (provider_results) |result| {
|
||||||
try new_releases.appendSlice(result.releases.items);
|
try new_releases.appendSlice(result.releases.items);
|
||||||
print("Found {} new releases from {s}\n", .{ result.releases.items.len, result.provider_name });
|
printInfo("Found {} new releases from {s}\n", .{ result.releases.items.len, result.provider_name });
|
||||||
}
|
}
|
||||||
|
|
||||||
// Combine all releases (existing and new)
|
// Combine all releases (existing and new)
|
||||||
|
@ -201,9 +227,9 @@ pub fn main() !u8 {
|
||||||
try file.writeAll(atom_content);
|
try file.writeAll(atom_content);
|
||||||
|
|
||||||
// Log to stderr for user feedback
|
// Log to stderr for user feedback
|
||||||
std.debug.print("Found {} new releases\n", .{new_releases.items.len});
|
printInfo("Found {} new releases\n", .{new_releases.items.len});
|
||||||
std.debug.print("Total releases in feed: {} (filtered from {} total, showing last {} days)\n", .{ all_releases.items.len, original_count, @divTrunc(RELEASE_AGE_LIMIT_SECONDS, 24 * 60 * 60) });
|
printInfo("Total releases in feed: {} (filtered from {} total, showing last {} days)\n", .{ all_releases.items.len, original_count, @divTrunc(RELEASE_AGE_LIMIT_SECONDS, 24 * 60 * 60) });
|
||||||
std.debug.print("Updated feed written to: {s}\n", .{output_file});
|
printInfo("Updated feed written to: {s}\n", .{output_file});
|
||||||
|
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
@ -211,7 +237,7 @@ pub fn main() !u8 {
|
||||||
fn loadExistingReleases(allocator: Allocator, filename: []const u8) !ArrayList(Release) {
|
fn loadExistingReleases(allocator: Allocator, filename: []const u8) !ArrayList(Release) {
|
||||||
const file = std.fs.cwd().openFile(filename, .{}) catch |err| switch (err) {
|
const file = std.fs.cwd().openFile(filename, .{}) catch |err| switch (err) {
|
||||||
error.FileNotFound => {
|
error.FileNotFound => {
|
||||||
print("No existing releases file found, starting fresh\n", .{});
|
printInfo("No existing releases file found, starting fresh\n", .{});
|
||||||
return ArrayList(Release).init(allocator);
|
return ArrayList(Release).init(allocator);
|
||||||
},
|
},
|
||||||
else => return err,
|
else => return err,
|
||||||
|
@ -221,13 +247,16 @@ fn loadExistingReleases(allocator: Allocator, filename: []const u8) !ArrayList(R
|
||||||
const content = try file.readToEndAlloc(allocator, 10 * 1024 * 1024);
|
const content = try file.readToEndAlloc(allocator, 10 * 1024 * 1024);
|
||||||
defer allocator.free(content);
|
defer allocator.free(content);
|
||||||
|
|
||||||
return parseReleasesFromXml(allocator, content);
|
printInfo("Loading existing releases from {s}...\n", .{filename});
|
||||||
|
const releases = try parseReleasesFromXml(allocator, content);
|
||||||
|
printInfo("Loaded {} existing releases\n", .{releases.items.len});
|
||||||
|
return releases;
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parseReleasesFromXml(allocator: Allocator, xml_content: []const u8) !ArrayList(Release) {
|
fn parseReleasesFromXml(allocator: Allocator, xml_content: []const u8) !ArrayList(Release) {
|
||||||
const releases = xml_parser.parseAtomFeed(allocator, xml_content) catch |err| {
|
const releases = xml_parser.parseAtomFeed(allocator, xml_content) catch |err| {
|
||||||
print("Warning: Failed to parse XML content: {}\n", .{err});
|
printError("Warning: Failed to parse XML content: {}\n", .{err});
|
||||||
print("Starting fresh with no existing releases\n", .{});
|
printInfo("Starting fresh with no existing releases\n", .{});
|
||||||
return ArrayList(Release).init(allocator);
|
return ArrayList(Release).init(allocator);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -370,7 +399,7 @@ fn fetchProviderReleases(context: *const ThreadContext) void {
|
||||||
|
|
||||||
const since_str = formatTimestampForDisplay(allocator, latest_release_date) catch "unknown";
|
const since_str = formatTimestampForDisplay(allocator, latest_release_date) catch "unknown";
|
||||||
defer if (!std.mem.eql(u8, since_str, "unknown")) allocator.free(since_str);
|
defer if (!std.mem.eql(u8, since_str, "unknown")) allocator.free(since_str);
|
||||||
print("Fetching releases from {s} (since: {s})...\n", .{ provider.getName(), since_str });
|
printInfo("Fetching releases from {s} (since: {s})...\n", .{ provider.getName(), since_str });
|
||||||
|
|
||||||
if (provider.fetchReleases(allocator)) |all_releases| {
|
if (provider.fetchReleases(allocator)) |all_releases| {
|
||||||
defer {
|
defer {
|
||||||
|
@ -388,11 +417,11 @@ fn fetchProviderReleases(context: *const ThreadContext) void {
|
||||||
};
|
};
|
||||||
|
|
||||||
result.releases = filtered;
|
result.releases = filtered;
|
||||||
print("✓ {s}: Found {} new releases\n", .{ provider.getName(), filtered.items.len });
|
printInfo("✓ {s}: Found {} new releases\n", .{ provider.getName(), filtered.items.len });
|
||||||
} else |err| {
|
} else |err| {
|
||||||
const error_msg = std.fmt.allocPrint(allocator, "Error fetching releases: {}", .{err}) catch "Unknown fetch error";
|
const error_msg = std.fmt.allocPrint(allocator, "Error fetching releases: {}", .{err}) catch "Unknown fetch error";
|
||||||
result.error_msg = error_msg;
|
result.error_msg = error_msg;
|
||||||
print("✗ {s}: {s}\n", .{ provider.getName(), error_msg });
|
// Don't print error here - it will be handled in main function
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -38,7 +38,8 @@ pub fn fetchReleases(self: *Self, allocator: Allocator) !ArrayList(Release) {
|
||||||
// Get releases for each repo
|
// Get releases for each repo
|
||||||
for (starred_repos.items) |repo| {
|
for (starred_repos.items) |repo| {
|
||||||
const repo_releases = getRepoReleases(allocator, &client, self.token, repo) catch |err| {
|
const repo_releases = getRepoReleases(allocator, &client, self.token, repo) catch |err| {
|
||||||
std.debug.print("Error fetching Codeberg releases for {s}: {}\n", .{ repo, err });
|
const stderr = std.io.getStdErr().writer();
|
||||||
|
stderr.print("Error fetching Codeberg releases for {s}: {}\n", .{ repo, err }) catch {};
|
||||||
continue;
|
continue;
|
||||||
};
|
};
|
||||||
defer repo_releases.deinit();
|
defer repo_releases.deinit();
|
||||||
|
@ -95,13 +96,16 @@ fn getStarredRepos(allocator: Allocator, client: *http.Client, token: []const u8
|
||||||
|
|
||||||
if (req.response.status != .ok) {
|
if (req.response.status != .ok) {
|
||||||
if (req.response.status == .unauthorized) {
|
if (req.response.status == .unauthorized) {
|
||||||
std.debug.print("Codeberg API: Unauthorized - check your token and scopes\n", .{});
|
const stderr = std.io.getStdErr().writer();
|
||||||
|
stderr.print("Codeberg API: Unauthorized - check your token and scopes\n", .{}) catch {};
|
||||||
return error.Unauthorized;
|
return error.Unauthorized;
|
||||||
} else if (req.response.status == .forbidden) {
|
} else if (req.response.status == .forbidden) {
|
||||||
std.debug.print("Codeberg API: Forbidden - token may lack required scopes (read:repository)\n", .{});
|
const stderr = std.io.getStdErr().writer();
|
||||||
|
stderr.print("Codeberg API: Forbidden - token may lack required scopes (read:repository)\n", .{}) catch {};
|
||||||
return error.Forbidden;
|
return error.Forbidden;
|
||||||
}
|
}
|
||||||
std.debug.print("Codeberg API request failed with status: {}\n", .{req.response.status});
|
const stderr = std.io.getStdErr().writer();
|
||||||
|
stderr.print("Codeberg API request failed with status: {}\n", .{req.response.status}) catch {};
|
||||||
return error.HttpRequestFailed;
|
return error.HttpRequestFailed;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -109,7 +113,8 @@ fn getStarredRepos(allocator: Allocator, client: *http.Client, token: []const u8
|
||||||
defer allocator.free(body);
|
defer allocator.free(body);
|
||||||
|
|
||||||
const parsed = json.parseFromSlice(json.Value, allocator, body, .{}) catch |err| {
|
const parsed = json.parseFromSlice(json.Value, allocator, body, .{}) catch |err| {
|
||||||
std.debug.print("Error parsing Codeberg starred repos JSON (page {d}): {}\n", .{ page, err });
|
const stderr = std.io.getStdErr().writer();
|
||||||
|
stderr.print("Error parsing Codeberg starred repos JSON (page {d}): {}\n", .{ page, err }) catch {};
|
||||||
return error.JsonParseError;
|
return error.JsonParseError;
|
||||||
};
|
};
|
||||||
defer parsed.deinit();
|
defer parsed.deinit();
|
||||||
|
@ -178,16 +183,20 @@ fn getRepoReleases(allocator: Allocator, client: *http.Client, token: []const u8
|
||||||
|
|
||||||
if (req.response.status != .ok) {
|
if (req.response.status != .ok) {
|
||||||
if (req.response.status == .unauthorized) {
|
if (req.response.status == .unauthorized) {
|
||||||
std.debug.print("Codeberg API: Unauthorized for repo {s} - check your token and scopes\n", .{repo});
|
const stderr = std.io.getStdErr().writer();
|
||||||
|
stderr.print("Codeberg API: Unauthorized for repo {s} - check your token and scopes\n", .{repo}) catch {};
|
||||||
return error.Unauthorized;
|
return error.Unauthorized;
|
||||||
} else if (req.response.status == .forbidden) {
|
} else if (req.response.status == .forbidden) {
|
||||||
std.debug.print("Codeberg API: Forbidden for repo {s} - token may lack required scopes\n", .{repo});
|
const stderr = std.io.getStdErr().writer();
|
||||||
|
stderr.print("Codeberg API: Forbidden for repo {s} - token may lack required scopes\n", .{repo}) catch {};
|
||||||
return error.Forbidden;
|
return error.Forbidden;
|
||||||
} else if (req.response.status == .not_found) {
|
} else if (req.response.status == .not_found) {
|
||||||
std.debug.print("Codeberg API: Repository {s} not found or no releases\n", .{repo});
|
const stderr = std.io.getStdErr().writer();
|
||||||
|
stderr.print("Codeberg API: Repository {s} not found or no releases\n", .{repo}) catch {};
|
||||||
return error.NotFound;
|
return error.NotFound;
|
||||||
}
|
}
|
||||||
std.debug.print("Codeberg API request failed for repo {s} with status: {}\n", .{ repo, req.response.status });
|
const stderr = std.io.getStdErr().writer();
|
||||||
|
stderr.print("Codeberg API request failed for repo {s} with status: {}\n", .{ repo, req.response.status }) catch {};
|
||||||
return error.HttpRequestFailed;
|
return error.HttpRequestFailed;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -195,7 +204,8 @@ fn getRepoReleases(allocator: Allocator, client: *http.Client, token: []const u8
|
||||||
defer allocator.free(body);
|
defer allocator.free(body);
|
||||||
|
|
||||||
const parsed = json.parseFromSlice(json.Value, allocator, body, .{}) catch |err| {
|
const parsed = json.parseFromSlice(json.Value, allocator, body, .{}) catch |err| {
|
||||||
std.debug.print("Error parsing Codeberg releases JSON for {s}: {}\n", .{ repo, err });
|
const stderr = std.io.getStdErr().writer();
|
||||||
|
stderr.print("Error parsing Codeberg releases JSON for {s}: {}\n", .{ repo, err }) catch {};
|
||||||
return error.JsonParseError;
|
return error.JsonParseError;
|
||||||
};
|
};
|
||||||
defer parsed.deinit();
|
defer parsed.deinit();
|
||||||
|
|
|
@ -38,7 +38,8 @@ pub fn fetchReleases(self: *Self, allocator: Allocator) !ArrayList(Release) {
|
||||||
// Then get releases for each repo
|
// Then get releases for each repo
|
||||||
for (starred_repos.items) |repo| {
|
for (starred_repos.items) |repo| {
|
||||||
const repo_releases = getRepoReleases(allocator, &client, self.token, repo) catch |err| {
|
const repo_releases = getRepoReleases(allocator, &client, self.token, repo) catch |err| {
|
||||||
std.debug.print("Error fetching releases for {s}: {}\n", .{ repo, err });
|
const stderr = std.io.getStdErr().writer();
|
||||||
|
stderr.print("Error fetching releases for {s}: {}\n", .{ repo, err }) catch {};
|
||||||
continue;
|
continue;
|
||||||
};
|
};
|
||||||
defer repo_releases.deinit();
|
defer repo_releases.deinit();
|
||||||
|
|
|
@ -38,7 +38,8 @@ pub fn fetchReleases(self: *Self, allocator: Allocator) !ArrayList(Release) {
|
||||||
// Get releases for each project
|
// Get releases for each project
|
||||||
for (starred_projects.items) |project_id| {
|
for (starred_projects.items) |project_id| {
|
||||||
const project_releases = getProjectReleases(allocator, &client, self.token, project_id) catch |err| {
|
const project_releases = getProjectReleases(allocator, &client, self.token, project_id) catch |err| {
|
||||||
std.debug.print("Error fetching GitLab releases for project {s}: {}\n", .{ project_id, err });
|
const stderr = std.io.getStdErr().writer();
|
||||||
|
stderr.print("Error fetching GitLab releases for project {s}: {}\n", .{ project_id, err }) catch {};
|
||||||
continue;
|
continue;
|
||||||
};
|
};
|
||||||
defer project_releases.deinit();
|
defer project_releases.deinit();
|
||||||
|
|
|
@ -33,12 +33,14 @@ pub fn fetchReleasesForRepos(self: *Self, allocator: Allocator, repositories: []
|
||||||
}
|
}
|
||||||
|
|
||||||
const auth_token = token orelse {
|
const auth_token = token orelse {
|
||||||
std.debug.print("SourceHut: No token provided, skipping\n", .{});
|
const stderr = std.io.getStdErr().writer();
|
||||||
|
stderr.print("SourceHut: No token provided, skipping\n", .{}) catch {};
|
||||||
return ArrayList(Release).init(allocator);
|
return ArrayList(Release).init(allocator);
|
||||||
};
|
};
|
||||||
|
|
||||||
if (auth_token.len == 0) {
|
if (auth_token.len == 0) {
|
||||||
std.debug.print("SourceHut: Empty token, skipping\n", .{});
|
const stderr = std.io.getStdErr().writer();
|
||||||
|
stderr.print("SourceHut: Empty token, skipping\n", .{}) catch {};
|
||||||
return ArrayList(Release).init(allocator);
|
return ArrayList(Release).init(allocator);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -97,7 +99,8 @@ fn fetchReleasesMultiRepo(allocator: Allocator, client: *http.Client, token: []c
|
||||||
|
|
||||||
for (repositories) |repo| {
|
for (repositories) |repo| {
|
||||||
const parsed = parseRepoFormat(allocator, repo) catch |err| {
|
const parsed = parseRepoFormat(allocator, repo) catch |err| {
|
||||||
std.debug.print("Invalid repo format '{s}': {}\n", .{ repo, err });
|
const stderr = std.io.getStdErr().writer();
|
||||||
|
stderr.print("Invalid repo format '{s}': {}\n", .{ repo, err }) catch {};
|
||||||
continue;
|
continue;
|
||||||
};
|
};
|
||||||
try parsed_repos.append(parsed);
|
try parsed_repos.append(parsed);
|
||||||
|
@ -109,7 +112,8 @@ fn fetchReleasesMultiRepo(allocator: Allocator, client: *http.Client, token: []c
|
||||||
|
|
||||||
// Step 1: Get all references for all repositories in one query
|
// Step 1: Get all references for all repositories in one query
|
||||||
const all_tag_data = getAllReferencesMultiRepo(allocator, client, token, parsed_repos.items) catch |err| {
|
const all_tag_data = getAllReferencesMultiRepo(allocator, client, token, parsed_repos.items) catch |err| {
|
||||||
std.debug.print("Failed to get references: {}\n", .{err});
|
const stderr = std.io.getStdErr().writer();
|
||||||
|
stderr.print("Failed to get references: {}\n", .{err}) catch {};
|
||||||
return releases;
|
return releases;
|
||||||
};
|
};
|
||||||
defer {
|
defer {
|
||||||
|
@ -128,7 +132,8 @@ fn fetchReleasesMultiRepo(allocator: Allocator, client: *http.Client, token: []c
|
||||||
|
|
||||||
// Step 2: Get commit dates for all commits in one query
|
// Step 2: Get commit dates for all commits in one query
|
||||||
const commit_dates = getAllCommitDatesMultiRepo(allocator, client, token, parsed_repos.items, all_tag_data.items) catch |err| {
|
const commit_dates = getAllCommitDatesMultiRepo(allocator, client, token, parsed_repos.items, all_tag_data.items) catch |err| {
|
||||||
std.debug.print("Failed to get commit dates: {}\n", .{err});
|
const stderr = std.io.getStdErr().writer();
|
||||||
|
stderr.print("Failed to get commit dates: {}\n", .{err}) catch {};
|
||||||
return releases;
|
return releases;
|
||||||
};
|
};
|
||||||
defer {
|
defer {
|
||||||
|
@ -261,7 +266,8 @@ fn getAllReferencesMultiRepo(allocator: Allocator, client: *http.Client, token:
|
||||||
|
|
||||||
// Parse the response and extract tag data
|
// Parse the response and extract tag data
|
||||||
var parsed = json.parseFromSlice(json.Value, allocator, response_body, .{}) catch |err| {
|
var parsed = json.parseFromSlice(json.Value, allocator, response_body, .{}) catch |err| {
|
||||||
std.debug.print("SourceHut: Failed to parse references JSON response: {}\n", .{err});
|
const stderr = std.io.getStdErr().writer();
|
||||||
|
stderr.print("SourceHut: Failed to parse references JSON response: {}\n", .{err}) catch {};
|
||||||
return all_tag_data;
|
return all_tag_data;
|
||||||
};
|
};
|
||||||
defer parsed.deinit();
|
defer parsed.deinit();
|
||||||
|
@ -270,13 +276,14 @@ fn getAllReferencesMultiRepo(allocator: Allocator, client: *http.Client, token:
|
||||||
|
|
||||||
// Check for GraphQL errors first
|
// Check for GraphQL errors first
|
||||||
if (root.object.get("errors")) |errors| {
|
if (root.object.get("errors")) |errors| {
|
||||||
std.debug.print("GraphQL errors in references query: ", .{});
|
const stderr = std.io.getStdErr().writer();
|
||||||
|
stderr.print("GraphQL errors in references query: ", .{}) catch {};
|
||||||
for (errors.array.items) |error_item| {
|
for (errors.array.items) |error_item| {
|
||||||
if (error_item.object.get("message")) |message| {
|
if (error_item.object.get("message")) |message| {
|
||||||
std.debug.print("{s} ", .{message.string});
|
stderr.print("{s} ", .{message.string}) catch {};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
std.debug.print("\n", .{});
|
stderr.print("\n", .{}) catch {};
|
||||||
return all_tag_data;
|
return all_tag_data;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -441,7 +448,8 @@ fn getAllCommitDatesMultiRepo(allocator: Allocator, client: *http.Client, token:
|
||||||
|
|
||||||
// Parse the response
|
// Parse the response
|
||||||
var parsed = json.parseFromSlice(json.Value, allocator, response_body, .{}) catch |err| {
|
var parsed = json.parseFromSlice(json.Value, allocator, response_body, .{}) catch |err| {
|
||||||
std.debug.print("SourceHut: Failed to parse commit dates JSON response: {}\n", .{err});
|
const stderr = std.io.getStdErr().writer();
|
||||||
|
stderr.print("SourceHut: Failed to parse commit dates JSON response: {}\n", .{err}) catch {};
|
||||||
// Return empty dates for all tags
|
// Return empty dates for all tags
|
||||||
for (tag_data) |_| {
|
for (tag_data) |_| {
|
||||||
try commit_dates.append("");
|
try commit_dates.append("");
|
||||||
|
@ -454,13 +462,14 @@ fn getAllCommitDatesMultiRepo(allocator: Allocator, client: *http.Client, token:
|
||||||
|
|
||||||
// Check for GraphQL errors first
|
// Check for GraphQL errors first
|
||||||
if (root.object.get("errors")) |errors| {
|
if (root.object.get("errors")) |errors| {
|
||||||
std.debug.print("GraphQL errors in commit dates query: ", .{});
|
const stderr = std.io.getStdErr().writer();
|
||||||
|
stderr.print("GraphQL errors in commit dates query: ", .{}) catch {};
|
||||||
for (errors.array.items) |error_item| {
|
for (errors.array.items) |error_item| {
|
||||||
if (error_item.object.get("message")) |message| {
|
if (error_item.object.get("message")) |message| {
|
||||||
std.debug.print("{s} ", .{message.string});
|
stderr.print("{s} ", .{message.string}) catch {};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
std.debug.print("\n", .{});
|
stderr.print("\n", .{}) catch {};
|
||||||
// Return empty dates for all tags
|
// Return empty dates for all tags
|
||||||
for (tag_data) |_| {
|
for (tag_data) |_| {
|
||||||
try commit_dates.append("");
|
try commit_dates.append("");
|
||||||
|
@ -550,7 +559,8 @@ fn makeGraphQLRequest(allocator: Allocator, client: *http.Client, token: []const
|
||||||
try req.wait();
|
try req.wait();
|
||||||
|
|
||||||
if (req.response.status != .ok) {
|
if (req.response.status != .ok) {
|
||||||
std.debug.print("SourceHut GraphQL API request failed with status: {}\n", .{req.response.status});
|
const stderr = std.io.getStdErr().writer();
|
||||||
|
stderr.print("SourceHut GraphQL API request failed with status: {}\n", .{req.response.status}) catch {};
|
||||||
return error.HttpRequestFailed;
|
return error.HttpRequestFailed;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -41,7 +41,8 @@ pub const AppState = struct {
|
||||||
pub fn loadState(allocator: Allocator, path: []const u8) !AppState {
|
pub fn loadState(allocator: Allocator, path: []const u8) !AppState {
|
||||||
const file = std.fs.cwd().openFile(path, .{}) catch |err| switch (err) {
|
const file = std.fs.cwd().openFile(path, .{}) catch |err| switch (err) {
|
||||||
error.FileNotFound => {
|
error.FileNotFound => {
|
||||||
std.debug.print("State file not found, creating default state at {s}\n", .{path});
|
const stderr = std.io.getStdErr().writer();
|
||||||
|
stderr.print("State file not found, creating default state at {s}\n", .{path}) catch {};
|
||||||
const default_state = AppState.init(allocator);
|
const default_state = AppState.init(allocator);
|
||||||
try saveState(default_state, path);
|
try saveState(default_state, path);
|
||||||
return default_state;
|
return default_state;
|
||||||
|
|
|
@ -36,7 +36,8 @@ pub fn parseAtomFeed(allocator: Allocator, xml_content: []const u8) !ArrayList(R
|
||||||
if (parseEntry(allocator, entry_content)) |release| {
|
if (parseEntry(allocator, entry_content)) |release| {
|
||||||
try releases.append(release);
|
try releases.append(release);
|
||||||
} else |err| {
|
} else |err| {
|
||||||
std.debug.print("Warning: Failed to parse entry: {}\n", .{err});
|
const stderr = std.io.getStdErr().writer();
|
||||||
|
stderr.print("Warning: Failed to parse entry: {}\n", .{err}) catch {};
|
||||||
}
|
}
|
||||||
|
|
||||||
pos = entry_end + 8; // Move past "</entry>"
|
pos = entry_end + 8; // Move past "</entry>"
|
||||||
|
|
Loading…
Add table
Reference in a new issue