2025-04-25 20:44:32 +00:00
|
|
|
import * as plugins from './plugins.js';
|
2024-12-13 18:31:09 +01:00
|
|
|
|
|
|
|
export class CodeFeed {
|
|
|
|
private baseUrl: string;
|
|
|
|
private token?: string;
|
2024-12-13 19:51:42 +01:00
|
|
|
private lastRunTimestamp: string;
|
2025-09-12 22:06:02 +00:00
|
|
|
private pageLimit = 50;
|
2025-04-25 20:44:32 +00:00
|
|
|
// Raw changelog content for the current repository
|
|
|
|
private changelogContent: string = '';
|
|
|
|
// npm registry helper for published-on-npm checks
|
|
|
|
private npmRegistry: plugins.smartnpm.NpmRegistry;
|
|
|
|
// In-memory stateful cache of commits
|
|
|
|
private enableCache: boolean = false;
|
|
|
|
private cacheWindowMs?: number;
|
|
|
|
private cache: plugins.interfaces.ICommitResult[] = [];
|
|
|
|
// enable or disable npm publishedOnNpm checks (true by default)
|
|
|
|
private enableNpmCheck: boolean = true;
|
2025-04-25 20:56:01 +00:00
|
|
|
// return only tagged commits (false by default)
|
|
|
|
private enableTaggedOnly: boolean = false;
|
2025-09-12 22:06:02 +00:00
|
|
|
// allow/deny filters
|
|
|
|
private orgAllowlist?: string[];
|
|
|
|
private orgDenylist?: string[];
|
2025-09-14 20:27:51 +00:00
|
|
|
private repoAllowlist?: string[]; // entries like "org/repo"
|
|
|
|
private repoDenylist?: string[]; // entries like "org/repo"
|
|
|
|
private untilTimestamp?: string; // optional upper bound on commit timestamps
|
|
|
|
private verbose?: boolean; // optional metrics logging
|
2025-04-25 20:44:32 +00:00
|
|
|
|
|
|
|
constructor(
|
|
|
|
baseUrl: string,
|
|
|
|
token?: string,
|
|
|
|
lastRunTimestamp?: string,
|
|
|
|
options?: {
|
|
|
|
enableCache?: boolean;
|
|
|
|
cacheWindowMs?: number;
|
|
|
|
enableNpmCheck?: boolean;
|
2025-04-25 20:56:01 +00:00
|
|
|
taggedOnly?: boolean;
|
2025-09-12 22:06:02 +00:00
|
|
|
orgAllowlist?: string[];
|
|
|
|
orgDenylist?: string[];
|
2025-09-14 20:27:51 +00:00
|
|
|
repoAllowlist?: string[];
|
|
|
|
repoDenylist?: string[];
|
|
|
|
untilTimestamp?: string;
|
|
|
|
verbose?: boolean;
|
2025-04-25 20:44:32 +00:00
|
|
|
}
|
|
|
|
) {
|
2024-12-13 18:31:09 +01:00
|
|
|
this.baseUrl = baseUrl;
|
|
|
|
this.token = token;
|
2024-12-14 02:28:25 +01:00
|
|
|
this.lastRunTimestamp =
|
2025-04-25 20:44:32 +00:00
|
|
|
lastRunTimestamp ?? new Date(Date.now() - 7 * 24 * 60 * 60 * 1000).toISOString();
|
|
|
|
// configure stateful caching
|
|
|
|
this.enableCache = options?.enableCache ?? false;
|
|
|
|
this.cacheWindowMs = options?.cacheWindowMs;
|
|
|
|
this.enableNpmCheck = options?.enableNpmCheck ?? true;
|
2025-04-25 20:56:01 +00:00
|
|
|
this.enableTaggedOnly = options?.taggedOnly ?? false;
|
2025-09-12 22:06:02 +00:00
|
|
|
this.orgAllowlist = options?.orgAllowlist;
|
|
|
|
this.orgDenylist = options?.orgDenylist;
|
2025-09-14 20:27:51 +00:00
|
|
|
this.repoAllowlist = options?.repoAllowlist;
|
|
|
|
this.repoDenylist = options?.repoDenylist;
|
|
|
|
this.untilTimestamp = options?.untilTimestamp;
|
|
|
|
this.verbose = options?.verbose ?? false;
|
2025-04-25 20:44:32 +00:00
|
|
|
this.cache = [];
|
|
|
|
// npm registry instance for version lookups
|
|
|
|
this.npmRegistry = new plugins.smartnpm.NpmRegistry();
|
2024-12-13 19:51:42 +01:00
|
|
|
console.log('CodeFeed initialized with last run timestamp:', this.lastRunTimestamp);
|
2024-12-13 18:31:09 +01:00
|
|
|
}
|
|
|
|
|
2025-04-25 20:44:32 +00:00
|
|
|
/**
|
|
|
|
* Fetch all new commits (since lastRunTimestamp) across all orgs and repos.
|
|
|
|
*/
|
|
|
|
public async fetchAllCommitsFromInstance(): Promise<plugins.interfaces.ICommitResult[]> {
|
|
|
|
// Controlled concurrency with AsyncExecutionStack
|
|
|
|
const stack = new plugins.lik.AsyncExecutionStack();
|
2025-04-25 20:56:01 +00:00
|
|
|
stack.setNonExclusiveMaxConcurrency(20);
|
2025-04-25 20:44:32 +00:00
|
|
|
// determine since timestamp for this run (stateful caching)
|
|
|
|
let effectiveSince = this.lastRunTimestamp;
|
|
|
|
if (this.enableCache && this.cache.length > 0) {
|
|
|
|
// use newest timestamp in cache to fetch only tail
|
|
|
|
effectiveSince = this.cache.reduce(
|
|
|
|
(max, c) => (c.timestamp > max ? c.timestamp : max),
|
|
|
|
effectiveSince
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
// 1) get all organizations
|
2025-09-12 22:06:02 +00:00
|
|
|
let orgs = await this.fetchAllOrganizations();
|
|
|
|
// apply allow/deny filters
|
|
|
|
if (this.orgAllowlist && this.orgAllowlist.length > 0) {
|
|
|
|
orgs = orgs.filter((o) => this.orgAllowlist!.includes(o));
|
|
|
|
}
|
|
|
|
if (this.orgDenylist && this.orgDenylist.length > 0) {
|
|
|
|
orgs = orgs.filter((o) => !this.orgDenylist!.includes(o));
|
|
|
|
}
|
2025-04-25 20:44:32 +00:00
|
|
|
|
|
|
|
// 2) fetch repos per org in parallel
|
|
|
|
const repoLists = await Promise.all(
|
|
|
|
orgs.map((org) =>
|
|
|
|
stack.getNonExclusiveExecutionSlot(() => this.fetchRepositoriesForOrg(org))
|
|
|
|
)
|
|
|
|
);
|
|
|
|
// flatten to [{ owner, name }]
|
2025-09-14 20:27:51 +00:00
|
|
|
let allRepos = orgs.flatMap((org, i) =>
|
2025-04-25 20:44:32 +00:00
|
|
|
repoLists[i].map((r) => ({ owner: org, name: r.name }))
|
|
|
|
);
|
2025-09-14 20:27:51 +00:00
|
|
|
// apply repo allow/deny filters using slug "org/repo"
|
|
|
|
if (this.repoAllowlist && this.repoAllowlist.length > 0) {
|
|
|
|
const allow = new Set(this.repoAllowlist.map((s) => s.toLowerCase()));
|
|
|
|
allRepos = allRepos.filter(({ owner, name }) => allow.has(`${owner}/${name}`.toLowerCase()));
|
|
|
|
}
|
|
|
|
if (this.repoDenylist && this.repoDenylist.length > 0) {
|
|
|
|
const deny = new Set(this.repoDenylist.map((s) => s.toLowerCase()));
|
|
|
|
allRepos = allRepos.filter(({ owner, name }) => !deny.has(`${owner}/${name}`.toLowerCase()));
|
|
|
|
}
|
2025-04-25 20:44:32 +00:00
|
|
|
|
|
|
|
// 3) probe latest commit per repo and fetch full list only if new commits exist
|
|
|
|
const commitJobs = allRepos.map(({ owner, name }) =>
|
|
|
|
stack.getNonExclusiveExecutionSlot(async () => {
|
|
|
|
try {
|
|
|
|
// 3a) Probe the most recent commit (limit=1)
|
|
|
|
const probeResp = await this.fetchFunction(
|
|
|
|
`/api/v1/repos/${owner}/${name}/commits?limit=1`,
|
|
|
|
{ headers: this.token ? { Authorization: `token ${this.token}` } : {} }
|
|
|
|
);
|
|
|
|
if (!probeResp.ok) {
|
|
|
|
throw new Error(`Probe failed for ${owner}/${name}: ${probeResp.statusText}`);
|
|
|
|
}
|
|
|
|
const probeData: plugins.interfaces.ICommit[] = await probeResp.json();
|
|
|
|
// If no commits or no new commits since last run, skip
|
|
|
|
if (
|
|
|
|
probeData.length === 0 ||
|
|
|
|
new Date(probeData[0].commit.author.date).getTime() <=
|
|
|
|
new Date(effectiveSince).getTime()
|
|
|
|
) {
|
|
|
|
return { owner, name, commits: [] };
|
|
|
|
}
|
|
|
|
// 3b) Fetch commits since last run
|
|
|
|
const commits = await this.fetchRecentCommitsForRepo(
|
|
|
|
owner,
|
|
|
|
name,
|
|
|
|
effectiveSince
|
|
|
|
);
|
|
|
|
return { owner, name, commits };
|
|
|
|
} catch (e: any) {
|
|
|
|
console.error(`Failed to fetch commits for ${owner}/${name}:`, e.message);
|
|
|
|
return { owner, name, commits: [] };
|
|
|
|
}
|
|
|
|
})
|
|
|
|
);
|
|
|
|
const commitResults = await Promise.all(commitJobs);
|
|
|
|
|
|
|
|
// 4) build new commit entries with tagging, npm and changelog support
|
|
|
|
const newResults: plugins.interfaces.ICommitResult[] = [];
|
2025-09-14 20:27:51 +00:00
|
|
|
let reposWithNewCommits = 0;
|
2025-04-25 20:44:32 +00:00
|
|
|
for (const { owner, name, commits } of commitResults) {
|
|
|
|
// skip repos with no new commits
|
|
|
|
if (commits.length === 0) {
|
|
|
|
this.changelogContent = '';
|
|
|
|
continue;
|
|
|
|
}
|
2025-09-14 20:27:51 +00:00
|
|
|
reposWithNewCommits++;
|
2025-04-25 20:44:32 +00:00
|
|
|
// load changelog for this repo
|
|
|
|
await this.loadChangelogFromRepo(owner, name);
|
|
|
|
// fetch tags for this repo
|
|
|
|
let taggedShas: Set<string>;
|
2025-09-12 22:06:02 +00:00
|
|
|
let tagNameBySha: Map<string, string>;
|
2025-04-25 20:44:32 +00:00
|
|
|
try {
|
2025-09-12 22:06:02 +00:00
|
|
|
const tagInfo = await this.fetchTags(owner, name);
|
|
|
|
taggedShas = tagInfo.shas;
|
|
|
|
tagNameBySha = tagInfo.map;
|
2025-04-25 20:44:32 +00:00
|
|
|
} catch (e: any) {
|
|
|
|
console.error(`Failed to fetch tags for ${owner}/${name}:`, e.message);
|
|
|
|
taggedShas = new Set<string>();
|
2025-09-12 22:06:02 +00:00
|
|
|
tagNameBySha = new Map<string, string>();
|
2025-04-25 20:44:32 +00:00
|
|
|
}
|
|
|
|
// fetch npm package info only if any new commits correspond to a tag
|
|
|
|
const hasTaggedCommit = commits.some((c) => taggedShas.has(c.sha));
|
|
|
|
let pkgInfo: { allVersions: Array<{ version: string }> } | null = null;
|
|
|
|
if (hasTaggedCommit && this.enableNpmCheck) {
|
|
|
|
try {
|
|
|
|
pkgInfo = await this.npmRegistry.getPackageInfo(`@${owner}/${name}`);
|
|
|
|
} catch (e: any) {
|
|
|
|
console.error(`Failed to fetch package info for ${owner}/${name}:`, e.message);
|
|
|
|
pkgInfo = null;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
// build commit entries
|
|
|
|
for (const c of commits) {
|
|
|
|
const isTagged = taggedShas.has(c.sha);
|
2025-09-12 22:06:02 +00:00
|
|
|
// derive version from tag name if present (strip leading 'v')
|
|
|
|
let versionFromTag: string | undefined;
|
|
|
|
if (isTagged) {
|
|
|
|
const tagName = tagNameBySha.get(c.sha);
|
|
|
|
if (tagName) {
|
|
|
|
versionFromTag = tagName.startsWith('v') ? tagName.substring(1) : tagName;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
const publishedOnNpm = isTagged && pkgInfo && versionFromTag
|
|
|
|
? pkgInfo.allVersions.some((v) => v.version === versionFromTag)
|
2025-04-25 20:44:32 +00:00
|
|
|
: false;
|
|
|
|
let changelogEntry: string | undefined;
|
|
|
|
if (this.changelogContent) {
|
2025-09-12 22:06:02 +00:00
|
|
|
if (versionFromTag) {
|
|
|
|
changelogEntry = this.getChangelogForVersion(versionFromTag);
|
|
|
|
}
|
2025-04-25 20:44:32 +00:00
|
|
|
}
|
2025-09-14 20:27:51 +00:00
|
|
|
// optionally enforce an upper bound on commit timestamps
|
|
|
|
if (this.untilTimestamp) {
|
|
|
|
const ts = new Date(c.commit.author.date).getTime();
|
|
|
|
if (ts > new Date(this.untilTimestamp).getTime()) {
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
}
|
2025-04-25 20:44:32 +00:00
|
|
|
newResults.push({
|
|
|
|
baseUrl: this.baseUrl,
|
|
|
|
org: owner,
|
|
|
|
repo: name,
|
|
|
|
timestamp: c.commit.author.date,
|
|
|
|
prettyAgoTime: plugins.smarttime.getMilliSecondsAsHumanReadableAgoTime(
|
|
|
|
new Date(c.commit.author.date).getTime()
|
|
|
|
),
|
|
|
|
hash: c.sha,
|
|
|
|
commitMessage: c.commit.message,
|
|
|
|
tagged: isTagged,
|
|
|
|
publishedOnNpm,
|
|
|
|
changelog: changelogEntry,
|
|
|
|
});
|
|
|
|
}
|
|
|
|
}
|
|
|
|
// if caching is enabled, merge into in-memory cache and return full cache
|
|
|
|
if (this.enableCache) {
|
|
|
|
const existingHashes = new Set(this.cache.map((c) => c.hash));
|
|
|
|
const uniqueNew = newResults.filter((c) => !existingHashes.has(c.hash));
|
|
|
|
this.cache.push(...uniqueNew);
|
|
|
|
// trim commits older than window
|
|
|
|
if (this.cacheWindowMs !== undefined) {
|
|
|
|
const cutoff = Date.now() - this.cacheWindowMs;
|
|
|
|
this.cache = this.cache.filter((c) => new Date(c.timestamp).getTime() >= cutoff);
|
|
|
|
}
|
|
|
|
// advance lastRunTimestamp to now
|
|
|
|
this.lastRunTimestamp = new Date().toISOString();
|
|
|
|
// sort descending by timestamp
|
|
|
|
this.cache.sort((a, b) => b.timestamp.localeCompare(a.timestamp));
|
2025-04-25 20:56:01 +00:00
|
|
|
// apply tagged-only filter if requested
|
|
|
|
if (this.enableTaggedOnly) {
|
|
|
|
return this.cache.filter((c) => c.tagged === true);
|
|
|
|
}
|
2025-09-14 20:27:51 +00:00
|
|
|
if (this.verbose) {
|
|
|
|
console.log(
|
|
|
|
`[CodeFeed] orgs=${orgs.length} repos=${allRepos.length} reposWithNew=${reposWithNewCommits} commits=${this.cache.length} (cached)`
|
|
|
|
);
|
|
|
|
}
|
2025-04-25 20:44:32 +00:00
|
|
|
return this.cache;
|
|
|
|
}
|
2025-04-25 20:56:01 +00:00
|
|
|
// no caching: apply tagged-only filter if requested
|
2025-09-12 22:06:02 +00:00
|
|
|
// sort and dedupe
|
|
|
|
const seen = new Set<string>();
|
|
|
|
const unique = newResults.filter((c) => {
|
|
|
|
if (seen.has(c.hash)) return false;
|
|
|
|
seen.add(c.hash);
|
|
|
|
return true;
|
|
|
|
});
|
|
|
|
unique.sort((a, b) => b.timestamp.localeCompare(a.timestamp));
|
2025-09-14 20:27:51 +00:00
|
|
|
const result = this.enableTaggedOnly ? unique.filter((c) => c.tagged === true) : unique;
|
|
|
|
if (this.verbose) {
|
|
|
|
console.log(
|
|
|
|
`[CodeFeed] orgs=${orgs.length} repos=${allRepos.length} reposWithNew=${reposWithNewCommits} commits=${result.length}`
|
|
|
|
);
|
2025-04-25 20:56:01 +00:00
|
|
|
}
|
2025-09-14 20:27:51 +00:00
|
|
|
return result;
|
2025-04-25 20:44:32 +00:00
|
|
|
}
|
|
|
|
|
2024-12-13 19:51:42 +01:00
|
|
|
/**
|
2024-12-14 00:54:38 +01:00
|
|
|
* Load the changelog directly from the Gitea repository.
|
2024-12-13 19:51:42 +01:00
|
|
|
*/
|
2024-12-14 00:54:38 +01:00
|
|
|
private async loadChangelogFromRepo(owner: string, repo: string): Promise<void> {
|
|
|
|
const headers: Record<string, string> = {};
|
2025-09-12 22:06:02 +00:00
|
|
|
if (this.token) headers['Authorization'] = `token ${this.token}`;
|
|
|
|
const candidates = [
|
|
|
|
'CHANGELOG.md',
|
|
|
|
'changelog.md',
|
|
|
|
'Changelog.md',
|
|
|
|
'docs/CHANGELOG.md',
|
|
|
|
];
|
|
|
|
for (const path of candidates) {
|
|
|
|
const url = `/api/v1/repos/${owner}/${repo}/contents/${encodeURIComponent(path)}`;
|
|
|
|
const response = await this.fetchFunction(url, { headers });
|
|
|
|
if (!response.ok) {
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
try {
|
|
|
|
const data = await response.json();
|
|
|
|
if (data && data.content) {
|
|
|
|
this.changelogContent = Buffer.from(data.content, 'base64').toString('utf8');
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
} catch {
|
|
|
|
// continue trying others
|
|
|
|
}
|
2024-12-14 00:54:38 +01:00
|
|
|
}
|
2025-09-12 22:06:02 +00:00
|
|
|
this.changelogContent = '';
|
2024-12-14 00:54:38 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Parse the changelog to find the entry for a given version.
|
|
|
|
* The changelog format is assumed as:
|
|
|
|
*
|
|
|
|
* # Changelog
|
|
|
|
*
|
|
|
|
* ## <date> - <version> - <description>
|
|
|
|
* <changes...>
|
|
|
|
*/
|
|
|
|
private getChangelogForVersion(version: string): string | undefined {
|
|
|
|
if (!this.changelogContent) {
|
|
|
|
return undefined;
|
|
|
|
}
|
|
|
|
const lines = this.changelogContent.split('\n');
|
|
|
|
const versionHeaderIndex = lines.findIndex((line) => line.includes(`- ${version} -`));
|
|
|
|
if (versionHeaderIndex === -1) {
|
|
|
|
return undefined;
|
|
|
|
}
|
|
|
|
|
|
|
|
const changelogLines: string[] = [];
|
|
|
|
for (let i = versionHeaderIndex + 1; i < lines.length; i++) {
|
|
|
|
const line = lines[i];
|
|
|
|
// The next version header starts with `## `
|
|
|
|
if (line.startsWith('## ')) {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
changelogLines.push(line);
|
|
|
|
}
|
|
|
|
|
|
|
|
return changelogLines.join('\n').trim();
|
|
|
|
}
|
2025-04-25 20:44:32 +00:00
|
|
|
/**
|
|
|
|
* Fetch all tags for a given repo and return the set of tagged commit SHAs
|
|
|
|
*/
|
2025-09-12 22:06:02 +00:00
|
|
|
private async fetchTags(owner: string, repo: string): Promise<{ shas: Set<string>; map: Map<string, string> }> {
|
2025-04-25 20:44:32 +00:00
|
|
|
const taggedShas = new Set<string>();
|
2025-09-12 22:06:02 +00:00
|
|
|
const tagNameBySha = new Map<string, string>();
|
2024-12-13 18:31:09 +01:00
|
|
|
let page = 1;
|
|
|
|
while (true) {
|
2025-09-12 22:06:02 +00:00
|
|
|
const url = `/api/v1/repos/${owner}/${repo}/tags?limit=${this.pageLimit}&page=${page}`;
|
2024-12-14 22:53:42 +01:00
|
|
|
const resp = await this.fetchFunction(url, {
|
2024-12-14 02:28:25 +01:00
|
|
|
headers: this.token ? { Authorization: `token ${this.token}` } : {},
|
2024-12-13 18:31:09 +01:00
|
|
|
});
|
|
|
|
if (!resp.ok) {
|
2025-04-25 20:44:32 +00:00
|
|
|
console.error(`Failed to fetch tags for ${owner}/${repo}: ${resp.status} ${resp.statusText}`);
|
2025-09-12 22:06:02 +00:00
|
|
|
return { shas: taggedShas, map: tagNameBySha };
|
2024-12-13 18:31:09 +01:00
|
|
|
}
|
2024-12-14 00:54:38 +01:00
|
|
|
const data: plugins.interfaces.ITag[] = await resp.json();
|
2025-04-25 20:44:32 +00:00
|
|
|
if (data.length === 0) break;
|
|
|
|
for (const t of data) {
|
2025-09-12 22:06:02 +00:00
|
|
|
const sha = t.commit?.sha;
|
|
|
|
if (sha) {
|
|
|
|
taggedShas.add(sha);
|
|
|
|
if (t.name) tagNameBySha.set(sha, t.name);
|
|
|
|
}
|
2024-12-13 18:31:09 +01:00
|
|
|
}
|
2025-09-12 22:06:02 +00:00
|
|
|
if (data.length < this.pageLimit) break;
|
2024-12-13 18:31:09 +01:00
|
|
|
page++;
|
|
|
|
}
|
2025-09-12 22:06:02 +00:00
|
|
|
return { shas: taggedShas, map: tagNameBySha };
|
2025-04-25 20:44:32 +00:00
|
|
|
}
|
2024-12-13 18:31:09 +01:00
|
|
|
|
2025-04-25 20:44:32 +00:00
|
|
|
private async fetchAllOrganizations(): Promise<string[]> {
|
2025-09-12 22:06:02 +00:00
|
|
|
const headers = this.token ? { Authorization: `token ${this.token}` } : {};
|
|
|
|
let page = 1;
|
|
|
|
const orgs: string[] = [];
|
|
|
|
while (true) {
|
|
|
|
const resp = await this.fetchFunction(`/api/v1/orgs?limit=${this.pageLimit}&page=${page}`, { headers });
|
|
|
|
if (!resp.ok) {
|
|
|
|
throw new Error(`Failed to fetch organizations: ${resp.status} ${resp.statusText}`);
|
|
|
|
}
|
|
|
|
const data: { username: string }[] = await resp.json();
|
|
|
|
if (data.length === 0) break;
|
|
|
|
orgs.push(...data.map((o) => o.username));
|
|
|
|
if (data.length < this.pageLimit) break;
|
|
|
|
page++;
|
2024-12-13 18:31:09 +01:00
|
|
|
}
|
2025-09-12 22:06:02 +00:00
|
|
|
return orgs;
|
2025-04-25 20:44:32 +00:00
|
|
|
}
|
2024-12-13 18:31:09 +01:00
|
|
|
|
2025-04-25 20:44:32 +00:00
|
|
|
private async fetchRepositoriesForOrg(org: string): Promise<plugins.interfaces.IRepository[]> {
|
2025-09-12 22:06:02 +00:00
|
|
|
const headers = this.token ? { Authorization: `token ${this.token}` } : {};
|
|
|
|
let page = 1;
|
|
|
|
const repos: plugins.interfaces.IRepository[] = [];
|
|
|
|
while (true) {
|
|
|
|
const resp = await this.fetchFunction(`/api/v1/orgs/${org}/repos?limit=${this.pageLimit}&page=${page}`, { headers });
|
|
|
|
if (!resp.ok) {
|
|
|
|
throw new Error(`Failed to fetch repositories for ${org}: ${resp.status} ${resp.statusText}`);
|
|
|
|
}
|
|
|
|
const data: plugins.interfaces.IRepository[] = await resp.json();
|
|
|
|
if (data.length === 0) break;
|
|
|
|
repos.push(...data);
|
|
|
|
if (data.length < this.pageLimit) break;
|
|
|
|
page++;
|
2025-04-25 20:44:32 +00:00
|
|
|
}
|
2025-09-12 22:06:02 +00:00
|
|
|
return repos;
|
2024-12-13 18:31:09 +01:00
|
|
|
}
|
|
|
|
|
2024-12-14 02:28:25 +01:00
|
|
|
private async fetchRecentCommitsForRepo(
|
|
|
|
owner: string,
|
2025-04-25 20:44:32 +00:00
|
|
|
repo: string,
|
|
|
|
sinceTimestamp?: string
|
2024-12-14 02:28:25 +01:00
|
|
|
): Promise<plugins.interfaces.ICommit[]> {
|
2025-04-25 20:44:32 +00:00
|
|
|
const since = sinceTimestamp ?? this.lastRunTimestamp;
|
2025-09-12 22:06:02 +00:00
|
|
|
const headers = this.token ? { Authorization: `token ${this.token}` } : {};
|
|
|
|
let page = 1;
|
|
|
|
const commits: plugins.interfaces.ICommit[] = [];
|
|
|
|
while (true) {
|
|
|
|
const url = `/api/v1/repos/${owner}/${repo}/commits?since=${encodeURIComponent(since)}&limit=${this.pageLimit}&page=${page}`;
|
|
|
|
const resp = await this.fetchFunction(url, { headers });
|
|
|
|
if (!resp.ok) {
|
|
|
|
throw new Error(`Failed to fetch commits for ${owner}/${repo}: ${resp.status} ${resp.statusText}`);
|
|
|
|
}
|
|
|
|
const data: plugins.interfaces.ICommit[] = await resp.json();
|
|
|
|
if (data.length === 0) break;
|
|
|
|
commits.push(...data);
|
|
|
|
if (data.length < this.pageLimit) break;
|
|
|
|
page++;
|
2024-12-13 18:31:09 +01:00
|
|
|
}
|
2025-09-12 22:06:02 +00:00
|
|
|
return commits;
|
2024-12-13 18:31:09 +01:00
|
|
|
}
|
2024-12-14 22:53:42 +01:00
|
|
|
|
2025-04-25 20:44:32 +00:00
|
|
|
public async fetchFunction(
|
|
|
|
urlArg: string,
|
|
|
|
optionsArg: RequestInit = {}
|
|
|
|
): Promise<Response> {
|
2025-09-12 22:06:02 +00:00
|
|
|
const maxAttempts = 4;
|
|
|
|
let attempt = 0;
|
|
|
|
let lastError: any;
|
|
|
|
while (attempt < maxAttempts) {
|
|
|
|
try {
|
|
|
|
const resp = await fetch(`${this.baseUrl}${urlArg}`, optionsArg);
|
|
|
|
// retry on 429 and 5xx
|
|
|
|
if (resp.status === 429 || resp.status >= 500) {
|
|
|
|
const retryAfter = Number(resp.headers.get('retry-after'));
|
|
|
|
const backoffMs = retryAfter
|
|
|
|
? retryAfter * 1000
|
|
|
|
: Math.min(32000, 1000 * Math.pow(2, attempt)) + Math.floor(Math.random() * 250);
|
|
|
|
await new Promise((r) => setTimeout(r, backoffMs));
|
|
|
|
attempt++;
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
return resp;
|
|
|
|
} catch (e: any) {
|
|
|
|
lastError = e;
|
|
|
|
const backoffMs = Math.min(32000, 1000 * Math.pow(2, attempt)) + Math.floor(Math.random() * 250);
|
|
|
|
await new Promise((r) => setTimeout(r, backoffMs));
|
|
|
|
attempt++;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
throw new Error(`fetchFunction failed after retries for ${urlArg}: ${lastError?.message ?? 'unknown error'}`);
|
2024-12-14 22:53:42 +01:00
|
|
|
}
|
2025-09-12 22:06:02 +00:00
|
|
|
}
|