4 Commits

Author SHA1 Message Date
72faf7bfd4 1.7.2
Some checks failed
Default (tags) / security (push) Successful in 45s
Default (tags) / test (push) Successful in 3m29s
Default (tags) / release (push) Failing after 47s
Default (tags) / metadata (push) Successful in 55s
2025-09-14 21:07:25 +00:00
4cc819b5eb fix(core): Stabilize pagination, tag mapping, changelog parsing, and HTTP retry/backoff; add tests and caching improvements 2025-09-14 21:07:25 +00:00
f21aa58c18 feat: enhance CodeFeed with repo allowlist/denylist, optional timestamp filtering, and verbose logging 2025-09-14 20:27:51 +00:00
98f5c466a6 feat: add organization allowlist and denylist filters, enhance changelog loading, and improve fetch functions
- Introduced orgAllowlist and orgDenylist properties to filter organizations during fetching.
- Enhanced loadChangelogFromRepo to check multiple potential changelog file names.
- Updated fetchTags to return a map of tag names associated with their SHAs.
- Improved pagination logic in fetchAllOrganizations and fetchRepositoriesForOrg to handle larger datasets.
- Added retry logic in fetchFunction to handle rate limiting and server errors more gracefully.
- Modified ITag interface to include an optional name property for better tag handling.
2025-09-12 22:06:02 +00:00
9 changed files with 3175 additions and 3217 deletions

View File

@@ -1,5 +1,15 @@
# Changelog # Changelog
## 2025-09-14 - 1.7.2 - fix(core)
Stabilize pagination, tag mapping, changelog parsing, and HTTP retry/backoff; add tests and caching improvements
- Handle paginated orgs, repos, commits, and tags to avoid missing pages.
- Map tags to commit SHAs and extract version strings from tag names for changelog lookup and optional npm publish detection.
- Discover and parse repository CHANGELOG files from multiple candidate paths to extract per-version entries.
- Implement retries with exponential backoff for 429/5xx and network errors in fetchFunction.
- Add in-memory caching with window trimming, stable sorting, and optional tagged-only filtering.
- Include tests: mocked pagination & tag mapping test and integration test scaffolding using @push.rocks/tapbundle.
## 2025-04-25 - 1.7.1 - fix(CodeFeed) ## 2025-04-25 - 1.7.1 - fix(CodeFeed)
Improve commit fetching concurrency and add tagged-only commit filtering along with updated documentation and tests Improve commit fetching concurrency and add tagged-only commit filtering along with updated documentation and tests

View File

@@ -1,6 +1,6 @@
{ {
"name": "@foss.global/codefeed", "name": "@foss.global/codefeed",
"version": "1.7.1", "version": "1.7.2",
"private": false, "private": false,
"description": "The @foss.global/codefeed module is designed for generating feeds from Gitea repositories, enhancing development workflows by processing commit data and repository activities.", "description": "The @foss.global/codefeed module is designed for generating feeds from Gitea repositories, enhancing development workflows by processing commit data and repository activities.",
"exports": { "exports": {
@@ -11,22 +11,22 @@
"author": "Task Venture Capital GmbH", "author": "Task Venture Capital GmbH",
"license": "MIT", "license": "MIT",
"scripts": { "scripts": {
"test": "(tstest test/ --web)", "test": "(tstest test/ --verbose)",
"build": "(tsbuild tsfolders --web --allowimplicitany)", "build": "(tsbuild tsfolders --web --allowimplicitany)",
"buildDocs": "(tsdoc)" "buildDocs": "(tsdoc)"
}, },
"devDependencies": { "devDependencies": {
"@git.zone/tsbuild": "^2.3.2", "@git.zone/tsbuild": "^2.6.8",
"@git.zone/tsbundle": "^2.2.5", "@git.zone/tsbundle": "^2.5.1",
"@git.zone/tsrun": "^1.2.46", "@git.zone/tsrun": "^1.2.46",
"@git.zone/tstest": "^1.0.96", "@git.zone/tstest": "^2.3.8",
"@push.rocks/tapbundle": "^5.6.3", "@push.rocks/tapbundle": "^6.0.3",
"@types/node": "^22.15.2" "@types/node": "^22.15.2"
}, },
"dependencies": { "dependencies": {
"@push.rocks/lik": "^6.2.2", "@push.rocks/lik": "^6.2.2",
"@push.rocks/qenv": "^6.1.0", "@push.rocks/qenv": "^6.1.3",
"@push.rocks/smartnpm": "^2.0.4", "@push.rocks/smartnpm": "^2.0.6",
"@push.rocks/smarttime": "^4.1.1", "@push.rocks/smarttime": "^4.1.1",
"@push.rocks/smartxml": "^1.1.1" "@push.rocks/smartxml": "^1.1.1"
}, },

5808
pnpm-lock.yaml generated

File diff suppressed because it is too large Load Diff

220
readme.md
View File

@@ -1,143 +1,99 @@
```markdown
# @foss.global/codefeed # @foss.global/codefeed
A module for creating feeds for code development. Generate an activity feed from a Gitea instance. Scans orgs and repos, retrieves commits since a configurable timestamp, enriches with tags, optional npm publish detection, and CHANGELOG snippets.
## Install ## Install
To install the `@foss.global/codefeed` package, you can run the following npm command in your project directory: ```bash
pnpm add @foss.global/codefeed
# or
npm i @foss.global/codefeed
```
Requires Node.js 18+ (global fetch/Request/Response) and ESM.
## Quick Start
```ts
import { CodeFeed } from '@foss.global/codefeed';
// Fetch commits since one week ago (default), no caching
const feed = new CodeFeed('https://code.example.com', 'gitea_token');
const commits = await feed.fetchAllCommitsFromInstance();
console.log(commits);
```
### With options
```ts
const thirtyDays = 30 * 24 * 60 * 60 * 1000;
const since = new Date(Date.now() - thirtyDays).toISOString();
const feed = new CodeFeed('https://code.example.com', 'gitea_token', since, {
enableCache: true, // keep results in memory
cacheWindowMs: thirtyDays, // trim cache to this window
enableNpmCheck: true, // check npm for published versions
taggedOnly: false, // return all commits (or only tagged)
orgAllowlist: ['myorg'], // only scan these orgs
orgDenylist: ['archive'], // skip these orgs
repoAllowlist: ['myorg/app1', 'myorg/app2'], // only these repos
repoDenylist: ['myorg/old-repo'], // skip these repos
untilTimestamp: new Date().toISOString(), // optional upper bound
verbose: true, // print a short metrics summary
});
const commits = await feed.fetchAllCommitsFromInstance();
```
Each returned item follows this shape:
```ts
interface ICommitResult {
baseUrl: string;
org: string;
repo: string;
timestamp: string; // ISO date
hash: string; // commit SHA
commitMessage: string;
tagged: boolean; // commit is pointed to by a tag
publishedOnNpm: boolean; // only when npm check enabled and tag matches
prettyAgoTime: string; // human-readable diff
changelog: string | undefined; // snippet for matching tag version
}
```
## Features
- Pagination for orgs, repos, commits, and tags (no missing pages)
- Retries with exponential backoff for 429/5xx and network errors
- CHANGELOG discovery with case variants (`CHANGELOG.md`, `changelog.md`, `docs/CHANGELOG.md`)
- Tag-to-version mapping based on tag names (`vX.Y.Z``X.Y.Z`)
- Optional npm publish detection via `@org/repo` package versions
- In-memory caching with window trimming and stable sorting
- Allow/deny filters for orgs and repos, optional time upper bound
- One-line metrics summary when `verbose: true`
## Environment
- Gitea base URL and an optional token with read access
- Node.js 18+ (global fetch)
## Testing
The repo contains:
- An integration test using a `GITEA_TOKEN` from `.nogit/` via `@push.rocks/qenv`.
- A mocked pagination test that does not require network.
Run tests:
```bash ```bash
npm install @foss.global/codefeed pnpm test
``` ```
Ensure that you have a compatible version of Node.js installed and that your project is set up to support ECMAScript modules. The `@foss.global/codefeed` module uses ESM syntax. For the integration test, ensure `GITEA_TOKEN` is provided (e.g., via `.nogit/` as used in `test/test.ts`).
## Usage ## Notes
The `@foss.global/codefeed` package is designed to help developers generate feeds for code developments, specifically targeting Gitea repositories. It fetches and processes commit data, changelogs, and repository activities for further analysis or visualization. Here, we'll delve into how you can utilize the different features of the `CodeFeed` class. - When `taggedOnly` is enabled, the feed includes only commits associated with tags.
- `publishedOnNpm` is computed by matching the tag-derived version against the npm registry for `@org/repo`.
### Setting Up CodeFeed - For very large instances, consider using allowlists/denylists and enabling caching for incremental runs.
To get started, import the `CodeFeed` class from the module:
```typescript
import { CodeFeed } from '@foss.global/codefeed';
```
Then, create an instance of `CodeFeed`. You'll need the base URL of your Gitea instance and optionally an API token if your repositories require authentication.
```typescript
// default: fetch commits since 7 days ago, no caching or npm checks, include all commits
const codeFeed = new CodeFeed(
'https://your-gitea-instance-url.com',
'your-api-token'
);
// with options: cache commits in-memory for 30 days, disable npm lookups, return only tagged commits
const thirtyDays = 30 * 24 * 60 * 60 * 1000;
const codeFeedStateful = new CodeFeed(
'https://your-gitea-instance-url.com',
'your-api-token',
undefined, // defaults to 7 days ago
{
enableCache: true,
cacheWindowMs: thirtyDays,
enableNpmCheck: false,
taggedOnly: true,
}
);
```
The constructor can also accept a `lastRunTimestamp` which indicates the last time a sync was performed. If not provided, it defaults to one week (7 days) prior to the current time.
### Fetching Commits
One of the core functionalities of CodeFeed is fetching commits from a Gitea instance. By calling `fetchAllCommitsFromInstance`, you can retrieve commits across multiple repositories:
```typescript
(async () => {
try {
const commits = await codeFeed.fetchAllCommitsFromInstance();
console.log(commits);
} catch (error) {
console.error('An error occurred while fetching commits:', error);
}
})();
```
This method scans all organizations and repositories, fetches all commits since the constructors `lastRunTimestamp` (default: one week ago), and enriches them with metadata like:
- Git tags (to detect releases)
- npm publication status (when enabled)
- parsed changelog entries (when available)
When `taggedOnly` is enabled, only commits marked as release tags are returned. When `enableCache` is enabled, previously fetched commits are kept in memory (up to `cacheWindowMs`), and only new commits are fetched on subsequent calls.
Each commit object in the resulting array conforms to the `ICommitResult` interface, containing details such as:
- `baseUrl`
- `org`
- `repo`
- `timestamp`
- `hash`
- `commitMessage`
- `tagged` (boolean)
- `publishedOnNpm` (boolean)
- `prettyAgoTime` (human-readable relative time)
- `changelog` (text from the `changelog.md` associated with a commit)
### Understanding the Data Fetch Process
#### Fetching Organizations
The `fetchAllOrganizations` method collects all organizations within the Gitea instance:
```typescript
const organizations = await codeFeed.fetchAllOrganizations();
console.log('Organizations:', organizations);
```
This method interacts with the Gitea API to pull organization names, aiding further requests that require organization context.
#### Fetching Repositories
Repositories under these organizations can be retrieved using `fetchAllRepositories`:
```typescript
const repositories = await codeFeed.fetchAllRepositories();
console.log('Repositories:', repositories);
```
Here, filtering by organization can help narrow down the scope further when dealing with large instances.
#### Fetching Tags and Commits
To handle repository-specific details, use:
- `fetchTags(owner: string, repo: string)`: Appropriately handles paginated tag data within a repository.
- `fetchRecentCommitsForRepo(owner: string, repo: string)`: Gathers commit data specific to the past 24 hours for a given repository.
```typescript
const tags = await codeFeed.fetchTags('orgName', 'repoName');
const recentCommits = await codeFeed.fetchRecentCommitsForRepo('orgName', 'repoName');
console.log('Tags:', tags);
console.log('Recent Commits:', recentCommits);
```
### Changelog Integration
Loading changelog content from a repository is integrated into the flow with `loadChangelogFromRepo`. This can be accessed when processing specific commits:
```typescript
await codeFeed.loadChangelogFromRepo('org', 'repo');
const changelog = codeFeed.getChangelogForVersion('1.0.0');
console.log('Changelog for version 1.0.0:', changelog);
```
### Conclusion
The `@foss.global/codefeed` module provides robust capabilities for extracting and managing feed data related to code developments in Gitea environments. Through systematic setup and leveraging API-driven methods, it becomes a valuable tool for developers aiming to keep track of software progress and changes efficiently. The integration hooks like changelog and npm verification further enrich its utility, offering consolidated insights into each commit's journey from codebase to published package.
Explore integrating these capabilities into your development workflows to enhance tracking, deployment pipelines, or analytics systems within your projects. Remember to always handle API tokens securely and adhere to best practices when managing access to repository resources. Stay updated on any changes or enhancements to this module for further feature exposures or bug fixes. Happy coding!
```
undefined

View File

@@ -0,0 +1,82 @@
import { expect, tap } from '@push.rocks/tapbundle';
import { CodeFeed } from '../ts/index.js';
// A subclass to mock fetchFunction for controlled pagination tests
class MockCodeFeed extends CodeFeed {
private data: Record<string, any>;
constructor() {
super('https://mock', undefined, '2024-01-01T00:00:00.000Z', {
enableCache: false,
enableNpmCheck: false,
taggedOnly: false,
verbose: false,
});
// Prepare mock datasets
const commit = (sha: string, date: string, message = 'chore: update') => ({
sha,
commit: { author: { date }, message },
});
const commitsPage1 = Array.from({ length: 50 }).map((_, i) =>
commit(`sha-${i}`, `2024-01-0${(i % 9) + 1}T00:00:00.000Z`)
);
const commitsPage2 = [commit('sha-50', '2024-01-10T00:00:00.000Z'), commit('sha-tagged', '2024-01-11T00:00:00.000Z')];
const tagsPage1 = [
{ name: 'v1.2.3', commit: { sha: 'sha-tagged' } },
];
const changelogContent = Buffer.from(
[
'# Changelog',
'',
'## 2024-01-11 - 1.2.3 - Release',
'* example change',
'',
].join('\n'),
'utf8'
).toString('base64');
this.data = {
'/api/v1/orgs?limit=50&page=1': [{ username: 'org1' }],
'/api/v1/orgs?limit=50&page=2': [],
'/api/v1/orgs/org1/repos?limit=50&page=1': [{ name: 'repo1' }],
'/api/v1/orgs/org1/repos?limit=50&page=2': [],
'/api/v1/repos/org1/repo1/commits?limit=1': [commit('probe', '2024-01-12T00:00:00.000Z')],
'/api/v1/repos/org1/repo1/commits?since=2024-01-01T00%3A00%3A00.000Z&limit=50&page=1': commitsPage1,
'/api/v1/repos/org1/repo1/commits?since=2024-01-01T00%3A00%3A00.000Z&limit=50&page=2': commitsPage2,
'/api/v1/repos/org1/repo1/commits?since=2024-01-01T00%3A00%3A00.000Z&limit=50&page=3': [],
'/api/v1/repos/org1/repo1/tags?limit=50&page=1': tagsPage1,
'/api/v1/repos/org1/repo1/tags?limit=50&page=2': [],
'/api/v1/repos/org1/repo1/contents/CHANGELOG.md': { content: changelogContent },
};
}
public async fetchFunction(urlArg: string, _optionsArg: RequestInit = {}): Promise<Response> {
const payload = this.data[urlArg];
if (payload === undefined) {
return new Response('Not found', { status: 404, statusText: 'Not Found' });
}
return new Response(JSON.stringify(payload), { status: 200, headers: { 'content-type': 'application/json' } });
}
}
let mockFeed: MockCodeFeed;
tap.test('mock: pagination and tag mapping', async () => {
mockFeed = new MockCodeFeed();
const results = await mockFeed.fetchAllCommitsFromInstance();
// ensure we received > 50 commits from two pages
expect(results).toBeArray();
expect(results.length).toBeGreaterThan(50);
// ensure tagged commit is present and has changelog attached when found
const tagged = results.find((r) => r.hash === 'sha-tagged');
expect(tagged).toBeTruthy();
expect(tagged!.tagged).toBeTrue();
// changelog is present for that version (via tag name)
expect(tagged!.changelog).toBeTypeofString();
});
tap.start();

View File

@@ -1,4 +1,4 @@
import { expect, expectAsync, tap } from '@push.rocks/tapbundle'; import { expect, tap } from '@push.rocks/tapbundle';
import * as codefeed from '../ts/index.js'; import * as codefeed from '../ts/index.js';
import * as qenv from '@push.rocks/qenv'; import * as qenv from '@push.rocks/qenv';
const testQenv = new qenv.Qenv('./', '.nogit/'); const testQenv = new qenv.Qenv('./', '.nogit/');

View File

@@ -3,6 +3,6 @@
*/ */
export const commitinfo = { export const commitinfo = {
name: '@foss.global/codefeed', name: '@foss.global/codefeed',
version: '1.7.1', version: '1.7.2',
description: 'The @foss.global/codefeed module is designed for generating feeds from Gitea repositories, enhancing development workflows by processing commit data and repository activities.' description: 'The @foss.global/codefeed module is designed for generating feeds from Gitea repositories, enhancing development workflows by processing commit data and repository activities.'
} }

View File

@@ -4,6 +4,7 @@ export class CodeFeed {
private baseUrl: string; private baseUrl: string;
private token?: string; private token?: string;
private lastRunTimestamp: string; private lastRunTimestamp: string;
private pageLimit = 50;
// Raw changelog content for the current repository // Raw changelog content for the current repository
private changelogContent: string = ''; private changelogContent: string = '';
// npm registry helper for published-on-npm checks // npm registry helper for published-on-npm checks
@@ -16,6 +17,13 @@ export class CodeFeed {
private enableNpmCheck: boolean = true; private enableNpmCheck: boolean = true;
// return only tagged commits (false by default) // return only tagged commits (false by default)
private enableTaggedOnly: boolean = false; private enableTaggedOnly: boolean = false;
// allow/deny filters
private orgAllowlist?: string[];
private orgDenylist?: string[];
private repoAllowlist?: string[]; // entries like "org/repo"
private repoDenylist?: string[]; // entries like "org/repo"
private untilTimestamp?: string; // optional upper bound on commit timestamps
private verbose?: boolean; // optional metrics logging
constructor( constructor(
baseUrl: string, baseUrl: string,
@@ -26,6 +34,12 @@ export class CodeFeed {
cacheWindowMs?: number; cacheWindowMs?: number;
enableNpmCheck?: boolean; enableNpmCheck?: boolean;
taggedOnly?: boolean; taggedOnly?: boolean;
orgAllowlist?: string[];
orgDenylist?: string[];
repoAllowlist?: string[];
repoDenylist?: string[];
untilTimestamp?: string;
verbose?: boolean;
} }
) { ) {
this.baseUrl = baseUrl; this.baseUrl = baseUrl;
@@ -37,6 +51,12 @@ export class CodeFeed {
this.cacheWindowMs = options?.cacheWindowMs; this.cacheWindowMs = options?.cacheWindowMs;
this.enableNpmCheck = options?.enableNpmCheck ?? true; this.enableNpmCheck = options?.enableNpmCheck ?? true;
this.enableTaggedOnly = options?.taggedOnly ?? false; this.enableTaggedOnly = options?.taggedOnly ?? false;
this.orgAllowlist = options?.orgAllowlist;
this.orgDenylist = options?.orgDenylist;
this.repoAllowlist = options?.repoAllowlist;
this.repoDenylist = options?.repoDenylist;
this.untilTimestamp = options?.untilTimestamp;
this.verbose = options?.verbose ?? false;
this.cache = []; this.cache = [];
// npm registry instance for version lookups // npm registry instance for version lookups
this.npmRegistry = new plugins.smartnpm.NpmRegistry(); this.npmRegistry = new plugins.smartnpm.NpmRegistry();
@@ -61,7 +81,14 @@ export class CodeFeed {
} }
// 1) get all organizations // 1) get all organizations
const orgs = await this.fetchAllOrganizations(); let orgs = await this.fetchAllOrganizations();
// apply allow/deny filters
if (this.orgAllowlist && this.orgAllowlist.length > 0) {
orgs = orgs.filter((o) => this.orgAllowlist!.includes(o));
}
if (this.orgDenylist && this.orgDenylist.length > 0) {
orgs = orgs.filter((o) => !this.orgDenylist!.includes(o));
}
// 2) fetch repos per org in parallel // 2) fetch repos per org in parallel
const repoLists = await Promise.all( const repoLists = await Promise.all(
@@ -70,9 +97,18 @@ export class CodeFeed {
) )
); );
// flatten to [{ owner, name }] // flatten to [{ owner, name }]
const allRepos = orgs.flatMap((org, i) => let allRepos = orgs.flatMap((org, i) =>
repoLists[i].map((r) => ({ owner: org, name: r.name })) repoLists[i].map((r) => ({ owner: org, name: r.name }))
); );
// apply repo allow/deny filters using slug "org/repo"
if (this.repoAllowlist && this.repoAllowlist.length > 0) {
const allow = new Set(this.repoAllowlist.map((s) => s.toLowerCase()));
allRepos = allRepos.filter(({ owner, name }) => allow.has(`${owner}/${name}`.toLowerCase()));
}
if (this.repoDenylist && this.repoDenylist.length > 0) {
const deny = new Set(this.repoDenylist.map((s) => s.toLowerCase()));
allRepos = allRepos.filter(({ owner, name }) => !deny.has(`${owner}/${name}`.toLowerCase()));
}
// 3) probe latest commit per repo and fetch full list only if new commits exist // 3) probe latest commit per repo and fetch full list only if new commits exist
const commitJobs = allRepos.map(({ owner, name }) => const commitJobs = allRepos.map(({ owner, name }) =>
@@ -112,21 +148,27 @@ export class CodeFeed {
// 4) build new commit entries with tagging, npm and changelog support // 4) build new commit entries with tagging, npm and changelog support
const newResults: plugins.interfaces.ICommitResult[] = []; const newResults: plugins.interfaces.ICommitResult[] = [];
let reposWithNewCommits = 0;
for (const { owner, name, commits } of commitResults) { for (const { owner, name, commits } of commitResults) {
// skip repos with no new commits // skip repos with no new commits
if (commits.length === 0) { if (commits.length === 0) {
this.changelogContent = ''; this.changelogContent = '';
continue; continue;
} }
reposWithNewCommits++;
// load changelog for this repo // load changelog for this repo
await this.loadChangelogFromRepo(owner, name); await this.loadChangelogFromRepo(owner, name);
// fetch tags for this repo // fetch tags for this repo
let taggedShas: Set<string>; let taggedShas: Set<string>;
let tagNameBySha: Map<string, string>;
try { try {
taggedShas = await this.fetchTags(owner, name); const tagInfo = await this.fetchTags(owner, name);
taggedShas = tagInfo.shas;
tagNameBySha = tagInfo.map;
} catch (e: any) { } catch (e: any) {
console.error(`Failed to fetch tags for ${owner}/${name}:`, e.message); console.error(`Failed to fetch tags for ${owner}/${name}:`, e.message);
taggedShas = new Set<string>(); taggedShas = new Set<string>();
tagNameBySha = new Map<string, string>();
} }
// fetch npm package info only if any new commits correspond to a tag // fetch npm package info only if any new commits correspond to a tag
const hasTaggedCommit = commits.some((c) => taggedShas.has(c.sha)); const hasTaggedCommit = commits.some((c) => taggedShas.has(c.sha));
@@ -141,14 +183,30 @@ export class CodeFeed {
} }
// build commit entries // build commit entries
for (const c of commits) { for (const c of commits) {
const versionCandidate = c.commit.message.replace(/\n/g, '').trim();
const isTagged = taggedShas.has(c.sha); const isTagged = taggedShas.has(c.sha);
const publishedOnNpm = isTagged && pkgInfo // derive version from tag name if present (strip leading 'v')
? pkgInfo.allVersions.some((v) => v.version === versionCandidate) let versionFromTag: string | undefined;
if (isTagged) {
const tagName = tagNameBySha.get(c.sha);
if (tagName) {
versionFromTag = tagName.startsWith('v') ? tagName.substring(1) : tagName;
}
}
const publishedOnNpm = isTagged && pkgInfo && versionFromTag
? pkgInfo.allVersions.some((v) => v.version === versionFromTag)
: false; : false;
let changelogEntry: string | undefined; let changelogEntry: string | undefined;
if (this.changelogContent) { if (this.changelogContent) {
changelogEntry = this.getChangelogForVersion(versionCandidate); if (versionFromTag) {
changelogEntry = this.getChangelogForVersion(versionFromTag);
}
}
// optionally enforce an upper bound on commit timestamps
if (this.untilTimestamp) {
const ts = new Date(c.commit.author.date).getTime();
if (ts > new Date(this.untilTimestamp).getTime()) {
continue;
}
} }
newResults.push({ newResults.push({
baseUrl: this.baseUrl, baseUrl: this.baseUrl,
@@ -184,43 +242,60 @@ export class CodeFeed {
if (this.enableTaggedOnly) { if (this.enableTaggedOnly) {
return this.cache.filter((c) => c.tagged === true); return this.cache.filter((c) => c.tagged === true);
} }
if (this.verbose) {
console.log(
`[CodeFeed] orgs=${orgs.length} repos=${allRepos.length} reposWithNew=${reposWithNewCommits} commits=${this.cache.length} (cached)`
);
}
return this.cache; return this.cache;
} }
// no caching: apply tagged-only filter if requested // no caching: apply tagged-only filter if requested
if (this.enableTaggedOnly) { // sort and dedupe
return newResults.filter((c) => c.tagged === true); const seen = new Set<string>();
const unique = newResults.filter((c) => {
if (seen.has(c.hash)) return false;
seen.add(c.hash);
return true;
});
unique.sort((a, b) => b.timestamp.localeCompare(a.timestamp));
const result = this.enableTaggedOnly ? unique.filter((c) => c.tagged === true) : unique;
if (this.verbose) {
console.log(
`[CodeFeed] orgs=${orgs.length} repos=${allRepos.length} reposWithNew=${reposWithNewCommits} commits=${result.length}`
);
} }
return newResults; return result;
} }
/** /**
* Load the changelog directly from the Gitea repository. * Load the changelog directly from the Gitea repository.
*/ */
private async loadChangelogFromRepo(owner: string, repo: string): Promise<void> { private async loadChangelogFromRepo(owner: string, repo: string): Promise<void> {
const url = `/api/v1/repos/${owner}/${repo}/contents/changelog.md`;
const headers: Record<string, string> = {}; const headers: Record<string, string> = {};
if (this.token) { if (this.token) headers['Authorization'] = `token ${this.token}`;
headers['Authorization'] = `token ${this.token}`; const candidates = [
'CHANGELOG.md',
'changelog.md',
'Changelog.md',
'docs/CHANGELOG.md',
];
for (const path of candidates) {
const url = `/api/v1/repos/${owner}/${repo}/contents/${encodeURIComponent(path)}`;
const response = await this.fetchFunction(url, { headers });
if (!response.ok) {
continue;
}
try {
const data = await response.json();
if (data && data.content) {
this.changelogContent = Buffer.from(data.content, 'base64').toString('utf8');
return;
}
} catch {
// continue trying others
}
} }
this.changelogContent = '';
const response = await this.fetchFunction(url, { headers });
if (!response.ok) {
console.error(
`Could not fetch CHANGELOG.md from ${owner}/${repo}: ${response.status} ${response.statusText}`
);
this.changelogContent = '';
return;
}
const data = await response.json();
if (!data.content) {
console.warn(`No content field found in response for ${owner}/${repo}/changelog.md`);
this.changelogContent = '';
return;
}
// decode base64 content
this.changelogContent = Buffer.from(data.content, 'base64').toString('utf8');
} }
/** /**
@@ -257,49 +332,68 @@ export class CodeFeed {
/** /**
* Fetch all tags for a given repo and return the set of tagged commit SHAs * Fetch all tags for a given repo and return the set of tagged commit SHAs
*/ */
private async fetchTags(owner: string, repo: string): Promise<Set<string>> { private async fetchTags(owner: string, repo: string): Promise<{ shas: Set<string>; map: Map<string, string> }> {
const taggedShas = new Set<string>(); const taggedShas = new Set<string>();
const tagNameBySha = new Map<string, string>();
let page = 1; let page = 1;
while (true) { while (true) {
const url = `/api/v1/repos/${owner}/${repo}/tags?limit=50&page=${page}`; const url = `/api/v1/repos/${owner}/${repo}/tags?limit=${this.pageLimit}&page=${page}`;
const resp = await this.fetchFunction(url, { const resp = await this.fetchFunction(url, {
headers: this.token ? { Authorization: `token ${this.token}` } : {}, headers: this.token ? { Authorization: `token ${this.token}` } : {},
}); });
if (!resp.ok) { if (!resp.ok) {
console.error(`Failed to fetch tags for ${owner}/${repo}: ${resp.status} ${resp.statusText}`); console.error(`Failed to fetch tags for ${owner}/${repo}: ${resp.status} ${resp.statusText}`);
return taggedShas; return { shas: taggedShas, map: tagNameBySha };
} }
const data: plugins.interfaces.ITag[] = await resp.json(); const data: plugins.interfaces.ITag[] = await resp.json();
if (data.length === 0) break; if (data.length === 0) break;
for (const t of data) { for (const t of data) {
if (t.commit?.sha) taggedShas.add(t.commit.sha); const sha = t.commit?.sha;
if (sha) {
taggedShas.add(sha);
if (t.name) tagNameBySha.set(sha, t.name);
}
} }
if (data.length < 50) break; if (data.length < this.pageLimit) break;
page++; page++;
} }
return taggedShas; return { shas: taggedShas, map: tagNameBySha };
} }
private async fetchAllOrganizations(): Promise<string[]> { private async fetchAllOrganizations(): Promise<string[]> {
const resp = await this.fetchFunction('/api/v1/orgs', { const headers = this.token ? { Authorization: `token ${this.token}` } : {};
headers: this.token ? { Authorization: `token ${this.token}` } : {}, let page = 1;
}); const orgs: string[] = [];
if (!resp.ok) { while (true) {
throw new Error(`Failed to fetch organizations: ${resp.statusText}`); const resp = await this.fetchFunction(`/api/v1/orgs?limit=${this.pageLimit}&page=${page}`, { headers });
if (!resp.ok) {
throw new Error(`Failed to fetch organizations: ${resp.status} ${resp.statusText}`);
}
const data: { username: string }[] = await resp.json();
if (data.length === 0) break;
orgs.push(...data.map((o) => o.username));
if (data.length < this.pageLimit) break;
page++;
} }
const data: { username: string }[] = await resp.json(); return orgs;
return data.map((o) => o.username);
} }
private async fetchRepositoriesForOrg(org: string): Promise<plugins.interfaces.IRepository[]> { private async fetchRepositoriesForOrg(org: string): Promise<plugins.interfaces.IRepository[]> {
const resp = await this.fetchFunction(`/api/v1/orgs/${org}/repos?limit=50`, { const headers = this.token ? { Authorization: `token ${this.token}` } : {};
headers: this.token ? { Authorization: `token ${this.token}` } : {}, let page = 1;
}); const repos: plugins.interfaces.IRepository[] = [];
if (!resp.ok) { while (true) {
throw new Error(`Failed to fetch repositories for ${org}: ${resp.statusText}`); const resp = await this.fetchFunction(`/api/v1/orgs/${org}/repos?limit=${this.pageLimit}&page=${page}`, { headers });
if (!resp.ok) {
throw new Error(`Failed to fetch repositories for ${org}: ${resp.status} ${resp.statusText}`);
}
const data: plugins.interfaces.IRepository[] = await resp.json();
if (data.length === 0) break;
repos.push(...data);
if (data.length < this.pageLimit) break;
page++;
} }
const data: plugins.interfaces.IRepository[] = await resp.json(); return repos;
return data;
} }
private async fetchRecentCommitsForRepo( private async fetchRecentCommitsForRepo(
@@ -308,23 +402,52 @@ export class CodeFeed {
sinceTimestamp?: string sinceTimestamp?: string
): Promise<plugins.interfaces.ICommit[]> { ): Promise<plugins.interfaces.ICommit[]> {
const since = sinceTimestamp ?? this.lastRunTimestamp; const since = sinceTimestamp ?? this.lastRunTimestamp;
const resp = await this.fetchFunction( const headers = this.token ? { Authorization: `token ${this.token}` } : {};
`/api/v1/repos/${owner}/${repo}/commits?since=${encodeURIComponent( let page = 1;
since const commits: plugins.interfaces.ICommit[] = [];
)}&limit=50`, while (true) {
{ headers: this.token ? { Authorization: `token ${this.token}` } : {} } const url = `/api/v1/repos/${owner}/${repo}/commits?since=${encodeURIComponent(since)}&limit=${this.pageLimit}&page=${page}`;
); const resp = await this.fetchFunction(url, { headers });
if (!resp.ok) { if (!resp.ok) {
throw new Error(`Failed to fetch commits for ${owner}/${repo}: ${resp.statusText}`); throw new Error(`Failed to fetch commits for ${owner}/${repo}: ${resp.status} ${resp.statusText}`);
}
const data: plugins.interfaces.ICommit[] = await resp.json();
if (data.length === 0) break;
commits.push(...data);
if (data.length < this.pageLimit) break;
page++;
} }
const data: plugins.interfaces.ICommit[] = await resp.json(); return commits;
return data;
} }
public async fetchFunction( public async fetchFunction(
urlArg: string, urlArg: string,
optionsArg: RequestInit = {} optionsArg: RequestInit = {}
): Promise<Response> { ): Promise<Response> {
return fetch(`${this.baseUrl}${urlArg}`, optionsArg); const maxAttempts = 4;
let attempt = 0;
let lastError: any;
while (attempt < maxAttempts) {
try {
const resp = await fetch(`${this.baseUrl}${urlArg}`, optionsArg);
// retry on 429 and 5xx
if (resp.status === 429 || resp.status >= 500) {
const retryAfter = Number(resp.headers.get('retry-after'));
const backoffMs = retryAfter
? retryAfter * 1000
: Math.min(32000, 1000 * Math.pow(2, attempt)) + Math.floor(Math.random() * 250);
await new Promise((r) => setTimeout(r, backoffMs));
attempt++;
continue;
}
return resp;
} catch (e: any) {
lastError = e;
const backoffMs = Math.min(32000, 1000 * Math.pow(2, attempt)) + Math.floor(Math.random() * 250);
await new Promise((r) => setTimeout(r, backoffMs));
attempt++;
}
}
throw new Error(`fetchFunction failed after retries for ${urlArg}: ${lastError?.message ?? 'unknown error'}`);
} }
} }

View File

@@ -22,6 +22,7 @@ export interface ICommit {
} }
export interface ITag { export interface ITag {
name?: string;
commit?: { commit?: {
sha?: string; sha?: string;
}; };