Compare commits
40 Commits
Author | SHA1 | Date | |
---|---|---|---|
72faf7bfd4 | |||
4cc819b5eb | |||
f21aa58c18 | |||
98f5c466a6 | |||
d0a00aedea | |||
b6af835d3f | |||
c639735f92 | |||
e40e008429 | |||
6032867a13 | |||
b59bd82685 | |||
a43114ab61 | |||
1e0ccec03e | |||
e5e0ceee78 | |||
d9ab609039 | |||
aa039e8b5e | |||
f511ab7a63 | |||
1df8064247 | |||
ac1f398422 | |||
3a498c00ee | |||
bb248ed408 | |||
e843197211 | |||
3502a661ea | |||
d103778a75 | |||
9b1b91eb31 | |||
25b2519324 | |||
166b289eb2 | |||
6ca6b37b1d | |||
5d0d125e43 | |||
470f4fe730 | |||
daeb38c91c | |||
9b46b0d46e | |||
46bd0a2486 | |||
d23a27eb66 | |||
96ed35e953 | |||
e3b51414a9 | |||
7b1e9ed072 | |||
27dc4dd6aa | |||
76c662356e | |||
c1e15ab47c | |||
19ecb3f9a5 |
118
changelog.md
118
changelog.md
@@ -1,5 +1,123 @@
|
||||
# Changelog
|
||||
|
||||
## 2025-09-14 - 1.7.2 - fix(core)
|
||||
Stabilize pagination, tag mapping, changelog parsing, and HTTP retry/backoff; add tests and caching improvements
|
||||
|
||||
- Handle paginated orgs, repos, commits, and tags to avoid missing pages.
|
||||
- Map tags to commit SHAs and extract version strings from tag names for changelog lookup and optional npm publish detection.
|
||||
- Discover and parse repository CHANGELOG files from multiple candidate paths to extract per-version entries.
|
||||
- Implement retries with exponential backoff for 429/5xx and network errors in fetchFunction.
|
||||
- Add in-memory caching with window trimming, stable sorting, and optional tagged-only filtering.
|
||||
- Include tests: mocked pagination & tag mapping test and integration test scaffolding using @push.rocks/tapbundle.
|
||||
|
||||
## 2025-04-25 - 1.7.1 - fix(CodeFeed)
|
||||
Improve commit fetching concurrency and add tagged-only commit filtering along with updated documentation and tests
|
||||
|
||||
- Updated readme examples to clarify default and options usage, including caching and tagged-only filtering
|
||||
- Increased non-exclusive concurrency from 5 to 20 in fetchAllCommitsFromInstance
|
||||
- Added tagged-only filtering logic for both cached and non-cached commit results
|
||||
- Modified tests to enable tagged-only mode and require npm check
|
||||
|
||||
## 2025-04-25 - 1.7.0 - feat(core)
|
||||
Enhance commit fetching with caching, concurrency improvements, and dependency upgrades
|
||||
|
||||
- Updated development dependencies (@git.zone/tsbuild, @git.zone/tsbundle, @git.zone/tstest, @push.rocks/tapbundle, @types/node) and dependency versions
|
||||
- Introduced optional caching options (enableCache, cacheWindowMs, enableNpmCheck) in the CodeFeed constructor to optimize commit retrieval
|
||||
- Refactored commit fetching to use AsyncExecutionStack for controlled concurrency and improved performance
|
||||
- Removed deprecated ts/codefeed.plugins.ts in favor of a consolidated plugins.ts module
|
||||
|
||||
## 2024-12-16 - 1.6.5 - fix(CodeFeed)
|
||||
Fixed timestamp initialization and commit fetching timeframe
|
||||
|
||||
- Updated the lastRunTimestamp initialization default period from 24 hours to 7 days in CodeFeed constructor.
|
||||
- Modified commit fetching logic to consider commits from the last 7 days instead of 24 hours in fetchRecentCommitsForRepo.
|
||||
|
||||
## 2024-12-14 - 1.6.4 - fix(core)
|
||||
Refactor fetch logic to use a unified fetchFunction for API calls
|
||||
|
||||
- Consolidated API request logic in the CodeFeed class to use fetchFunction for improved maintainability.
|
||||
|
||||
## 2024-12-14 - 1.6.3 - fix(codefeed)
|
||||
Refactor and fix formatting issues in the CodeFeed module
|
||||
|
||||
- Refactored various method format and spacing.
|
||||
- Fixed error handling formatting for readability.
|
||||
- Improved consistency in JSON handling for API responses.
|
||||
|
||||
## 2024-12-14 - 1.6.2 - fix(core)
|
||||
Fix sorting order of tagged commits by timestamp
|
||||
|
||||
- Fixed the sorting order of commits to be by timestamp in descending order after filtering for tagged commits.
|
||||
|
||||
## 2024-12-14 - 1.6.1 - fix(docs)
|
||||
Updated project metadata and expanded documentation for installation and usage.
|
||||
|
||||
- Updated description and keywords in package.json and npmextra.json.
|
||||
- Significant expansion of the README.md with detailed installation, usage, and feature instructions.
|
||||
|
||||
## 2024-12-14 - 1.6.0 - feat(core)
|
||||
Add changelog fetching and parsing functionality
|
||||
|
||||
- Implemented loadChangelogFromRepo to directly load the changelog from a Gitea repository.
|
||||
- Introduced parsing functionality to extract specific version details from the loaded changelog.
|
||||
- Updated CodeFeed class to utilize the changelog for version verification and commit processing.
|
||||
|
||||
## 2024-12-14 - 1.5.3 - fix(core)
|
||||
Fix filtering logic for returning only tagged commits
|
||||
|
||||
- Ensure `allCommits` is filtered to only include commits with 'tagged' status before returning.
|
||||
|
||||
## 2024-12-14 - 1.5.2 - fix(core)
|
||||
Ensure stability of core functionalities.
|
||||
|
||||
|
||||
## 2024-12-14 - 1.5.1 - fix(core)
|
||||
Refine logging format in CodeFeed class
|
||||
|
||||
- Modified console log format in fetchAllCommitsFromInstance method for better readability.
|
||||
|
||||
## 2024-12-14 - 1.5.0 - feat(core)
|
||||
Refactor TypeScript interfaces and improve module exports
|
||||
|
||||
- Moved TypeScript interfaces to a dedicated file (ts/interfaces/index.ts).
|
||||
- Updated import/export structure to improve code readability and maintainability.
|
||||
- Enhanced the package.json to utilize a module exports field for better resolution.
|
||||
|
||||
## 2024-12-13 - 1.4.1 - fix(core)
|
||||
Corrected log formatting for commit information output in CodeFeed
|
||||
|
||||
- Fixed formatting issue in commit log output within the CodeFeed class to ensure proper display of timestamps.
|
||||
|
||||
## 2024-12-13 - 1.4.0 - feat(CodeFeed)
|
||||
Enhance commit results with human-readable time
|
||||
|
||||
- Integrated smarttime plugin to calculate and format timestamps into human-readable time.
|
||||
- Updated dependencies in package.json to include smarttime and adjusted versions for existing packages.
|
||||
- Improved fetchAllCommitsFromInstance method to display formatted time ago information for each commit.
|
||||
|
||||
## 2024-12-13 - 1.3.0 - feat(core)
|
||||
Export CommitResult interface for external use.
|
||||
|
||||
- Changed CommitResult from a local interface to an exported interface, allowing for external usage and integration.
|
||||
|
||||
## 2024-12-13 - 1.2.1 - fix(core)
|
||||
No changes detected
|
||||
|
||||
|
||||
## 2024-12-13 - 1.2.0 - feat(core)
|
||||
Add organization-level activity fetching and RSS parsing
|
||||
|
||||
- Integrated smartxml package for XML parsing.
|
||||
- Implemented fetching of all organizations within a Gitea instance.
|
||||
- Added functionality to check new activities in organization RSS feeds.
|
||||
- Enhanced fetching logic to include repository commits and tags.
|
||||
|
||||
## 2024-12-13 - 1.1.0 - feat(core)
|
||||
Add tracking of commits published on npm
|
||||
|
||||
- Introduced a check for published commits on npm using smartnpm.
|
||||
- Enhanced fetchAllCommitsFromInstance to include 'publishedOnNpm' status in results.
|
||||
|
||||
## 2024-12-13 - 1.0.2 - fix(core)
|
||||
Improve error handling in fetchRecentCommitsForRepo method
|
||||
|
||||
|
@@ -5,10 +5,23 @@
|
||||
"githost": "code.foss.global",
|
||||
"gitscope": "foss.global",
|
||||
"gitrepo": "codefeed",
|
||||
"description": "a module for creating feeds for code development",
|
||||
"description": "The @foss.global/codefeed module is designed for generating feeds from Gitea repositories, enhancing development workflows by processing commit data and repository activities.",
|
||||
"npmPackagename": "@foss.global/codefeed",
|
||||
"license": "MIT",
|
||||
"projectDomain": "foss.global"
|
||||
"projectDomain": "foss.global",
|
||||
"keywords": [
|
||||
"codefeed",
|
||||
"Gitea",
|
||||
"commits",
|
||||
"changelog",
|
||||
"repository",
|
||||
"development tools",
|
||||
"npm",
|
||||
"module",
|
||||
"code analysis",
|
||||
"activity feed",
|
||||
"version control"
|
||||
]
|
||||
}
|
||||
},
|
||||
"npmci": {
|
||||
|
46
package.json
46
package.json
@@ -1,28 +1,34 @@
|
||||
{
|
||||
"name": "@foss.global/codefeed",
|
||||
"version": "1.0.2",
|
||||
"version": "1.7.2",
|
||||
"private": false,
|
||||
"description": "a module for creating feeds for code development",
|
||||
"main": "dist_ts/index.js",
|
||||
"typings": "dist_ts/index.d.ts",
|
||||
"description": "The @foss.global/codefeed module is designed for generating feeds from Gitea repositories, enhancing development workflows by processing commit data and repository activities.",
|
||||
"exports": {
|
||||
".": "./dist_ts/index.js",
|
||||
"./interfaces": "./dist_ts/interfaces/index.js"
|
||||
},
|
||||
"type": "module",
|
||||
"author": "Task Venture Capital GmbH",
|
||||
"license": "MIT",
|
||||
"scripts": {
|
||||
"test": "(tstest test/ --web)",
|
||||
"build": "(tsbuild --web --allowimplicitany)",
|
||||
"test": "(tstest test/ --verbose)",
|
||||
"build": "(tsbuild tsfolders --web --allowimplicitany)",
|
||||
"buildDocs": "(tsdoc)"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@git.zone/tsbuild": "^2.1.25",
|
||||
"@git.zone/tsbundle": "^2.0.5",
|
||||
"@git.zone/tsbuild": "^2.6.8",
|
||||
"@git.zone/tsbundle": "^2.5.1",
|
||||
"@git.zone/tsrun": "^1.2.46",
|
||||
"@git.zone/tstest": "^1.0.44",
|
||||
"@push.rocks/tapbundle": "^5.0.15",
|
||||
"@types/node": "^20.8.7"
|
||||
"@git.zone/tstest": "^2.3.8",
|
||||
"@push.rocks/tapbundle": "^6.0.3",
|
||||
"@types/node": "^22.15.2"
|
||||
},
|
||||
"dependencies": {
|
||||
"@push.rocks/qenv": "^6.1.0"
|
||||
"@push.rocks/lik": "^6.2.2",
|
||||
"@push.rocks/qenv": "^6.1.3",
|
||||
"@push.rocks/smartnpm": "^2.0.6",
|
||||
"@push.rocks/smarttime": "^4.1.1",
|
||||
"@push.rocks/smartxml": "^1.1.1"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
@@ -43,5 +49,19 @@
|
||||
"cli.js",
|
||||
"npmextra.json",
|
||||
"readme.md"
|
||||
]
|
||||
],
|
||||
"keywords": [
|
||||
"codefeed",
|
||||
"Gitea",
|
||||
"commits",
|
||||
"changelog",
|
||||
"repository",
|
||||
"development tools",
|
||||
"npm",
|
||||
"module",
|
||||
"code analysis",
|
||||
"activity feed",
|
||||
"version control"
|
||||
],
|
||||
"packageManager": "pnpm@10.7.0+sha512.6b865ad4b62a1d9842b61d674a393903b871d9244954f652b8842c2b553c72176b278f64c463e52d40fff8aba385c235c8c9ecf5cc7de4fd78b8bb6d49633ab6"
|
||||
}
|
||||
|
7059
pnpm-lock.yaml
generated
7059
pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
98
readme.md
98
readme.md
@@ -1,7 +1,99 @@
|
||||
# @foss.global/codefeed
|
||||
|
||||
a module for creating feeds for code development
|
||||
Generate an activity feed from a Gitea instance. Scans orgs and repos, retrieves commits since a configurable timestamp, enriches with tags, optional npm publish detection, and CHANGELOG snippets.
|
||||
|
||||
## How to create the docs
|
||||
## Install
|
||||
|
||||
To create docs run gitzone aidoc.
|
||||
```bash
|
||||
pnpm add @foss.global/codefeed
|
||||
# or
|
||||
npm i @foss.global/codefeed
|
||||
```
|
||||
|
||||
Requires Node.js 18+ (global fetch/Request/Response) and ESM.
|
||||
|
||||
## Quick Start
|
||||
|
||||
```ts
|
||||
import { CodeFeed } from '@foss.global/codefeed';
|
||||
|
||||
// Fetch commits since one week ago (default), no caching
|
||||
const feed = new CodeFeed('https://code.example.com', 'gitea_token');
|
||||
const commits = await feed.fetchAllCommitsFromInstance();
|
||||
console.log(commits);
|
||||
```
|
||||
|
||||
### With options
|
||||
|
||||
```ts
|
||||
const thirtyDays = 30 * 24 * 60 * 60 * 1000;
|
||||
const since = new Date(Date.now() - thirtyDays).toISOString();
|
||||
|
||||
const feed = new CodeFeed('https://code.example.com', 'gitea_token', since, {
|
||||
enableCache: true, // keep results in memory
|
||||
cacheWindowMs: thirtyDays, // trim cache to this window
|
||||
enableNpmCheck: true, // check npm for published versions
|
||||
taggedOnly: false, // return all commits (or only tagged)
|
||||
orgAllowlist: ['myorg'], // only scan these orgs
|
||||
orgDenylist: ['archive'], // skip these orgs
|
||||
repoAllowlist: ['myorg/app1', 'myorg/app2'], // only these repos
|
||||
repoDenylist: ['myorg/old-repo'], // skip these repos
|
||||
untilTimestamp: new Date().toISOString(), // optional upper bound
|
||||
verbose: true, // print a short metrics summary
|
||||
});
|
||||
|
||||
const commits = await feed.fetchAllCommitsFromInstance();
|
||||
```
|
||||
|
||||
Each returned item follows this shape:
|
||||
|
||||
```ts
|
||||
interface ICommitResult {
|
||||
baseUrl: string;
|
||||
org: string;
|
||||
repo: string;
|
||||
timestamp: string; // ISO date
|
||||
hash: string; // commit SHA
|
||||
commitMessage: string;
|
||||
tagged: boolean; // commit is pointed to by a tag
|
||||
publishedOnNpm: boolean; // only when npm check enabled and tag matches
|
||||
prettyAgoTime: string; // human-readable diff
|
||||
changelog: string | undefined; // snippet for matching tag version
|
||||
}
|
||||
```
|
||||
|
||||
## Features
|
||||
|
||||
- Pagination for orgs, repos, commits, and tags (no missing pages)
|
||||
- Retries with exponential backoff for 429/5xx and network errors
|
||||
- CHANGELOG discovery with case variants (`CHANGELOG.md`, `changelog.md`, `docs/CHANGELOG.md`)
|
||||
- Tag-to-version mapping based on tag names (`vX.Y.Z` → `X.Y.Z`)
|
||||
- Optional npm publish detection via `@org/repo` package versions
|
||||
- In-memory caching with window trimming and stable sorting
|
||||
- Allow/deny filters for orgs and repos, optional time upper bound
|
||||
- One-line metrics summary when `verbose: true`
|
||||
|
||||
## Environment
|
||||
|
||||
- Gitea base URL and an optional token with read access
|
||||
- Node.js 18+ (global fetch)
|
||||
|
||||
## Testing
|
||||
|
||||
The repo contains:
|
||||
- An integration test using a `GITEA_TOKEN` from `.nogit/` via `@push.rocks/qenv`.
|
||||
- A mocked pagination test that does not require network.
|
||||
|
||||
Run tests:
|
||||
|
||||
```bash
|
||||
pnpm test
|
||||
```
|
||||
|
||||
For the integration test, ensure `GITEA_TOKEN` is provided (e.g., via `.nogit/` as used in `test/test.ts`).
|
||||
|
||||
## Notes
|
||||
|
||||
- When `taggedOnly` is enabled, the feed includes only commits associated with tags.
|
||||
- `publishedOnNpm` is computed by matching the tag-derived version against the npm registry for `@org/repo`.
|
||||
- For very large instances, consider using allowlists/denylists and enabling caching for incremental runs.
|
||||
|
82
test/mock.pagination.test.ts
Normal file
82
test/mock.pagination.test.ts
Normal file
@@ -0,0 +1,82 @@
|
||||
import { expect, tap } from '@push.rocks/tapbundle';
|
||||
import { CodeFeed } from '../ts/index.js';
|
||||
|
||||
// A subclass to mock fetchFunction for controlled pagination tests
|
||||
class MockCodeFeed extends CodeFeed {
|
||||
private data: Record<string, any>;
|
||||
constructor() {
|
||||
super('https://mock', undefined, '2024-01-01T00:00:00.000Z', {
|
||||
enableCache: false,
|
||||
enableNpmCheck: false,
|
||||
taggedOnly: false,
|
||||
verbose: false,
|
||||
});
|
||||
|
||||
// Prepare mock datasets
|
||||
const commit = (sha: string, date: string, message = 'chore: update') => ({
|
||||
sha,
|
||||
commit: { author: { date }, message },
|
||||
});
|
||||
|
||||
const commitsPage1 = Array.from({ length: 50 }).map((_, i) =>
|
||||
commit(`sha-${i}`, `2024-01-0${(i % 9) + 1}T00:00:00.000Z`)
|
||||
);
|
||||
const commitsPage2 = [commit('sha-50', '2024-01-10T00:00:00.000Z'), commit('sha-tagged', '2024-01-11T00:00:00.000Z')];
|
||||
|
||||
const tagsPage1 = [
|
||||
{ name: 'v1.2.3', commit: { sha: 'sha-tagged' } },
|
||||
];
|
||||
|
||||
const changelogContent = Buffer.from(
|
||||
[
|
||||
'# Changelog',
|
||||
'',
|
||||
'## 2024-01-11 - 1.2.3 - Release',
|
||||
'* example change',
|
||||
'',
|
||||
].join('\n'),
|
||||
'utf8'
|
||||
).toString('base64');
|
||||
|
||||
this.data = {
|
||||
'/api/v1/orgs?limit=50&page=1': [{ username: 'org1' }],
|
||||
'/api/v1/orgs?limit=50&page=2': [],
|
||||
'/api/v1/orgs/org1/repos?limit=50&page=1': [{ name: 'repo1' }],
|
||||
'/api/v1/orgs/org1/repos?limit=50&page=2': [],
|
||||
'/api/v1/repos/org1/repo1/commits?limit=1': [commit('probe', '2024-01-12T00:00:00.000Z')],
|
||||
'/api/v1/repos/org1/repo1/commits?since=2024-01-01T00%3A00%3A00.000Z&limit=50&page=1': commitsPage1,
|
||||
'/api/v1/repos/org1/repo1/commits?since=2024-01-01T00%3A00%3A00.000Z&limit=50&page=2': commitsPage2,
|
||||
'/api/v1/repos/org1/repo1/commits?since=2024-01-01T00%3A00%3A00.000Z&limit=50&page=3': [],
|
||||
'/api/v1/repos/org1/repo1/tags?limit=50&page=1': tagsPage1,
|
||||
'/api/v1/repos/org1/repo1/tags?limit=50&page=2': [],
|
||||
'/api/v1/repos/org1/repo1/contents/CHANGELOG.md': { content: changelogContent },
|
||||
};
|
||||
}
|
||||
|
||||
public async fetchFunction(urlArg: string, _optionsArg: RequestInit = {}): Promise<Response> {
|
||||
const payload = this.data[urlArg];
|
||||
if (payload === undefined) {
|
||||
return new Response('Not found', { status: 404, statusText: 'Not Found' });
|
||||
}
|
||||
return new Response(JSON.stringify(payload), { status: 200, headers: { 'content-type': 'application/json' } });
|
||||
}
|
||||
}
|
||||
|
||||
let mockFeed: MockCodeFeed;
|
||||
|
||||
tap.test('mock: pagination and tag mapping', async () => {
|
||||
mockFeed = new MockCodeFeed();
|
||||
const results = await mockFeed.fetchAllCommitsFromInstance();
|
||||
// ensure we received > 50 commits from two pages
|
||||
expect(results).toBeArray();
|
||||
expect(results.length).toBeGreaterThan(50);
|
||||
// ensure tagged commit is present and has changelog attached when found
|
||||
const tagged = results.find((r) => r.hash === 'sha-tagged');
|
||||
expect(tagged).toBeTruthy();
|
||||
expect(tagged!.tagged).toBeTrue();
|
||||
// changelog is present for that version (via tag name)
|
||||
expect(tagged!.changelog).toBeTypeofString();
|
||||
});
|
||||
|
||||
tap.start();
|
||||
|
14
test/test.ts
14
test/test.ts
@@ -1,4 +1,4 @@
|
||||
import { expect, expectAsync, tap } from '@push.rocks/tapbundle';
|
||||
import { expect, tap } from '@push.rocks/tapbundle';
|
||||
import * as codefeed from '../ts/index.js';
|
||||
import * as qenv from '@push.rocks/qenv';
|
||||
const testQenv = new qenv.Qenv('./', '.nogit/');
|
||||
@@ -9,12 +9,22 @@ let testCodeFeed: codefeed.CodeFeed;
|
||||
tap.test('first test', async () => {
|
||||
const token = await testQenv.getEnvVarOnDemand('GITEA_TOKEN');
|
||||
// console.log('token', token);
|
||||
testCodeFeed = new codefeed.CodeFeed('https://code.foss.global', token);
|
||||
// seed lastRunTimestamp to 1 year ago and enable in-memory caching for 1 year
|
||||
const oneYearMs = 365 * 24 * 60 * 60 * 1000;
|
||||
const oneYearAgo = new Date(Date.now() - oneYearMs).toISOString();
|
||||
testCodeFeed = new codefeed.CodeFeed(
|
||||
'https://code.foss.global',
|
||||
token,
|
||||
oneYearAgo,
|
||||
{ enableCache: true, cacheWindowMs: oneYearMs, enableNpmCheck: true, taggedOnly: true }
|
||||
);
|
||||
expect(testCodeFeed).toBeInstanceOf(codefeed.CodeFeed);
|
||||
});
|
||||
|
||||
tap.test('fetchAllCommitsFromInstance', async () => {
|
||||
const commits = await testCodeFeed.fetchAllCommitsFromInstance();
|
||||
// log the actual results so we can inspect them
|
||||
console.log('Fetched commits:', JSON.stringify(commits, null, 2));
|
||||
expect(commits).toBeArray();
|
||||
expect(commits.length).toBeGreaterThan(0);
|
||||
// expect(commits[0]).toBeTypeofObject();
|
||||
|
@@ -3,6 +3,6 @@
|
||||
*/
|
||||
export const commitinfo = {
|
||||
name: '@foss.global/codefeed',
|
||||
version: '1.0.2',
|
||||
description: 'a module for creating feeds for code development'
|
||||
version: '1.7.2',
|
||||
description: 'The @foss.global/codefeed module is designed for generating feeds from Gitea repositories, enhancing development workflows by processing commit data and repository activities.'
|
||||
}
|
||||
|
@@ -1,6 +0,0 @@
|
||||
// @push.rocks
|
||||
import * as qenv from '@push.rocks/qenv'
|
||||
|
||||
export {
|
||||
qenv,
|
||||
}
|
599
ts/index.ts
599
ts/index.ts
@@ -1,194 +1,453 @@
|
||||
import * as plugins from './codefeed.plugins.js';
|
||||
|
||||
interface RepositoryOwner {
|
||||
login: string;
|
||||
}
|
||||
|
||||
interface Repository {
|
||||
owner: RepositoryOwner;
|
||||
name: string;
|
||||
}
|
||||
|
||||
interface CommitAuthor {
|
||||
date: string;
|
||||
}
|
||||
|
||||
interface CommitDetail {
|
||||
message: string;
|
||||
author: CommitAuthor;
|
||||
}
|
||||
|
||||
interface Commit {
|
||||
sha: string;
|
||||
commit: CommitDetail;
|
||||
}
|
||||
|
||||
interface Tag {
|
||||
commit?: {
|
||||
sha?: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface RepoSearchResponse {
|
||||
data: Repository[];
|
||||
}
|
||||
|
||||
interface CommitResult {
|
||||
baseUrl: string;
|
||||
org: string;
|
||||
repo: string;
|
||||
timestamp: string;
|
||||
hash: string;
|
||||
commitMessage: string;
|
||||
tagged: boolean;
|
||||
}
|
||||
import * as plugins from './plugins.js';
|
||||
|
||||
export class CodeFeed {
|
||||
private baseUrl: string;
|
||||
private token?: string;
|
||||
private lastRunTimestamp: string;
|
||||
private pageLimit = 50;
|
||||
// Raw changelog content for the current repository
|
||||
private changelogContent: string = '';
|
||||
// npm registry helper for published-on-npm checks
|
||||
private npmRegistry: plugins.smartnpm.NpmRegistry;
|
||||
// In-memory stateful cache of commits
|
||||
private enableCache: boolean = false;
|
||||
private cacheWindowMs?: number;
|
||||
private cache: plugins.interfaces.ICommitResult[] = [];
|
||||
// enable or disable npm publishedOnNpm checks (true by default)
|
||||
private enableNpmCheck: boolean = true;
|
||||
// return only tagged commits (false by default)
|
||||
private enableTaggedOnly: boolean = false;
|
||||
// allow/deny filters
|
||||
private orgAllowlist?: string[];
|
||||
private orgDenylist?: string[];
|
||||
private repoAllowlist?: string[]; // entries like "org/repo"
|
||||
private repoDenylist?: string[]; // entries like "org/repo"
|
||||
private untilTimestamp?: string; // optional upper bound on commit timestamps
|
||||
private verbose?: boolean; // optional metrics logging
|
||||
|
||||
constructor(baseUrl: string, token?: string) {
|
||||
constructor(
|
||||
baseUrl: string,
|
||||
token?: string,
|
||||
lastRunTimestamp?: string,
|
||||
options?: {
|
||||
enableCache?: boolean;
|
||||
cacheWindowMs?: number;
|
||||
enableNpmCheck?: boolean;
|
||||
taggedOnly?: boolean;
|
||||
orgAllowlist?: string[];
|
||||
orgDenylist?: string[];
|
||||
repoAllowlist?: string[];
|
||||
repoDenylist?: string[];
|
||||
untilTimestamp?: string;
|
||||
verbose?: boolean;
|
||||
}
|
||||
) {
|
||||
this.baseUrl = baseUrl;
|
||||
this.token = token;
|
||||
console.log('CodeFeed initialized');
|
||||
this.lastRunTimestamp =
|
||||
lastRunTimestamp ?? new Date(Date.now() - 7 * 24 * 60 * 60 * 1000).toISOString();
|
||||
// configure stateful caching
|
||||
this.enableCache = options?.enableCache ?? false;
|
||||
this.cacheWindowMs = options?.cacheWindowMs;
|
||||
this.enableNpmCheck = options?.enableNpmCheck ?? true;
|
||||
this.enableTaggedOnly = options?.taggedOnly ?? false;
|
||||
this.orgAllowlist = options?.orgAllowlist;
|
||||
this.orgDenylist = options?.orgDenylist;
|
||||
this.repoAllowlist = options?.repoAllowlist;
|
||||
this.repoDenylist = options?.repoDenylist;
|
||||
this.untilTimestamp = options?.untilTimestamp;
|
||||
this.verbose = options?.verbose ?? false;
|
||||
this.cache = [];
|
||||
// npm registry instance for version lookups
|
||||
this.npmRegistry = new plugins.smartnpm.NpmRegistry();
|
||||
console.log('CodeFeed initialized with last run timestamp:', this.lastRunTimestamp);
|
||||
}
|
||||
|
||||
private async fetchAllRepositories(): Promise<Repository[]> {
|
||||
let page = 1;
|
||||
const allRepos: Repository[] = [];
|
||||
|
||||
while (true) {
|
||||
const url = new URL(`${this.baseUrl}/api/v1/repos/search`);
|
||||
url.searchParams.set('limit', '50');
|
||||
url.searchParams.set('page', page.toString());
|
||||
|
||||
const resp = await fetch(url.href, {
|
||||
headers: this.token ? { 'Authorization': `token ${this.token}` } : {}
|
||||
});
|
||||
|
||||
if (!resp.ok) {
|
||||
throw new Error(`Failed to fetch repositories: ${resp.statusText}`);
|
||||
/**
|
||||
* Fetch all new commits (since lastRunTimestamp) across all orgs and repos.
|
||||
*/
|
||||
public async fetchAllCommitsFromInstance(): Promise<plugins.interfaces.ICommitResult[]> {
|
||||
// Controlled concurrency with AsyncExecutionStack
|
||||
const stack = new plugins.lik.AsyncExecutionStack();
|
||||
stack.setNonExclusiveMaxConcurrency(20);
|
||||
// determine since timestamp for this run (stateful caching)
|
||||
let effectiveSince = this.lastRunTimestamp;
|
||||
if (this.enableCache && this.cache.length > 0) {
|
||||
// use newest timestamp in cache to fetch only tail
|
||||
effectiveSince = this.cache.reduce(
|
||||
(max, c) => (c.timestamp > max ? c.timestamp : max),
|
||||
effectiveSince
|
||||
);
|
||||
}
|
||||
|
||||
const data: RepoSearchResponse = await resp.json();
|
||||
allRepos.push(...data.data);
|
||||
|
||||
if (data.data.length < 50) {
|
||||
break;
|
||||
// 1) get all organizations
|
||||
let orgs = await this.fetchAllOrganizations();
|
||||
// apply allow/deny filters
|
||||
if (this.orgAllowlist && this.orgAllowlist.length > 0) {
|
||||
orgs = orgs.filter((o) => this.orgAllowlist!.includes(o));
|
||||
}
|
||||
page++;
|
||||
if (this.orgDenylist && this.orgDenylist.length > 0) {
|
||||
orgs = orgs.filter((o) => !this.orgDenylist!.includes(o));
|
||||
}
|
||||
|
||||
return allRepos;
|
||||
// 2) fetch repos per org in parallel
|
||||
const repoLists = await Promise.all(
|
||||
orgs.map((org) =>
|
||||
stack.getNonExclusiveExecutionSlot(() => this.fetchRepositoriesForOrg(org))
|
||||
)
|
||||
);
|
||||
// flatten to [{ owner, name }]
|
||||
let allRepos = orgs.flatMap((org, i) =>
|
||||
repoLists[i].map((r) => ({ owner: org, name: r.name }))
|
||||
);
|
||||
// apply repo allow/deny filters using slug "org/repo"
|
||||
if (this.repoAllowlist && this.repoAllowlist.length > 0) {
|
||||
const allow = new Set(this.repoAllowlist.map((s) => s.toLowerCase()));
|
||||
allRepos = allRepos.filter(({ owner, name }) => allow.has(`${owner}/${name}`.toLowerCase()));
|
||||
}
|
||||
if (this.repoDenylist && this.repoDenylist.length > 0) {
|
||||
const deny = new Set(this.repoDenylist.map((s) => s.toLowerCase()));
|
||||
allRepos = allRepos.filter(({ owner, name }) => !deny.has(`${owner}/${name}`.toLowerCase()));
|
||||
}
|
||||
|
||||
private async fetchTags(owner: string, repo: string): Promise<Set<string>> {
|
||||
let page = 1;
|
||||
const tags: Tag[] = [];
|
||||
|
||||
while (true) {
|
||||
const url = new URL(`${this.baseUrl}/api/v1/repos/${owner}/${repo}/tags`);
|
||||
url.searchParams.set('limit', '50');
|
||||
url.searchParams.set('page', page.toString());
|
||||
|
||||
const resp = await fetch(url.href, {
|
||||
headers: this.token ? { 'Authorization': `token ${this.token}` } : {}
|
||||
});
|
||||
|
||||
if (!resp.ok) {
|
||||
console.error(`Failed to fetch tags for ${owner}/${repo}: ${resp.status} ${resp.statusText} at ${url.href}`);
|
||||
throw new Error(`Failed to fetch tags for ${owner}/${repo}: ${resp.statusText}`);
|
||||
}
|
||||
|
||||
const data: Tag[] = await resp.json();
|
||||
tags.push(...data);
|
||||
|
||||
if (data.length < 50) {
|
||||
break;
|
||||
}
|
||||
page++;
|
||||
}
|
||||
|
||||
const taggedCommitShas = new Set<string>();
|
||||
for (const t of tags) {
|
||||
if (t.commit?.sha) {
|
||||
taggedCommitShas.add(t.commit.sha);
|
||||
}
|
||||
}
|
||||
|
||||
return taggedCommitShas;
|
||||
}
|
||||
|
||||
private async fetchRecentCommitsForRepo(owner: string, repo: string): Promise<Commit[]> {
|
||||
const twentyFourHoursAgo = new Date(Date.now() - 24 * 60 * 60 * 1000);
|
||||
let page = 1;
|
||||
const recentCommits: Commit[] = [];
|
||||
|
||||
while (true) {
|
||||
const url = new URL(`${this.baseUrl}/api/v1/repos/${owner}/${repo}/commits`);
|
||||
url.searchParams.set('limit', '1');
|
||||
url.searchParams.set('page', page.toString());
|
||||
|
||||
const resp = await fetch(url.href, {
|
||||
headers: this.token ? { 'Authorization': `token ${this.token}` } : {}
|
||||
});
|
||||
if (!resp.ok) {
|
||||
console.error(`Failed to fetch commits for ${owner}/${repo}: ${resp.status} ${resp.statusText} at ${url.href}`);
|
||||
throw new Error(`Failed to fetch commits for ${owner}/${repo}: ${resp.statusText}`);
|
||||
}
|
||||
|
||||
const data: Commit[] = await resp.json();
|
||||
if (data.length === 0) {
|
||||
break;
|
||||
}
|
||||
|
||||
for (const commit of data) {
|
||||
const commitDate = new Date(commit.commit.author.date);
|
||||
if (commitDate > twentyFourHoursAgo) {
|
||||
recentCommits.push(commit);
|
||||
} else {
|
||||
// If we encounter a commit older than 24 hours, we can stop fetching more pages
|
||||
return recentCommits;
|
||||
}
|
||||
}
|
||||
|
||||
page++;
|
||||
}
|
||||
|
||||
return recentCommits;
|
||||
}
|
||||
|
||||
public async fetchAllCommitsFromInstance(): Promise<CommitResult[]> {
|
||||
const repos = await this.fetchAllRepositories();
|
||||
let allCommits: CommitResult[] = [];
|
||||
|
||||
for (const r of repos) {
|
||||
const org = r.owner.login;
|
||||
const repo = r.name;
|
||||
console.log(`Processing repository ${org}/${repo}`);
|
||||
|
||||
// 3) probe latest commit per repo and fetch full list only if new commits exist
|
||||
const commitJobs = allRepos.map(({ owner, name }) =>
|
||||
stack.getNonExclusiveExecutionSlot(async () => {
|
||||
try {
|
||||
const taggedCommitShas = await this.fetchTags(org, repo);
|
||||
const commits = await this.fetchRecentCommitsForRepo(org, repo);
|
||||
console.log(`${org}/${repo} -> Found ${commits.length} commits`);
|
||||
// 3a) Probe the most recent commit (limit=1)
|
||||
const probeResp = await this.fetchFunction(
|
||||
`/api/v1/repos/${owner}/${name}/commits?limit=1`,
|
||||
{ headers: this.token ? { Authorization: `token ${this.token}` } : {} }
|
||||
);
|
||||
if (!probeResp.ok) {
|
||||
throw new Error(`Probe failed for ${owner}/${name}: ${probeResp.statusText}`);
|
||||
}
|
||||
const probeData: plugins.interfaces.ICommit[] = await probeResp.json();
|
||||
// If no commits or no new commits since last run, skip
|
||||
if (
|
||||
probeData.length === 0 ||
|
||||
new Date(probeData[0].commit.author.date).getTime() <=
|
||||
new Date(effectiveSince).getTime()
|
||||
) {
|
||||
return { owner, name, commits: [] };
|
||||
}
|
||||
// 3b) Fetch commits since last run
|
||||
const commits = await this.fetchRecentCommitsForRepo(
|
||||
owner,
|
||||
name,
|
||||
effectiveSince
|
||||
);
|
||||
return { owner, name, commits };
|
||||
} catch (e: any) {
|
||||
console.error(`Failed to fetch commits for ${owner}/${name}:`, e.message);
|
||||
return { owner, name, commits: [] };
|
||||
}
|
||||
})
|
||||
);
|
||||
const commitResults = await Promise.all(commitJobs);
|
||||
|
||||
const formatted = commits.map((c): CommitResult => ({
|
||||
baseUrl: this.baseUrl,
|
||||
org,
|
||||
repo,
|
||||
timestamp: c.commit.author.date,
|
||||
hash: c.sha,
|
||||
commitMessage: c.commit.message,
|
||||
tagged: taggedCommitShas.has(c.sha)
|
||||
}));
|
||||
|
||||
allCommits.push(...formatted);
|
||||
} catch (error: any) {
|
||||
console.error(`Skipping repository ${org}/${repo} due to error:`, error.message);
|
||||
// 4) build new commit entries with tagging, npm and changelog support
|
||||
const newResults: plugins.interfaces.ICommitResult[] = [];
|
||||
let reposWithNewCommits = 0;
|
||||
for (const { owner, name, commits } of commitResults) {
|
||||
// skip repos with no new commits
|
||||
if (commits.length === 0) {
|
||||
this.changelogContent = '';
|
||||
continue;
|
||||
}
|
||||
reposWithNewCommits++;
|
||||
// load changelog for this repo
|
||||
await this.loadChangelogFromRepo(owner, name);
|
||||
// fetch tags for this repo
|
||||
let taggedShas: Set<string>;
|
||||
let tagNameBySha: Map<string, string>;
|
||||
try {
|
||||
const tagInfo = await this.fetchTags(owner, name);
|
||||
taggedShas = tagInfo.shas;
|
||||
tagNameBySha = tagInfo.map;
|
||||
} catch (e: any) {
|
||||
console.error(`Failed to fetch tags for ${owner}/${name}:`, e.message);
|
||||
taggedShas = new Set<string>();
|
||||
tagNameBySha = new Map<string, string>();
|
||||
}
|
||||
// fetch npm package info only if any new commits correspond to a tag
|
||||
const hasTaggedCommit = commits.some((c) => taggedShas.has(c.sha));
|
||||
let pkgInfo: { allVersions: Array<{ version: string }> } | null = null;
|
||||
if (hasTaggedCommit && this.enableNpmCheck) {
|
||||
try {
|
||||
pkgInfo = await this.npmRegistry.getPackageInfo(`@${owner}/${name}`);
|
||||
} catch (e: any) {
|
||||
console.error(`Failed to fetch package info for ${owner}/${name}:`, e.message);
|
||||
pkgInfo = null;
|
||||
}
|
||||
}
|
||||
// build commit entries
|
||||
for (const c of commits) {
|
||||
const isTagged = taggedShas.has(c.sha);
|
||||
// derive version from tag name if present (strip leading 'v')
|
||||
let versionFromTag: string | undefined;
|
||||
if (isTagged) {
|
||||
const tagName = tagNameBySha.get(c.sha);
|
||||
if (tagName) {
|
||||
versionFromTag = tagName.startsWith('v') ? tagName.substring(1) : tagName;
|
||||
}
|
||||
}
|
||||
const publishedOnNpm = isTagged && pkgInfo && versionFromTag
|
||||
? pkgInfo.allVersions.some((v) => v.version === versionFromTag)
|
||||
: false;
|
||||
let changelogEntry: string | undefined;
|
||||
if (this.changelogContent) {
|
||||
if (versionFromTag) {
|
||||
changelogEntry = this.getChangelogForVersion(versionFromTag);
|
||||
}
|
||||
}
|
||||
// optionally enforce an upper bound on commit timestamps
|
||||
if (this.untilTimestamp) {
|
||||
const ts = new Date(c.commit.author.date).getTime();
|
||||
if (ts > new Date(this.untilTimestamp).getTime()) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
newResults.push({
|
||||
baseUrl: this.baseUrl,
|
||||
org: owner,
|
||||
repo: name,
|
||||
timestamp: c.commit.author.date,
|
||||
prettyAgoTime: plugins.smarttime.getMilliSecondsAsHumanReadableAgoTime(
|
||||
new Date(c.commit.author.date).getTime()
|
||||
),
|
||||
hash: c.sha,
|
||||
commitMessage: c.commit.message,
|
||||
tagged: isTagged,
|
||||
publishedOnNpm,
|
||||
changelog: changelogEntry,
|
||||
});
|
||||
}
|
||||
}
|
||||
// if caching is enabled, merge into in-memory cache and return full cache
|
||||
if (this.enableCache) {
|
||||
const existingHashes = new Set(this.cache.map((c) => c.hash));
|
||||
const uniqueNew = newResults.filter((c) => !existingHashes.has(c.hash));
|
||||
this.cache.push(...uniqueNew);
|
||||
// trim commits older than window
|
||||
if (this.cacheWindowMs !== undefined) {
|
||||
const cutoff = Date.now() - this.cacheWindowMs;
|
||||
this.cache = this.cache.filter((c) => new Date(c.timestamp).getTime() >= cutoff);
|
||||
}
|
||||
// advance lastRunTimestamp to now
|
||||
this.lastRunTimestamp = new Date().toISOString();
|
||||
// sort descending by timestamp
|
||||
this.cache.sort((a, b) => b.timestamp.localeCompare(a.timestamp));
|
||||
// apply tagged-only filter if requested
|
||||
if (this.enableTaggedOnly) {
|
||||
return this.cache.filter((c) => c.tagged === true);
|
||||
}
|
||||
if (this.verbose) {
|
||||
console.log(
|
||||
`[CodeFeed] orgs=${orgs.length} repos=${allRepos.length} reposWithNew=${reposWithNewCommits} commits=${this.cache.length} (cached)`
|
||||
);
|
||||
}
|
||||
return this.cache;
|
||||
}
|
||||
// no caching: apply tagged-only filter if requested
|
||||
// sort and dedupe
|
||||
const seen = new Set<string>();
|
||||
const unique = newResults.filter((c) => {
|
||||
if (seen.has(c.hash)) return false;
|
||||
seen.add(c.hash);
|
||||
return true;
|
||||
});
|
||||
unique.sort((a, b) => b.timestamp.localeCompare(a.timestamp));
|
||||
const result = this.enableTaggedOnly ? unique.filter((c) => c.tagged === true) : unique;
|
||||
if (this.verbose) {
|
||||
console.log(
|
||||
`[CodeFeed] orgs=${orgs.length} repos=${allRepos.length} reposWithNew=${reposWithNewCommits} commits=${result.length}`
|
||||
);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
return allCommits;
|
||||
/**
|
||||
* Load the changelog directly from the Gitea repository.
|
||||
*/
|
||||
private async loadChangelogFromRepo(owner: string, repo: string): Promise<void> {
|
||||
const headers: Record<string, string> = {};
|
||||
if (this.token) headers['Authorization'] = `token ${this.token}`;
|
||||
const candidates = [
|
||||
'CHANGELOG.md',
|
||||
'changelog.md',
|
||||
'Changelog.md',
|
||||
'docs/CHANGELOG.md',
|
||||
];
|
||||
for (const path of candidates) {
|
||||
const url = `/api/v1/repos/${owner}/${repo}/contents/${encodeURIComponent(path)}`;
|
||||
const response = await this.fetchFunction(url, { headers });
|
||||
if (!response.ok) {
|
||||
continue;
|
||||
}
|
||||
try {
|
||||
const data = await response.json();
|
||||
if (data && data.content) {
|
||||
this.changelogContent = Buffer.from(data.content, 'base64').toString('utf8');
|
||||
return;
|
||||
}
|
||||
} catch {
|
||||
// continue trying others
|
||||
}
|
||||
}
|
||||
this.changelogContent = '';
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse the changelog to find the entry for a given version.
|
||||
* The changelog format is assumed as:
|
||||
*
|
||||
* # Changelog
|
||||
*
|
||||
* ## <date> - <version> - <description>
|
||||
* <changes...>
|
||||
*/
|
||||
private getChangelogForVersion(version: string): string | undefined {
|
||||
if (!this.changelogContent) {
|
||||
return undefined;
|
||||
}
|
||||
const lines = this.changelogContent.split('\n');
|
||||
const versionHeaderIndex = lines.findIndex((line) => line.includes(`- ${version} -`));
|
||||
if (versionHeaderIndex === -1) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const changelogLines: string[] = [];
|
||||
for (let i = versionHeaderIndex + 1; i < lines.length; i++) {
|
||||
const line = lines[i];
|
||||
// The next version header starts with `## `
|
||||
if (line.startsWith('## ')) {
|
||||
break;
|
||||
}
|
||||
changelogLines.push(line);
|
||||
}
|
||||
|
||||
return changelogLines.join('\n').trim();
|
||||
}
|
||||
/**
|
||||
* Fetch all tags for a given repo and return the set of tagged commit SHAs
|
||||
*/
|
||||
private async fetchTags(owner: string, repo: string): Promise<{ shas: Set<string>; map: Map<string, string> }> {
|
||||
const taggedShas = new Set<string>();
|
||||
const tagNameBySha = new Map<string, string>();
|
||||
let page = 1;
|
||||
while (true) {
|
||||
const url = `/api/v1/repos/${owner}/${repo}/tags?limit=${this.pageLimit}&page=${page}`;
|
||||
const resp = await this.fetchFunction(url, {
|
||||
headers: this.token ? { Authorization: `token ${this.token}` } : {},
|
||||
});
|
||||
if (!resp.ok) {
|
||||
console.error(`Failed to fetch tags for ${owner}/${repo}: ${resp.status} ${resp.statusText}`);
|
||||
return { shas: taggedShas, map: tagNameBySha };
|
||||
}
|
||||
const data: plugins.interfaces.ITag[] = await resp.json();
|
||||
if (data.length === 0) break;
|
||||
for (const t of data) {
|
||||
const sha = t.commit?.sha;
|
||||
if (sha) {
|
||||
taggedShas.add(sha);
|
||||
if (t.name) tagNameBySha.set(sha, t.name);
|
||||
}
|
||||
}
|
||||
if (data.length < this.pageLimit) break;
|
||||
page++;
|
||||
}
|
||||
return { shas: taggedShas, map: tagNameBySha };
|
||||
}
|
||||
|
||||
private async fetchAllOrganizations(): Promise<string[]> {
|
||||
const headers = this.token ? { Authorization: `token ${this.token}` } : {};
|
||||
let page = 1;
|
||||
const orgs: string[] = [];
|
||||
while (true) {
|
||||
const resp = await this.fetchFunction(`/api/v1/orgs?limit=${this.pageLimit}&page=${page}`, { headers });
|
||||
if (!resp.ok) {
|
||||
throw new Error(`Failed to fetch organizations: ${resp.status} ${resp.statusText}`);
|
||||
}
|
||||
const data: { username: string }[] = await resp.json();
|
||||
if (data.length === 0) break;
|
||||
orgs.push(...data.map((o) => o.username));
|
||||
if (data.length < this.pageLimit) break;
|
||||
page++;
|
||||
}
|
||||
return orgs;
|
||||
}
|
||||
|
||||
private async fetchRepositoriesForOrg(org: string): Promise<plugins.interfaces.IRepository[]> {
|
||||
const headers = this.token ? { Authorization: `token ${this.token}` } : {};
|
||||
let page = 1;
|
||||
const repos: plugins.interfaces.IRepository[] = [];
|
||||
while (true) {
|
||||
const resp = await this.fetchFunction(`/api/v1/orgs/${org}/repos?limit=${this.pageLimit}&page=${page}`, { headers });
|
||||
if (!resp.ok) {
|
||||
throw new Error(`Failed to fetch repositories for ${org}: ${resp.status} ${resp.statusText}`);
|
||||
}
|
||||
const data: plugins.interfaces.IRepository[] = await resp.json();
|
||||
if (data.length === 0) break;
|
||||
repos.push(...data);
|
||||
if (data.length < this.pageLimit) break;
|
||||
page++;
|
||||
}
|
||||
return repos;
|
||||
}
|
||||
|
||||
private async fetchRecentCommitsForRepo(
|
||||
owner: string,
|
||||
repo: string,
|
||||
sinceTimestamp?: string
|
||||
): Promise<plugins.interfaces.ICommit[]> {
|
||||
const since = sinceTimestamp ?? this.lastRunTimestamp;
|
||||
const headers = this.token ? { Authorization: `token ${this.token}` } : {};
|
||||
let page = 1;
|
||||
const commits: plugins.interfaces.ICommit[] = [];
|
||||
while (true) {
|
||||
const url = `/api/v1/repos/${owner}/${repo}/commits?since=${encodeURIComponent(since)}&limit=${this.pageLimit}&page=${page}`;
|
||||
const resp = await this.fetchFunction(url, { headers });
|
||||
if (!resp.ok) {
|
||||
throw new Error(`Failed to fetch commits for ${owner}/${repo}: ${resp.status} ${resp.statusText}`);
|
||||
}
|
||||
const data: plugins.interfaces.ICommit[] = await resp.json();
|
||||
if (data.length === 0) break;
|
||||
commits.push(...data);
|
||||
if (data.length < this.pageLimit) break;
|
||||
page++;
|
||||
}
|
||||
return commits;
|
||||
}
|
||||
|
||||
public async fetchFunction(
|
||||
urlArg: string,
|
||||
optionsArg: RequestInit = {}
|
||||
): Promise<Response> {
|
||||
const maxAttempts = 4;
|
||||
let attempt = 0;
|
||||
let lastError: any;
|
||||
while (attempt < maxAttempts) {
|
||||
try {
|
||||
const resp = await fetch(`${this.baseUrl}${urlArg}`, optionsArg);
|
||||
// retry on 429 and 5xx
|
||||
if (resp.status === 429 || resp.status >= 500) {
|
||||
const retryAfter = Number(resp.headers.get('retry-after'));
|
||||
const backoffMs = retryAfter
|
||||
? retryAfter * 1000
|
||||
: Math.min(32000, 1000 * Math.pow(2, attempt)) + Math.floor(Math.random() * 250);
|
||||
await new Promise((r) => setTimeout(r, backoffMs));
|
||||
attempt++;
|
||||
continue;
|
||||
}
|
||||
return resp;
|
||||
} catch (e: any) {
|
||||
lastError = e;
|
||||
const backoffMs = Math.min(32000, 1000 * Math.pow(2, attempt)) + Math.floor(Math.random() * 250);
|
||||
await new Promise((r) => setTimeout(r, backoffMs));
|
||||
attempt++;
|
||||
}
|
||||
}
|
||||
throw new Error(`fetchFunction failed after retries for ${urlArg}: ${lastError?.message ?? 'unknown error'}`);
|
||||
}
|
||||
}
|
46
ts/interfaces/index.ts
Normal file
46
ts/interfaces/index.ts
Normal file
@@ -0,0 +1,46 @@
|
||||
export interface IRepositoryOwner {
|
||||
login: string;
|
||||
}
|
||||
|
||||
export interface IRepository {
|
||||
owner: IRepositoryOwner;
|
||||
name: string;
|
||||
}
|
||||
|
||||
export interface ICommitAuthor {
|
||||
date: string;
|
||||
}
|
||||
|
||||
export interface ICommitDetail {
|
||||
message: string;
|
||||
author: ICommitAuthor;
|
||||
}
|
||||
|
||||
export interface ICommit {
|
||||
sha: string;
|
||||
commit: ICommitDetail;
|
||||
}
|
||||
|
||||
export interface ITag {
|
||||
name?: string;
|
||||
commit?: {
|
||||
sha?: string;
|
||||
};
|
||||
}
|
||||
|
||||
export interface IRepoSearchResponse {
|
||||
data: IRepository[];
|
||||
}
|
||||
|
||||
export interface ICommitResult {
|
||||
baseUrl: string;
|
||||
org: string;
|
||||
repo: string;
|
||||
timestamp: string;
|
||||
hash: string;
|
||||
commitMessage: string;
|
||||
tagged: boolean;
|
||||
publishedOnNpm: boolean;
|
||||
prettyAgoTime: string;
|
||||
changelog: string | undefined;
|
||||
}
|
21
ts/plugins.ts
Normal file
21
ts/plugins.ts
Normal file
@@ -0,0 +1,21 @@
|
||||
// module
|
||||
import * as interfaces from './interfaces/index.js';
|
||||
|
||||
export {
|
||||
interfaces,
|
||||
}
|
||||
|
||||
// @push.rocks
|
||||
import * as qenv from '@push.rocks/qenv';
|
||||
import * as smartnpm from '@push.rocks/smartnpm';
|
||||
import * as smartxml from '@push.rocks/smartxml';
|
||||
import * as smarttime from '@push.rocks/smarttime';
|
||||
import * as lik from '@push.rocks/lik';
|
||||
|
||||
export {
|
||||
qenv,
|
||||
smartnpm,
|
||||
smartxml,
|
||||
smarttime,
|
||||
lik,
|
||||
}
|
@@ -8,7 +8,8 @@
|
||||
"esModuleInterop": true,
|
||||
"verbatimModuleSyntax": true,
|
||||
"baseUrl": ".",
|
||||
"paths": {}
|
||||
"paths": {
|
||||
}
|
||||
},
|
||||
"exclude": [
|
||||
"dist_*/**/*.d.ts"
|
||||
|
Reference in New Issue
Block a user