Compare commits
34 Commits
Author | SHA1 | Date | |
---|---|---|---|
d0a00aedea | |||
b6af835d3f | |||
c639735f92 | |||
e40e008429 | |||
6032867a13 | |||
b59bd82685 | |||
a43114ab61 | |||
1e0ccec03e | |||
e5e0ceee78 | |||
d9ab609039 | |||
aa039e8b5e | |||
f511ab7a63 | |||
1df8064247 | |||
ac1f398422 | |||
3a498c00ee | |||
bb248ed408 | |||
e843197211 | |||
3502a661ea | |||
d103778a75 | |||
9b1b91eb31 | |||
25b2519324 | |||
166b289eb2 | |||
6ca6b37b1d | |||
5d0d125e43 | |||
470f4fe730 | |||
daeb38c91c | |||
9b46b0d46e | |||
46bd0a2486 | |||
d23a27eb66 | |||
96ed35e953 | |||
e3b51414a9 | |||
7b1e9ed072 | |||
27dc4dd6aa | |||
76c662356e |
102
changelog.md
102
changelog.md
@ -1,5 +1,107 @@
|
|||||||
# Changelog
|
# Changelog
|
||||||
|
|
||||||
|
## 2025-04-25 - 1.7.1 - fix(CodeFeed)
|
||||||
|
Improve commit fetching concurrency and add tagged-only commit filtering along with updated documentation and tests
|
||||||
|
|
||||||
|
- Updated readme examples to clarify default and options usage, including caching and tagged-only filtering
|
||||||
|
- Increased non-exclusive concurrency from 5 to 20 in fetchAllCommitsFromInstance
|
||||||
|
- Added tagged-only filtering logic for both cached and non-cached commit results
|
||||||
|
- Modified tests to enable tagged-only mode and require npm check
|
||||||
|
|
||||||
|
## 2025-04-25 - 1.7.0 - feat(core)
|
||||||
|
Enhance commit fetching with caching, concurrency improvements, and dependency upgrades
|
||||||
|
|
||||||
|
- Updated development dependencies (@git.zone/tsbuild, @git.zone/tsbundle, @git.zone/tstest, @push.rocks/tapbundle, @types/node) and dependency versions
|
||||||
|
- Introduced optional caching options (enableCache, cacheWindowMs, enableNpmCheck) in the CodeFeed constructor to optimize commit retrieval
|
||||||
|
- Refactored commit fetching to use AsyncExecutionStack for controlled concurrency and improved performance
|
||||||
|
- Removed deprecated ts/codefeed.plugins.ts in favor of a consolidated plugins.ts module
|
||||||
|
|
||||||
|
## 2024-12-16 - 1.6.5 - fix(CodeFeed)
|
||||||
|
Fixed timestamp initialization and commit fetching timeframe
|
||||||
|
|
||||||
|
- Updated the lastRunTimestamp initialization default period from 24 hours to 7 days in CodeFeed constructor.
|
||||||
|
- Modified commit fetching logic to consider commits from the last 7 days instead of 24 hours in fetchRecentCommitsForRepo.
|
||||||
|
|
||||||
|
## 2024-12-14 - 1.6.4 - fix(core)
|
||||||
|
Refactor fetch logic to use a unified fetchFunction for API calls
|
||||||
|
|
||||||
|
- Consolidated API request logic in the CodeFeed class to use fetchFunction for improved maintainability.
|
||||||
|
|
||||||
|
## 2024-12-14 - 1.6.3 - fix(codefeed)
|
||||||
|
Refactor and fix formatting issues in the CodeFeed module
|
||||||
|
|
||||||
|
- Refactored various method format and spacing.
|
||||||
|
- Fixed error handling formatting for readability.
|
||||||
|
- Improved consistency in JSON handling for API responses.
|
||||||
|
|
||||||
|
## 2024-12-14 - 1.6.2 - fix(core)
|
||||||
|
Fix sorting order of tagged commits by timestamp
|
||||||
|
|
||||||
|
- Fixed the sorting order of commits to be by timestamp in descending order after filtering for tagged commits.
|
||||||
|
|
||||||
|
## 2024-12-14 - 1.6.1 - fix(docs)
|
||||||
|
Updated project metadata and expanded documentation for installation and usage.
|
||||||
|
|
||||||
|
- Updated description and keywords in package.json and npmextra.json.
|
||||||
|
- Significant expansion of the README.md with detailed installation, usage, and feature instructions.
|
||||||
|
|
||||||
|
## 2024-12-14 - 1.6.0 - feat(core)
|
||||||
|
Add changelog fetching and parsing functionality
|
||||||
|
|
||||||
|
- Implemented loadChangelogFromRepo to directly load the changelog from a Gitea repository.
|
||||||
|
- Introduced parsing functionality to extract specific version details from the loaded changelog.
|
||||||
|
- Updated CodeFeed class to utilize the changelog for version verification and commit processing.
|
||||||
|
|
||||||
|
## 2024-12-14 - 1.5.3 - fix(core)
|
||||||
|
Fix filtering logic for returning only tagged commits
|
||||||
|
|
||||||
|
- Ensure `allCommits` is filtered to only include commits with 'tagged' status before returning.
|
||||||
|
|
||||||
|
## 2024-12-14 - 1.5.2 - fix(core)
|
||||||
|
Ensure stability of core functionalities.
|
||||||
|
|
||||||
|
|
||||||
|
## 2024-12-14 - 1.5.1 - fix(core)
|
||||||
|
Refine logging format in CodeFeed class
|
||||||
|
|
||||||
|
- Modified console log format in fetchAllCommitsFromInstance method for better readability.
|
||||||
|
|
||||||
|
## 2024-12-14 - 1.5.0 - feat(core)
|
||||||
|
Refactor TypeScript interfaces and improve module exports
|
||||||
|
|
||||||
|
- Moved TypeScript interfaces to a dedicated file (ts/interfaces/index.ts).
|
||||||
|
- Updated import/export structure to improve code readability and maintainability.
|
||||||
|
- Enhanced the package.json to utilize a module exports field for better resolution.
|
||||||
|
|
||||||
|
## 2024-12-13 - 1.4.1 - fix(core)
|
||||||
|
Corrected log formatting for commit information output in CodeFeed
|
||||||
|
|
||||||
|
- Fixed formatting issue in commit log output within the CodeFeed class to ensure proper display of timestamps.
|
||||||
|
|
||||||
|
## 2024-12-13 - 1.4.0 - feat(CodeFeed)
|
||||||
|
Enhance commit results with human-readable time
|
||||||
|
|
||||||
|
- Integrated smarttime plugin to calculate and format timestamps into human-readable time.
|
||||||
|
- Updated dependencies in package.json to include smarttime and adjusted versions for existing packages.
|
||||||
|
- Improved fetchAllCommitsFromInstance method to display formatted time ago information for each commit.
|
||||||
|
|
||||||
|
## 2024-12-13 - 1.3.0 - feat(core)
|
||||||
|
Export CommitResult interface for external use.
|
||||||
|
|
||||||
|
- Changed CommitResult from a local interface to an exported interface, allowing for external usage and integration.
|
||||||
|
|
||||||
|
## 2024-12-13 - 1.2.1 - fix(core)
|
||||||
|
No changes detected
|
||||||
|
|
||||||
|
|
||||||
|
## 2024-12-13 - 1.2.0 - feat(core)
|
||||||
|
Add organization-level activity fetching and RSS parsing
|
||||||
|
|
||||||
|
- Integrated smartxml package for XML parsing.
|
||||||
|
- Implemented fetching of all organizations within a Gitea instance.
|
||||||
|
- Added functionality to check new activities in organization RSS feeds.
|
||||||
|
- Enhanced fetching logic to include repository commits and tags.
|
||||||
|
|
||||||
## 2024-12-13 - 1.1.0 - feat(core)
|
## 2024-12-13 - 1.1.0 - feat(core)
|
||||||
Add tracking of commits published on npm
|
Add tracking of commits published on npm
|
||||||
|
|
||||||
|
@ -5,10 +5,23 @@
|
|||||||
"githost": "code.foss.global",
|
"githost": "code.foss.global",
|
||||||
"gitscope": "foss.global",
|
"gitscope": "foss.global",
|
||||||
"gitrepo": "codefeed",
|
"gitrepo": "codefeed",
|
||||||
"description": "a module for creating feeds for code development",
|
"description": "The @foss.global/codefeed module is designed for generating feeds from Gitea repositories, enhancing development workflows by processing commit data and repository activities.",
|
||||||
"npmPackagename": "@foss.global/codefeed",
|
"npmPackagename": "@foss.global/codefeed",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"projectDomain": "foss.global"
|
"projectDomain": "foss.global",
|
||||||
|
"keywords": [
|
||||||
|
"codefeed",
|
||||||
|
"Gitea",
|
||||||
|
"commits",
|
||||||
|
"changelog",
|
||||||
|
"repository",
|
||||||
|
"development tools",
|
||||||
|
"npm",
|
||||||
|
"module",
|
||||||
|
"code analysis",
|
||||||
|
"activity feed",
|
||||||
|
"version control"
|
||||||
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"npmci": {
|
"npmci": {
|
||||||
|
43
package.json
43
package.json
@ -1,29 +1,34 @@
|
|||||||
{
|
{
|
||||||
"name": "@foss.global/codefeed",
|
"name": "@foss.global/codefeed",
|
||||||
"version": "1.1.0",
|
"version": "1.7.1",
|
||||||
"private": false,
|
"private": false,
|
||||||
"description": "a module for creating feeds for code development",
|
"description": "The @foss.global/codefeed module is designed for generating feeds from Gitea repositories, enhancing development workflows by processing commit data and repository activities.",
|
||||||
"main": "dist_ts/index.js",
|
"exports": {
|
||||||
"typings": "dist_ts/index.d.ts",
|
".": "./dist_ts/index.js",
|
||||||
|
"./interfaces": "./dist_ts/interfaces/index.js"
|
||||||
|
},
|
||||||
"type": "module",
|
"type": "module",
|
||||||
"author": "Task Venture Capital GmbH",
|
"author": "Task Venture Capital GmbH",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"test": "(tstest test/ --web)",
|
"test": "(tstest test/ --web)",
|
||||||
"build": "(tsbuild --web --allowimplicitany)",
|
"build": "(tsbuild tsfolders --web --allowimplicitany)",
|
||||||
"buildDocs": "(tsdoc)"
|
"buildDocs": "(tsdoc)"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@git.zone/tsbuild": "^2.1.25",
|
"@git.zone/tsbuild": "^2.3.2",
|
||||||
"@git.zone/tsbundle": "^2.0.5",
|
"@git.zone/tsbundle": "^2.2.5",
|
||||||
"@git.zone/tsrun": "^1.2.46",
|
"@git.zone/tsrun": "^1.2.46",
|
||||||
"@git.zone/tstest": "^1.0.44",
|
"@git.zone/tstest": "^1.0.96",
|
||||||
"@push.rocks/tapbundle": "^5.0.15",
|
"@push.rocks/tapbundle": "^5.6.3",
|
||||||
"@types/node": "^20.8.7"
|
"@types/node": "^22.15.2"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
|
"@push.rocks/lik": "^6.2.2",
|
||||||
"@push.rocks/qenv": "^6.1.0",
|
"@push.rocks/qenv": "^6.1.0",
|
||||||
"@push.rocks/smartnpm": "^2.0.4"
|
"@push.rocks/smartnpm": "^2.0.4",
|
||||||
|
"@push.rocks/smarttime": "^4.1.1",
|
||||||
|
"@push.rocks/smartxml": "^1.1.1"
|
||||||
},
|
},
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
@ -44,5 +49,19 @@
|
|||||||
"cli.js",
|
"cli.js",
|
||||||
"npmextra.json",
|
"npmextra.json",
|
||||||
"readme.md"
|
"readme.md"
|
||||||
]
|
],
|
||||||
|
"keywords": [
|
||||||
|
"codefeed",
|
||||||
|
"Gitea",
|
||||||
|
"commits",
|
||||||
|
"changelog",
|
||||||
|
"repository",
|
||||||
|
"development tools",
|
||||||
|
"npm",
|
||||||
|
"module",
|
||||||
|
"code analysis",
|
||||||
|
"activity feed",
|
||||||
|
"version control"
|
||||||
|
],
|
||||||
|
"packageManager": "pnpm@10.7.0+sha512.6b865ad4b62a1d9842b61d674a393903b871d9244954f652b8842c2b553c72176b278f64c463e52d40fff8aba385c235c8c9ecf5cc7de4fd78b8bb6d49633ab6"
|
||||||
}
|
}
|
||||||
|
3422
pnpm-lock.yaml
generated
3422
pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
142
readme.md
142
readme.md
@ -1,7 +1,143 @@
|
|||||||
|
```markdown
|
||||||
# @foss.global/codefeed
|
# @foss.global/codefeed
|
||||||
|
|
||||||
a module for creating feeds for code development
|
A module for creating feeds for code development.
|
||||||
|
|
||||||
## How to create the docs
|
## Install
|
||||||
|
|
||||||
To create docs run gitzone aidoc.
|
To install the `@foss.global/codefeed` package, you can run the following npm command in your project directory:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
npm install @foss.global/codefeed
|
||||||
|
```
|
||||||
|
|
||||||
|
Ensure that you have a compatible version of Node.js installed and that your project is set up to support ECMAScript modules. The `@foss.global/codefeed` module uses ESM syntax.
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
The `@foss.global/codefeed` package is designed to help developers generate feeds for code developments, specifically targeting Gitea repositories. It fetches and processes commit data, changelogs, and repository activities for further analysis or visualization. Here, we'll delve into how you can utilize the different features of the `CodeFeed` class.
|
||||||
|
|
||||||
|
### Setting Up CodeFeed
|
||||||
|
|
||||||
|
To get started, import the `CodeFeed` class from the module:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { CodeFeed } from '@foss.global/codefeed';
|
||||||
|
```
|
||||||
|
|
||||||
|
Then, create an instance of `CodeFeed`. You'll need the base URL of your Gitea instance and optionally an API token if your repositories require authentication.
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// default: fetch commits since 7 days ago, no caching or npm checks, include all commits
|
||||||
|
const codeFeed = new CodeFeed(
|
||||||
|
'https://your-gitea-instance-url.com',
|
||||||
|
'your-api-token'
|
||||||
|
);
|
||||||
|
// with options: cache commits in-memory for 30 days, disable npm lookups, return only tagged commits
|
||||||
|
const thirtyDays = 30 * 24 * 60 * 60 * 1000;
|
||||||
|
const codeFeedStateful = new CodeFeed(
|
||||||
|
'https://your-gitea-instance-url.com',
|
||||||
|
'your-api-token',
|
||||||
|
undefined, // defaults to 7 days ago
|
||||||
|
{
|
||||||
|
enableCache: true,
|
||||||
|
cacheWindowMs: thirtyDays,
|
||||||
|
enableNpmCheck: false,
|
||||||
|
taggedOnly: true,
|
||||||
|
}
|
||||||
|
);
|
||||||
|
```
|
||||||
|
|
||||||
|
The constructor can also accept a `lastRunTimestamp` which indicates the last time a sync was performed. If not provided, it defaults to one week (7 days) prior to the current time.
|
||||||
|
|
||||||
|
### Fetching Commits
|
||||||
|
|
||||||
|
One of the core functionalities of CodeFeed is fetching commits from a Gitea instance. By calling `fetchAllCommitsFromInstance`, you can retrieve commits across multiple repositories:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
(async () => {
|
||||||
|
try {
|
||||||
|
const commits = await codeFeed.fetchAllCommitsFromInstance();
|
||||||
|
console.log(commits);
|
||||||
|
} catch (error) {
|
||||||
|
console.error('An error occurred while fetching commits:', error);
|
||||||
|
}
|
||||||
|
})();
|
||||||
|
```
|
||||||
|
|
||||||
|
This method scans all organizations and repositories, fetches all commits since the constructor’s `lastRunTimestamp` (default: one week ago), and enriches them with metadata like:
|
||||||
|
- Git tags (to detect releases)
|
||||||
|
- npm publication status (when enabled)
|
||||||
|
- parsed changelog entries (when available)
|
||||||
|
|
||||||
|
When `taggedOnly` is enabled, only commits marked as release tags are returned. When `enableCache` is enabled, previously fetched commits are kept in memory (up to `cacheWindowMs`), and only new commits are fetched on subsequent calls.
|
||||||
|
|
||||||
|
Each commit object in the resulting array conforms to the `ICommitResult` interface, containing details such as:
|
||||||
|
- `baseUrl`
|
||||||
|
- `org`
|
||||||
|
- `repo`
|
||||||
|
- `timestamp`
|
||||||
|
- `hash`
|
||||||
|
- `commitMessage`
|
||||||
|
- `tagged` (boolean)
|
||||||
|
- `publishedOnNpm` (boolean)
|
||||||
|
- `prettyAgoTime` (human-readable relative time)
|
||||||
|
- `changelog` (text from the `changelog.md` associated with a commit)
|
||||||
|
|
||||||
|
### Understanding the Data Fetch Process
|
||||||
|
|
||||||
|
#### Fetching Organizations
|
||||||
|
|
||||||
|
The `fetchAllOrganizations` method collects all organizations within the Gitea instance:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
const organizations = await codeFeed.fetchAllOrganizations();
|
||||||
|
console.log('Organizations:', organizations);
|
||||||
|
```
|
||||||
|
|
||||||
|
This method interacts with the Gitea API to pull organization names, aiding further requests that require organization context.
|
||||||
|
|
||||||
|
#### Fetching Repositories
|
||||||
|
|
||||||
|
Repositories under these organizations can be retrieved using `fetchAllRepositories`:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
const repositories = await codeFeed.fetchAllRepositories();
|
||||||
|
console.log('Repositories:', repositories);
|
||||||
|
```
|
||||||
|
|
||||||
|
Here, filtering by organization can help narrow down the scope further when dealing with large instances.
|
||||||
|
|
||||||
|
#### Fetching Tags and Commits
|
||||||
|
|
||||||
|
To handle repository-specific details, use:
|
||||||
|
|
||||||
|
- `fetchTags(owner: string, repo: string)`: Appropriately handles paginated tag data within a repository.
|
||||||
|
|
||||||
|
- `fetchRecentCommitsForRepo(owner: string, repo: string)`: Gathers commit data specific to the past 24 hours for a given repository.
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
const tags = await codeFeed.fetchTags('orgName', 'repoName');
|
||||||
|
const recentCommits = await codeFeed.fetchRecentCommitsForRepo('orgName', 'repoName');
|
||||||
|
|
||||||
|
console.log('Tags:', tags);
|
||||||
|
console.log('Recent Commits:', recentCommits);
|
||||||
|
```
|
||||||
|
|
||||||
|
### Changelog Integration
|
||||||
|
|
||||||
|
Loading changelog content from a repository is integrated into the flow with `loadChangelogFromRepo`. This can be accessed when processing specific commits:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
await codeFeed.loadChangelogFromRepo('org', 'repo');
|
||||||
|
const changelog = codeFeed.getChangelogForVersion('1.0.0');
|
||||||
|
console.log('Changelog for version 1.0.0:', changelog);
|
||||||
|
```
|
||||||
|
|
||||||
|
### Conclusion
|
||||||
|
|
||||||
|
The `@foss.global/codefeed` module provides robust capabilities for extracting and managing feed data related to code developments in Gitea environments. Through systematic setup and leveraging API-driven methods, it becomes a valuable tool for developers aiming to keep track of software progress and changes efficiently. The integration hooks like changelog and npm verification further enrich its utility, offering consolidated insights into each commit's journey from codebase to published package.
|
||||||
|
|
||||||
|
Explore integrating these capabilities into your development workflows to enhance tracking, deployment pipelines, or analytics systems within your projects. Remember to always handle API tokens securely and adhere to best practices when managing access to repository resources. Stay updated on any changes or enhancements to this module for further feature exposures or bug fixes. Happy coding!
|
||||||
|
```
|
||||||
|
undefined
|
12
test/test.ts
12
test/test.ts
@ -9,12 +9,22 @@ let testCodeFeed: codefeed.CodeFeed;
|
|||||||
tap.test('first test', async () => {
|
tap.test('first test', async () => {
|
||||||
const token = await testQenv.getEnvVarOnDemand('GITEA_TOKEN');
|
const token = await testQenv.getEnvVarOnDemand('GITEA_TOKEN');
|
||||||
// console.log('token', token);
|
// console.log('token', token);
|
||||||
testCodeFeed = new codefeed.CodeFeed('https://code.foss.global', token);
|
// seed lastRunTimestamp to 1 year ago and enable in-memory caching for 1 year
|
||||||
|
const oneYearMs = 365 * 24 * 60 * 60 * 1000;
|
||||||
|
const oneYearAgo = new Date(Date.now() - oneYearMs).toISOString();
|
||||||
|
testCodeFeed = new codefeed.CodeFeed(
|
||||||
|
'https://code.foss.global',
|
||||||
|
token,
|
||||||
|
oneYearAgo,
|
||||||
|
{ enableCache: true, cacheWindowMs: oneYearMs, enableNpmCheck: true, taggedOnly: true }
|
||||||
|
);
|
||||||
expect(testCodeFeed).toBeInstanceOf(codefeed.CodeFeed);
|
expect(testCodeFeed).toBeInstanceOf(codefeed.CodeFeed);
|
||||||
});
|
});
|
||||||
|
|
||||||
tap.test('fetchAllCommitsFromInstance', async () => {
|
tap.test('fetchAllCommitsFromInstance', async () => {
|
||||||
const commits = await testCodeFeed.fetchAllCommitsFromInstance();
|
const commits = await testCodeFeed.fetchAllCommitsFromInstance();
|
||||||
|
// log the actual results so we can inspect them
|
||||||
|
console.log('Fetched commits:', JSON.stringify(commits, null, 2));
|
||||||
expect(commits).toBeArray();
|
expect(commits).toBeArray();
|
||||||
expect(commits.length).toBeGreaterThan(0);
|
expect(commits.length).toBeGreaterThan(0);
|
||||||
// expect(commits[0]).toBeTypeofObject();
|
// expect(commits[0]).toBeTypeofObject();
|
||||||
|
@ -3,6 +3,6 @@
|
|||||||
*/
|
*/
|
||||||
export const commitinfo = {
|
export const commitinfo = {
|
||||||
name: '@foss.global/codefeed',
|
name: '@foss.global/codefeed',
|
||||||
version: '1.1.0',
|
version: '1.7.1',
|
||||||
description: 'a module for creating feeds for code development'
|
description: 'The @foss.global/codefeed module is designed for generating feeds from Gitea repositories, enhancing development workflows by processing commit data and repository activities.'
|
||||||
}
|
}
|
||||||
|
@ -1,8 +0,0 @@
|
|||||||
// @push.rocks
|
|
||||||
import * as qenv from '@push.rocks/qenv'
|
|
||||||
import * as smartnpm from '@push.rocks/smartnpm'
|
|
||||||
|
|
||||||
export {
|
|
||||||
qenv,
|
|
||||||
smartnpm,
|
|
||||||
}
|
|
513
ts/index.ts
513
ts/index.ts
@ -1,233 +1,330 @@
|
|||||||
import * as plugins from './codefeed.plugins.js';
|
import * as plugins from './plugins.js';
|
||||||
|
|
||||||
interface RepositoryOwner {
|
|
||||||
login: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
interface Repository {
|
|
||||||
owner: RepositoryOwner;
|
|
||||||
name: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
interface CommitAuthor {
|
|
||||||
date: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
interface CommitDetail {
|
|
||||||
message: string;
|
|
||||||
author: CommitAuthor;
|
|
||||||
}
|
|
||||||
|
|
||||||
interface Commit {
|
|
||||||
sha: string;
|
|
||||||
commit: CommitDetail;
|
|
||||||
}
|
|
||||||
|
|
||||||
interface Tag {
|
|
||||||
commit?: {
|
|
||||||
sha?: string;
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
interface RepoSearchResponse {
|
|
||||||
data: Repository[];
|
|
||||||
}
|
|
||||||
|
|
||||||
interface CommitResult {
|
|
||||||
baseUrl: string;
|
|
||||||
org: string;
|
|
||||||
repo: string;
|
|
||||||
timestamp: string;
|
|
||||||
hash: string;
|
|
||||||
commitMessage: string;
|
|
||||||
tagged: boolean;
|
|
||||||
publishedOnNpm: boolean;
|
|
||||||
}
|
|
||||||
|
|
||||||
export class CodeFeed {
|
export class CodeFeed {
|
||||||
private baseUrl: string;
|
private baseUrl: string;
|
||||||
private token?: string;
|
private token?: string;
|
||||||
private npmRegistry = new plugins.smartnpm.NpmRegistry();
|
private lastRunTimestamp: string;
|
||||||
|
// Raw changelog content for the current repository
|
||||||
|
private changelogContent: string = '';
|
||||||
|
// npm registry helper for published-on-npm checks
|
||||||
|
private npmRegistry: plugins.smartnpm.NpmRegistry;
|
||||||
|
// In-memory stateful cache of commits
|
||||||
|
private enableCache: boolean = false;
|
||||||
|
private cacheWindowMs?: number;
|
||||||
|
private cache: plugins.interfaces.ICommitResult[] = [];
|
||||||
|
// enable or disable npm publishedOnNpm checks (true by default)
|
||||||
|
private enableNpmCheck: boolean = true;
|
||||||
|
// return only tagged commits (false by default)
|
||||||
|
private enableTaggedOnly: boolean = false;
|
||||||
|
|
||||||
constructor(baseUrl: string, token?: string) {
|
constructor(
|
||||||
|
baseUrl: string,
|
||||||
|
token?: string,
|
||||||
|
lastRunTimestamp?: string,
|
||||||
|
options?: {
|
||||||
|
enableCache?: boolean;
|
||||||
|
cacheWindowMs?: number;
|
||||||
|
enableNpmCheck?: boolean;
|
||||||
|
taggedOnly?: boolean;
|
||||||
|
}
|
||||||
|
) {
|
||||||
this.baseUrl = baseUrl;
|
this.baseUrl = baseUrl;
|
||||||
this.token = token;
|
this.token = token;
|
||||||
console.log('CodeFeed initialized');
|
this.lastRunTimestamp =
|
||||||
|
lastRunTimestamp ?? new Date(Date.now() - 7 * 24 * 60 * 60 * 1000).toISOString();
|
||||||
|
// configure stateful caching
|
||||||
|
this.enableCache = options?.enableCache ?? false;
|
||||||
|
this.cacheWindowMs = options?.cacheWindowMs;
|
||||||
|
this.enableNpmCheck = options?.enableNpmCheck ?? true;
|
||||||
|
this.enableTaggedOnly = options?.taggedOnly ?? false;
|
||||||
|
this.cache = [];
|
||||||
|
// npm registry instance for version lookups
|
||||||
|
this.npmRegistry = new plugins.smartnpm.NpmRegistry();
|
||||||
|
console.log('CodeFeed initialized with last run timestamp:', this.lastRunTimestamp);
|
||||||
}
|
}
|
||||||
|
|
||||||
private async fetchAllRepositories(): Promise<Repository[]> {
|
/**
|
||||||
let page = 1;
|
* Fetch all new commits (since lastRunTimestamp) across all orgs and repos.
|
||||||
const allRepos: Repository[] = [];
|
*/
|
||||||
|
public async fetchAllCommitsFromInstance(): Promise<plugins.interfaces.ICommitResult[]> {
|
||||||
while (true) {
|
// Controlled concurrency with AsyncExecutionStack
|
||||||
const url = new URL(`${this.baseUrl}/api/v1/repos/search`);
|
const stack = new plugins.lik.AsyncExecutionStack();
|
||||||
url.searchParams.set('limit', '50');
|
stack.setNonExclusiveMaxConcurrency(20);
|
||||||
url.searchParams.set('page', page.toString());
|
// determine since timestamp for this run (stateful caching)
|
||||||
|
let effectiveSince = this.lastRunTimestamp;
|
||||||
const resp = await fetch(url.href, {
|
if (this.enableCache && this.cache.length > 0) {
|
||||||
headers: this.token ? { 'Authorization': `token ${this.token}` } : {}
|
// use newest timestamp in cache to fetch only tail
|
||||||
});
|
effectiveSince = this.cache.reduce(
|
||||||
|
(max, c) => (c.timestamp > max ? c.timestamp : max),
|
||||||
if (!resp.ok) {
|
effectiveSince
|
||||||
throw new Error(`Failed to fetch repositories: ${resp.statusText}`);
|
);
|
||||||
}
|
|
||||||
|
|
||||||
const data: RepoSearchResponse = await resp.json();
|
|
||||||
allRepos.push(...data.data);
|
|
||||||
|
|
||||||
if (data.data.length < 50) {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
page++;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return allRepos;
|
// 1) get all organizations
|
||||||
}
|
const orgs = await this.fetchAllOrganizations();
|
||||||
|
|
||||||
private async fetchTags(owner: string, repo: string): Promise<Set<string>> {
|
// 2) fetch repos per org in parallel
|
||||||
let page = 1;
|
const repoLists = await Promise.all(
|
||||||
const tags: Tag[] = [];
|
orgs.map((org) =>
|
||||||
|
stack.getNonExclusiveExecutionSlot(() => this.fetchRepositoriesForOrg(org))
|
||||||
|
)
|
||||||
|
);
|
||||||
|
// flatten to [{ owner, name }]
|
||||||
|
const allRepos = orgs.flatMap((org, i) =>
|
||||||
|
repoLists[i].map((r) => ({ owner: org, name: r.name }))
|
||||||
|
);
|
||||||
|
|
||||||
while (true) {
|
// 3) probe latest commit per repo and fetch full list only if new commits exist
|
||||||
const url = new URL(`${this.baseUrl}/api/v1/repos/${owner}/${repo}/tags`);
|
const commitJobs = allRepos.map(({ owner, name }) =>
|
||||||
url.searchParams.set('limit', '50');
|
stack.getNonExclusiveExecutionSlot(async () => {
|
||||||
url.searchParams.set('page', page.toString());
|
try {
|
||||||
|
// 3a) Probe the most recent commit (limit=1)
|
||||||
const resp = await fetch(url.href, {
|
const probeResp = await this.fetchFunction(
|
||||||
headers: this.token ? { 'Authorization': `token ${this.token}` } : {}
|
`/api/v1/repos/${owner}/${name}/commits?limit=1`,
|
||||||
});
|
{ headers: this.token ? { Authorization: `token ${this.token}` } : {} }
|
||||||
|
);
|
||||||
if (!resp.ok) {
|
if (!probeResp.ok) {
|
||||||
console.error(`Failed to fetch tags for ${owner}/${repo}: ${resp.status} ${resp.statusText} at ${url.href}`);
|
throw new Error(`Probe failed for ${owner}/${name}: ${probeResp.statusText}`);
|
||||||
throw new Error(`Failed to fetch tags for ${owner}/${repo}: ${resp.statusText}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
const data: Tag[] = await resp.json();
|
|
||||||
tags.push(...data);
|
|
||||||
|
|
||||||
if (data.length < 50) {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
page++;
|
|
||||||
}
|
|
||||||
|
|
||||||
const taggedCommitShas = new Set<string>();
|
|
||||||
for (const t of tags) {
|
|
||||||
if (t.commit?.sha) {
|
|
||||||
taggedCommitShas.add(t.commit.sha);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return taggedCommitShas;
|
|
||||||
}
|
|
||||||
|
|
||||||
private async fetchRecentCommitsForRepo(owner: string, repo: string): Promise<Commit[]> {
|
|
||||||
const twentyFourHoursAgo = new Date(Date.now() - 24 * 60 * 60 * 1000);
|
|
||||||
let page = 1;
|
|
||||||
const recentCommits: Commit[] = [];
|
|
||||||
|
|
||||||
while (true) {
|
|
||||||
const url = new URL(`${this.baseUrl}/api/v1/repos/${owner}/${repo}/commits`);
|
|
||||||
url.searchParams.set('limit', '1');
|
|
||||||
url.searchParams.set('page', page.toString());
|
|
||||||
|
|
||||||
const resp = await fetch(url.href, {
|
|
||||||
headers: this.token ? { 'Authorization': `token ${this.token}` } : {}
|
|
||||||
});
|
|
||||||
if (!resp.ok) {
|
|
||||||
console.error(`Failed to fetch commits for ${owner}/${repo}: ${resp.status} ${resp.statusText} at ${url.href}`);
|
|
||||||
throw new Error(`Failed to fetch commits for ${owner}/${repo}: ${resp.statusText}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
const data: Commit[] = await resp.json();
|
|
||||||
if (data.length === 0) {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
for (const commit of data) {
|
|
||||||
const commitDate = new Date(commit.commit.author.date);
|
|
||||||
if (commitDate > twentyFourHoursAgo) {
|
|
||||||
recentCommits.push(commit);
|
|
||||||
} else {
|
|
||||||
// If we encounter a commit older than 24 hours, we can stop fetching more pages
|
|
||||||
return recentCommits;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
page++;
|
|
||||||
}
|
|
||||||
|
|
||||||
return recentCommits;
|
|
||||||
}
|
|
||||||
|
|
||||||
public async fetchAllCommitsFromInstance(): Promise<CommitResult[]> {
|
|
||||||
const repos = await this.fetchAllRepositories();
|
|
||||||
const skippedRepos: string[] = [];
|
|
||||||
console.log(`Found ${repos.length} repositories`);
|
|
||||||
let allCommits: CommitResult[] = [];
|
|
||||||
|
|
||||||
for (const r of repos) {
|
|
||||||
const org = r.owner.login;
|
|
||||||
const repo = r.name;
|
|
||||||
console.log(`Processing repository ${org}/${repo}`);
|
|
||||||
|
|
||||||
try {
|
|
||||||
const taggedCommitShas = await this.fetchTags(org, repo);
|
|
||||||
const commits = await this.fetchRecentCommitsForRepo(org, repo);
|
|
||||||
console.log(`${org}/${repo} -> Found ${commits.length} commits`);
|
|
||||||
const commitResults: CommitResult[] = [];
|
|
||||||
for (const c of commits) {
|
|
||||||
const commit: CommitResult = {
|
|
||||||
baseUrl: this.baseUrl,
|
|
||||||
org,
|
|
||||||
repo,
|
|
||||||
timestamp: c.commit.author.date,
|
|
||||||
hash: c.sha,
|
|
||||||
commitMessage: c.commit.message,
|
|
||||||
tagged: taggedCommitShas.has(c.sha),
|
|
||||||
publishedOnNpm: false,
|
|
||||||
}
|
}
|
||||||
commitResults.push(commit);
|
const probeData: plugins.interfaces.ICommit[] = await probeResp.json();
|
||||||
}
|
// If no commits or no new commits since last run, skip
|
||||||
|
if (
|
||||||
if (commitResults.length > 0) {
|
probeData.length === 0 ||
|
||||||
try {
|
new Date(probeData[0].commit.author.date).getTime() <=
|
||||||
const packageInfo = await this.npmRegistry.getPackageInfo(`@${org}/${repo}`);
|
new Date(effectiveSince).getTime()
|
||||||
for (const commit of commitResults.filter(c => c.tagged)) {
|
) {
|
||||||
const correspondingVersion = packageInfo.allVersions.find(versionArg => {
|
return { owner, name, commits: [] };
|
||||||
return versionArg.version === commit.commitMessage.replace('\n', '');
|
|
||||||
});
|
|
||||||
if (correspondingVersion) {
|
|
||||||
commit.publishedOnNpm = true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch (error: any) {
|
|
||||||
console.error(`Failed to fetch package info for ${org}/${repo}:`, error.message);
|
|
||||||
continue;
|
|
||||||
}
|
}
|
||||||
|
// 3b) Fetch commits since last run
|
||||||
|
const commits = await this.fetchRecentCommitsForRepo(
|
||||||
|
owner,
|
||||||
|
name,
|
||||||
|
effectiveSince
|
||||||
|
);
|
||||||
|
return { owner, name, commits };
|
||||||
|
} catch (e: any) {
|
||||||
|
console.error(`Failed to fetch commits for ${owner}/${name}:`, e.message);
|
||||||
|
return { owner, name, commits: [] };
|
||||||
}
|
}
|
||||||
|
})
|
||||||
|
);
|
||||||
|
const commitResults = await Promise.all(commitJobs);
|
||||||
|
|
||||||
allCommits.push(...commitResults);
|
// 4) build new commit entries with tagging, npm and changelog support
|
||||||
} catch (error: any) {
|
const newResults: plugins.interfaces.ICommitResult[] = [];
|
||||||
skippedRepos.push(`${org}/${repo}`);
|
for (const { owner, name, commits } of commitResults) {
|
||||||
console.error(`Skipping repository ${org}/${repo} due to error:`, error.message);
|
// skip repos with no new commits
|
||||||
|
if (commits.length === 0) {
|
||||||
|
this.changelogContent = '';
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
// load changelog for this repo
|
||||||
|
await this.loadChangelogFromRepo(owner, name);
|
||||||
|
// fetch tags for this repo
|
||||||
|
let taggedShas: Set<string>;
|
||||||
|
try {
|
||||||
|
taggedShas = await this.fetchTags(owner, name);
|
||||||
|
} catch (e: any) {
|
||||||
|
console.error(`Failed to fetch tags for ${owner}/${name}:`, e.message);
|
||||||
|
taggedShas = new Set<string>();
|
||||||
|
}
|
||||||
|
// fetch npm package info only if any new commits correspond to a tag
|
||||||
|
const hasTaggedCommit = commits.some((c) => taggedShas.has(c.sha));
|
||||||
|
let pkgInfo: { allVersions: Array<{ version: string }> } | null = null;
|
||||||
|
if (hasTaggedCommit && this.enableNpmCheck) {
|
||||||
|
try {
|
||||||
|
pkgInfo = await this.npmRegistry.getPackageInfo(`@${owner}/${name}`);
|
||||||
|
} catch (e: any) {
|
||||||
|
console.error(`Failed to fetch package info for ${owner}/${name}:`, e.message);
|
||||||
|
pkgInfo = null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// build commit entries
|
||||||
|
for (const c of commits) {
|
||||||
|
const versionCandidate = c.commit.message.replace(/\n/g, '').trim();
|
||||||
|
const isTagged = taggedShas.has(c.sha);
|
||||||
|
const publishedOnNpm = isTagged && pkgInfo
|
||||||
|
? pkgInfo.allVersions.some((v) => v.version === versionCandidate)
|
||||||
|
: false;
|
||||||
|
let changelogEntry: string | undefined;
|
||||||
|
if (this.changelogContent) {
|
||||||
|
changelogEntry = this.getChangelogForVersion(versionCandidate);
|
||||||
|
}
|
||||||
|
newResults.push({
|
||||||
|
baseUrl: this.baseUrl,
|
||||||
|
org: owner,
|
||||||
|
repo: name,
|
||||||
|
timestamp: c.commit.author.date,
|
||||||
|
prettyAgoTime: plugins.smarttime.getMilliSecondsAsHumanReadableAgoTime(
|
||||||
|
new Date(c.commit.author.date).getTime()
|
||||||
|
),
|
||||||
|
hash: c.sha,
|
||||||
|
commitMessage: c.commit.message,
|
||||||
|
tagged: isTagged,
|
||||||
|
publishedOnNpm,
|
||||||
|
changelog: changelogEntry,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// if caching is enabled, merge into in-memory cache and return full cache
|
||||||
|
if (this.enableCache) {
|
||||||
|
const existingHashes = new Set(this.cache.map((c) => c.hash));
|
||||||
|
const uniqueNew = newResults.filter((c) => !existingHashes.has(c.hash));
|
||||||
|
this.cache.push(...uniqueNew);
|
||||||
|
// trim commits older than window
|
||||||
|
if (this.cacheWindowMs !== undefined) {
|
||||||
|
const cutoff = Date.now() - this.cacheWindowMs;
|
||||||
|
this.cache = this.cache.filter((c) => new Date(c.timestamp).getTime() >= cutoff);
|
||||||
|
}
|
||||||
|
// advance lastRunTimestamp to now
|
||||||
|
this.lastRunTimestamp = new Date().toISOString();
|
||||||
|
// sort descending by timestamp
|
||||||
|
this.cache.sort((a, b) => b.timestamp.localeCompare(a.timestamp));
|
||||||
|
// apply tagged-only filter if requested
|
||||||
|
if (this.enableTaggedOnly) {
|
||||||
|
return this.cache.filter((c) => c.tagged === true);
|
||||||
|
}
|
||||||
|
return this.cache;
|
||||||
|
}
|
||||||
|
// no caching: apply tagged-only filter if requested
|
||||||
|
if (this.enableTaggedOnly) {
|
||||||
|
return newResults.filter((c) => c.tagged === true);
|
||||||
|
}
|
||||||
|
return newResults;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Load the changelog directly from the Gitea repository.
|
||||||
|
*/
|
||||||
|
private async loadChangelogFromRepo(owner: string, repo: string): Promise<void> {
|
||||||
|
const url = `/api/v1/repos/${owner}/${repo}/contents/changelog.md`;
|
||||||
|
const headers: Record<string, string> = {};
|
||||||
|
if (this.token) {
|
||||||
|
headers['Authorization'] = `token ${this.token}`;
|
||||||
}
|
}
|
||||||
|
|
||||||
console.log(`Found ${allCommits.length} relevant commits`);
|
const response = await this.fetchFunction(url, { headers });
|
||||||
console.log(`Skipped ${skippedRepos.length} repositories due to errors`);
|
if (!response.ok) {
|
||||||
for (const s of skippedRepos) {
|
console.error(
|
||||||
console.log(`Skipped ${s}`);
|
`Could not fetch CHANGELOG.md from ${owner}/${repo}: ${response.status} ${response.statusText}`
|
||||||
}
|
);
|
||||||
for (const c of allCommits) {
|
this.changelogContent = '';
|
||||||
console.log(`______________________________________________________
|
return;
|
||||||
Commit ${c.hash} by ${c.org}/${c.repo} at ${c.timestamp}
|
|
||||||
${c.commitMessage}
|
|
||||||
Published on npm: ${c.publishedOnNpm}
|
|
||||||
`);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return allCommits;
|
const data = await response.json();
|
||||||
|
if (!data.content) {
|
||||||
|
console.warn(`No content field found in response for ${owner}/${repo}/changelog.md`);
|
||||||
|
this.changelogContent = '';
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// decode base64 content
|
||||||
|
this.changelogContent = Buffer.from(data.content, 'base64').toString('utf8');
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parse the changelog to find the entry for a given version.
|
||||||
|
* The changelog format is assumed as:
|
||||||
|
*
|
||||||
|
* # Changelog
|
||||||
|
*
|
||||||
|
* ## <date> - <version> - <description>
|
||||||
|
* <changes...>
|
||||||
|
*/
|
||||||
|
private getChangelogForVersion(version: string): string | undefined {
|
||||||
|
if (!this.changelogContent) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
const lines = this.changelogContent.split('\n');
|
||||||
|
const versionHeaderIndex = lines.findIndex((line) => line.includes(`- ${version} -`));
|
||||||
|
if (versionHeaderIndex === -1) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
const changelogLines: string[] = [];
|
||||||
|
for (let i = versionHeaderIndex + 1; i < lines.length; i++) {
|
||||||
|
const line = lines[i];
|
||||||
|
// The next version header starts with `## `
|
||||||
|
if (line.startsWith('## ')) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
changelogLines.push(line);
|
||||||
|
}
|
||||||
|
|
||||||
|
return changelogLines.join('\n').trim();
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Fetch all tags for a given repo and return the set of tagged commit SHAs
|
||||||
|
*/
|
||||||
|
private async fetchTags(owner: string, repo: string): Promise<Set<string>> {
|
||||||
|
const taggedShas = new Set<string>();
|
||||||
|
let page = 1;
|
||||||
|
while (true) {
|
||||||
|
const url = `/api/v1/repos/${owner}/${repo}/tags?limit=50&page=${page}`;
|
||||||
|
const resp = await this.fetchFunction(url, {
|
||||||
|
headers: this.token ? { Authorization: `token ${this.token}` } : {},
|
||||||
|
});
|
||||||
|
if (!resp.ok) {
|
||||||
|
console.error(`Failed to fetch tags for ${owner}/${repo}: ${resp.status} ${resp.statusText}`);
|
||||||
|
return taggedShas;
|
||||||
|
}
|
||||||
|
const data: plugins.interfaces.ITag[] = await resp.json();
|
||||||
|
if (data.length === 0) break;
|
||||||
|
for (const t of data) {
|
||||||
|
if (t.commit?.sha) taggedShas.add(t.commit.sha);
|
||||||
|
}
|
||||||
|
if (data.length < 50) break;
|
||||||
|
page++;
|
||||||
|
}
|
||||||
|
return taggedShas;
|
||||||
|
}
|
||||||
|
|
||||||
|
private async fetchAllOrganizations(): Promise<string[]> {
|
||||||
|
const resp = await this.fetchFunction('/api/v1/orgs', {
|
||||||
|
headers: this.token ? { Authorization: `token ${this.token}` } : {},
|
||||||
|
});
|
||||||
|
if (!resp.ok) {
|
||||||
|
throw new Error(`Failed to fetch organizations: ${resp.statusText}`);
|
||||||
|
}
|
||||||
|
const data: { username: string }[] = await resp.json();
|
||||||
|
return data.map((o) => o.username);
|
||||||
|
}
|
||||||
|
|
||||||
|
private async fetchRepositoriesForOrg(org: string): Promise<plugins.interfaces.IRepository[]> {
|
||||||
|
const resp = await this.fetchFunction(`/api/v1/orgs/${org}/repos?limit=50`, {
|
||||||
|
headers: this.token ? { Authorization: `token ${this.token}` } : {},
|
||||||
|
});
|
||||||
|
if (!resp.ok) {
|
||||||
|
throw new Error(`Failed to fetch repositories for ${org}: ${resp.statusText}`);
|
||||||
|
}
|
||||||
|
const data: plugins.interfaces.IRepository[] = await resp.json();
|
||||||
|
return data;
|
||||||
|
}
|
||||||
|
|
||||||
|
private async fetchRecentCommitsForRepo(
|
||||||
|
owner: string,
|
||||||
|
repo: string,
|
||||||
|
sinceTimestamp?: string
|
||||||
|
): Promise<plugins.interfaces.ICommit[]> {
|
||||||
|
const since = sinceTimestamp ?? this.lastRunTimestamp;
|
||||||
|
const resp = await this.fetchFunction(
|
||||||
|
`/api/v1/repos/${owner}/${repo}/commits?since=${encodeURIComponent(
|
||||||
|
since
|
||||||
|
)}&limit=50`,
|
||||||
|
{ headers: this.token ? { Authorization: `token ${this.token}` } : {} }
|
||||||
|
);
|
||||||
|
if (!resp.ok) {
|
||||||
|
throw new Error(`Failed to fetch commits for ${owner}/${repo}: ${resp.statusText}`);
|
||||||
|
}
|
||||||
|
const data: plugins.interfaces.ICommit[] = await resp.json();
|
||||||
|
return data;
|
||||||
|
}
|
||||||
|
|
||||||
|
public async fetchFunction(
|
||||||
|
urlArg: string,
|
||||||
|
optionsArg: RequestInit = {}
|
||||||
|
): Promise<Response> {
|
||||||
|
return fetch(`${this.baseUrl}${urlArg}`, optionsArg);
|
||||||
}
|
}
|
||||||
}
|
}
|
45
ts/interfaces/index.ts
Normal file
45
ts/interfaces/index.ts
Normal file
@ -0,0 +1,45 @@
|
|||||||
|
export interface IRepositoryOwner {
|
||||||
|
login: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IRepository {
|
||||||
|
owner: IRepositoryOwner;
|
||||||
|
name: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ICommitAuthor {
|
||||||
|
date: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ICommitDetail {
|
||||||
|
message: string;
|
||||||
|
author: ICommitAuthor;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ICommit {
|
||||||
|
sha: string;
|
||||||
|
commit: ICommitDetail;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ITag {
|
||||||
|
commit?: {
|
||||||
|
sha?: string;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IRepoSearchResponse {
|
||||||
|
data: IRepository[];
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ICommitResult {
|
||||||
|
baseUrl: string;
|
||||||
|
org: string;
|
||||||
|
repo: string;
|
||||||
|
timestamp: string;
|
||||||
|
hash: string;
|
||||||
|
commitMessage: string;
|
||||||
|
tagged: boolean;
|
||||||
|
publishedOnNpm: boolean;
|
||||||
|
prettyAgoTime: string;
|
||||||
|
changelog: string | undefined;
|
||||||
|
}
|
21
ts/plugins.ts
Normal file
21
ts/plugins.ts
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
// module
|
||||||
|
import * as interfaces from './interfaces/index.js';
|
||||||
|
|
||||||
|
export {
|
||||||
|
interfaces,
|
||||||
|
}
|
||||||
|
|
||||||
|
// @push.rocks
|
||||||
|
import * as qenv from '@push.rocks/qenv';
|
||||||
|
import * as smartnpm from '@push.rocks/smartnpm';
|
||||||
|
import * as smartxml from '@push.rocks/smartxml';
|
||||||
|
import * as smarttime from '@push.rocks/smarttime';
|
||||||
|
import * as lik from '@push.rocks/lik';
|
||||||
|
|
||||||
|
export {
|
||||||
|
qenv,
|
||||||
|
smartnpm,
|
||||||
|
smartxml,
|
||||||
|
smarttime,
|
||||||
|
lik,
|
||||||
|
}
|
@ -8,7 +8,8 @@
|
|||||||
"esModuleInterop": true,
|
"esModuleInterop": true,
|
||||||
"verbatimModuleSyntax": true,
|
"verbatimModuleSyntax": true,
|
||||||
"baseUrl": ".",
|
"baseUrl": ".",
|
||||||
"paths": {}
|
"paths": {
|
||||||
|
}
|
||||||
},
|
},
|
||||||
"exclude": [
|
"exclude": [
|
||||||
"dist_*/**/*.d.ts"
|
"dist_*/**/*.d.ts"
|
||||||
|
Loading…
x
Reference in New Issue
Block a user