2024-12-13 17:31:09 +00:00
|
|
|
import * as plugins from './codefeed.plugins.js';
|
|
|
|
|
|
|
|
interface RepositoryOwner {
|
|
|
|
login: string;
|
|
|
|
}
|
|
|
|
|
|
|
|
interface Repository {
|
|
|
|
owner: RepositoryOwner;
|
|
|
|
name: string;
|
|
|
|
}
|
|
|
|
|
|
|
|
interface CommitAuthor {
|
|
|
|
date: string;
|
|
|
|
}
|
|
|
|
|
|
|
|
interface CommitDetail {
|
|
|
|
message: string;
|
|
|
|
author: CommitAuthor;
|
|
|
|
}
|
|
|
|
|
|
|
|
interface Commit {
|
|
|
|
sha: string;
|
|
|
|
commit: CommitDetail;
|
|
|
|
}
|
|
|
|
|
|
|
|
interface Tag {
|
|
|
|
commit?: {
|
|
|
|
sha?: string;
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
|
|
|
interface RepoSearchResponse {
|
|
|
|
data: Repository[];
|
|
|
|
}
|
|
|
|
|
|
|
|
interface CommitResult {
|
|
|
|
baseUrl: string;
|
|
|
|
org: string;
|
|
|
|
repo: string;
|
|
|
|
timestamp: string;
|
|
|
|
hash: string;
|
|
|
|
commitMessage: string;
|
|
|
|
tagged: boolean;
|
2024-12-13 18:24:09 +00:00
|
|
|
publishedOnNpm: boolean;
|
2024-12-13 17:31:09 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
export class CodeFeed {
|
|
|
|
private baseUrl: string;
|
|
|
|
private token?: string;
|
2024-12-13 18:24:09 +00:00
|
|
|
private npmRegistry = new plugins.smartnpm.NpmRegistry();
|
2024-12-13 17:31:09 +00:00
|
|
|
|
|
|
|
constructor(baseUrl: string, token?: string) {
|
|
|
|
this.baseUrl = baseUrl;
|
|
|
|
this.token = token;
|
|
|
|
console.log('CodeFeed initialized');
|
|
|
|
}
|
|
|
|
|
|
|
|
private async fetchAllRepositories(): Promise<Repository[]> {
|
|
|
|
let page = 1;
|
|
|
|
const allRepos: Repository[] = [];
|
|
|
|
|
|
|
|
while (true) {
|
|
|
|
const url = new URL(`${this.baseUrl}/api/v1/repos/search`);
|
|
|
|
url.searchParams.set('limit', '50');
|
|
|
|
url.searchParams.set('page', page.toString());
|
|
|
|
|
|
|
|
const resp = await fetch(url.href, {
|
|
|
|
headers: this.token ? { 'Authorization': `token ${this.token}` } : {}
|
|
|
|
});
|
|
|
|
|
|
|
|
if (!resp.ok) {
|
|
|
|
throw new Error(`Failed to fetch repositories: ${resp.statusText}`);
|
|
|
|
}
|
|
|
|
|
|
|
|
const data: RepoSearchResponse = await resp.json();
|
|
|
|
allRepos.push(...data.data);
|
|
|
|
|
|
|
|
if (data.data.length < 50) {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
page++;
|
|
|
|
}
|
|
|
|
|
|
|
|
return allRepos;
|
|
|
|
}
|
|
|
|
|
|
|
|
private async fetchTags(owner: string, repo: string): Promise<Set<string>> {
|
|
|
|
let page = 1;
|
|
|
|
const tags: Tag[] = [];
|
|
|
|
|
|
|
|
while (true) {
|
|
|
|
const url = new URL(`${this.baseUrl}/api/v1/repos/${owner}/${repo}/tags`);
|
|
|
|
url.searchParams.set('limit', '50');
|
|
|
|
url.searchParams.set('page', page.toString());
|
|
|
|
|
|
|
|
const resp = await fetch(url.href, {
|
|
|
|
headers: this.token ? { 'Authorization': `token ${this.token}` } : {}
|
|
|
|
});
|
|
|
|
|
|
|
|
if (!resp.ok) {
|
|
|
|
console.error(`Failed to fetch tags for ${owner}/${repo}: ${resp.status} ${resp.statusText} at ${url.href}`);
|
|
|
|
throw new Error(`Failed to fetch tags for ${owner}/${repo}: ${resp.statusText}`);
|
|
|
|
}
|
|
|
|
|
|
|
|
const data: Tag[] = await resp.json();
|
|
|
|
tags.push(...data);
|
|
|
|
|
|
|
|
if (data.length < 50) {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
page++;
|
|
|
|
}
|
|
|
|
|
|
|
|
const taggedCommitShas = new Set<string>();
|
|
|
|
for (const t of tags) {
|
|
|
|
if (t.commit?.sha) {
|
|
|
|
taggedCommitShas.add(t.commit.sha);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return taggedCommitShas;
|
|
|
|
}
|
|
|
|
|
|
|
|
private async fetchRecentCommitsForRepo(owner: string, repo: string): Promise<Commit[]> {
|
|
|
|
const twentyFourHoursAgo = new Date(Date.now() - 24 * 60 * 60 * 1000);
|
|
|
|
let page = 1;
|
|
|
|
const recentCommits: Commit[] = [];
|
|
|
|
|
|
|
|
while (true) {
|
|
|
|
const url = new URL(`${this.baseUrl}/api/v1/repos/${owner}/${repo}/commits`);
|
|
|
|
url.searchParams.set('limit', '1');
|
|
|
|
url.searchParams.set('page', page.toString());
|
|
|
|
|
|
|
|
const resp = await fetch(url.href, {
|
|
|
|
headers: this.token ? { 'Authorization': `token ${this.token}` } : {}
|
|
|
|
});
|
|
|
|
if (!resp.ok) {
|
|
|
|
console.error(`Failed to fetch commits for ${owner}/${repo}: ${resp.status} ${resp.statusText} at ${url.href}`);
|
|
|
|
throw new Error(`Failed to fetch commits for ${owner}/${repo}: ${resp.statusText}`);
|
|
|
|
}
|
|
|
|
|
|
|
|
const data: Commit[] = await resp.json();
|
|
|
|
if (data.length === 0) {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
|
|
|
for (const commit of data) {
|
|
|
|
const commitDate = new Date(commit.commit.author.date);
|
|
|
|
if (commitDate > twentyFourHoursAgo) {
|
|
|
|
recentCommits.push(commit);
|
|
|
|
} else {
|
|
|
|
// If we encounter a commit older than 24 hours, we can stop fetching more pages
|
|
|
|
return recentCommits;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
page++;
|
|
|
|
}
|
|
|
|
|
|
|
|
return recentCommits;
|
|
|
|
}
|
|
|
|
|
|
|
|
public async fetchAllCommitsFromInstance(): Promise<CommitResult[]> {
|
|
|
|
const repos = await this.fetchAllRepositories();
|
2024-12-13 18:24:09 +00:00
|
|
|
const skippedRepos: string[] = [];
|
|
|
|
console.log(`Found ${repos.length} repositories`);
|
2024-12-13 17:31:09 +00:00
|
|
|
let allCommits: CommitResult[] = [];
|
|
|
|
|
|
|
|
for (const r of repos) {
|
|
|
|
const org = r.owner.login;
|
|
|
|
const repo = r.name;
|
|
|
|
console.log(`Processing repository ${org}/${repo}`);
|
|
|
|
|
|
|
|
try {
|
|
|
|
const taggedCommitShas = await this.fetchTags(org, repo);
|
|
|
|
const commits = await this.fetchRecentCommitsForRepo(org, repo);
|
|
|
|
console.log(`${org}/${repo} -> Found ${commits.length} commits`);
|
2024-12-13 18:24:09 +00:00
|
|
|
const commitResults: CommitResult[] = [];
|
|
|
|
for (const c of commits) {
|
|
|
|
const commit: CommitResult = {
|
|
|
|
baseUrl: this.baseUrl,
|
|
|
|
org,
|
|
|
|
repo,
|
|
|
|
timestamp: c.commit.author.date,
|
|
|
|
hash: c.sha,
|
|
|
|
commitMessage: c.commit.message,
|
|
|
|
tagged: taggedCommitShas.has(c.sha),
|
|
|
|
publishedOnNpm: false,
|
|
|
|
}
|
|
|
|
commitResults.push(commit);
|
|
|
|
}
|
|
|
|
|
|
|
|
if (commitResults.length > 0) {
|
|
|
|
try {
|
|
|
|
const packageInfo = await this.npmRegistry.getPackageInfo(`@${org}/${repo}`);
|
|
|
|
for (const commit of commitResults.filter(c => c.tagged)) {
|
|
|
|
const correspondingVersion = packageInfo.allVersions.find(versionArg => {
|
|
|
|
return versionArg.version === commit.commitMessage.replace('\n', '');
|
|
|
|
});
|
|
|
|
if (correspondingVersion) {
|
|
|
|
commit.publishedOnNpm = true;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
} catch (error: any) {
|
|
|
|
console.error(`Failed to fetch package info for ${org}/${repo}:`, error.message);
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
}
|
2024-12-13 17:31:09 +00:00
|
|
|
|
2024-12-13 18:24:09 +00:00
|
|
|
allCommits.push(...commitResults);
|
2024-12-13 17:31:09 +00:00
|
|
|
} catch (error: any) {
|
2024-12-13 18:24:09 +00:00
|
|
|
skippedRepos.push(`${org}/${repo}`);
|
2024-12-13 17:31:09 +00:00
|
|
|
console.error(`Skipping repository ${org}/${repo} due to error:`, error.message);
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-12-13 18:24:09 +00:00
|
|
|
console.log(`Found ${allCommits.length} relevant commits`);
|
|
|
|
console.log(`Skipped ${skippedRepos.length} repositories due to errors`);
|
|
|
|
for (const s of skippedRepos) {
|
|
|
|
console.log(`Skipped ${s}`);
|
|
|
|
}
|
|
|
|
for (const c of allCommits) {
|
|
|
|
console.log(`______________________________________________________
|
|
|
|
Commit ${c.hash} by ${c.org}/${c.repo} at ${c.timestamp}
|
|
|
|
${c.commitMessage}
|
|
|
|
Published on npm: ${c.publishedOnNpm}
|
|
|
|
`);
|
|
|
|
}
|
|
|
|
|
2024-12-13 17:31:09 +00:00
|
|
|
return allCommits;
|
|
|
|
}
|
|
|
|
}
|