Compare commits

...

10 Commits

Author SHA1 Message Date
4854d27a19 4.1.37
Some checks failed
Default (tags) / security (push) Failing after 15s
Default (tags) / test (push) Successful in 1m28s
Default (tags) / metadata (push) Failing after 11s
Default (tags) / release (push) Failing after 22s
2024-11-17 01:01:22 +01:00
75a0e8a7d8 fix(docker): Enhanced base image extraction logic from Dockerfile 2024-11-17 01:01:22 +01:00
43eb19f772 4.1.36
Some checks failed
Default (tags) / security (push) Failing after 16s
Default (tags) / test (push) Successful in 1m26s
Default (tags) / metadata (push) Failing after 11s
Default (tags) / release (push) Failing after 21s
2024-11-17 00:50:43 +01:00
dc2665d250 fix(docker): Improve logging for Dockerfile build order with base image details. 2024-11-17 00:50:43 +01:00
00f324e151 4.1.35
Some checks failed
Default (tags) / security (push) Failing after 16s
Default (tags) / test (push) Successful in 1m32s
Default (tags) / metadata (push) Failing after 10s
Default (tags) / release (push) Failing after 21s
2024-11-17 00:32:56 +01:00
e38cc40f11 fix(docker): Fix Dockerfile dependency sorting and enhance environment variable handling for GitHub repos 2024-11-17 00:32:56 +01:00
e9e8acafe4 4.1.34 2024-11-05 02:39:53 +01:00
c763db40bb fix(connector): Remove unused typedrequest implementation in cloudlyconnector 2024-11-05 02:39:53 +01:00
01256480c4 4.1.33 2024-11-05 02:38:21 +01:00
c6918399bf fix(core): Updated dependencies and improved npm preparation logic. 2024-11-05 02:38:21 +01:00
10 changed files with 7085 additions and 2869 deletions

View File

@@ -1,5 +1,39 @@
# Changelog # Changelog
## 2024-11-17 - 4.1.37 - fix(docker)
Enhanced base image extraction logic from Dockerfile
- Improved dockerBaseImage to accurately extract base images considering ARG variables.
- Added support for parsing Dockerfile content without external libraries.
- Enhanced error handling for missing FROM instructions.
## 2024-11-17 - 4.1.36 - fix(docker)
Improve logging for Dockerfile build order with base image details.
- Enhance logging in Dockerfile sorting process to include base image information.
## 2024-11-17 - 4.1.35 - fix(docker)
Fix Dockerfile dependency sorting and enhance environment variable handling for GitHub repos
- Refined the algorithm for sorting Dockerfiles based on dependencies to ensure proper build order.
- Enhanced environment variable handling in the NpmciEnv class to support conditional assignments.
- Updated various dependencies in package.json for improved performance and compatibility.
- Added error handling to circular dependency detection in Dockerfile sorting.
## 2024-11-05 - 4.1.34 - fix(connector)
Remove unused typedrequest implementation in cloudlyconnector
- Removed commented out code that initialized typedrequest in CloudlyConnector.
## 2024-11-05 - 4.1.33 - fix(core)
Updated dependencies and improved npm preparation logic.
- Updated @git.zone/tsbuild from ^2.1.84 to ^2.2.0.
- Updated @git.zone/tsrun from ^1.2.49 to ^1.3.3.
- Updated @types/node from ^22.7.9 to ^22.8.7.
- Updated @serve.zone/api from ^1.2.1 to ^4.3.1.
- Improved npm preparation logic to handle empty tokens gracefully.
## 2024-10-23 - 4.1.32 - fix(dependencies) ## 2024-10-23 - 4.1.32 - fix(dependencies)
Update project dependencies to latest versions Update project dependencies to latest versions

View File

@@ -1,6 +1,6 @@
{ {
"name": "@ship.zone/npmci", "name": "@ship.zone/npmci",
"version": "4.1.32", "version": "4.1.37",
"private": false, "private": false,
"description": "A tool to streamline Node.js and Docker workflows within CI environments, particularly GitLab CI, providing various CI/CD utilities.", "description": "A tool to streamline Node.js and Docker workflows within CI environments, particularly GitLab CI, providing various CI/CD utilities.",
"main": "dist_ts/index.js", "main": "dist_ts/index.js",
@@ -26,16 +26,16 @@
}, },
"homepage": "https://gitlab.com/gitzone/npmci#README", "homepage": "https://gitlab.com/gitzone/npmci#README",
"devDependencies": { "devDependencies": {
"@git.zone/tsbuild": "^2.1.84", "@git.zone/tsbuild": "^2.2.0",
"@git.zone/tsrun": "^1.2.49", "@git.zone/tsrun": "^1.3.3",
"@git.zone/tstest": "^1.0.77", "@git.zone/tstest": "^1.0.77",
"@push.rocks/tapbundle": "^5.3.0", "@push.rocks/tapbundle": "^5.5.0",
"@types/node": "^22.7.9" "@types/node": "^22.9.0"
}, },
"dependencies": { "dependencies": {
"@api.global/typedrequest": "^3.1.10", "@api.global/typedrequest": "^3.1.10",
"@push.rocks/lik": "^6.1.0", "@push.rocks/lik": "^6.1.0",
"@push.rocks/npmextra": "^5.0.23", "@push.rocks/npmextra": "^5.1.2",
"@push.rocks/projectinfo": "^5.0.2", "@push.rocks/projectinfo": "^5.0.2",
"@push.rocks/qenv": "^6.0.2", "@push.rocks/qenv": "^6.0.2",
"@push.rocks/smartanalytics": "^2.0.15", "@push.rocks/smartanalytics": "^2.0.15",
@@ -49,12 +49,12 @@
"@push.rocks/smartobject": "^1.0.12", "@push.rocks/smartobject": "^1.0.12",
"@push.rocks/smartpath": "^5.0.11", "@push.rocks/smartpath": "^5.0.11",
"@push.rocks/smartpromise": "^4.0.4", "@push.rocks/smartpromise": "^4.0.4",
"@push.rocks/smartrequest": "^2.0.18", "@push.rocks/smartrequest": "^2.0.23",
"@push.rocks/smartshell": "^3.0.6", "@push.rocks/smartshell": "^3.0.6",
"@push.rocks/smartsocket": "^2.0.22", "@push.rocks/smartsocket": "^2.0.22",
"@push.rocks/smartssh": "^2.0.1", "@push.rocks/smartssh": "^2.0.1",
"@push.rocks/smartstring": "^4.0.8", "@push.rocks/smartstring": "^4.0.8",
"@serve.zone/api": "^1.2.1", "@serve.zone/api": "^4.3.11",
"@tsclass/tsclass": "^4.1.2", "@tsclass/tsclass": "^4.1.2",
"@types/through2": "^2.0.38", "@types/through2": "^2.0.38",
"through2": "^4.0.2" "through2": "^4.0.2"

9495
pnpm-lock.yaml generated

File diff suppressed because it is too large Load Diff

View File

@@ -1 +1,9 @@
{} {
"gitzone": {
"module": {
"githost": "code.foss.global",
"gitscope": "mygroup",
"gitrepo": "myrepo"
}
}
}

View File

@@ -3,6 +3,6 @@
*/ */
export const commitinfo = { export const commitinfo = {
name: '@ship.zone/npmci', name: '@ship.zone/npmci',
version: '4.1.32', version: '4.1.37',
description: 'A tool to streamline Node.js and Docker workflows within CI environments, particularly GitLab CI, providing various CI/CD utilities.' description: 'A tool to streamline Node.js and Docker workflows within CI environments, particularly GitLab CI, providing various CI/CD utilities.'
} }

View File

@@ -26,14 +26,6 @@ export class CloudlyConnector {
return; return;
} }
const typedrequest = // lets push to cloudly here
new plugins.typedrequest.TypedRequest<plugins.servezoneInterfaces.requests.IRequest_InformAboutNewContainerImage>(
`https://${cloudlyUrl}/typedrequest`,
'servezonestandard_InformAboutNewContainerVersion'
);
const response = await typedrequest.fire({
containerImageInfo: optionsArg,
});
} }
} }

View File

@@ -40,43 +40,76 @@ export class Dockerfile {
} }
/** /**
* sorts Dockerfiles into a dependency chain * Sorts Dockerfiles into a build order based on dependencies.
* @param sortableArrayArg an array of instances of class Dockerfile * @param dockerfiles An array of Dockerfile instances.
* @returns Promise<Dockerfile[]> * @returns A Promise that resolves to a sorted array of Dockerfiles.
*/ */
public static async sortDockerfiles(sortableArrayArg: Dockerfile[]): Promise<Dockerfile[]> { public static async sortDockerfiles(dockerfiles: Dockerfile[]): Promise<Dockerfile[]> {
const done = plugins.smartpromise.defer<Dockerfile[]>(); logger.log('info', 'Sorting Dockerfiles based on dependencies...');
logger.log('info', 'sorting Dockerfiles:');
const sortedArray: Dockerfile[] = []; // Map from cleanTag to Dockerfile instance for quick lookup
const cleanTagsOriginal = Dockerfile.cleanTagsArrayFunction(sortableArrayArg, sortedArray); const tagToDockerfile = new Map<string, Dockerfile>();
let sorterFunctionCounter: number = 0; dockerfiles.forEach((dockerfile) => {
const sorterFunction = () => { tagToDockerfile.set(dockerfile.cleanTag, dockerfile);
sortableArrayArg.forEach((dockerfileArg) => {
const cleanTags = Dockerfile.cleanTagsArrayFunction(sortableArrayArg, sortedArray);
if (
cleanTags.indexOf(dockerfileArg.baseImage) === -1 &&
sortedArray.indexOf(dockerfileArg) === -1
) {
sortedArray.push(dockerfileArg);
}
if (cleanTagsOriginal.indexOf(dockerfileArg.baseImage) !== -1) {
dockerfileArg.localBaseImageDependent = true;
}
}); });
if (sortableArrayArg.length === sortedArray.length) {
let counter = 1; // Build the dependency graph
for (const dockerfile of sortedArray) { const graph = new Map<Dockerfile, Dockerfile[]>();
logger.log('info', `tag ${counter}: -> ${dockerfile.cleanTag}`); dockerfiles.forEach((dockerfile) => {
counter++; const dependencies: Dockerfile[] = [];
const baseImage = dockerfile.baseImage;
// Check if the baseImage is among the local Dockerfiles
if (tagToDockerfile.has(baseImage)) {
const baseDockerfile = tagToDockerfile.get(baseImage);
dependencies.push(baseDockerfile);
dockerfile.localBaseImageDependent = true;
dockerfile.localBaseDockerfile = baseDockerfile;
} }
done.resolve(sortedArray);
} else if (sorterFunctionCounter < 10) { graph.set(dockerfile, dependencies);
sorterFunctionCounter++; });
sorterFunction();
// Perform topological sort
const sortedDockerfiles: Dockerfile[] = [];
const visited = new Set<Dockerfile>();
const tempMarked = new Set<Dockerfile>();
const visit = (dockerfile: Dockerfile) => {
if (tempMarked.has(dockerfile)) {
throw new Error(`Circular dependency detected involving ${dockerfile.cleanTag}`);
}
if (!visited.has(dockerfile)) {
tempMarked.add(dockerfile);
const dependencies = graph.get(dockerfile) || [];
dependencies.forEach((dep) => visit(dep));
tempMarked.delete(dockerfile);
visited.add(dockerfile);
sortedDockerfiles.push(dockerfile);
} }
}; };
sorterFunction();
return done.promise; try {
dockerfiles.forEach((dockerfile) => {
if (!visited.has(dockerfile)) {
visit(dockerfile);
}
});
} catch (error) {
logger.log('error', error.message);
throw error;
}
// Log the sorted order
sortedDockerfiles.forEach((dockerfile, index) => {
logger.log(
'info',
`Build order ${index + 1}: ${dockerfile.cleanTag}
with base image ${dockerfile.baseImage}`
);
});
return sortedDockerfiles;
} }
/** /**
@@ -120,7 +153,10 @@ export class Dockerfile {
* returns a version for a docker file * returns a version for a docker file
* @execution SYNC * @execution SYNC
*/ */
public static dockerFileVersion(dockerfileInstanceArg: Dockerfile, dockerfileNameArg: string): string { public static dockerFileVersion(
dockerfileInstanceArg: Dockerfile,
dockerfileNameArg: string
): string {
let versionString: string; let versionString: string;
const versionRegex = /Dockerfile_(.+)$/; const versionRegex = /Dockerfile_(.+)$/;
const regexResultArray = versionRegex.exec(dockerfileNameArg); const regexResultArray = versionRegex.exec(dockerfileNameArg);
@@ -131,18 +167,69 @@ public static dockerFileVersion(dockerfileInstanceArg: Dockerfile, dockerfileNam
} }
versionString = versionString.replace( versionString = versionString.replace(
'##version##', '##version##',
dockerfileInstanceArg.npmciDockerManagerRef.npmciRef.npmciConfig.getConfig().projectInfo.npm.version dockerfileInstanceArg.npmciDockerManagerRef.npmciRef.npmciConfig.getConfig().projectInfo.npm
.version
); );
return versionString; return versionString;
} }
/** /**
* returns the docker base image for a Dockerfile * Extracts the base image from a Dockerfile content without using external libraries.
* @param dockerfileContentArg The content of the Dockerfile as a string.
* @returns The base image specified in the first FROM instruction.
*/ */
public static dockerBaseImage(dockerfileContentArg: string): string { public static dockerBaseImage(dockerfileContentArg: string): string {
const baseImageRegex = /FROM\s([a-zA-z0-9\/\-\:]*)\n?/; const lines = dockerfileContentArg.split(/\r?\n/);
const regexResultArray = baseImageRegex.exec(dockerfileContentArg); const args: { [key: string]: string } = {};
return regexResultArray[1];
for (const line of lines) {
const trimmedLine = line.trim();
// Skip empty lines and comments
if (trimmedLine === '' || trimmedLine.startsWith('#')) {
continue;
}
// Match ARG instructions
const argMatch = trimmedLine.match(/^ARG\s+([^\s=]+)(?:=(.*))?$/i);
if (argMatch) {
const argName = argMatch[1];
const argValue = argMatch[2] !== undefined ? argMatch[2] : process.env[argName] || '';
args[argName] = argValue;
continue;
}
// Match FROM instructions
const fromMatch = trimmedLine.match(/^FROM\s+(.+?)(?:\s+AS\s+[^\s]+)?$/i);
if (fromMatch) {
let baseImage = fromMatch[1].trim();
// Substitute variables in the base image name
baseImage = Dockerfile.substituteVariables(baseImage, args);
return baseImage;
}
}
throw new Error('No FROM instruction found in Dockerfile');
}
/**
* Substitutes variables in a string, supporting default values like ${VAR:-default}.
* @param str The string containing variables.
* @param vars The object containing variable values.
* @returns The string with variables substituted.
*/
private static substituteVariables(str: string, vars: { [key: string]: string }): string {
return str.replace(/\${([^}:]+)(:-([^}]+))?}/g, (_, varName, __, defaultValue) => {
if (vars[varName] !== undefined) {
return vars[varName];
} else if (defaultValue !== undefined) {
return defaultValue;
} else {
return '';
}
});
} }
/** /**
@@ -196,22 +283,6 @@ public static dockerFileVersion(dockerfileInstanceArg: Dockerfile, dockerfileNam
return buildArgsString; return buildArgsString;
} }
/**
*
*/
public static cleanTagsArrayFunction(
dockerfileArrayArg: Dockerfile[],
trackingArrayArg: Dockerfile[]
): string[] {
const cleanTagsArray: string[] = [];
dockerfileArrayArg.forEach((dockerfileArg) => {
if (trackingArrayArg.indexOf(dockerfileArg) === -1) {
cleanTagsArray.push(dockerfileArg.cleanTag);
}
});
return cleanTagsArray;
}
// INSTANCE // INSTANCE
public npmciDockerManagerRef: NpmciDockerManager; public npmciDockerManagerRef: NpmciDockerManager;
@@ -285,7 +356,10 @@ public static dockerFileVersion(dockerfileInstanceArg: Dockerfile, dockerfileNam
labels: [], labels: [],
version: this.npmciDockerManagerRef.npmciRef.npmciConfig.getConfig().projectInfo.npm.version, version: this.npmciDockerManagerRef.npmciRef.npmciConfig.getConfig().projectInfo.npm.version,
}); });
await this.npmciDockerManagerRef.npmciRef.npmciConfig.kvStorage.writeKey('latestPushedDockerTag', this.pushTag) await this.npmciDockerManagerRef.npmciRef.npmciConfig.kvStorage.writeKey(
'latestPushedDockerTag',
this.pushTag
);
} }
/** /**

View File

@@ -52,12 +52,17 @@ export class NpmciNpmManager {
* authenticates npm with token from env var * authenticates npm with token from env var
*/ */
public async prepare() { public async prepare() {
logger.log('info', 'running >>npm prepare<<');
const config = this.npmciRef.npmciConfig.getConfig(); const config = this.npmciRef.npmciConfig.getConfig();
let npmrcFileString: string = ''; let npmrcFileString: string = '';
await plugins.smartobject.forEachMinimatch( await plugins.smartobject.forEachMinimatch(
process.env, process.env,
'NPMCI_TOKEN_NPM*', 'NPMCI_TOKEN_NPM*',
(npmEnvArg: string) => { (npmEnvArg: string) => {
if (!npmEnvArg) {
logger.log('note','found empty token...');
return;
}
const npmRegistryUrl = npmEnvArg.split('|')[0]; const npmRegistryUrl = npmEnvArg.split('|')[0];
logger.log('ok', `found token for ${npmRegistryUrl}`); logger.log('ok', `found token for ${npmRegistryUrl}`);
let npmToken = npmEnvArg.split('|')[1]; let npmToken = npmEnvArg.split('|')[1];

View File

@@ -48,10 +48,10 @@ export class NpmciConfig {
public async init() { public async init() {
this.npmciNpmextra = new plugins.npmextra.Npmextra(paths.cwd); this.npmciNpmextra = new plugins.npmextra.Npmextra(paths.cwd);
this.kvStorage = new plugins.npmextra.KeyValueStore( this.kvStorage = new plugins.npmextra.KeyValueStore({
'userHomeDir', typeArg: 'userHomeDir',
`${this.npmciRef.npmciEnv.repo.user}_${this.npmciRef.npmciEnv.repo.repo}` identityArg: `.npmci_${this.npmciRef.npmciEnv.repo.user}_${this.npmciRef.npmciEnv.repo.repo}`,
); });
this.npmciQenv = new plugins.qenv.Qenv( this.npmciQenv = new plugins.qenv.Qenv(
paths.NpmciProjectDir, paths.NpmciProjectDir,
paths.NpmciProjectNogitDir, paths.NpmciProjectNogitDir,

View File

@@ -9,10 +9,10 @@ export class NpmciEnv {
constructor(npmciRefArg: Npmci) { constructor(npmciRefArg: Npmci) {
this.npmciRef = npmciRefArg; this.npmciRef = npmciRefArg;
if (process.env.GITLAB_CI) { if (!this.repoString && process.env.GITLAB_CI) {
this.repoString = process.env.CI_REPOSITORY_URL; this.repoString = process.env.CI_REPOSITORY_URL;
} }
if (process.env.NPMCI_COMPUTED_REPOURL) { if (!this.repoString && process.env.NPMCI_COMPUTED_REPOURL) {
this.repoString = process.env.NPMCI_COMPUTED_REPOURL; this.repoString = process.env.NPMCI_COMPUTED_REPOURL;
} }
if (!this.repoString) { if (!this.repoString) {