366 lines
12 KiB
TypeScript
366 lines
12 KiB
TypeScript
import * as plugins from './tsdocker.plugins.js';
|
|
import * as paths from './tsdocker.paths.js';
|
|
import { logger, formatDuration } from './tsdocker.logging.js';
|
|
import { Dockerfile } from './classes.dockerfile.js';
|
|
import { DockerRegistry } from './classes.dockerregistry.js';
|
|
import { RegistryStorage } from './classes.registrystorage.js';
|
|
import { TsDockerCache } from './classes.tsdockercache.js';
|
|
import type { ITsDockerConfig, IBuildCommandOptions } from './interfaces/index.js';
|
|
|
|
const smartshellInstance = new plugins.smartshell.Smartshell({
|
|
executor: 'bash',
|
|
});
|
|
|
|
/**
|
|
* Main orchestrator class for Docker operations
|
|
*/
|
|
export class TsDockerManager {
|
|
public registryStorage: RegistryStorage;
|
|
public config: ITsDockerConfig;
|
|
public projectInfo: any;
|
|
private dockerfiles: Dockerfile[] = [];
|
|
|
|
constructor(config: ITsDockerConfig) {
|
|
this.config = config;
|
|
this.registryStorage = new RegistryStorage();
|
|
}
|
|
|
|
/**
|
|
* Prepares the manager by loading project info and registries
|
|
*/
|
|
public async prepare(): Promise<void> {
|
|
// Load project info
|
|
try {
|
|
const projectinfoInstance = new plugins.projectinfo.ProjectInfo(paths.cwd);
|
|
this.projectInfo = {
|
|
npm: {
|
|
name: projectinfoInstance.npm.name,
|
|
version: projectinfoInstance.npm.version,
|
|
},
|
|
};
|
|
} catch (err) {
|
|
logger.log('warn', 'Could not load project info');
|
|
this.projectInfo = null;
|
|
}
|
|
|
|
// Load registries from environment
|
|
this.registryStorage.loadFromEnv();
|
|
|
|
// Add registries from config if specified
|
|
if (this.config.registries) {
|
|
for (const registryUrl of this.config.registries) {
|
|
// Check if already loaded from env
|
|
if (!this.registryStorage.getRegistryByUrl(registryUrl)) {
|
|
// Try to load credentials for this registry from env
|
|
const envVarName = registryUrl.replace(/\./g, '_').toUpperCase();
|
|
const envString = process.env[`DOCKER_REGISTRY_${envVarName}`];
|
|
if (envString) {
|
|
try {
|
|
const registry = DockerRegistry.fromEnvString(envString);
|
|
this.registryStorage.addRegistry(registry);
|
|
} catch (err) {
|
|
logger.log('warn', `Could not load credentials for registry ${registryUrl}`);
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
logger.log('info', `Prepared TsDockerManager with ${this.registryStorage.getAllRegistries().length} registries`);
|
|
}
|
|
|
|
/**
|
|
* Logs in to all configured registries
|
|
*/
|
|
public async login(): Promise<void> {
|
|
if (this.registryStorage.getAllRegistries().length === 0) {
|
|
logger.log('warn', 'No registries configured');
|
|
return;
|
|
}
|
|
await this.registryStorage.loginAll();
|
|
}
|
|
|
|
/**
|
|
* Discovers and sorts Dockerfiles in the current directory
|
|
*/
|
|
public async discoverDockerfiles(): Promise<Dockerfile[]> {
|
|
this.dockerfiles = await Dockerfile.readDockerfiles(this);
|
|
this.dockerfiles = await Dockerfile.sortDockerfiles(this.dockerfiles);
|
|
this.dockerfiles = await Dockerfile.mapDockerfiles(this.dockerfiles);
|
|
return this.dockerfiles;
|
|
}
|
|
|
|
/**
|
|
* Builds discovered Dockerfiles in dependency order.
|
|
* When options.patterns is provided, only matching Dockerfiles (and their dependencies) are built.
|
|
*/
|
|
public async build(options?: IBuildCommandOptions): Promise<Dockerfile[]> {
|
|
if (this.dockerfiles.length === 0) {
|
|
await this.discoverDockerfiles();
|
|
}
|
|
|
|
if (this.dockerfiles.length === 0) {
|
|
logger.log('warn', 'No Dockerfiles found');
|
|
return [];
|
|
}
|
|
|
|
// Determine which Dockerfiles to build
|
|
let toBuild = this.dockerfiles;
|
|
|
|
if (options?.patterns && options.patterns.length > 0) {
|
|
// Filter to matching Dockerfiles
|
|
const matched = this.dockerfiles.filter((df) => {
|
|
const basename = plugins.path.basename(df.filePath);
|
|
return options.patterns!.some((pattern) => {
|
|
if (pattern.includes('*') || pattern.includes('?')) {
|
|
// Convert glob pattern to regex
|
|
const regexStr = '^' + pattern.replace(/\*/g, '.*').replace(/\?/g, '.') + '$';
|
|
return new RegExp(regexStr).test(basename);
|
|
}
|
|
return basename === pattern;
|
|
});
|
|
});
|
|
|
|
if (matched.length === 0) {
|
|
logger.log('warn', `No Dockerfiles matched patterns: ${options.patterns.join(', ')}`);
|
|
return [];
|
|
}
|
|
|
|
// Resolve dependency chain and preserve topological order
|
|
toBuild = this.resolveWithDependencies(matched, this.dockerfiles);
|
|
logger.log('info', `Matched ${matched.length} Dockerfile(s), building ${toBuild.length} (including dependencies)`);
|
|
}
|
|
|
|
// Check if buildx is needed
|
|
if (options?.platform || (this.config.platforms && this.config.platforms.length > 1)) {
|
|
await this.ensureBuildx();
|
|
}
|
|
|
|
logger.log('info', '');
|
|
logger.log('info', '=== BUILD PHASE ===');
|
|
logger.log('info', `Building ${toBuild.length} Dockerfile(s)...`);
|
|
|
|
if (options?.cached) {
|
|
// === CACHED MODE: skip builds for unchanged Dockerfiles ===
|
|
logger.log('info', '(cached mode active)');
|
|
const cache = new TsDockerCache();
|
|
cache.load();
|
|
|
|
const total = toBuild.length;
|
|
const overallStart = Date.now();
|
|
|
|
for (let i = 0; i < total; i++) {
|
|
const dockerfileArg = toBuild[i];
|
|
const progress = `(${i + 1}/${total})`;
|
|
const skip = await cache.shouldSkipBuild(dockerfileArg.cleanTag, dockerfileArg.content);
|
|
if (skip) {
|
|
logger.log('ok', `${progress} Skipped ${dockerfileArg.cleanTag} (cached)`);
|
|
continue;
|
|
}
|
|
|
|
// Cache miss — build this Dockerfile
|
|
logger.log('info', `${progress} Building ${dockerfileArg.cleanTag}...`);
|
|
const elapsed = await dockerfileArg.build({
|
|
platform: options?.platform,
|
|
timeout: options?.timeout,
|
|
noCache: options?.noCache,
|
|
verbose: options?.verbose,
|
|
});
|
|
logger.log('ok', `${progress} Built ${dockerfileArg.cleanTag} in ${formatDuration(elapsed)}`);
|
|
|
|
const imageId = await dockerfileArg.getId();
|
|
cache.recordBuild(dockerfileArg.cleanTag, dockerfileArg.content, imageId, dockerfileArg.buildTag);
|
|
}
|
|
|
|
logger.log('info', `Total build time: ${formatDuration(Date.now() - overallStart)}`);
|
|
|
|
// Perform dependency tagging for all Dockerfiles (even cache hits, since tags may be stale)
|
|
for (const dockerfileArg of toBuild) {
|
|
const dependentBaseImages = new Set<string>();
|
|
for (const other of toBuild) {
|
|
if (other.localBaseDockerfile === dockerfileArg && other.baseImage !== dockerfileArg.buildTag) {
|
|
dependentBaseImages.add(other.baseImage);
|
|
}
|
|
}
|
|
for (const fullTag of dependentBaseImages) {
|
|
logger.log('info', `Tagging ${dockerfileArg.buildTag} as ${fullTag} for local dependency resolution`);
|
|
await smartshellInstance.exec(`docker tag ${dockerfileArg.buildTag} ${fullTag}`);
|
|
}
|
|
}
|
|
|
|
cache.save();
|
|
} else {
|
|
// === STANDARD MODE: build all via static helper ===
|
|
await Dockerfile.buildDockerfiles(toBuild, {
|
|
platform: options?.platform,
|
|
timeout: options?.timeout,
|
|
noCache: options?.noCache,
|
|
verbose: options?.verbose,
|
|
});
|
|
}
|
|
|
|
logger.log('success', 'All Dockerfiles built successfully');
|
|
|
|
return toBuild;
|
|
}
|
|
|
|
/**
|
|
* Resolves a set of target Dockerfiles to include all their local base image dependencies,
|
|
* preserving the original topological build order.
|
|
*/
|
|
private resolveWithDependencies(targets: Dockerfile[], allSorted: Dockerfile[]): Dockerfile[] {
|
|
const needed = new Set<Dockerfile>();
|
|
const addWithDeps = (df: Dockerfile) => {
|
|
if (needed.has(df)) return;
|
|
needed.add(df);
|
|
if (df.localBaseImageDependent && df.localBaseDockerfile) {
|
|
addWithDeps(df.localBaseDockerfile);
|
|
}
|
|
};
|
|
for (const df of targets) addWithDeps(df);
|
|
return allSorted.filter((df) => needed.has(df));
|
|
}
|
|
|
|
/**
|
|
* Ensures Docker buildx is set up for multi-architecture builds
|
|
*/
|
|
private async ensureBuildx(): Promise<void> {
|
|
logger.log('info', 'Setting up Docker buildx for multi-platform builds...');
|
|
|
|
// Check if a buildx builder exists
|
|
const inspectResult = await smartshellInstance.exec('docker buildx inspect tsdocker-builder 2>/dev/null');
|
|
|
|
if (inspectResult.exitCode !== 0) {
|
|
// Create a new buildx builder
|
|
logger.log('info', 'Creating new buildx builder...');
|
|
await smartshellInstance.exec('docker buildx create --name tsdocker-builder --use');
|
|
await smartshellInstance.exec('docker buildx inspect --bootstrap');
|
|
} else {
|
|
// Use existing builder
|
|
await smartshellInstance.exec('docker buildx use tsdocker-builder');
|
|
}
|
|
|
|
logger.log('ok', 'Docker buildx ready');
|
|
}
|
|
|
|
/**
|
|
* Pushes all built images to specified registries
|
|
*/
|
|
public async push(registryUrls?: string[]): Promise<void> {
|
|
if (this.dockerfiles.length === 0) {
|
|
await this.discoverDockerfiles();
|
|
}
|
|
|
|
if (this.dockerfiles.length === 0) {
|
|
logger.log('warn', 'No Dockerfiles found to push');
|
|
return;
|
|
}
|
|
|
|
// Determine which registries to push to
|
|
let registriesToPush: DockerRegistry[] = [];
|
|
|
|
if (registryUrls && registryUrls.length > 0) {
|
|
// Push to specified registries
|
|
for (const url of registryUrls) {
|
|
const registry = this.registryStorage.getRegistryByUrl(url);
|
|
if (registry) {
|
|
registriesToPush.push(registry);
|
|
} else {
|
|
logger.log('warn', `Registry ${url} not found in storage`);
|
|
}
|
|
}
|
|
} else {
|
|
// Push to all configured registries
|
|
registriesToPush = this.registryStorage.getAllRegistries();
|
|
}
|
|
|
|
if (registriesToPush.length === 0) {
|
|
logger.log('warn', 'No registries available to push to');
|
|
return;
|
|
}
|
|
|
|
// Push each Dockerfile to each registry
|
|
for (const dockerfile of this.dockerfiles) {
|
|
for (const registry of registriesToPush) {
|
|
await dockerfile.push(registry);
|
|
}
|
|
}
|
|
|
|
logger.log('success', 'All images pushed successfully');
|
|
}
|
|
|
|
/**
|
|
* Pulls images from a specified registry
|
|
*/
|
|
public async pull(registryUrl: string): Promise<void> {
|
|
if (this.dockerfiles.length === 0) {
|
|
await this.discoverDockerfiles();
|
|
}
|
|
|
|
const registry = this.registryStorage.getRegistryByUrl(registryUrl);
|
|
if (!registry) {
|
|
throw new Error(`Registry ${registryUrl} not found`);
|
|
}
|
|
|
|
for (const dockerfile of this.dockerfiles) {
|
|
await dockerfile.pull(registry);
|
|
}
|
|
|
|
logger.log('success', 'All images pulled successfully');
|
|
}
|
|
|
|
/**
|
|
* Runs tests for all Dockerfiles
|
|
*/
|
|
public async test(): Promise<void> {
|
|
if (this.dockerfiles.length === 0) {
|
|
await this.discoverDockerfiles();
|
|
}
|
|
|
|
if (this.dockerfiles.length === 0) {
|
|
logger.log('warn', 'No Dockerfiles found to test');
|
|
return;
|
|
}
|
|
|
|
logger.log('info', '');
|
|
logger.log('info', '=== TEST PHASE ===');
|
|
await Dockerfile.testDockerfiles(this.dockerfiles);
|
|
logger.log('success', 'All tests completed');
|
|
}
|
|
|
|
/**
|
|
* Lists all discovered Dockerfiles and their info
|
|
*/
|
|
public async list(): Promise<Dockerfile[]> {
|
|
if (this.dockerfiles.length === 0) {
|
|
await this.discoverDockerfiles();
|
|
}
|
|
|
|
logger.log('info', '');
|
|
logger.log('info', 'Discovered Dockerfiles:');
|
|
logger.log('info', '========================');
|
|
logger.log('info', '');
|
|
|
|
for (let i = 0; i < this.dockerfiles.length; i++) {
|
|
const df = this.dockerfiles[i];
|
|
logger.log('info', `${i + 1}. ${df.filePath}`);
|
|
logger.log('info', ` Tag: ${df.cleanTag}`);
|
|
logger.log('info', ` Base Image: ${df.baseImage}`);
|
|
logger.log('info', ` Version: ${df.version}`);
|
|
if (df.localBaseImageDependent) {
|
|
logger.log('info', ` Depends on: ${df.localBaseDockerfile?.cleanTag}`);
|
|
}
|
|
logger.log('info', '');
|
|
}
|
|
|
|
return this.dockerfiles;
|
|
}
|
|
|
|
/**
|
|
* Gets the cached Dockerfiles (after discovery)
|
|
*/
|
|
public getDockerfiles(): Dockerfile[] {
|
|
return this.dockerfiles;
|
|
}
|
|
}
|