fix(deps): upgrade core tooling dependencies and adapt Docker client internals for compatibility

This commit is contained in:
2026-03-28 05:39:48 +00:00
parent 1923837225
commit 645e1fd4a9
19 changed files with 5861 additions and 7164 deletions

View File

@@ -3,6 +3,8 @@ import * as paths from './paths.js';
import { logger } from './logger.js';
import type { DockerHost } from './classes.host.js';
const smartfileFactory = plugins.smartfile.SmartFileFactory.nodeFs();
export interface IDockerImageStoreConstructorOptions {
/**
* used for preparing images for longer term storage
@@ -38,14 +40,12 @@ export class DockerImageStore {
uniqueProcessingId,
);
// Create a write stream to store the tar file
const writeStream = plugins.smartfile.fsStream.createWriteStream(
initialTarDownloadPath,
);
const writeStream = plugins.fs.createWriteStream(initialTarDownloadPath);
// lets wait for the write stream to finish
await new Promise((resolve, reject) => {
await new Promise<void>((resolve, reject) => {
tarStream.pipe(writeStream);
writeStream.on('finish', resolve);
writeStream.on('finish', () => resolve());
writeStream.on('error', reject);
});
logger.log(
@@ -54,44 +54,55 @@ export class DockerImageStore {
);
// lets process the image
const tarArchive = await plugins.smartarchive.SmartArchive.fromArchiveFile(
initialTarDownloadPath,
);
await tarArchive.exportToFs(extractionDir);
await plugins.smartarchive.SmartArchive.create()
.file(initialTarDownloadPath)
.extract(extractionDir);
logger.log('info', `Image ${imageName} extracted.`);
await plugins.smartfile.fs.remove(initialTarDownloadPath);
await plugins.fs.promises.rm(initialTarDownloadPath, { force: true });
logger.log('info', `deleted original tar to save space.`);
logger.log('info', `now repackaging for s3...`);
const smartfileIndexJson = await plugins.smartfile.SmartFile.fromFilePath(
const smartfileIndexJson = await smartfileFactory.fromFilePath(
plugins.path.join(extractionDir, 'index.json'),
);
const smartfileManifestJson =
await plugins.smartfile.SmartFile.fromFilePath(
plugins.path.join(extractionDir, 'manifest.json'),
);
const smartfileOciLayoutJson =
await plugins.smartfile.SmartFile.fromFilePath(
plugins.path.join(extractionDir, 'oci-layout'),
);
const smartfileRepositoriesJson =
await plugins.smartfile.SmartFile.fromFilePath(
plugins.path.join(extractionDir, 'repositories'),
);
const smartfileManifestJson = await smartfileFactory.fromFilePath(
plugins.path.join(extractionDir, 'manifest.json'),
);
const smartfileOciLayoutJson = await smartfileFactory.fromFilePath(
plugins.path.join(extractionDir, 'oci-layout'),
);
// repositories file is optional in OCI image tars
const repositoriesPath = plugins.path.join(extractionDir, 'repositories');
const hasRepositories = plugins.fs.existsSync(repositoriesPath);
const smartfileRepositoriesJson = hasRepositories
? await smartfileFactory.fromFilePath(repositoriesPath)
: null;
const indexJson = JSON.parse(smartfileIndexJson.contents.toString());
const manifestJson = JSON.parse(smartfileManifestJson.contents.toString());
const ociLayoutJson = JSON.parse(
smartfileOciLayoutJson.contents.toString(),
);
const repositoriesJson = JSON.parse(
smartfileRepositoriesJson.contents.toString(),
);
indexJson.manifests[0].annotations['io.containerd.image.name'] = imageName;
manifestJson[0].RepoTags[0] = imageName;
const repoFirstKey = Object.keys(repositoriesJson)[0];
const repoFirstValue = repositoriesJson[repoFirstKey];
repositoriesJson[imageName] = repoFirstValue;
delete repositoriesJson[repoFirstKey];
if (indexJson.manifests?.[0]?.annotations) {
indexJson.manifests[0].annotations['io.containerd.image.name'] = imageName;
}
if (manifestJson?.[0]?.RepoTags) {
manifestJson[0].RepoTags[0] = imageName;
}
if (smartfileRepositoriesJson) {
const repositoriesJson = JSON.parse(
smartfileRepositoriesJson.contents.toString(),
);
const repoFirstKey = Object.keys(repositoriesJson)[0];
const repoFirstValue = repositoriesJson[repoFirstKey];
repositoriesJson[imageName] = repoFirstValue;
delete repositoriesJson[repoFirstKey];
smartfileRepositoriesJson.contents = Buffer.from(
JSON.stringify(repositoriesJson, null, 2),
);
}
smartfileIndexJson.contents = Buffer.from(
JSON.stringify(indexJson, null, 2),
@@ -102,45 +113,51 @@ export class DockerImageStore {
smartfileOciLayoutJson.contents = Buffer.from(
JSON.stringify(ociLayoutJson, null, 2),
);
smartfileRepositoriesJson.contents = Buffer.from(
JSON.stringify(repositoriesJson, null, 2),
);
await Promise.all([
const writePromises = [
smartfileIndexJson.write(),
smartfileManifestJson.write(),
smartfileOciLayoutJson.write(),
smartfileRepositoriesJson.write(),
]);
];
if (smartfileRepositoriesJson) {
writePromises.push(smartfileRepositoriesJson.write());
}
await Promise.all(writePromises);
logger.log('info', 'repackaging archive for s3...');
const tartools = new plugins.smartarchive.TarTools();
const newTarPack = await tartools.packDirectory(extractionDir);
const newTarPack = await tartools.getDirectoryPackStream(extractionDir);
const finalTarName = `${uniqueProcessingId}.processed.tar`;
const finalTarPath = plugins.path.join(
this.options.localDirPath,
finalTarName,
);
const finalWriteStream =
plugins.smartfile.fsStream.createWriteStream(finalTarPath);
await new Promise((resolve, reject) => {
newTarPack.finalize();
const finalWriteStream = plugins.fs.createWriteStream(finalTarPath);
await new Promise<void>((resolve, reject) => {
newTarPack.pipe(finalWriteStream);
finalWriteStream.on('finish', resolve);
finalWriteStream.on('finish', () => resolve());
finalWriteStream.on('error', reject);
});
logger.log('ok', `Repackaged image ${imageName} for s3.`);
await plugins.smartfile.fs.remove(extractionDir);
const finalTarReadStream =
plugins.smartfile.fsStream.createReadStream(finalTarPath);
await plugins.fs.promises.rm(extractionDir, { recursive: true, force: true });
// Remove existing file in bucket if it exists (smartbucket v4 no longer silently overwrites)
try {
await this.options.bucketDir.fastRemove({ path: `${imageName}.tar` });
} catch (e) {
// File may not exist, which is fine
}
const finalTarReadStream = plugins.fs.createReadStream(finalTarPath);
await this.options.bucketDir.fastPutStream({
stream: finalTarReadStream,
path: `${imageName}.tar`,
});
await plugins.smartfile.fs.remove(finalTarPath);
await plugins.fs.promises.rm(finalTarPath, { force: true });
}
public async start() {
await plugins.smartfile.fs.ensureEmptyDir(this.options.localDirPath);
// Ensure the local directory exists and is empty
await plugins.fs.promises.rm(this.options.localDirPath, { recursive: true, force: true });
await plugins.fs.promises.mkdir(this.options.localDirPath, { recursive: true });
}
public async stop() {}
@@ -154,10 +171,10 @@ export class DockerImageStore {
`${imageName}.tar`,
);
if (!(await plugins.smartfile.fs.fileExists(imagePath))) {
if (!plugins.fs.existsSync(imagePath)) {
throw new Error(`Image ${imageName} does not exist.`);
}
return plugins.smartfile.fsStream.createReadStream(imagePath);
return plugins.fs.createReadStream(imagePath);
}
}