fix(core): Stabilize CI/workflows and runtime: update CI images/metadata, improve streaming requests and image handling, and fix tests & package metadata

This commit is contained in:
2025-08-19 01:46:37 +00:00
parent 4b1c908b89
commit 414d7dd727
19 changed files with 444 additions and 243 deletions

View File

@@ -3,6 +3,6 @@
*/
export const commitinfo = {
name: '@apiclient.xyz/docker',
version: '1.3.4',
version: '1.3.5',
description: 'Provides easy communication with Docker remote API from Node.js, with TypeScript support.'
}

View File

@@ -10,7 +10,9 @@ export class DockerContainer {
/**
* get all containers
*/
public static async getContainers(dockerHostArg: DockerHost): Promise<DockerContainer[]> {
public static async getContainers(
dockerHostArg: DockerHost,
): Promise<DockerContainer[]> {
const result: DockerContainer[] = [];
const response = await dockerHostArg.request('GET', '/containers/json');
@@ -34,7 +36,7 @@ export class DockerContainer {
*/
public static async create(
dockerHost: DockerHost,
containerCreationDescriptor: interfaces.IContainerCreationDescriptor
containerCreationDescriptor: interfaces.IContainerCreationDescriptor,
) {
// check for unique hostname
const existingContainers = await DockerContainer.getContainers(dockerHost);
@@ -50,7 +52,10 @@ export class DockerContainer {
if (response.statusCode < 300) {
logger.log('info', 'Container created successfully');
} else {
logger.log('error', 'There has been a problem when creating the container');
logger.log(
'error',
'There has been a problem when creating the container',
);
}
}

View File

@@ -37,10 +37,13 @@ export class DockerHost {
constructor(optionsArg: IDockerHostConstructorOptions) {
this.options = {
...{
imageStoreDir: plugins.path.join(paths.nogitDir, 'temp-docker-image-store'),
imageStoreDir: plugins.path.join(
paths.nogitDir,
'temp-docker-image-store',
),
},
...optionsArg,
}
};
let pathToUse: string;
if (optionsArg.dockerSockPath) {
pathToUse = optionsArg.dockerSockPath;
@@ -62,7 +65,7 @@ export class DockerHost {
this.imageStore = new DockerImageStore({
bucketDir: null,
localDirPath: this.options.imageStoreDir,
})
});
}
public async start() {
@@ -84,17 +87,22 @@ export class DockerHost {
throw new Error(response.body.Status);
}
console.log(response.body.Status);
this.registryToken = plugins.smartstring.base64.encode(plugins.smartjson.stringify(authData));
this.registryToken = plugins.smartstring.base64.encode(
plugins.smartjson.stringify(authData),
);
}
/**
* gets the token from the .docker/config.json file for GitLab registry
*/
public async getAuthTokenFromDockerConfig(registryUrlArg: string) {
const dockerConfigPath = plugins.smartpath.get.home('~/.docker/config.json');
const dockerConfigPath = plugins.smartpath.get.home(
'~/.docker/config.json',
);
const configObject = plugins.smartfile.fs.toObjectSync(dockerConfigPath);
const gitlabAuthBase64 = configObject.auths[registryUrlArg].auth;
const gitlabAuth: string = plugins.smartstring.base64.decode(gitlabAuthBase64);
const gitlabAuth: string =
plugins.smartstring.base64.decode(gitlabAuthBase64);
const gitlabAuthArray = gitlabAuth.split(':');
await this.auth({
username: gitlabAuthArray[0],
@@ -116,7 +124,9 @@ export class DockerHost {
/**
* create a network
*/
public async createNetwork(optionsArg: Parameters<typeof DockerNetwork.createNetwork>[1]) {
public async createNetwork(
optionsArg: Parameters<typeof DockerNetwork.createNetwork>[1],
) {
return await DockerNetwork.createNetwork(this, optionsArg);
}
@@ -127,7 +137,6 @@ export class DockerHost {
return await DockerNetwork.getNetworkByName(this, networkNameArg);
}
// ==============
// CONTAINERS
// ==============
@@ -226,7 +235,7 @@ export class DockerHost {
*/
public async request(methodArg: string, routeArg: string, dataArg = {}) {
const requestUrl = `${this.socketPath}${routeArg}`;
// Build the request using the fluent API
const smartRequest = plugins.smartrequest.SmartRequest.create()
.url(requestUrl)
@@ -234,12 +243,12 @@ export class DockerHost {
.header('X-Registry-Auth', this.registryToken)
.header('Host', 'docker.sock')
.options({ keepAlive: false });
// Add body for methods that support it
if (dataArg && Object.keys(dataArg).length > 0) {
smartRequest.json(dataArg);
}
// Execute the request based on method
let response;
switch (methodArg.toUpperCase()) {
@@ -258,23 +267,28 @@ export class DockerHost {
default:
throw new Error(`Unsupported HTTP method: ${methodArg}`);
}
// Parse the response body based on content type
let body;
const contentType = response.headers['content-type'] || '';
// Docker's streaming endpoints (like /images/create) return newline-delimited JSON
// which can't be parsed as a single JSON object
const isStreamingEndpoint = routeArg.includes('/images/create') ||
routeArg.includes('/images/load') ||
routeArg.includes('/build');
const isStreamingEndpoint =
routeArg.includes('/images/create') ||
routeArg.includes('/images/load') ||
routeArg.includes('/build');
if (contentType.includes('application/json') && !isStreamingEndpoint) {
body = await response.json();
} else {
body = await response.text();
// Try to parse as JSON if it looks like JSON and is not a streaming response
if (!isStreamingEndpoint && body && (body.startsWith('{') || body.startsWith('['))) {
if (
!isStreamingEndpoint &&
body &&
(body.startsWith('{') || body.startsWith('['))
) {
try {
body = JSON.parse(body);
} catch {
@@ -282,24 +296,28 @@ export class DockerHost {
}
}
}
// Create a response object compatible with existing code
const legacyResponse = {
statusCode: response.status,
body: body,
headers: response.headers
headers: response.headers,
};
if (response.status !== 200) {
console.log(body);
}
return legacyResponse;
}
public async requestStreaming(methodArg: string, routeArg: string, readStream?: plugins.smartstream.stream.Readable) {
public async requestStreaming(
methodArg: string,
routeArg: string,
readStream?: plugins.smartstream.stream.Readable,
) {
const requestUrl = `${this.socketPath}${routeArg}`;
// Build the request using the fluent API
const smartRequest = plugins.smartrequest.SmartRequest.create()
.url(requestUrl)
@@ -308,7 +326,7 @@ export class DockerHost {
.header('Host', 'docker.sock')
.timeout(30000)
.options({ keepAlive: false, autoDrain: true }); // Disable auto-drain for streaming
// If we have a readStream, use the new stream method with logging
if (readStream) {
let counter = 0;
@@ -319,16 +337,16 @@ export class DockerHost {
}
counter++;
return chunkArg;
}
},
});
// Pipe through the logging duplex stream
const loggedStream = readStream.pipe(smartduplex);
// Use the new stream method to stream the data
smartRequest.stream(loggedStream, 'application/octet-stream');
}
// Execute the request based on method
let response;
switch (methodArg.toUpperCase()) {
@@ -347,29 +365,29 @@ export class DockerHost {
default:
throw new Error(`Unsupported HTTP method: ${methodArg}`);
}
console.log(response.status);
// For streaming responses, get the Node.js stream
const nodeStream = response.streamNode();
if (!nodeStream) {
// If no stream is available, consume the body as text
const body = await response.text();
console.log(body);
// Return a compatible response object
return {
statusCode: response.status,
body: body,
headers: response.headers
headers: response.headers,
};
}
// For streaming responses, return the stream with added properties
(nodeStream as any).statusCode = response.status;
(nodeStream as any).body = ''; // For compatibility
return nodeStream;
}
@@ -382,10 +400,14 @@ export class DockerHost {
if (!optionsArg.bucketName) {
throw new Error('bucketName is required');
}
const bucket = await this.smartBucket.getBucketByName(optionsArg.bucketName);
const bucket = await this.smartBucket.getBucketByName(
optionsArg.bucketName,
);
let wantedDirectory = await bucket.getBaseDirectory();
if (optionsArg.directoryPath) {
wantedDirectory = await wantedDirectory.getSubDirectoryByName(optionsArg.directoryPath);
wantedDirectory = await wantedDirectory.getSubDirectoryByName(
optionsArg.directoryPath,
);
}
this.imageStore.options.bucketDir = wantedDirectory;
}

View File

@@ -17,7 +17,10 @@ export class DockerImage {
return images;
}
public static async getImageByName(dockerHost: DockerHost, imageNameArg: string) {
public static async getImageByName(
dockerHost: DockerHost,
imageNameArg: string,
) {
const images = await this.getImages(dockerHost);
const result = images.find((image) => {
if (image.RepoTags) {
@@ -32,8 +35,8 @@ export class DockerImage {
public static async createFromRegistry(
dockerHostArg: DockerHost,
optionsArg: {
creationObject: interfaces.IImageCreationDescriptor
}
creationObject: interfaces.IImageCreationDescriptor;
},
): Promise<DockerImage> {
// lets create a sanatized imageUrlObject
const imageUrlObject: {
@@ -50,7 +53,7 @@ export class DockerImage {
const imageTag = imageUrlObject.imageUrl.split(':')[1];
if (imageUrlObject.imageTag) {
throw new Error(
`imageUrl ${imageUrlObject.imageUrl} can't be tagged with ${imageUrlObject.imageTag} because it is already tagged with ${imageTag}`
`imageUrl ${imageUrlObject.imageUrl} can't be tagged with ${imageUrlObject.imageTag} because it is already tagged with ${imageTag}`,
);
} else {
imageUrlObject.imageUrl = imageUrl;
@@ -65,12 +68,18 @@ export class DockerImage {
const response = await dockerHostArg.request(
'POST',
`/images/create?fromImage=${encodeURIComponent(
imageUrlObject.imageUrl
)}&tag=${encodeURIComponent(imageUrlObject.imageTag)}`
imageUrlObject.imageUrl,
)}&tag=${encodeURIComponent(imageUrlObject.imageTag)}`,
);
if (response.statusCode < 300) {
logger.log('info', `Successfully pulled image ${imageUrlObject.imageUrl} from the registry`);
const image = await DockerImage.getImageByName(dockerHostArg, imageUrlObject.imageOriginTag);
logger.log(
'info',
`Successfully pulled image ${imageUrlObject.imageUrl} from the registry`,
);
const image = await DockerImage.getImageByName(
dockerHostArg,
imageUrlObject.imageOriginTag,
);
return image;
} else {
logger.log('error', `Failed at the attempt of creating a new image`);
@@ -78,7 +87,7 @@ export class DockerImage {
}
/**
*
*
* @param dockerHostArg
* @param tarStreamArg
*/
@@ -87,13 +96,13 @@ export class DockerImage {
optionsArg: {
creationObject: interfaces.IImageCreationDescriptor;
tarStream: plugins.smartstream.stream.Readable;
}
},
): Promise<DockerImage> {
// Start the request for importing an image
const response = await dockerHostArg.requestStreaming(
'POST',
'/images/load',
optionsArg.tarStream
optionsArg.tarStream,
);
/**
@@ -144,7 +153,7 @@ export class DockerImage {
if (!loadedImageTag) {
throw new Error(
`Could not parse the loaded image info from Docker response.\nResponse was:\n${rawOutput}`
`Could not parse the loaded image info from Docker response.\nResponse was:\n${rawOutput}`,
);
}
@@ -153,34 +162,31 @@ export class DockerImage {
// "myrepo/myimage:latest" OR "sha256:someHash..."
// If Docker gave you an ID (e.g. "sha256:..."), you may need a separate
// DockerImage.getImageById method; or if you prefer, you can treat it as a name.
const newlyImportedImage = await DockerImage.getImageByName(dockerHostArg, loadedImageTag);
const newlyImportedImage = await DockerImage.getImageByName(
dockerHostArg,
loadedImageTag,
);
if (!newlyImportedImage) {
throw new Error(
`Image load succeeded, but no local reference found for "${loadedImageTag}".`
`Image load succeeded, but no local reference found for "${loadedImageTag}".`,
);
}
logger.log(
'info',
`Successfully imported image "${loadedImageTag}".`
);
logger.log('info', `Successfully imported image "${loadedImageTag}".`);
return newlyImportedImage;
}
public static async tagImageByIdOrName(
dockerHost: DockerHost,
idOrNameArg: string,
newTagArg: string
newTagArg: string,
) {
const response = await dockerHost.request(
'POST',
`/images/${encodeURIComponent(idOrNameArg)}/${encodeURIComponent(newTagArg)}`
`/images/${encodeURIComponent(idOrNameArg)}/${encodeURIComponent(newTagArg)}`,
);
}
public static async buildImage(dockerHostArg: DockerHost, dockerImageTag) {
@@ -249,23 +255,25 @@ export class DockerImage {
*/
public async exportToTarStream(): Promise<plugins.smartstream.stream.Readable> {
logger.log('info', `Exporting image ${this.RepoTags[0]} to tar stream.`);
const response = await this.dockerHost.requestStreaming('GET', `/images/${encodeURIComponent(this.RepoTags[0])}/get`);
const response = await this.dockerHost.requestStreaming(
'GET',
`/images/${encodeURIComponent(this.RepoTags[0])}/get`,
);
// Check if response is a Node.js stream
if (!response || typeof response.on !== 'function') {
throw new Error('Failed to get streaming response for image export');
}
let counter = 0;
const webduplexStream = new plugins.smartstream.SmartDuplex({
writeFunction: async (chunk, tools) => {
if (counter % 1000 === 0)
console.log(`Got chunk: ${counter}`);
if (counter % 1000 === 0) console.log(`Got chunk: ${counter}`);
counter++;
return chunk;
}
},
});
response.on('data', (chunk) => {
if (!webduplexStream.write(chunk)) {
response.pause();
@@ -274,16 +282,16 @@ export class DockerImage {
});
}
});
response.on('end', () => {
webduplexStream.end();
});
response.on('error', (error) => {
logger.log('error', `Error during image export: ${error.message}`);
webduplexStream.destroy(error);
});
return webduplexStream;
}
}

View File

@@ -22,14 +22,25 @@ export class DockerImageStore {
}
// Method to store tar stream
public async storeImage(imageName: string, tarStream: plugins.smartstream.stream.Readable): Promise<void> {
public async storeImage(
imageName: string,
tarStream: plugins.smartstream.stream.Readable,
): Promise<void> {
logger.log('info', `Storing image ${imageName}...`);
const uniqueProcessingId = plugins.smartunique.shortId();
const initialTarDownloadPath = plugins.path.join(this.options.localDirPath, `${uniqueProcessingId}.tar`);
const extractionDir = plugins.path.join(this.options.localDirPath, uniqueProcessingId);
const initialTarDownloadPath = plugins.path.join(
this.options.localDirPath,
`${uniqueProcessingId}.tar`,
);
const extractionDir = plugins.path.join(
this.options.localDirPath,
uniqueProcessingId,
);
// Create a write stream to store the tar file
const writeStream = plugins.smartfile.fsStream.createWriteStream(initialTarDownloadPath);
const writeStream = plugins.smartfile.fsStream.createWriteStream(
initialTarDownloadPath,
);
// lets wait for the write stream to finish
await new Promise((resolve, reject) => {
@@ -37,23 +48,43 @@ export class DockerImageStore {
writeStream.on('finish', resolve);
writeStream.on('error', reject);
});
logger.log('info', `Image ${imageName} stored locally for processing. Extracting...`);
logger.log(
'info',
`Image ${imageName} stored locally for processing. Extracting...`,
);
// lets process the image
const tarArchive = await plugins.smartarchive.SmartArchive.fromArchiveFile(initialTarDownloadPath);
const tarArchive = await plugins.smartarchive.SmartArchive.fromArchiveFile(
initialTarDownloadPath,
);
await tarArchive.exportToFs(extractionDir);
logger.log('info', `Image ${imageName} extracted.`);
await plugins.smartfile.fs.remove(initialTarDownloadPath);
logger.log('info', `deleted original tar to save space.`);
logger.log('info', `now repackaging for s3...`);
const smartfileIndexJson = await plugins.smartfile.SmartFile.fromFilePath(plugins.path.join(extractionDir, 'index.json'));
const smartfileManifestJson = await plugins.smartfile.SmartFile.fromFilePath(plugins.path.join(extractionDir, 'manifest.json'));
const smartfileOciLayoutJson = await plugins.smartfile.SmartFile.fromFilePath(plugins.path.join(extractionDir, 'oci-layout'));
const smartfileRepositoriesJson = await plugins.smartfile.SmartFile.fromFilePath(plugins.path.join(extractionDir, 'repositories'));
const smartfileIndexJson = await plugins.smartfile.SmartFile.fromFilePath(
plugins.path.join(extractionDir, 'index.json'),
);
const smartfileManifestJson =
await plugins.smartfile.SmartFile.fromFilePath(
plugins.path.join(extractionDir, 'manifest.json'),
);
const smartfileOciLayoutJson =
await plugins.smartfile.SmartFile.fromFilePath(
plugins.path.join(extractionDir, 'oci-layout'),
);
const smartfileRepositoriesJson =
await plugins.smartfile.SmartFile.fromFilePath(
plugins.path.join(extractionDir, 'repositories'),
);
const indexJson = JSON.parse(smartfileIndexJson.contents.toString());
const manifestJson = JSON.parse(smartfileManifestJson.contents.toString());
const ociLayoutJson = JSON.parse(smartfileOciLayoutJson.contents.toString());
const repositoriesJson = JSON.parse(smartfileRepositoriesJson.contents.toString());
const ociLayoutJson = JSON.parse(
smartfileOciLayoutJson.contents.toString(),
);
const repositoriesJson = JSON.parse(
smartfileRepositoriesJson.contents.toString(),
);
indexJson.manifests[0].annotations['io.containerd.image.name'] = imageName;
manifestJson[0].RepoTags[0] = imageName;
@@ -62,10 +93,18 @@ export class DockerImageStore {
repositoriesJson[imageName] = repoFirstValue;
delete repositoriesJson[repoFirstKey];
smartfileIndexJson.contents = Buffer.from(JSON.stringify(indexJson, null, 2));
smartfileManifestJson.contents = Buffer.from(JSON.stringify(manifestJson, null, 2));
smartfileOciLayoutJson.contents = Buffer.from(JSON.stringify(ociLayoutJson, null, 2));
smartfileRepositoriesJson.contents = Buffer.from(JSON.stringify(repositoriesJson, null, 2));
smartfileIndexJson.contents = Buffer.from(
JSON.stringify(indexJson, null, 2),
);
smartfileManifestJson.contents = Buffer.from(
JSON.stringify(manifestJson, null, 2),
);
smartfileOciLayoutJson.contents = Buffer.from(
JSON.stringify(ociLayoutJson, null, 2),
);
smartfileRepositoriesJson.contents = Buffer.from(
JSON.stringify(repositoriesJson, null, 2),
);
await Promise.all([
smartfileIndexJson.write(),
smartfileManifestJson.write(),
@@ -77,8 +116,12 @@ export class DockerImageStore {
const tartools = new plugins.smartarchive.TarTools();
const newTarPack = await tartools.packDirectory(extractionDir);
const finalTarName = `${uniqueProcessingId}.processed.tar`;
const finalTarPath = plugins.path.join(this.options.localDirPath, finalTarName);
const finalWriteStream = plugins.smartfile.fsStream.createWriteStream(finalTarPath);
const finalTarPath = plugins.path.join(
this.options.localDirPath,
finalTarName,
);
const finalWriteStream =
plugins.smartfile.fsStream.createWriteStream(finalTarPath);
await new Promise((resolve, reject) => {
newTarPack.finalize();
newTarPack.pipe(finalWriteStream);
@@ -87,7 +130,8 @@ export class DockerImageStore {
});
logger.log('ok', `Repackaged image ${imageName} for s3.`);
await plugins.smartfile.fs.remove(extractionDir);
const finalTarReadStream = plugins.smartfile.fsStream.createReadStream(finalTarPath);
const finalTarReadStream =
plugins.smartfile.fsStream.createReadStream(finalTarPath);
await this.options.bucketDir.fastPutStream({
stream: finalTarReadStream,
path: `${imageName}.tar`,
@@ -102,8 +146,13 @@ export class DockerImageStore {
public async stop() {}
// Method to retrieve tar stream
public async getImage(imageName: string): Promise<plugins.smartstream.stream.Readable> {
const imagePath = plugins.path.join(this.options.localDirPath, `${imageName}.tar`);
public async getImage(
imageName: string,
): Promise<plugins.smartstream.stream.Readable> {
const imagePath = plugins.path.join(
this.options.localDirPath,
`${imageName}.tar`,
);
if (!(await plugins.smartfile.fs.fileExists(imagePath))) {
throw new Error(`Image ${imageName} does not exist.`);

View File

@@ -6,7 +6,9 @@ import { DockerService } from './classes.service.js';
import { logger } from './logger.js';
export class DockerNetwork {
public static async getNetworks(dockerHost: DockerHost): Promise<DockerNetwork[]> {
public static async getNetworks(
dockerHost: DockerHost,
): Promise<DockerNetwork[]> {
const dockerNetworks: DockerNetwork[] = [];
const response = await dockerHost.request('GET', '/networks');
for (const networkObject of response.body) {
@@ -17,14 +19,19 @@ export class DockerNetwork {
return dockerNetworks;
}
public static async getNetworkByName(dockerHost: DockerHost, dockerNetworkNameArg: string) {
public static async getNetworkByName(
dockerHost: DockerHost,
dockerNetworkNameArg: string,
) {
const networks = await DockerNetwork.getNetworks(dockerHost);
return networks.find((dockerNetwork) => dockerNetwork.Name === dockerNetworkNameArg);
return networks.find(
(dockerNetwork) => dockerNetwork.Name === dockerNetworkNameArg,
);
}
public static async createNetwork(
dockerHost: DockerHost,
networkCreationDescriptor: interfaces.INetworkCreationDescriptor
networkCreationDescriptor: interfaces.INetworkCreationDescriptor,
): Promise<DockerNetwork> {
const response = await dockerHost.request('POST', '/networks/create', {
Name: networkCreationDescriptor.Name,
@@ -47,9 +54,15 @@ export class DockerNetwork {
});
if (response.statusCode < 300) {
logger.log('info', 'Created network successfully');
return await DockerNetwork.getNetworkByName(dockerHost, networkCreationDescriptor.Name);
return await DockerNetwork.getNetworkByName(
dockerHost,
networkCreationDescriptor.Name,
);
} else {
logger.log('error', 'There has been an error creating the wanted network');
logger.log(
'error',
'There has been an error creating the wanted network',
);
return null;
}
}
@@ -75,7 +88,7 @@ export class DockerNetwork {
Subnet: string;
IPRange: string;
Gateway: string;
}
},
];
};
@@ -87,7 +100,10 @@ export class DockerNetwork {
* removes the network
*/
public async remove() {
const response = await this.dockerHost.request('DELETE', `/networks/${this.Id}`);
const response = await this.dockerHost.request(
'DELETE',
`/networks/${this.Id}`,
);
}
public async getContainersOnNetwork(): Promise<
@@ -100,7 +116,10 @@ export class DockerNetwork {
}>
> {
const returnArray = [];
const response = await this.dockerHost.request('GET', `/networks/${this.Id}`);
const response = await this.dockerHost.request(
'GET',
`/networks/${this.Id}`,
);
for (const key of Object.keys(response.body.Containers)) {
returnArray.push(response.body.Containers[key]);
}

View File

@@ -22,14 +22,17 @@ export class DockerSecret {
return secrets.find((secret) => secret.ID === idArg);
}
public static async getSecretByName(dockerHostArg: DockerHost, nameArg: string) {
public static async getSecretByName(
dockerHostArg: DockerHost,
nameArg: string,
) {
const secrets = await this.getSecrets(dockerHostArg);
return secrets.find((secret) => secret.Spec.Name === nameArg);
}
public static async createSecret(
dockerHostArg: DockerHost,
secretDescriptor: interfaces.ISecretCreationDescriptor
secretDescriptor: interfaces.ISecretCreationDescriptor,
) {
const labels: interfaces.TLabels = {
...secretDescriptor.labels,
@@ -45,7 +48,7 @@ export class DockerSecret {
Object.assign(newSecretInstance, response.body);
Object.assign(
newSecretInstance,
await DockerSecret.getSecretByID(dockerHostArg, newSecretInstance.ID)
await DockerSecret.getSecretByID(dockerHostArg, newSecretInstance.ID),
);
return newSecretInstance;
}
@@ -77,7 +80,7 @@ export class DockerSecret {
Name: this.Spec.Name,
Labels: this.Spec.Labels,
Data: plugins.smartstring.base64.encode(contentArg),
}
},
);
}

View File

@@ -21,7 +21,7 @@ export class DockerService {
public static async getServiceByName(
dockerHost: DockerHost,
networkName: string
networkName: string,
): Promise<DockerService> {
const allServices = await DockerService.getServices(dockerHost);
const wantedService = allServices.find((service) => {
@@ -35,10 +35,13 @@ export class DockerService {
*/
public static async createService(
dockerHost: DockerHost,
serviceCreationDescriptor: interfaces.IServiceCreationDescriptor
serviceCreationDescriptor: interfaces.IServiceCreationDescriptor,
): Promise<DockerService> {
// lets get the image
logger.log('info', `now creating service ${serviceCreationDescriptor.name}`);
logger.log(
'info',
`now creating service ${serviceCreationDescriptor.name}`,
);
// await serviceCreationDescriptor.image.pullLatestImageFromRegistry();
const serviceVersion = await serviceCreationDescriptor.image.getVersion();
@@ -71,8 +74,12 @@ export class DockerService {
});
}
if (serviceCreationDescriptor.resources && serviceCreationDescriptor.resources.volumeMounts) {
for (const volumeMount of serviceCreationDescriptor.resources.volumeMounts) {
if (
serviceCreationDescriptor.resources &&
serviceCreationDescriptor.resources.volumeMounts
) {
for (const volumeMount of serviceCreationDescriptor.resources
.volumeMounts) {
mounts.push({
Target: volumeMount.containerFsPath,
Source: volumeMount.hostFsPath,
@@ -130,7 +137,8 @@ export class DockerService {
// lets configure limits
const memoryLimitMB =
serviceCreationDescriptor.resources && serviceCreationDescriptor.resources.memorySizeMB
serviceCreationDescriptor.resources &&
serviceCreationDescriptor.resources.memorySizeMB
? serviceCreationDescriptor.resources.memorySizeMB
: 1000;
@@ -139,7 +147,8 @@ export class DockerService {
};
if (serviceCreationDescriptor.resources) {
limits.MemoryBytes = serviceCreationDescriptor.resources.memorySizeMB * 1000000;
limits.MemoryBytes =
serviceCreationDescriptor.resources.memorySizeMB * 1000000;
}
const response = await dockerHost.request('POST', '/services/create', {
@@ -182,7 +191,7 @@ export class DockerService {
const createdService = await DockerService.getServiceByName(
dockerHost,
serviceCreationDescriptor.name
serviceCreationDescriptor.name,
);
return createdService;
}
@@ -228,7 +237,10 @@ export class DockerService {
}
public async reReadFromDockerEngine() {
const dockerData = await this.dockerHostRef.request('GET', `/services/${this.ID}`);
const dockerData = await this.dockerHostRef.request(
'GET',
`/services/${this.ID}`,
);
// TODO: Better assign: Object.assign(this, dockerData);
}
@@ -236,14 +248,21 @@ export class DockerService {
// TODO: implement digest based update recognition
await this.reReadFromDockerEngine();
const dockerImage = await DockerImage.createFromRegistry(this.dockerHostRef, {
creationObject: {
imageUrl: this.Spec.TaskTemplate.ContainerSpec.Image,
}
});
const dockerImage = await DockerImage.createFromRegistry(
this.dockerHostRef,
{
creationObject: {
imageUrl: this.Spec.TaskTemplate.ContainerSpec.Image,
},
},
);
const imageVersion = new plugins.smartversion.SmartVersion(dockerImage.Labels.version);
const serviceVersion = new plugins.smartversion.SmartVersion(this.Spec.Labels.version);
const imageVersion = new plugins.smartversion.SmartVersion(
dockerImage.Labels.version,
);
const serviceVersion = new plugins.smartversion.SmartVersion(
this.Spec.Labels.version,
);
if (imageVersion.greaterThan(serviceVersion)) {
console.log(`service ${this.Spec.Name} needs to be updated`);
return true;

View File

@@ -2,4 +2,4 @@ import * as plugins from './plugins.js';
import { commitinfo } from './00_commitinfo_data.js';
export const logger = plugins.smartlog.Smartlog.createForCommitinfo(commitinfo);
logger.enableConsole();
logger.enableConsole();

View File

@@ -2,7 +2,7 @@ import * as plugins from './plugins.js';
export const packageDir = plugins.path.resolve(
plugins.smartpath.get.dirnameFromImportMetaUrl(import.meta.url),
'../'
'../',
);
export const nogitDir = plugins.path.resolve(packageDir, '.nogit/');