Compare commits
22 Commits
Author | SHA1 | Date | |
---|---|---|---|
606c82dafa | |||
9fc4afe4b8 | |||
90689c2645 | |||
4a1d649e5e | |||
66bd36dc4f | |||
349d711cc5 | |||
c74a4bcd5b | |||
ff835c4160 | |||
05eceeb056 | |||
de55beda08 | |||
9aa2b0c7be | |||
a283bbfba0 | |||
8a4e300581 | |||
6b0d96b745 | |||
a08c11838f | |||
7c5225125c | |||
bc4778f7db | |||
2e7e8ae5cf | |||
054585c7f5 | |||
c0cebbe614 | |||
740f83114c | |||
e48023d490 |
164
changelog.md
Normal file
164
changelog.md
Normal file
@ -0,0 +1,164 @@
|
|||||||
|
# Changelog
|
||||||
|
|
||||||
|
## 2024-12-23 - 1.3.0 - feat(core)
|
||||||
|
Initial release of Docker client with TypeScript support
|
||||||
|
|
||||||
|
- Provides easy communication with Docker's remote API from Node.js
|
||||||
|
- Includes implementations for managing Docker services, networks, secrets, containers, and images
|
||||||
|
|
||||||
|
## 2024-12-23 - 1.2.8 - fix(core)
|
||||||
|
Improved the image creation process from tar stream in DockerImage class.
|
||||||
|
|
||||||
|
- Enhanced `DockerImage.createFromTarStream` method to handle streamed response and parse imported image details.
|
||||||
|
- Fixed the dependency version for `@push.rocks/smartarchive` in package.json.
|
||||||
|
|
||||||
|
## 2024-10-13 - 1.2.7 - fix(core)
|
||||||
|
Prepare patch release with minor fixes and improvements
|
||||||
|
|
||||||
|
|
||||||
|
## 2024-10-13 - 1.2.6 - fix(core)
|
||||||
|
Minor refactoring and code quality improvements.
|
||||||
|
|
||||||
|
|
||||||
|
## 2024-10-13 - 1.2.5 - fix(dependencies)
|
||||||
|
Update dependencies for stability improvements
|
||||||
|
|
||||||
|
- Updated @push.rocks/smartstream to version ^3.0.46
|
||||||
|
- Updated @push.rocks/tapbundle to version ^5.3.0
|
||||||
|
- Updated @types/node to version 22.7.5
|
||||||
|
|
||||||
|
## 2024-10-13 - 1.2.4 - fix(core)
|
||||||
|
Refactored DockerImageStore constructor to remove DockerHost dependency
|
||||||
|
|
||||||
|
- Adjusted DockerImageStore constructor to remove dependency on DockerHost
|
||||||
|
- Updated ts/classes.host.ts to align with DockerImageStore's new constructor signature
|
||||||
|
|
||||||
|
## 2024-08-21 - 1.2.3 - fix(dependencies)
|
||||||
|
Update dependencies to the latest versions and fix image export test
|
||||||
|
|
||||||
|
- Updated several dependencies to their latest versions in package.json.
|
||||||
|
- Enabled the previously skipped 'should export images' test.
|
||||||
|
|
||||||
|
## 2024-06-10 - 1.2.1-1.2.2 - Core/General
|
||||||
|
General updates and fixes.
|
||||||
|
|
||||||
|
- Fix core update
|
||||||
|
|
||||||
|
## 2024-06-10 - 1.2.0 - Core
|
||||||
|
Core updates and bug fixes.
|
||||||
|
|
||||||
|
- Fix core update
|
||||||
|
|
||||||
|
## 2024-06-08 - 1.2.0 - General/Core
|
||||||
|
Major release with core enhancements.
|
||||||
|
|
||||||
|
- Processing images with extraction, retagging, repackaging, and long-term storage
|
||||||
|
|
||||||
|
## 2024-06-06 - 1.1.4 - General/Imagestore
|
||||||
|
Significant feature addition.
|
||||||
|
|
||||||
|
- Add feature to process images with extraction, retagging, repackaging, and long-term storage
|
||||||
|
|
||||||
|
## 2024-05-08 - 1.0.112 - Images
|
||||||
|
Add new functionality for image handling.
|
||||||
|
|
||||||
|
- Can now import and export images
|
||||||
|
- Start work on local 100% JS OCI image registry
|
||||||
|
|
||||||
|
## 2024-06-05 - 1.1.0-1.1.3 - Core
|
||||||
|
Regular updates and fixes.
|
||||||
|
|
||||||
|
- Fix core update
|
||||||
|
|
||||||
|
## 2024-02-02 - 1.0.105-1.0.110 - Core
|
||||||
|
Routine core updates and fixes.
|
||||||
|
|
||||||
|
- Fix core update
|
||||||
|
|
||||||
|
## 2022-10-17 - 1.0.103-1.0.104 - Core
|
||||||
|
Routine core updates.
|
||||||
|
|
||||||
|
- Fix core update
|
||||||
|
|
||||||
|
## 2020-10-01 - 1.0.99-1.0.102 - Core
|
||||||
|
Routine core updates.
|
||||||
|
|
||||||
|
- Fix core update
|
||||||
|
|
||||||
|
## 2019-09-22 - 1.0.73-1.0.78 - Core
|
||||||
|
Routine updates and core fixes.
|
||||||
|
|
||||||
|
- Fix core update
|
||||||
|
|
||||||
|
## 2019-09-13 - 1.0.60-1.0.72 - Core
|
||||||
|
Routine updates and core fixes.
|
||||||
|
|
||||||
|
- Fix core update
|
||||||
|
|
||||||
|
## 2019-08-16 - 1.0.43-1.0.59 - Core
|
||||||
|
Routine updates and core fixes.
|
||||||
|
|
||||||
|
- Fix core update
|
||||||
|
|
||||||
|
## 2019-08-15 - 1.0.37-1.0.42 - Core
|
||||||
|
Routine updates and core fixes.
|
||||||
|
|
||||||
|
- Fix core update
|
||||||
|
|
||||||
|
## 2019-08-14 - 1.0.31-1.0.36 - Core
|
||||||
|
Routine updates and core fixes.
|
||||||
|
|
||||||
|
- Fix core update
|
||||||
|
|
||||||
|
## 2019-01-10 - 1.0.27-1.0.30 - Core
|
||||||
|
Routine updates and core fixes.
|
||||||
|
|
||||||
|
- Fix core update
|
||||||
|
|
||||||
|
## 2018-07-16 - 1.0.23-1.0.24 - Core
|
||||||
|
Routine updates and core fixes.
|
||||||
|
|
||||||
|
- Fix core shift to new style
|
||||||
|
|
||||||
|
## 2017-07-16 - 1.0.20-1.0.22 - General
|
||||||
|
Routine updates and fixes.
|
||||||
|
|
||||||
|
- Update node_modules within npmdocker
|
||||||
|
|
||||||
|
## 2017-04-02 - 1.0.18-1.0.19 - General
|
||||||
|
Routine updates and fixes.
|
||||||
|
|
||||||
|
- Work with npmdocker and npmts 7.x.x
|
||||||
|
- CI updates
|
||||||
|
|
||||||
|
## 2016-07-31 - 1.0.17 - General
|
||||||
|
Enhancements and fixes.
|
||||||
|
|
||||||
|
- Now waiting for response to be stored before ending streaming request
|
||||||
|
- Cosmetic fix
|
||||||
|
|
||||||
|
## 2016-07-29 - 1.0.14-1.0.16 - General
|
||||||
|
Multiple updates and features added.
|
||||||
|
|
||||||
|
- Fix request for change observable and add npmdocker
|
||||||
|
- Add request typings
|
||||||
|
|
||||||
|
## 2016-07-28 - 1.0.13 - Core
|
||||||
|
Fixes and preparations.
|
||||||
|
|
||||||
|
- Fixed request for newer docker
|
||||||
|
- Prepare for npmdocker
|
||||||
|
|
||||||
|
|
||||||
|
## 2016-06-16 - 1.0.0-1.0.2 - General
|
||||||
|
Initial sequence of releases, significant feature additions and CI setups.
|
||||||
|
|
||||||
|
- Implement container start and stop
|
||||||
|
- Implement list containers and related functions
|
||||||
|
- Add tests with in docker environment
|
||||||
|
|
||||||
|
## 2016-04-12 - unknown - Initial Commit
|
||||||
|
Initial project setup.
|
||||||
|
|
||||||
|
- Initial commit
|
||||||
|
|
25
package.json
25
package.json
@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@apiclient.xyz/docker",
|
"name": "@apiclient.xyz/docker",
|
||||||
"version": "1.1.3",
|
"version": "1.3.0",
|
||||||
"description": "Provides easy communication with Docker remote API from Node.js, with TypeScript support.",
|
"description": "Provides easy communication with Docker remote API from Node.js, with TypeScript support.",
|
||||||
"private": false,
|
"private": false,
|
||||||
"main": "dist_ts/index.js",
|
"main": "dist_ts/index.js",
|
||||||
@ -34,26 +34,29 @@
|
|||||||
"homepage": "https://gitlab.com/mojoio/docker#readme",
|
"homepage": "https://gitlab.com/mojoio/docker#readme",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@push.rocks/lik": "^6.0.15",
|
"@push.rocks/lik": "^6.0.15",
|
||||||
"@push.rocks/smartarchive": "^4.0.22",
|
"@push.rocks/smartarchive": "^4.0.39",
|
||||||
"@push.rocks/smartfile": "^11.0.16",
|
"@push.rocks/smartbucket": "^3.0.22",
|
||||||
|
"@push.rocks/smartfile": "^11.0.21",
|
||||||
"@push.rocks/smartjson": "^5.0.20",
|
"@push.rocks/smartjson": "^5.0.20",
|
||||||
"@push.rocks/smartlog": "^3.0.6",
|
"@push.rocks/smartlog": "^3.0.7",
|
||||||
"@push.rocks/smartnetwork": "^3.0.0",
|
"@push.rocks/smartnetwork": "^3.0.0",
|
||||||
"@push.rocks/smartpath": "^5.0.18",
|
"@push.rocks/smartpath": "^5.0.18",
|
||||||
"@push.rocks/smartpromise": "^4.0.3",
|
"@push.rocks/smartpromise": "^4.0.4",
|
||||||
"@push.rocks/smartrequest": "^2.0.22",
|
"@push.rocks/smartrequest": "^2.0.22",
|
||||||
"@push.rocks/smartstream": "^3.0.44",
|
"@push.rocks/smartstream": "^3.0.46",
|
||||||
"@push.rocks/smartstring": "^4.0.15",
|
"@push.rocks/smartstring": "^4.0.15",
|
||||||
|
"@push.rocks/smartunique": "^3.0.9",
|
||||||
"@push.rocks/smartversion": "^3.0.5",
|
"@push.rocks/smartversion": "^3.0.5",
|
||||||
"@tsclass/tsclass": "^4.0.54",
|
"@tsclass/tsclass": "^4.1.2",
|
||||||
"rxjs": "^7.5.7"
|
"rxjs": "^7.5.7"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@git.zone/tsbuild": "^2.1.80",
|
"@git.zone/tsbuild": "^2.1.84",
|
||||||
"@git.zone/tsrun": "^1.2.12",
|
"@git.zone/tsrun": "^1.2.49",
|
||||||
"@git.zone/tstest": "^1.0.90",
|
"@git.zone/tstest": "^1.0.90",
|
||||||
"@push.rocks/tapbundle": "^5.0.23",
|
"@push.rocks/qenv": "^6.0.5",
|
||||||
"@types/node": "20.14.1"
|
"@push.rocks/tapbundle": "^5.3.0",
|
||||||
|
"@types/node": "22.7.5"
|
||||||
},
|
},
|
||||||
"files": [
|
"files": [
|
||||||
"ts/**/*",
|
"ts/**/*",
|
||||||
|
2373
pnpm-lock.yaml
generated
2373
pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
6
qenv.yml
Normal file
6
qenv.yml
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
required:
|
||||||
|
- S3_ENDPOINT
|
||||||
|
- S3_ACCESSKEY
|
||||||
|
- S3_ACCESSSECRET
|
||||||
|
- S3_BUCKET
|
||||||
|
|
@ -1,4 +1,7 @@
|
|||||||
import { expect, tap } from '@push.rocks/tapbundle';
|
import { expect, tap } from '@push.rocks/tapbundle';
|
||||||
|
import { Qenv } from '@push.rocks/qenv';
|
||||||
|
|
||||||
|
const testQenv = new Qenv('./', './.nogit/');
|
||||||
|
|
||||||
import * as plugins from '../ts/plugins.js';
|
import * as plugins from '../ts/plugins.js';
|
||||||
import * as paths from '../ts/paths.js';
|
import * as paths from '../ts/paths.js';
|
||||||
@ -9,6 +12,7 @@ let testDockerHost: docker.DockerHost;
|
|||||||
|
|
||||||
tap.test('should create a new Dockersock instance', async () => {
|
tap.test('should create a new Dockersock instance', async () => {
|
||||||
testDockerHost = new docker.DockerHost({});
|
testDockerHost = new docker.DockerHost({});
|
||||||
|
await testDockerHost.start();
|
||||||
return expect(testDockerHost).toBeInstanceOf(docker.DockerHost);
|
return expect(testDockerHost).toBeInstanceOf(docker.DockerHost);
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -146,6 +150,20 @@ tap.test('should import images', async (toolsArg) => {
|
|||||||
imageUrl: 'code.foss.global/host.today/ht-docker-node:latest',
|
imageUrl: 'code.foss.global/host.today/ht-docker-node:latest',
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should expose a working DockerImageStore', async () => {
|
||||||
|
// lets first add am s3 target
|
||||||
|
const s3Descriptor = {
|
||||||
|
endpoint: await testQenv.getEnvVarOnDemand('S3_ENDPOINT'),
|
||||||
|
accessKey: await testQenv.getEnvVarOnDemand('S3_ACCESSKEY'),
|
||||||
|
accessSecret: await testQenv.getEnvVarOnDemand('S3_ACCESSSECRET'),
|
||||||
|
bucketName: await testQenv.getEnvVarOnDemand('S3_BUCKET'),
|
||||||
|
};
|
||||||
|
await testDockerHost.addS3Storage(s3Descriptor);
|
||||||
|
|
||||||
|
//
|
||||||
|
await testDockerHost.imageStore.storeImage('hello', plugins.smartfile.fsStream.createReadStream(plugins.path.join(paths.nogitDir, 'testimage.tar')));
|
||||||
})
|
})
|
||||||
|
|
||||||
export default tap.start();
|
export default tap.start();
|
||||||
|
@ -1,8 +1,8 @@
|
|||||||
/**
|
/**
|
||||||
* autocreated commitinfo by @pushrocks/commitinfo
|
* autocreated commitinfo by @push.rocks/commitinfo
|
||||||
*/
|
*/
|
||||||
export const commitinfo = {
|
export const commitinfo = {
|
||||||
name: '@apiclient.xyz/docker',
|
name: '@apiclient.xyz/docker',
|
||||||
version: '1.1.3',
|
version: '1.3.0',
|
||||||
description: 'Provides easy communication with Docker remote API from Node.js, with TypeScript support.'
|
description: 'Provides easy communication with Docker remote API from Node.js, with TypeScript support.'
|
||||||
}
|
}
|
||||||
|
@ -2,7 +2,7 @@ import * as plugins from './plugins.js';
|
|||||||
import * as interfaces from './interfaces/index.js';
|
import * as interfaces from './interfaces/index.js';
|
||||||
|
|
||||||
import { DockerHost } from './classes.host.js';
|
import { DockerHost } from './classes.host.js';
|
||||||
import { logger } from './logging.js';
|
import { logger } from './logger.js';
|
||||||
|
|
||||||
export class DockerContainer {
|
export class DockerContainer {
|
||||||
// STATIC
|
// STATIC
|
||||||
|
@ -1,10 +1,12 @@
|
|||||||
import * as plugins from './plugins.js';
|
import * as plugins from './plugins.js';
|
||||||
|
import * as paths from './paths.js';
|
||||||
import { DockerContainer } from './classes.container.js';
|
import { DockerContainer } from './classes.container.js';
|
||||||
import { DockerNetwork } from './classes.network.js';
|
import { DockerNetwork } from './classes.network.js';
|
||||||
import { DockerService } from './classes.service.js';
|
import { DockerService } from './classes.service.js';
|
||||||
import { logger } from './logging.js';
|
import { logger } from './logger.js';
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
import type { DockerImageStore } from './classes.imagestore.js';
|
import { DockerImageStore } from './classes.imagestore.js';
|
||||||
|
import { DockerImage } from './classes.image.js';
|
||||||
|
|
||||||
export interface IAuthData {
|
export interface IAuthData {
|
||||||
serveraddress: string;
|
serveraddress: string;
|
||||||
@ -18,18 +20,27 @@ export interface IDockerHostConstructorOptions {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export class DockerHost {
|
export class DockerHost {
|
||||||
|
public options: IDockerHostConstructorOptions;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* the path where the docker sock can be found
|
* the path where the docker sock can be found
|
||||||
*/
|
*/
|
||||||
public socketPath: string;
|
public socketPath: string;
|
||||||
private registryToken: string = '';
|
private registryToken: string = '';
|
||||||
public imageStore: DockerImageStore;
|
public imageStore: DockerImageStore;
|
||||||
|
public smartBucket: plugins.smartbucket.SmartBucket;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* the constructor to instantiate a new docker sock instance
|
* the constructor to instantiate a new docker sock instance
|
||||||
* @param pathArg
|
* @param pathArg
|
||||||
*/
|
*/
|
||||||
constructor(optionsArg: IDockerHostConstructorOptions) {
|
constructor(optionsArg: IDockerHostConstructorOptions) {
|
||||||
|
this.options = {
|
||||||
|
...{
|
||||||
|
imageStoreDir: plugins.path.join(paths.nogitDir, 'temp-docker-image-store'),
|
||||||
|
},
|
||||||
|
...optionsArg,
|
||||||
|
}
|
||||||
let pathToUse: string;
|
let pathToUse: string;
|
||||||
if (optionsArg.dockerSockPath) {
|
if (optionsArg.dockerSockPath) {
|
||||||
pathToUse = optionsArg.dockerSockPath;
|
pathToUse = optionsArg.dockerSockPath;
|
||||||
@ -48,6 +59,17 @@ export class DockerHost {
|
|||||||
}
|
}
|
||||||
console.log(`using docker sock at ${pathToUse}`);
|
console.log(`using docker sock at ${pathToUse}`);
|
||||||
this.socketPath = pathToUse;
|
this.socketPath = pathToUse;
|
||||||
|
this.imageStore = new DockerImageStore({
|
||||||
|
bucketDir: null,
|
||||||
|
localDirPath: this.options.imageStoreDir,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
public async start() {
|
||||||
|
await this.imageStore.start();
|
||||||
|
}
|
||||||
|
public async stop() {
|
||||||
|
await this.imageStore.stop();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -81,6 +103,9 @@ export class DockerHost {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ==============
|
||||||
|
// NETWORKS
|
||||||
|
// ==============
|
||||||
/**
|
/**
|
||||||
* gets all networks
|
* gets all networks
|
||||||
*/
|
*/
|
||||||
@ -89,9 +114,23 @@ export class DockerHost {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
* create a network
|
||||||
*/
|
*/
|
||||||
|
public async createNetwork(optionsArg: Parameters<typeof DockerNetwork.createNetwork>[1]) {
|
||||||
|
return await DockerNetwork.createNetwork(this, optionsArg);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* get a network by name
|
||||||
|
*/
|
||||||
|
public async getNetworkByName(networkNameArg: string) {
|
||||||
|
return await DockerNetwork.getNetworkByName(this, networkNameArg);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
// ==============
|
||||||
|
// CONTAINERS
|
||||||
|
// ==============
|
||||||
/**
|
/**
|
||||||
* gets all containers
|
* gets all containers
|
||||||
*/
|
*/
|
||||||
@ -100,6 +139,10 @@ export class DockerHost {
|
|||||||
return containerArray;
|
return containerArray;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ==============
|
||||||
|
// SERVICES
|
||||||
|
// ==============
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* gets all services
|
* gets all services
|
||||||
*/
|
*/
|
||||||
@ -108,6 +151,24 @@ export class DockerHost {
|
|||||||
return serviceArray;
|
return serviceArray;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ==============
|
||||||
|
// IMAGES
|
||||||
|
// ==============
|
||||||
|
|
||||||
|
/**
|
||||||
|
* get all images
|
||||||
|
*/
|
||||||
|
public async getImages() {
|
||||||
|
return await DockerImage.getImages(this);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* get an image by name
|
||||||
|
*/
|
||||||
|
public async getImageByName(imageNameArg: string) {
|
||||||
|
return await DockerImage.getImageByName(this, imageNameArg);
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
*/
|
*/
|
||||||
@ -214,4 +275,21 @@ export class DockerHost {
|
|||||||
console.log(response.body);
|
console.log(response.body);
|
||||||
return response;
|
return response;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* add s3 storage
|
||||||
|
* @param optionsArg
|
||||||
|
*/
|
||||||
|
public async addS3Storage(optionsArg: plugins.tsclass.storage.IS3Descriptor) {
|
||||||
|
this.smartBucket = new plugins.smartbucket.SmartBucket(optionsArg);
|
||||||
|
if (!optionsArg.bucketName) {
|
||||||
|
throw new Error('bucketName is required');
|
||||||
|
}
|
||||||
|
const bucket = await this.smartBucket.getBucketByName(optionsArg.bucketName);
|
||||||
|
let wantedDirectory = await bucket.getBaseDirectory();
|
||||||
|
if (optionsArg.directoryPath) {
|
||||||
|
wantedDirectory = await wantedDirectory.getSubDirectoryByName(optionsArg.directoryPath);
|
||||||
|
}
|
||||||
|
this.imageStore.options.bucketDir = wantedDirectory;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,8 +1,11 @@
|
|||||||
import * as plugins from './plugins.js';
|
import * as plugins from './plugins.js';
|
||||||
import * as interfaces from './interfaces/index.js';
|
import * as interfaces from './interfaces/index.js';
|
||||||
import { DockerHost } from './classes.host.js';
|
import { DockerHost } from './classes.host.js';
|
||||||
import { logger } from './logging.js';
|
import { logger } from './logger.js';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* represents a docker image on the remote docker host
|
||||||
|
*/
|
||||||
export class DockerImage {
|
export class DockerImage {
|
||||||
// STATIC
|
// STATIC
|
||||||
public static async getImages(dockerHost: DockerHost) {
|
public static async getImages(dockerHost: DockerHost) {
|
||||||
@ -14,7 +17,7 @@ export class DockerImage {
|
|||||||
return images;
|
return images;
|
||||||
}
|
}
|
||||||
|
|
||||||
public static async findImageByName(dockerHost: DockerHost, imageNameArg: string) {
|
public static async getImageByName(dockerHost: DockerHost, imageNameArg: string) {
|
||||||
const images = await this.getImages(dockerHost);
|
const images = await this.getImages(dockerHost);
|
||||||
const result = images.find((image) => {
|
const result = images.find((image) => {
|
||||||
if (image.RepoTags) {
|
if (image.RepoTags) {
|
||||||
@ -67,7 +70,7 @@ export class DockerImage {
|
|||||||
);
|
);
|
||||||
if (response.statusCode < 300) {
|
if (response.statusCode < 300) {
|
||||||
logger.log('info', `Successfully pulled image ${imageUrlObject.imageUrl} from the registry`);
|
logger.log('info', `Successfully pulled image ${imageUrlObject.imageUrl} from the registry`);
|
||||||
const image = await DockerImage.findImageByName(dockerHostArg, imageUrlObject.imageOriginTag);
|
const image = await DockerImage.getImageByName(dockerHostArg, imageUrlObject.imageOriginTag);
|
||||||
return image;
|
return image;
|
||||||
} else {
|
} else {
|
||||||
logger.log('error', `Failed at the attempt of creating a new image`);
|
logger.log('error', `Failed at the attempt of creating a new image`);
|
||||||
@ -79,14 +82,94 @@ export class DockerImage {
|
|||||||
* @param dockerHostArg
|
* @param dockerHostArg
|
||||||
* @param tarStreamArg
|
* @param tarStreamArg
|
||||||
*/
|
*/
|
||||||
public static async createFromTarStream(dockerHostArg: DockerHost, optionsArg: {
|
public static async createFromTarStream(
|
||||||
creationObject: interfaces.IImageCreationDescriptor,
|
dockerHostArg: DockerHost,
|
||||||
tarStream: plugins.smartstream.stream.Readable,
|
optionsArg: {
|
||||||
}) {
|
creationObject: interfaces.IImageCreationDescriptor;
|
||||||
const response = await dockerHostArg.requestStreaming('POST', '/images/load', optionsArg.tarStream);
|
tarStream: plugins.smartstream.stream.Readable;
|
||||||
return response;
|
}
|
||||||
|
): Promise<DockerImage> {
|
||||||
|
// Start the request for importing an image
|
||||||
|
const response = await dockerHostArg.requestStreaming(
|
||||||
|
'POST',
|
||||||
|
'/images/load',
|
||||||
|
optionsArg.tarStream
|
||||||
|
);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Docker typically returns lines like:
|
||||||
|
* {"stream":"Loaded image: myrepo/myimage:latest"}
|
||||||
|
*
|
||||||
|
* So we will collect those lines and parse out the final image name.
|
||||||
|
*/
|
||||||
|
let rawOutput = '';
|
||||||
|
response.on('data', (chunk) => {
|
||||||
|
rawOutput += chunk.toString();
|
||||||
|
});
|
||||||
|
|
||||||
|
// Wrap the end event in a Promise for easier async/await usage
|
||||||
|
await new Promise<void>((resolve, reject) => {
|
||||||
|
response.on('end', () => {
|
||||||
|
resolve();
|
||||||
|
});
|
||||||
|
response.on('error', (err) => {
|
||||||
|
reject(err);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// Attempt to parse each line to find something like "Loaded image: ..."
|
||||||
|
let loadedImageTag: string | undefined;
|
||||||
|
const lines = rawOutput.trim().split('\n').filter(Boolean);
|
||||||
|
|
||||||
|
for (const line of lines) {
|
||||||
|
try {
|
||||||
|
const jsonLine = JSON.parse(line);
|
||||||
|
if (
|
||||||
|
jsonLine.stream &&
|
||||||
|
(jsonLine.stream.startsWith('Loaded image:') ||
|
||||||
|
jsonLine.stream.startsWith('Loaded image ID:'))
|
||||||
|
) {
|
||||||
|
// Examples:
|
||||||
|
// "Loaded image: your-image:latest"
|
||||||
|
// "Loaded image ID: sha256:...."
|
||||||
|
loadedImageTag = jsonLine.stream
|
||||||
|
.replace('Loaded image: ', '')
|
||||||
|
.replace('Loaded image ID: ', '')
|
||||||
|
.trim();
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
// not valid JSON, ignore
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!loadedImageTag) {
|
||||||
|
throw new Error(
|
||||||
|
`Could not parse the loaded image info from Docker response.\nResponse was:\n${rawOutput}`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Now try to look up that image by the "loadedImageTag".
|
||||||
|
// Depending on Docker’s response, it might be something like:
|
||||||
|
// "myrepo/myimage:latest" OR "sha256:someHash..."
|
||||||
|
// If Docker gave you an ID (e.g. "sha256:..."), you may need a separate
|
||||||
|
// DockerImage.getImageById method; or if you prefer, you can treat it as a name.
|
||||||
|
const newlyImportedImage = await DockerImage.getImageByName(dockerHostArg, loadedImageTag);
|
||||||
|
|
||||||
|
if (!newlyImportedImage) {
|
||||||
|
throw new Error(
|
||||||
|
`Image load succeeded, but no local reference found for "${loadedImageTag}".`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.log(
|
||||||
|
'info',
|
||||||
|
`Successfully imported image "${loadedImageTag}".`
|
||||||
|
);
|
||||||
|
|
||||||
|
return newlyImportedImage;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
public static async tagImageByIdOrName(
|
public static async tagImageByIdOrName(
|
||||||
dockerHost: DockerHost,
|
dockerHost: DockerHost,
|
||||||
idOrNameArg: string,
|
idOrNameArg: string,
|
||||||
@ -96,6 +179,8 @@ export class DockerImage {
|
|||||||
'POST',
|
'POST',
|
||||||
`/images/${encodeURIComponent(idOrNameArg)}/${encodeURIComponent(newTagArg)}`
|
`/images/${encodeURIComponent(idOrNameArg)}/${encodeURIComponent(newTagArg)}`
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public static async buildImage(dockerHostArg: DockerHost, dockerImageTag) {
|
public static async buildImage(dockerHostArg: DockerHost, dockerImageTag) {
|
||||||
@ -163,7 +248,7 @@ export class DockerImage {
|
|||||||
* exports an image to a tar ball
|
* exports an image to a tar ball
|
||||||
*/
|
*/
|
||||||
public async exportToTarStream(): Promise<plugins.smartstream.stream.Readable> {
|
public async exportToTarStream(): Promise<plugins.smartstream.stream.Readable> {
|
||||||
console.log(`Exporting image ${this.RepoTags[0]} to tar stream.`);
|
logger.log('info', `Exporting image ${this.RepoTags[0]} to tar stream.`);
|
||||||
const response = await this.dockerHost.requestStreaming('GET', `/images/${encodeURIComponent(this.RepoTags[0])}/get`);
|
const response = await this.dockerHost.requestStreaming('GET', `/images/${encodeURIComponent(this.RepoTags[0])}/get`);
|
||||||
let counter = 0;
|
let counter = 0;
|
||||||
const webduplexStream = new plugins.smartstream.SmartDuplex({
|
const webduplexStream = new plugins.smartstream.SmartDuplex({
|
||||||
|
@ -1,36 +1,109 @@
|
|||||||
import * as plugins from './plugins.js';
|
import * as plugins from './plugins.js';
|
||||||
import * as paths from './paths.js';
|
import * as paths from './paths.js';
|
||||||
|
import { logger } from './logger.js';
|
||||||
import type { DockerHost } from './classes.host.js';
|
import type { DockerHost } from './classes.host.js';
|
||||||
|
|
||||||
export interface IDockerImageStoreConstructorOptions {
|
export interface IDockerImageStoreConstructorOptions {
|
||||||
dirPath: string;
|
/**
|
||||||
|
* used for preparing images for longer term storage
|
||||||
|
*/
|
||||||
|
localDirPath: string;
|
||||||
|
/**
|
||||||
|
* a smartbucket dir for longer term storage.
|
||||||
|
*/
|
||||||
|
bucketDir: plugins.smartbucket.Directory;
|
||||||
}
|
}
|
||||||
|
|
||||||
export class DockerImageStore {
|
export class DockerImageStore {
|
||||||
public options: IDockerImageStoreConstructorOptions;
|
public options: IDockerImageStoreConstructorOptions;
|
||||||
|
|
||||||
constructor(dockerHost: DockerHost, optionsArg: IDockerImageStoreConstructorOptions) {
|
constructor(optionsArg: IDockerImageStoreConstructorOptions) {
|
||||||
this.options = optionsArg;
|
this.options = optionsArg;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Method to store tar stream
|
// Method to store tar stream
|
||||||
public async storeImage(imageName: string, tarStream: plugins.smartstream.stream.Readable): Promise<void> {
|
public async storeImage(imageName: string, tarStream: plugins.smartstream.stream.Readable): Promise<void> {
|
||||||
const imagePath = plugins.path.join(this.options.dirPath, `${imageName}.tar`);
|
logger.log('info', `Storing image ${imageName}...`);
|
||||||
|
const uniqueProcessingId = plugins.smartunique.shortId();
|
||||||
|
|
||||||
|
const initialTarDownloadPath = plugins.path.join(this.options.localDirPath, `${uniqueProcessingId}.tar`);
|
||||||
|
const extractionDir = plugins.path.join(this.options.localDirPath, uniqueProcessingId);
|
||||||
// Create a write stream to store the tar file
|
// Create a write stream to store the tar file
|
||||||
const writeStream = plugins.smartfile.fsStream.createWriteStream(imagePath);
|
const writeStream = plugins.smartfile.fsStream.createWriteStream(initialTarDownloadPath);
|
||||||
|
|
||||||
return new Promise((resolve, reject) => {
|
// lets wait for the write stream to finish
|
||||||
|
await new Promise((resolve, reject) => {
|
||||||
tarStream.pipe(writeStream);
|
tarStream.pipe(writeStream);
|
||||||
|
|
||||||
writeStream.on('finish', resolve);
|
writeStream.on('finish', resolve);
|
||||||
writeStream.on('error', reject);
|
writeStream.on('error', reject);
|
||||||
});
|
});
|
||||||
|
logger.log('info', `Image ${imageName} stored locally for processing. Extracting...`);
|
||||||
|
|
||||||
|
// lets process the image
|
||||||
|
const tarArchive = await plugins.smartarchive.SmartArchive.fromArchiveFile(initialTarDownloadPath);
|
||||||
|
await tarArchive.exportToFs(extractionDir);
|
||||||
|
logger.log('info', `Image ${imageName} extracted.`);
|
||||||
|
await plugins.smartfile.fs.remove(initialTarDownloadPath);
|
||||||
|
logger.log('info', `deleted original tar to save space.`);
|
||||||
|
logger.log('info', `now repackaging for s3...`);
|
||||||
|
const smartfileIndexJson = await plugins.smartfile.SmartFile.fromFilePath(plugins.path.join(extractionDir, 'index.json'));
|
||||||
|
const smartfileManifestJson = await plugins.smartfile.SmartFile.fromFilePath(plugins.path.join(extractionDir, 'manifest.json'));
|
||||||
|
const smartfileOciLayoutJson = await plugins.smartfile.SmartFile.fromFilePath(plugins.path.join(extractionDir, 'oci-layout'));
|
||||||
|
const smartfileRepositoriesJson = await plugins.smartfile.SmartFile.fromFilePath(plugins.path.join(extractionDir, 'repositories'));
|
||||||
|
const indexJson = JSON.parse(smartfileIndexJson.contents.toString());
|
||||||
|
const manifestJson = JSON.parse(smartfileManifestJson.contents.toString());
|
||||||
|
const ociLayoutJson = JSON.parse(smartfileOciLayoutJson.contents.toString());
|
||||||
|
const repositoriesJson = JSON.parse(smartfileRepositoriesJson.contents.toString());
|
||||||
|
|
||||||
|
indexJson.manifests[0].annotations['io.containerd.image.name'] = imageName;
|
||||||
|
manifestJson[0].RepoTags[0] = imageName;
|
||||||
|
const repoFirstKey = Object.keys(repositoriesJson)[0];
|
||||||
|
const repoFirstValue = repositoriesJson[repoFirstKey];
|
||||||
|
repositoriesJson[imageName] = repoFirstValue;
|
||||||
|
delete repositoriesJson[repoFirstKey];
|
||||||
|
|
||||||
|
smartfileIndexJson.contents = Buffer.from(JSON.stringify(indexJson, null, 2));
|
||||||
|
smartfileManifestJson.contents = Buffer.from(JSON.stringify(manifestJson, null, 2));
|
||||||
|
smartfileOciLayoutJson.contents = Buffer.from(JSON.stringify(ociLayoutJson, null, 2));
|
||||||
|
smartfileRepositoriesJson.contents = Buffer.from(JSON.stringify(repositoriesJson, null, 2));
|
||||||
|
await Promise.all([
|
||||||
|
smartfileIndexJson.write(),
|
||||||
|
smartfileManifestJson.write(),
|
||||||
|
smartfileOciLayoutJson.write(),
|
||||||
|
smartfileRepositoriesJson.write(),
|
||||||
|
]);
|
||||||
|
|
||||||
|
logger.log('info', 'repackaging archive for s3...');
|
||||||
|
const tartools = new plugins.smartarchive.TarTools();
|
||||||
|
const newTarPack = await tartools.packDirectory(extractionDir);
|
||||||
|
const finalTarName = `${uniqueProcessingId}.processed.tar`;
|
||||||
|
const finalTarPath = plugins.path.join(this.options.localDirPath, finalTarName);
|
||||||
|
const finalWriteStream = plugins.smartfile.fsStream.createWriteStream(finalTarPath);
|
||||||
|
await new Promise((resolve, reject) => {
|
||||||
|
newTarPack.finalize();
|
||||||
|
newTarPack.pipe(finalWriteStream);
|
||||||
|
finalWriteStream.on('finish', resolve);
|
||||||
|
finalWriteStream.on('error', reject);
|
||||||
|
});
|
||||||
|
logger.log('ok', `Repackaged image ${imageName} for s3.`);
|
||||||
|
await plugins.smartfile.fs.remove(extractionDir);
|
||||||
|
const finalTarReadStream = plugins.smartfile.fsStream.createReadStream(finalTarPath);
|
||||||
|
await this.options.bucketDir.fastPutStream({
|
||||||
|
stream: finalTarReadStream,
|
||||||
|
path: `${imageName}.tar`,
|
||||||
|
});
|
||||||
|
await plugins.smartfile.fs.remove(finalTarPath);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public async start() {
|
||||||
|
await plugins.smartfile.fs.ensureEmptyDir(this.options.localDirPath);
|
||||||
|
}
|
||||||
|
|
||||||
|
public async stop() {}
|
||||||
|
|
||||||
// Method to retrieve tar stream
|
// Method to retrieve tar stream
|
||||||
public async getImage(imageName: string): Promise<plugins.smartstream.stream.Readable> {
|
public async getImage(imageName: string): Promise<plugins.smartstream.stream.Readable> {
|
||||||
const imagePath = plugins.path.join(this.options.dirPath, `${imageName}.tar`);
|
const imagePath = plugins.path.join(this.options.localDirPath, `${imageName}.tar`);
|
||||||
|
|
||||||
if (!(await plugins.smartfile.fs.fileExists(imagePath))) {
|
if (!(await plugins.smartfile.fs.fileExists(imagePath))) {
|
||||||
throw new Error(`Image ${imageName} does not exist.`);
|
throw new Error(`Image ${imageName} does not exist.`);
|
||||||
|
@ -3,7 +3,7 @@ import * as interfaces from './interfaces/index.js';
|
|||||||
|
|
||||||
import { DockerHost } from './classes.host.js';
|
import { DockerHost } from './classes.host.js';
|
||||||
import { DockerService } from './classes.service.js';
|
import { DockerService } from './classes.service.js';
|
||||||
import { logger } from './logging.js';
|
import { logger } from './logger.js';
|
||||||
|
|
||||||
export class DockerNetwork {
|
export class DockerNetwork {
|
||||||
public static async getNetworks(dockerHost: DockerHost): Promise<DockerNetwork[]> {
|
public static async getNetworks(dockerHost: DockerHost): Promise<DockerNetwork[]> {
|
||||||
|
@ -4,7 +4,7 @@ import * as interfaces from './interfaces/index.js';
|
|||||||
import { DockerHost } from './classes.host.js';
|
import { DockerHost } from './classes.host.js';
|
||||||
import { DockerImage } from './classes.image.js';
|
import { DockerImage } from './classes.image.js';
|
||||||
import { DockerSecret } from './classes.secret.js';
|
import { DockerSecret } from './classes.secret.js';
|
||||||
import { logger } from './logging.js';
|
import { logger } from './logger.js';
|
||||||
|
|
||||||
export class DockerService {
|
export class DockerService {
|
||||||
// STATIC
|
// STATIC
|
||||||
|
5
ts/logger.ts
Normal file
5
ts/logger.ts
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
import * as plugins from './plugins.js';
|
||||||
|
import { commitinfo } from './00_commitinfo_data.js';
|
||||||
|
|
||||||
|
export const logger = plugins.smartlog.Smartlog.createForCommitinfo(commitinfo);
|
||||||
|
logger.enableConsole();
|
@ -1,3 +0,0 @@
|
|||||||
import * as plugins from './plugins.js';
|
|
||||||
|
|
||||||
export const logger = new plugins.smartlog.ConsoleLog();
|
|
@ -5,6 +5,8 @@ export { path };
|
|||||||
|
|
||||||
// @pushrocks scope
|
// @pushrocks scope
|
||||||
import * as lik from '@push.rocks/lik';
|
import * as lik from '@push.rocks/lik';
|
||||||
|
import * as smartarchive from '@push.rocks/smartarchive';
|
||||||
|
import * as smartbucket from '@push.rocks/smartbucket';
|
||||||
import * as smartfile from '@push.rocks/smartfile';
|
import * as smartfile from '@push.rocks/smartfile';
|
||||||
import * as smartjson from '@push.rocks/smartjson';
|
import * as smartjson from '@push.rocks/smartjson';
|
||||||
import * as smartlog from '@push.rocks/smartlog';
|
import * as smartlog from '@push.rocks/smartlog';
|
||||||
@ -14,10 +16,13 @@ import * as smartpromise from '@push.rocks/smartpromise';
|
|||||||
import * as smartrequest from '@push.rocks/smartrequest';
|
import * as smartrequest from '@push.rocks/smartrequest';
|
||||||
import * as smartstring from '@push.rocks/smartstring';
|
import * as smartstring from '@push.rocks/smartstring';
|
||||||
import * as smartstream from '@push.rocks/smartstream';
|
import * as smartstream from '@push.rocks/smartstream';
|
||||||
|
import * as smartunique from '@push.rocks/smartunique';
|
||||||
import * as smartversion from '@push.rocks/smartversion';
|
import * as smartversion from '@push.rocks/smartversion';
|
||||||
|
|
||||||
export {
|
export {
|
||||||
lik,
|
lik,
|
||||||
|
smartarchive,
|
||||||
|
smartbucket,
|
||||||
smartfile,
|
smartfile,
|
||||||
smartjson,
|
smartjson,
|
||||||
smartlog,
|
smartlog,
|
||||||
@ -27,6 +32,7 @@ export {
|
|||||||
smartrequest,
|
smartrequest,
|
||||||
smartstring,
|
smartstring,
|
||||||
smartstream,
|
smartstream,
|
||||||
|
smartunique,
|
||||||
smartversion,
|
smartversion,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user