2 Commits

Author SHA1 Message Date
jkunz 2846e5fdb5 v1.1.1 2026-05-01 15:32:06 +00:00
jkunz a4950ef358 fix(mongodb): modernize MongoDB dump handling and filesystem integration 2026-05-01 15:32:05 +00:00
15 changed files with 2672 additions and 13639 deletions
+39
View File
@@ -0,0 +1,39 @@
{
"@git.zone/cli": {
"projectType": "npm",
"module": {
"githost": "code.foss.global",
"gitscope": "push.rocks",
"gitrepo": "mongodump",
"description": "A tool to create and manage dumps of MongoDB databases, supporting data export and import.",
"npmPackagename": "@push.rocks/mongodump",
"license": "MIT",
"keywords": [
"mongodb",
"database backup",
"data dump",
"database restore",
"mongodb export",
"mongodb import",
"database management",
"mongodb management",
"data backup",
"data recovery"
]
},
"release": {
"registries": [
"https://verdaccio.lossless.digital",
"https://registry.npmjs.org"
],
"accessLevel": "public"
}
},
"@git.zone/tsdoc": {
"legal": "\n## License and Legal Information\n\nThis repository contains open-source code that is licensed under the MIT License. A copy of the MIT License can be found in the [license](license) file within this repository. \n\n**Please note:** The MIT License does not grant permission to use the trade names, trademarks, service marks, or product names of the project, except as required for reasonable and customary use in describing the origin of the work and reproducing the content of the NOTICE file.\n\n### Trademarks\n\nThis project is owned and maintained by Task Venture Capital GmbH. The names and logos associated with Task Venture Capital GmbH and any related products or services are trademarks of Task Venture Capital GmbH and are not included within the scope of the MIT license granted herein. Use of these trademarks must comply with Task Venture Capital GmbH's Trademark Guidelines, and any usage must be approved in writing by Task Venture Capital GmbH.\n\n### Company Information\n\nTask Venture Capital GmbH \nRegistered at District court Bremen HRB 35230 HB, Germany\n\nFor any legal inquiries or if you require further information, please contact us via email at hello@task.vc.\n\nBy using this repository, you acknowledge that you have read this section, agree to comply with its terms, and understand that the licensing of the code does not imply endorsement by Task Venture Capital GmbH of any derivative works.\n"
},
"@ship.zone/szci": {
"npmGlobalTools": [],
"npmRegistryUrl": "registry.npmjs.org"
}
}
+7
View File
@@ -1,5 +1,12 @@
# Changelog
## 2026-05-01 - 1.1.1 - fix(mongodb)
modernize MongoDB dump handling and filesystem integration
- replace smartdata-based test fixture generation with direct MongoDB client inserts
- migrate dump output from smartfile to smartfs and clean target directories safely before export
- improve descriptor handling and shutdown logic with optional credentials, stricter typing, and parallel client close
## 2025-08-18 - 1.1.0 - feat(MongoDumpTarget)
Implement core MongoDumpTarget methods and update documentation & project configs
+3 -1
View File
@@ -1,4 +1,6 @@
Copyright (c) 2022 Lossless GmbH (hello@lossless.com)
The MIT License (MIT)
Copyright (c) 2026 Task Venture Capital GmbH
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
+13 -7
View File
@@ -1,5 +1,5 @@
{
"gitzone": {
"@git.zone/cli": {
"projectType": "npm",
"module": {
"githost": "code.foss.global",
@@ -8,7 +8,6 @@
"description": "A tool to create and manage dumps of MongoDB databases, supporting data export and import.",
"npmPackagename": "@push.rocks/mongodump",
"license": "MIT",
"projectDomain": "push.rocks",
"keywords": [
"mongodb",
"database backup",
@@ -21,13 +20,20 @@
"data backup",
"data recovery"
]
},
"release": {
"registries": [
"https://verdaccio.lossless.digital",
"https://registry.npmjs.org"
],
"accessLevel": "public"
}
},
"npmci": {
"npmGlobalTools": [],
"npmAccessLevel": "public"
},
"tsdoc": {
"@git.zone/tsdoc": {
"legal": "\n## License and Legal Information\n\nThis repository contains open-source code that is licensed under the MIT License. A copy of the MIT License can be found in the [license](license) file within this repository. \n\n**Please note:** The MIT License does not grant permission to use the trade names, trademarks, service marks, or product names of the project, except as required for reasonable and customary use in describing the origin of the work and reproducing the content of the NOTICE file.\n\n### Trademarks\n\nThis project is owned and maintained by Task Venture Capital GmbH. The names and logos associated with Task Venture Capital GmbH and any related products or services are trademarks of Task Venture Capital GmbH and are not included within the scope of the MIT license granted herein. Use of these trademarks must comply with Task Venture Capital GmbH's Trademark Guidelines, and any usage must be approved in writing by Task Venture Capital GmbH.\n\n### Company Information\n\nTask Venture Capital GmbH \nRegistered at District court Bremen HRB 35230 HB, Germany\n\nFor any legal inquiries or if you require further information, please contact us via email at hello@task.vc.\n\nBy using this repository, you acknowledge that you have read this section, agree to comply with its terms, and understand that the licensing of the code does not imply endorsement by Task Venture Capital GmbH of any derivative works.\n"
},
"@ship.zone/szci": {
"npmGlobalTools": [],
"npmRegistryUrl": "registry.npmjs.org"
}
}
-10943
View File
File diff suppressed because it is too large Load Diff
+46 -41
View File
@@ -1,49 +1,22 @@
{
"name": "@push.rocks/mongodump",
"version": "1.1.0",
"version": "1.1.1",
"private": false,
"description": "A tool to create and manage dumps of MongoDB databases, supporting data export and import.",
"main": "dist_ts/index.js",
"typings": "dist_ts/index.d.ts",
"type": "module",
"author": "Lossless GmbH",
"author": "Task Venture Capital GmbH <hello@task.vc>",
"license": "MIT",
"scripts": {
"test": "(tstest test/ --verbose)",
"build": "(tsbuild --web --allowimplicitany)"
"test": "tstest test/ --verbose",
"build": "tsbuild --web",
"format": "gitzone format",
"buildDocs": "tsdoc"
},
"devDependencies": {
"@git.zone/tsbuild": "^2.1.25",
"@git.zone/tsbundle": "^2.0.5",
"@git.zone/tsrun": "^1.3.3",
"@git.zone/tstest": "^2.3.4",
"@push.rocks/smartdata": "^5.0.5",
"@push.rocks/smartmongo": "^2.0.3",
"@types/node": "^22.0.0"
},
"browserslist": [
"last 1 chrome versions"
],
"files": [
"ts/**/*",
"ts_web/**/*",
"dist/**/*",
"dist_*/**/*",
"dist_ts/**/*",
"dist_ts_web/**/*",
"assets/**/*",
"cli.js",
"npmextra.json",
"readme.md"
],
"dependencies": {
"@push.rocks/lik": "^6.0.0",
"@push.rocks/smartfile": "^11.2.7",
"@push.rocks/smartjson": "^5.0.6",
"@push.rocks/smartpath": "^6.0.0",
"@push.rocks/smartpromise": "^4.0.2",
"@tsclass/tsclass": "^9.2.0",
"mongodb": "^6.18.0"
"repository": {
"type": "git",
"url": "https://code.foss.global/push.rocks/mongodump.git"
},
"keywords": [
"mongodb",
@@ -57,10 +30,42 @@
"data backup",
"data recovery"
],
"homepage": "https://code.foss.global/push.rocks/mongodump",
"repository": {
"type": "git",
"url": "https://code.foss.global/push.rocks/mongodump.git"
"bugs": {
"url": "https://gitlab.com/push.rocks/mongodump/issues"
},
"packageManager": "pnpm@10.14.0+sha512.ad27a79641b49c3e481a16a805baa71817a04bbe06a38d17e60e2eaee83f6a146c6a688125f5792e48dd5ba30e7da52a5cda4c3992b9ccf333f9ce223af84748"
"homepage": "https://code.foss.global/push.rocks/mongodump",
"devDependencies": {
"@git.zone/tsbuild": "^4.4.0",
"@git.zone/tsrun": "^2.0.3",
"@git.zone/tstest": "^3.6.3",
"@push.rocks/smartmongo": "^7.0.0",
"@types/lodash.clonedeep": "^4.5.9",
"@types/node": "^25.6.0"
},
"browserslist": [
"last 1 chrome versions"
],
"files": [
"ts/**/*",
"ts_web/**/*",
"dist/**/*",
"dist_*/**/*",
"dist_ts/**/*",
"dist_ts_web/**/*",
"assets/**/*",
"cli.js",
".smartconfig.json",
"license",
"npmextra.json",
"readme.md"
],
"dependencies": {
"@push.rocks/lik": "^6.4.1",
"@push.rocks/smartfs": "^1.5.1",
"@push.rocks/smartpath": "^6.0.0",
"@push.rocks/smartpromise": "^4.2.3",
"@tsclass/tsclass": "^9.5.1",
"mongodb": "^7.2.0"
},
"packageManager": "pnpm@10.28.2"
}
+2459 -2554
View File
File diff suppressed because it is too large Load Diff
+2
View File
@@ -1,2 +1,4 @@
onlyBuiltDependencies:
- esbuild
- mongodb-memory-server
- puppeteer
+44 -41
View File
@@ -1,49 +1,52 @@
import * as smartdata from '@push.rocks/smartdata';
import * as mongodb from 'mongodb';
import type * as tsclass from '@tsclass/tsclass';
let db: smartdata.SmartdataDb;
let mongoClient: mongodb.MongoClient | undefined;
export const stop = async () => {
await db.close();
}
await mongoClient?.close();
mongoClient = undefined;
};
export const generateTestData = async (mongoDescriptorArg: smartdata.IMongoDescriptor) => {
db = new smartdata.SmartdataDb(mongoDescriptorArg);
await db.init();
let counter = 0;
export const generateTestData = async (mongoDescriptorArg: tsclass.database.IMongoDescriptor) => {
const finalConnectionUrl = mongoDescriptorArg.mongoDbUrl
.replace('<USERNAME>', mongoDescriptorArg.mongoDbUser ?? '')
.replace('<username>', mongoDescriptorArg.mongoDbUser ?? '')
.replace('<USER>', mongoDescriptorArg.mongoDbUser ?? '')
.replace('<user>', mongoDescriptorArg.mongoDbUser ?? '')
.replace('<PASSWORD>', mongoDescriptorArg.mongoDbPass ?? '')
.replace('<password>', mongoDescriptorArg.mongoDbPass ?? '')
.replace('<DBNAME>', mongoDescriptorArg.mongoDbName ?? '')
.replace('<dbname>', mongoDescriptorArg.mongoDbName ?? '');
mongoClient = await mongodb.MongoClient.connect(finalConnectionUrl);
const db = mongoClient.db(mongoDescriptorArg.mongoDbName);
const houseCollection = db.collection('House');
const truckCollection = db.collection('Truck');
@smartdata.Collection(db)
class House extends smartdata.SmartDataDbDoc<House, House> {
@smartdata.unI()
id = `hello-${counter}`;
const houseDocs: mongodb.OptionalUnlessRequiredId<mongodb.Document>[] = [];
const truckDocs: mongodb.OptionalUnlessRequiredId<mongodb.Document>[] = [];
@smartdata.svDb()
data = {
'some' : {
'complex': 'structure',
more: 4
}
}
for (let counter = 0; counter < 100; counter++) {
houseDocs.push({
id: `hello-${counter}`,
data: {
some: {
complex: 'structure',
more: 4,
},
},
});
truckDocs.push({
id: `hello-${counter}`,
data: {
some: {
complex: 'structure',
more: 2,
},
},
});
}
@smartdata.Collection(db)
class Truck extends smartdata.SmartDataDbDoc<Truck, Truck> {
@smartdata.unI()
id = `hello-${counter}`;
@smartdata.svDb()
data = {
'some' : {
'complex': 'structure',
more: 2
}
}
}
while (counter < 100) {
const house = new House();
await house.save();
const truck = new Truck();
await truck.save();
counter++;
}
}
await houseCollection.insertMany(houseDocs);
await truckCollection.insertMany(truckDocs);
};
+4 -4
View File
@@ -17,8 +17,8 @@ tap.test('should create a mongodump instance', async () => {
});
tap.test('should deploy sample data', async () => {
await sampledata.generateTestData(await testSmartMongo.getMongoDescriptor())
})
await sampledata.generateTestData(await testSmartMongo.getMongoDescriptor());
});
tap.test('should add a mongotarget to mongodump instance', async () => {
const target = await testMongodump.addMongoTargetByMongoDescriptor(await testSmartMongo.getMongoDescriptor());
@@ -27,8 +27,8 @@ tap.test('should add a mongotarget to mongodump instance', async () => {
tap.test('should dump a collection to a directory', async () => {
const target = await testMongodump.addMongoTargetByMongoDescriptor(await testSmartMongo.getMongoDescriptor());
await target.dumpAllCollectionsToDir('.nogit', docArg => docArg.id, true);
})
await target.dumpAllCollectionsToDir('.nogit', (docArg) => String(docArg.id), true);
});
tap.test('should stop the smartmongo instance', async () => {
await sampledata.stop();
+1 -1
View File
@@ -3,6 +3,6 @@
*/
export const commitinfo = {
name: '@push.rocks/mongodump',
version: '1.1.0',
version: '1.1.1',
description: 'A tool to create and manage dumps of MongoDB databases, supporting data export and import.'
}
+3 -3
View File
@@ -15,8 +15,8 @@ export class MongoDump {
}
public async stop() {
await this.mongoTargetObjectMap.forEach(async (mongoTargetArg) => {
await mongoTargetArg.mongoDbClient.close();
})
await Promise.all(
this.mongoTargetObjectMap.getArray().map((mongoTargetArg) => mongoTargetArg.mongoDbClient.close())
);
}
}
+34 -25
View File
@@ -1,5 +1,8 @@
import * as plugins from './mongodump.plugins.js';
type TMongoDumpDocument = plugins.mongodb.WithId<plugins.mongodb.Document>;
export type TMongoDumpNameTransform = (docArg: TMongoDumpDocument) => string;
/**
* a MongoDump Target is a pointer to a database
* + exposes functions to interact with the dump target
@@ -11,10 +14,12 @@ export class MongoDumpTarget {
return mongoDumpTarget;
}
public readyDeferred = plugins.smartpromise.defer();
public readyDeferred = plugins.smartpromise.defer<void>();
public mongoDescriptor: plugins.tsclass.database.IMongoDescriptor;
public mongoDbClient: plugins.mongodb.MongoClient;
public mongoDb: plugins.mongodb.Db;
public mongoDbClient!: plugins.mongodb.MongoClient;
public mongoDb!: plugins.mongodb.Db;
public smartFs = new plugins.smartfs.SmartFs(new plugins.smartfs.SmartFsProviderNode());
constructor(mongoDescriptorArg: plugins.tsclass.database.IMongoDescriptor) {
this.mongoDescriptor = mongoDescriptorArg;
}
@@ -22,16 +27,16 @@ export class MongoDumpTarget {
/**
* connects to the database that was specified during instance creation
*/
public async init(): Promise<any> {
public async init(): Promise<void> {
const finalConnectionUrl = this.mongoDescriptor.mongoDbUrl
.replace('<USERNAME>', this.mongoDescriptor.mongoDbUser)
.replace('<username>', this.mongoDescriptor.mongoDbUser)
.replace('<USER>', this.mongoDescriptor.mongoDbUser)
.replace('<user>', this.mongoDescriptor.mongoDbUser)
.replace('<PASSWORD>', this.mongoDescriptor.mongoDbPass)
.replace('<password>', this.mongoDescriptor.mongoDbPass)
.replace('<DBNAME>', this.mongoDescriptor.mongoDbName)
.replace('<dbname>', this.mongoDescriptor.mongoDbName);
.replace('<USERNAME>', this.mongoDescriptor.mongoDbUser ?? '')
.replace('<username>', this.mongoDescriptor.mongoDbUser ?? '')
.replace('<USER>', this.mongoDescriptor.mongoDbUser ?? '')
.replace('<user>', this.mongoDescriptor.mongoDbUser ?? '')
.replace('<PASSWORD>', this.mongoDescriptor.mongoDbPass ?? '')
.replace('<password>', this.mongoDescriptor.mongoDbPass ?? '')
.replace('<DBNAME>', this.mongoDescriptor.mongoDbName ?? '')
.replace('<dbname>', this.mongoDescriptor.mongoDbName ?? '');
this.mongoDbClient = await plugins.mongodb.MongoClient.connect(finalConnectionUrl, {
maxPoolSize: 100,
@@ -60,47 +65,51 @@ export class MongoDumpTarget {
* dumps a collection to a directory
*/
public async dumpCollectionToDir(
collectionArg: plugins.mongodb.Collection,
collectionArg: plugins.mongodb.Collection<plugins.mongodb.Document>,
dirArg: string,
nameTransformFunction = (doc: any) => doc._id
nameTransformFunction: TMongoDumpNameTransform = (docArg) => docArg._id.toString()
) {
const dirPath = plugins.smartpath.transform.makeAbsolute(dirArg);
const collectionDir = plugins.path.join(dirPath, collectionArg.collectionName);
await plugins.smartfile.fs.ensureDir(collectionDir);
await this.smartFs.directory(collectionDir).create();
const cursor = collectionArg.find();
let value = await cursor.next();
while (value) {
await plugins.smartfile.memory.toFs(
JSON.stringify(value, null, 2),
plugins.path.join(collectionDir, `${nameTransformFunction(value)}.json`)
);
const targetPath = plugins.path.join(collectionDir, `${nameTransformFunction(value)}.json`);
await this.smartFs.file(targetPath).encoding('utf8').write(JSON.stringify(value, null, 2));
value = await cursor.next();
}
}
public async dumpCollectionToTarArchiveStream(collectionArg: plugins.mongodb.Collection) {}
public async dumpCollectionToTarArchiveStream(
collectionArg: plugins.mongodb.Collection<plugins.mongodb.Document>
) {}
public async dumpCollectionToTarArchiveFile(
collectionArg: plugins.mongodb.Collection,
collectionArg: plugins.mongodb.Collection<plugins.mongodb.Document>,
filePathArg: string
) {}
public async dumpAllCollectionsToDir(
dirArg: string,
nameFunctionArg?: (docArg: any) => string,
nameFunctionArg?: TMongoDumpNameTransform,
cleanDirArg = false
) {
const dirPath = plugins.smartpath.transform.makeAbsolute(dirArg);
if (cleanDirArg) {
await plugins.smartfile.fs.ensureEmptyDir(dirArg);
if (await this.smartFs.directory(dirPath).exists()) {
await this.smartFs.directory(dirPath).recursive().delete();
}
await this.smartFs.directory(dirPath).create();
}
const collections = await this.getCollections();
for (const collection of collections) {
await this.dumpCollectionToDir(collection, dirArg, nameFunctionArg);
await this.dumpCollectionToDir(collection, dirPath, nameFunctionArg);
}
}
public async dumpAllCollectionsToTarArchiveStream(
collectionArg: plugins.mongodb.Collection,
collectionArg: plugins.mongodb.Collection<plugins.mongodb.Document>,
filePathArg: string
) {}
}
+10 -12
View File
@@ -2,34 +2,32 @@
import * as path from 'path';
export {
path
}
path,
};
// pushrocks scope
import * as lik from '@push.rocks/lik';
import * as smartfile from '@push.rocks/smartfile';
import * as smartjson from '@push.rocks/smartjson';
import * as smartfs from '@push.rocks/smartfs';
import * as smartpath from '@push.rocks/smartpath';
import * as smartpromise from '@push.rocks/smartpromise';
export {
lik,
smartfile,
smartjson,
smartfs,
smartpath,
smartpromise
}
smartpromise,
};
// tsclass
import * as tsclass from '@tsclass/tsclass';
export {
tsclass
}
tsclass,
};
// third party scope
import * as mongodb from 'mongodb';
export {
mongodb
}
mongodb,
};
+4 -4
View File
@@ -5,10 +5,10 @@
"target": "ES2022",
"module": "NodeNext",
"moduleResolution": "NodeNext",
"noImplicitAny": true,
"esModuleInterop": true,
"verbatimModuleSyntax": true
"verbatimModuleSyntax": true,
"types": ["node"]
},
"exclude": [
"dist_*/**/*.d.ts"
]
"exclude": ["dist_*/**/*.d.ts"]
}