Compare commits
62 Commits
Author | SHA1 | Date | |
---|---|---|---|
9755522bba | |||
de8736e99e | |||
c430627a21 | |||
0bfebaf5b9 | |||
4733982d03 | |||
368dc27607 | |||
938b25c925 | |||
ab251858ba | |||
24371ccf78 | |||
ed1eecbab8 | |||
0d2dcec3e2 | |||
9426a21a2a | |||
4fac974fc9 | |||
cad2decf59 | |||
0f61bdc455 | |||
408b2cce4a | |||
7a08700451 | |||
ebaf3e685c | |||
c8d51a30d8 | |||
d957e911de | |||
fee936c75f | |||
ac867401de | |||
c066464526 | |||
0105aa2a18 | |||
4c2477c269 | |||
ea0d2bb251 | |||
b3e30a8711 | |||
64621dd38f | |||
117c257a27 | |||
b30522c505 | |||
57d2d56d00 | |||
90751002aa | |||
7606e074a5 | |||
7ec39e397e | |||
21d8d3dc32 | |||
6d456955d8 | |||
d08544c782 | |||
bda9ac8a07 | |||
d27dafba2b | |||
b6594de18c | |||
d9246cbeac | |||
9a5864656e | |||
307f0c7277 | |||
62dc897e73 | |||
552b344914 | |||
5a2cc2406c | |||
73a11370b6 | |||
162265f353 | |||
06776d74c8 | |||
b4cd6b0fe1 | |||
b282f69b35 | |||
203a284c88 | |||
30ae641a9c | |||
cfe733621f | |||
1f76e2478e | |||
7d668bee05 | |||
bef7f68360 | |||
56e9754725 | |||
30d81581cf | |||
5e9db12955 | |||
ad2f422c86 | |||
17ce14bcb9 |
@ -6,8 +6,8 @@ on:
|
||||
- '**'
|
||||
|
||||
env:
|
||||
IMAGE: registry.gitlab.com/hosttoday/ht-docker-node:npmci
|
||||
NPMCI_COMPUTED_REPOURL: https://${{gitea.repository_owner}}:${{secrets.GITEA_TOKEN}}@gitea.lossless.digital/${{gitea.repository}}.git
|
||||
IMAGE: code.foss.global/host.today/ht-docker-node:npmci
|
||||
NPMCI_COMPUTED_REPOURL: https://${{gitea.repository_owner}}:${{secrets.GITEA_TOKEN}}@/${{gitea.repository}}.git
|
||||
NPMCI_TOKEN_NPM: ${{secrets.NPMCI_TOKEN_NPM}}
|
||||
NPMCI_TOKEN_NPM2: ${{secrets.NPMCI_TOKEN_NPM2}}
|
||||
NPMCI_GIT_GITHUBTOKEN: ${{secrets.NPMCI_GIT_GITHUBTOKEN}}
|
||||
@ -26,7 +26,7 @@ jobs:
|
||||
- name: Install pnpm and npmci
|
||||
run: |
|
||||
pnpm install -g pnpm
|
||||
pnpm install -g @shipzone/npmci
|
||||
pnpm install -g @ship.zone/npmci
|
||||
|
||||
- name: Run npm prepare
|
||||
run: npmci npm prepare
|
||||
|
@ -6,8 +6,8 @@ on:
|
||||
- '*'
|
||||
|
||||
env:
|
||||
IMAGE: registry.gitlab.com/hosttoday/ht-docker-node:npmci
|
||||
NPMCI_COMPUTED_REPOURL: https://${{gitea.repository_owner}}:${{secrets.GITEA_TOKEN}}@gitea.lossless.digital/${{gitea.repository}}.git
|
||||
IMAGE: code.foss.global/host.today/ht-docker-node:npmci
|
||||
NPMCI_COMPUTED_REPOURL: https://${{gitea.repository_owner}}:${{secrets.GITEA_TOKEN}}@/${{gitea.repository}}.git
|
||||
NPMCI_TOKEN_NPM: ${{secrets.NPMCI_TOKEN_NPM}}
|
||||
NPMCI_TOKEN_NPM2: ${{secrets.NPMCI_TOKEN_NPM2}}
|
||||
NPMCI_GIT_GITHUBTOKEN: ${{secrets.NPMCI_GIT_GITHUBTOKEN}}
|
||||
@ -26,7 +26,7 @@ jobs:
|
||||
- name: Prepare
|
||||
run: |
|
||||
pnpm install -g pnpm
|
||||
pnpm install -g @shipzone/npmci
|
||||
pnpm install -g @ship.zone/npmci
|
||||
npmci npm prepare
|
||||
|
||||
- name: Audit production dependencies
|
||||
@ -54,7 +54,7 @@ jobs:
|
||||
- name: Prepare
|
||||
run: |
|
||||
pnpm install -g pnpm
|
||||
pnpm install -g @shipzone/npmci
|
||||
pnpm install -g @ship.zone/npmci
|
||||
npmci npm prepare
|
||||
|
||||
- name: Test stable
|
||||
@ -82,7 +82,7 @@ jobs:
|
||||
- name: Prepare
|
||||
run: |
|
||||
pnpm install -g pnpm
|
||||
pnpm install -g @shipzone/npmci
|
||||
pnpm install -g @ship.zone/npmci
|
||||
npmci npm prepare
|
||||
|
||||
- name: Release
|
||||
@ -104,7 +104,7 @@ jobs:
|
||||
- name: Prepare
|
||||
run: |
|
||||
pnpm install -g pnpm
|
||||
pnpm install -g @shipzone/npmci
|
||||
pnpm install -g @ship.zone/npmci
|
||||
npmci npm prepare
|
||||
|
||||
- name: Code quality
|
||||
@ -119,6 +119,6 @@ jobs:
|
||||
run: |
|
||||
npmci node install stable
|
||||
npmci npm install
|
||||
pnpm install -g @gitzone/tsdoc
|
||||
pnpm install -g @git.zone/tsdoc
|
||||
npmci command tsdoc
|
||||
continue-on-error: true
|
||||
|
3
.gitignore
vendored
3
.gitignore
vendored
@ -3,7 +3,6 @@
|
||||
# artifacts
|
||||
coverage/
|
||||
public/
|
||||
pages/
|
||||
|
||||
# installs
|
||||
node_modules/
|
||||
@ -17,4 +16,4 @@ node_modules/
|
||||
dist/
|
||||
dist_*/
|
||||
|
||||
# custom
|
||||
#------# custom
|
237
changelog.md
Normal file
237
changelog.md
Normal file
@ -0,0 +1,237 @@
|
||||
# Changelog
|
||||
|
||||
## 2025-04-14 - 5.8.2 - fix(classes.doc.ts)
|
||||
Ensure collection initialization before creating a cursor in getCursorExtended
|
||||
|
||||
- Added 'await collection.init()' to guarantee that the MongoDB collection is initialized before using the cursor
|
||||
- Prevents potential runtime errors when accessing collection.mongoDbCollection
|
||||
|
||||
## 2025-04-14 - 5.8.1 - fix(cursor, doc)
|
||||
Add explicit return types and casts to SmartdataDbCursor methods and update getCursorExtended signature in SmartDataDbDoc.
|
||||
|
||||
- Specify Promise<T> as return type for next() in SmartdataDbCursor and cast return value to T.
|
||||
- Specify Promise<T[]> as return type for toArray() in SmartdataDbCursor and cast return value to T[].
|
||||
- Update getCursorExtended to return Promise<SmartdataDbCursor<T>> for clearer type safety.
|
||||
|
||||
## 2025-04-14 - 5.8.0 - feat(cursor)
|
||||
Add toArray method to SmartdataDbCursor to convert raw MongoDB documents into initialized class instances
|
||||
|
||||
- Introduced asynchronous toArray method in SmartdataDbCursor which retrieves all documents from the MongoDB cursor
|
||||
- Maps each native document to a SmartDataDbDoc instance using createInstanceFromMongoDbNativeDoc for consistent API usage
|
||||
|
||||
## 2025-04-14 - 5.7.0 - feat(SmartDataDbDoc)
|
||||
Add extended cursor method getCursorExtended for flexible cursor modifications
|
||||
|
||||
- Introduces getCursorExtended in classes.doc.ts to allow modifier functions for MongoDB cursors
|
||||
- Wraps the modified cursor with SmartdataDbCursor for improved API consistency
|
||||
- Enhances querying capabilities by enabling customized cursor transformations
|
||||
|
||||
## 2025-04-07 - 5.6.0 - feat(indexing)
|
||||
Add support for regular index creation in documents and collections
|
||||
|
||||
- Implement new index decorator in classes.doc.ts to mark properties with regular indexing options
|
||||
- Update SmartdataCollection to create regular indexes if defined on a document during insert
|
||||
- Enhance document structure to store and utilize regular index configurations
|
||||
|
||||
## 2025-04-06 - 5.5.1 - fix(ci & formatting)
|
||||
Minor fixes: update CI workflow image and npmci package references, adjust package.json and readme URLs, and apply consistent code formatting.
|
||||
|
||||
- Update image and repo URL in Gitea workflows from GitLab to code.foss.global
|
||||
- Replace '@shipzone/npmci' with '@ship.zone/npmci' throughout CI scripts
|
||||
- Adjust homepage and bugs URL in package.json and readme
|
||||
- Apply trailing commas and consistent formatting in TypeScript source files
|
||||
- Minor update to .gitignore custom section label
|
||||
|
||||
## 2025-04-06 - 5.5.0 - feat(search)
|
||||
Enhance search functionality with robust Lucene query transformation and reliable fallback mechanisms
|
||||
|
||||
- Improve Lucene adapter to properly structure $or queries for term, phrase, wildcard, and fuzzy search
|
||||
- Implement and document a robust searchWithLucene method with fallback to in-memory filtering
|
||||
- Update readme and tests with extensive examples for @searchable fields and Lucene-based queries
|
||||
|
||||
## 2025-04-06 - 5.4.0 - feat(core)
|
||||
Refactor file structure and update dependency versions
|
||||
|
||||
- Renamed files and modules from 'smartdata.classes.*' to 'classes.*' and adjusted corresponding import paths.
|
||||
- Updated dependency versions: '@push.rocks/smartmongo' to ^2.0.11, '@tsclass/tsclass' to ^8.2.0, and 'mongodb' to ^6.15.0.
|
||||
- Renamed dev dependency packages from '@gitzone/...' to '@git.zone/...' and updated '@push.rocks/tapbundle' and '@types/node'.
|
||||
- Fixed YAML workflow command: replaced 'pnpm install -g @gitzone/tsdoc' with 'pnpm install -g @git.zone/tsdoc'.
|
||||
- Added package manager configuration and pnpm-workspace.yaml for built dependencies.
|
||||
|
||||
## 2025-03-10 - 5.3.0 - feat(docs)
|
||||
Enhance documentation with updated installation instructions and comprehensive usage examples covering advanced features such as deep queries, automatic indexing, and distributed coordination.
|
||||
|
||||
- Added pnpm installation command
|
||||
- Updated User model example to include ObjectId, Binary, and custom serialization
|
||||
- Expanded CRUD operations examples with cursor methods and deep query support
|
||||
- Enhanced sections on EasyStore, real-time data watching with RxJS integration, and managed collections
|
||||
- Included detailed examples for transactions, deep object queries, and document lifecycle hooks
|
||||
|
||||
## 2025-02-03 - 5.2.12 - fix(documentation)
|
||||
Remove license badge from README
|
||||
|
||||
- Removed the license badge from the README file, ensuring compliance with branding guidelines.
|
||||
|
||||
## 2025-02-03 - 5.2.11 - fix(documentation)
|
||||
Updated project documentation for accuracy and added advanced feature details
|
||||
|
||||
- Added details for EasyStore, Distributed Coordination, and Real-time Data Watching features.
|
||||
- Updated database connection setup instructions to include user authentication.
|
||||
- Re-organized advanced usage section to showcase additional features separately.
|
||||
|
||||
## 2024-09-05 - 5.2.10 - fix(smartdata.classes.doc)
|
||||
Fix issue with array handling in convertFilterForMongoDb function
|
||||
|
||||
- Corrected the logic to properly handle array filters in the convertFilterForMongoDb function to avoid incorrect assignments.
|
||||
|
||||
## 2024-09-05 - 5.2.9 - fix(smartdata.classes.doc)
|
||||
Fixed issue with convertFilterForMongoDb to handle array operators.
|
||||
|
||||
- Updated the convertFilterForMongoDb function in smartdata.classes.doc.ts to properly handle array operators like $in and $all.
|
||||
|
||||
## 2024-09-05 - 5.2.8 - fix(smartdata.classes.doc)
|
||||
Fix key handling in convertFilterForMongoDb function
|
||||
|
||||
- Fixed an issue in convertFilterForMongoDb that allowed keys with dots which could cause errors.
|
||||
|
||||
## 2024-09-05 - 5.2.7 - fix(core)
|
||||
Fixed issue with handling filter keys containing dots in smartdata.classes.doc.ts
|
||||
|
||||
- Fixed an error in the convertFilterForMongoDb function which previously threw an error when keys contained dots.
|
||||
|
||||
## 2024-06-18 - 5.2.6 - Chore
|
||||
Maintenance Release
|
||||
|
||||
- Release version 5.2.6
|
||||
|
||||
## 2024-05-31 - 5.2.2 - Bug Fixes
|
||||
Fixes and Maintenance
|
||||
|
||||
- Fixed issue where `_createdAt` and `_updatedAt` registered saveableProperties for all document types
|
||||
|
||||
## 2024-04-15 - 5.1.2 - New Feature
|
||||
Enhancements and Bug Fixes
|
||||
|
||||
- Added static `.getCount({})` method to `SmartDataDbDoc`
|
||||
- Changed fields `_createdAt` and `_updatedAt` to ISO format
|
||||
|
||||
## 2024-04-14 - 5.0.43 - New Feature
|
||||
New Feature Addition
|
||||
|
||||
- Added default `_createdAt` and `_updatedAt` fields, fixes #1
|
||||
|
||||
## 2024-03-30 - 5.0.41 - Bug Fixes
|
||||
Improvements and Fixes
|
||||
|
||||
- Improved `tsconfig.json` for ES Module use
|
||||
|
||||
## 2023-07-10 - 5.0.20 - Chore
|
||||
Organizational Changes
|
||||
|
||||
- Switched to new org scheme
|
||||
|
||||
## 2023-07-21 - 5.0.21 to 5.0.26 - Fixes
|
||||
Multiple Fix Releases
|
||||
|
||||
- Various core updates and bug fixes
|
||||
|
||||
## 2023-07-21 - 5.0.20 - Chore
|
||||
Organizational Changes
|
||||
|
||||
- Switch to the new org scheme
|
||||
|
||||
## 2023-06-25 - 5.0.14 to 5.0.19 - Fixes
|
||||
Multiple Fix Releases
|
||||
|
||||
- Various core updates and bug fixes
|
||||
|
||||
## 2022-05-17 - 5.0.0 - Major Update
|
||||
Breaking Changes
|
||||
|
||||
- Switched to ESM
|
||||
|
||||
## 2022-05-18 - 5.0.2 - Bug Fixes
|
||||
Bug Fixes
|
||||
|
||||
- The `watcher.changeSubject` now emits the correct type into observer functions
|
||||
|
||||
## 2022-05-17 - 5.0.1 - Chore
|
||||
Testing Improvements
|
||||
|
||||
- Tests now use `@pushrocks/smartmongo` backed by `wiredTiger`
|
||||
|
||||
## 2022-05-17 to 2022-11-08 - 5.0.8 to 5.0.10
|
||||
Multiple Fix Releases
|
||||
|
||||
- Various core updates and bug fixes
|
||||
|
||||
## 2021-11-12 - 4.0.17 to 4.0.20
|
||||
Multiple Fix Releases
|
||||
|
||||
- Various core updates and bug fixes
|
||||
|
||||
## 2021-09-17 - 4.0.10 to 4.0.16
|
||||
Multiple Fix Releases
|
||||
|
||||
- Various core updates and bug fixes
|
||||
|
||||
## 2021-06-09 - 4.0.1 to 4.0.9
|
||||
Multiple Fix Releases
|
||||
|
||||
- Various core updates and bug fixes
|
||||
|
||||
## 2021-06-06 - 4.0.0 - Major Update
|
||||
Major Release
|
||||
|
||||
- Maintenance and core updates
|
||||
|
||||
## 2021-05-17 - 3.1.56 - Chore
|
||||
Maintenance Release
|
||||
|
||||
- Release version 3.1.56
|
||||
|
||||
## 2020-09-09 - 3.1.44 to 3.1.52
|
||||
Multiple Fix Releases
|
||||
|
||||
- Various core updates and bug fixes
|
||||
|
||||
## 2020-06-12 - 3.1.26 to 3.1.28
|
||||
Multiple Fix Releases
|
||||
|
||||
- Various core updates and bug fixes
|
||||
|
||||
## 2020-02-18 - 3.1.23 to 3.1.25
|
||||
Multiple Fix Releases
|
||||
|
||||
- Various core updates and bug fixes
|
||||
|
||||
## 2019-09-11 - 3.1.20 to 3.1.22
|
||||
Multiple Fix Releases
|
||||
|
||||
- Various core updates and bug fixes
|
||||
|
||||
## 2018-07-10 - 3.0.5 - New Feature
|
||||
Added Feature
|
||||
|
||||
- Added custom unique indexes to `SmartdataDoc`
|
||||
|
||||
## 2018-07-08 - 3.0.1 - Chore
|
||||
Dependencies Update
|
||||
|
||||
- Updated mongodb dependencies
|
||||
|
||||
## 2018-07-08 - 3.0.0 - Major Update
|
||||
Refactor and Cleanup
|
||||
|
||||
- Cleaned project structure
|
||||
|
||||
## 2018-01-16 - 2.0.7 - Breaking Change
|
||||
Big Changes
|
||||
|
||||
- Switched to `@pushrocks` scope and moved from `rethinkdb` to `mongodb`
|
||||
|
||||
## 2018-01-12 - 2.0.0 - Major Release
|
||||
Core Updates
|
||||
|
||||
- Updated CI configurations
|
||||
|
@ -12,12 +12,25 @@
|
||||
"gitzone": {
|
||||
"projectType": "npm",
|
||||
"module": {
|
||||
"githost": "gitlab.com",
|
||||
"githost": "code.foss.global",
|
||||
"gitscope": "push.rocks",
|
||||
"gitrepo": "smartdata",
|
||||
"description": "do more with data",
|
||||
"description": "An advanced library for NoSQL data organization and manipulation using TypeScript with support for MongoDB, data validation, collections, and custom data types.",
|
||||
"npmPackagename": "@push.rocks/smartdata",
|
||||
"license": "MIT"
|
||||
"license": "MIT",
|
||||
"keywords": [
|
||||
"data manipulation",
|
||||
"NoSQL",
|
||||
"MongoDB",
|
||||
"TypeScript",
|
||||
"data validation",
|
||||
"collections",
|
||||
"custom data types",
|
||||
"ODM"
|
||||
]
|
||||
}
|
||||
},
|
||||
"tsdoc": {
|
||||
"legal": "\n## License and Legal Information\n\nThis repository contains open-source code that is licensed under the MIT License. A copy of the MIT License can be found in the [license](license) file within this repository. \n\n**Please note:** The MIT License does not grant permission to use the trade names, trademarks, service marks, or product names of the project, except as required for reasonable and customary use in describing the origin of the work and reproducing the content of the NOTICE file.\n\n### Trademarks\n\nThis project is owned and maintained by Task Venture Capital GmbH. The names and logos associated with Task Venture Capital GmbH and any related products or services are trademarks of Task Venture Capital GmbH and are not included within the scope of the MIT license granted herein. Use of these trademarks must comply with Task Venture Capital GmbH's Trademark Guidelines, and any usage must be approved in writing by Task Venture Capital GmbH.\n\n### Company Information\n\nTask Venture Capital GmbH \nRegistered at District court Bremen HRB 35230 HB, Germany\n\nFor any legal inquiries or if you require further information, please contact us via email at hello@task.vc.\n\nBy using this repository, you acknowledge that you have read this section, agree to comply with its terms, and understand that the licensing of the code does not imply endorsement by Task Venture Capital GmbH of any derivative works.\n"
|
||||
}
|
||||
}
|
42
package.json
42
package.json
@ -1,8 +1,8 @@
|
||||
{
|
||||
"name": "@push.rocks/smartdata",
|
||||
"version": "5.0.39",
|
||||
"version": "5.8.2",
|
||||
"private": false,
|
||||
"description": "do more with data",
|
||||
"description": "An advanced library for NoSQL data organization and manipulation using TypeScript with support for MongoDB, data validation, collections, and custom data types.",
|
||||
"main": "dist_ts/index.js",
|
||||
"typings": "dist_ts/index.d.ts",
|
||||
"type": "module",
|
||||
@ -13,35 +13,35 @@
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+ssh://git@gitlab.com/pushrocks/smartdata.git"
|
||||
"url": "https://code.foss.global/push.rocks/smartdata.git"
|
||||
},
|
||||
"author": "Lossless GmbH",
|
||||
"license": "MIT",
|
||||
"bugs": {
|
||||
"url": "https://gitlab.com/pushrocks/smartdata/issues"
|
||||
"url": "https://code.foss.global/push.rocks/smartdata/issues"
|
||||
},
|
||||
"homepage": "https://gitlab.com/pushrocks/smartdata#README",
|
||||
"homepage": "https://code.foss.global/push.rocks/smartdata#readme",
|
||||
"dependencies": {
|
||||
"@push.rocks/lik": "^6.0.14",
|
||||
"@push.rocks/smartdelay": "^3.0.1",
|
||||
"@push.rocks/smartlog": "^3.0.2",
|
||||
"@push.rocks/smartmongo": "^2.0.10",
|
||||
"@push.rocks/smartmongo": "^2.0.11",
|
||||
"@push.rocks/smartpromise": "^4.0.2",
|
||||
"@push.rocks/smartrx": "^3.0.7",
|
||||
"@push.rocks/smartstring": "^4.0.15",
|
||||
"@push.rocks/smarttime": "^4.0.6",
|
||||
"@push.rocks/smartunique": "^3.0.8",
|
||||
"@push.rocks/taskbuffer": "^3.1.7",
|
||||
"@tsclass/tsclass": "^4.0.52",
|
||||
"mongodb": "^6.5.0"
|
||||
"@tsclass/tsclass": "^8.2.0",
|
||||
"mongodb": "^6.15.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@gitzone/tsbuild": "^2.1.66",
|
||||
"@gitzone/tsrun": "^1.2.44",
|
||||
"@gitzone/tstest": "^1.0.77",
|
||||
"@git.zone/tsbuild": "^2.3.2",
|
||||
"@git.zone/tsrun": "^1.2.44",
|
||||
"@git.zone/tstest": "^1.0.77",
|
||||
"@push.rocks/qenv": "^6.0.5",
|
||||
"@push.rocks/tapbundle": "^5.0.22",
|
||||
"@types/node": "^20.11.30"
|
||||
"@push.rocks/tapbundle": "^5.6.2",
|
||||
"@types/node": "^22.14.0"
|
||||
},
|
||||
"files": [
|
||||
"ts/**/*",
|
||||
@ -57,5 +57,19 @@
|
||||
],
|
||||
"browserslist": [
|
||||
"last 1 chrome versions"
|
||||
]
|
||||
],
|
||||
"keywords": [
|
||||
"data manipulation",
|
||||
"NoSQL",
|
||||
"MongoDB",
|
||||
"TypeScript",
|
||||
"data validation",
|
||||
"collections",
|
||||
"custom data types",
|
||||
"ODM"
|
||||
],
|
||||
"packageManager": "pnpm@10.7.0+sha512.6b865ad4b62a1d9842b61d674a393903b871d9244954f652b8842c2b553c72176b278f64c463e52d40fff8aba385c235c8c9ecf5cc7de4fd78b8bb6d49633ab6",
|
||||
"pnpm": {
|
||||
"overrides": {}
|
||||
}
|
||||
}
|
||||
|
13559
pnpm-lock.yaml
generated
13559
pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
4
pnpm-workspace.yaml
Normal file
4
pnpm-workspace.yaml
Normal file
@ -0,0 +1,4 @@
|
||||
onlyBuiltDependencies:
|
||||
- esbuild
|
||||
- mongodb-memory-server
|
||||
- puppeteer
|
0
readme.hints.md
Normal file
0
readme.hints.md
Normal file
680
readme.md
680
readme.md
@ -1,154 +1,608 @@
|
||||
# @push.rocks/smartdata
|
||||
do more with data
|
||||
|
||||
## Availabililty and Links
|
||||
* [npmjs.org (npm package)](https://www.npmjs.com/package/@push.rocks/smartdata)
|
||||
* [gitlab.com (source)](https://gitlab.com/push.rocks/smartdata)
|
||||
* [github.com (source mirror)](https://github.com/push.rocks/smartdata)
|
||||
* [docs (typedoc)](https://push.rocks.gitlab.io/smartdata/)
|
||||
[](https://www.npmjs.com/package/@push.rocks/smartdata)
|
||||
|
||||
## Status for master
|
||||
A powerful TypeScript-first MongoDB wrapper that provides advanced features for distributed systems, real-time data synchronization, and easy data management.
|
||||
|
||||
Status Category | Status Badge
|
||||
-- | --
|
||||
GitLab Pipelines | [](https://lossless.cloud)
|
||||
GitLab Pipline Test Coverage | [](https://lossless.cloud)
|
||||
npm | [](https://lossless.cloud)
|
||||
Snyk | [](https://lossless.cloud)
|
||||
TypeScript Support | [](https://lossless.cloud)
|
||||
node Support | [](https://nodejs.org/dist/latest-v10.x/docs/api/)
|
||||
Code Style | [](https://lossless.cloud)
|
||||
PackagePhobia (total standalone install weight) | [](https://lossless.cloud)
|
||||
PackagePhobia (package size on registry) | [](https://lossless.cloud)
|
||||
BundlePhobia (total size when bundled) | [](https://lossless.cloud)
|
||||
## Features
|
||||
|
||||
- **Type-Safe MongoDB Integration**: Full TypeScript support with decorators for schema definition
|
||||
- **Document Management**: Type-safe CRUD operations with automatic timestamp tracking
|
||||
- **EasyStore**: Simple key-value storage with automatic persistence and sharing between instances
|
||||
- **Distributed Coordination**: Built-in support for leader election and distributed task management
|
||||
- **Real-time Data Sync**: Watchers for real-time data changes with RxJS integration
|
||||
- **Connection Management**: Automatic connection handling with connection pooling
|
||||
- **Collection Management**: Type-safe collection operations with automatic indexing
|
||||
- **Deep Query Type Safety**: Fully type-safe queries for nested object properties with `DeepQuery<T>`
|
||||
- **MongoDB Compatibility**: Support for all MongoDB query operators and advanced features
|
||||
- **Enhanced Cursors**: Chainable, type-safe cursor API with memory efficient data processing
|
||||
- **Type Conversion**: Automatic handling of MongoDB types like ObjectId and Binary data
|
||||
- **Serialization Hooks**: Custom serialization and deserialization of document properties
|
||||
- **Powerful Search Capabilities**: Lucene-like query syntax with field-specific search, advanced operators, and fallback mechanisms
|
||||
|
||||
## Requirements
|
||||
|
||||
- Node.js >= 16.x
|
||||
- MongoDB >= 4.4
|
||||
- TypeScript >= 4.x (for development)
|
||||
|
||||
## Install
|
||||
|
||||
To install `@push.rocks/smartdata`, use npm:
|
||||
|
||||
```bash
|
||||
npm install @push.rocks/smartdata --save
|
||||
```
|
||||
|
||||
Or with pnpm:
|
||||
|
||||
```bash
|
||||
pnpm add @push.rocks/smartdata
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
Use TypeScript for best in class instellisense.
|
||||
`@push.rocks/smartdata` enables efficient data handling and operation management with a focus on using MongoDB. It leverages TypeScript for strong typing and ESM syntax for modern JavaScript usage.
|
||||
|
||||
smartdata is an ODM that adheres to TypeScript practices and uses classes to organize data.
|
||||
It uses RethinkDB as persistent storage.
|
||||
### Setting Up and Connecting to the Database
|
||||
|
||||
## Intention
|
||||
|
||||
There are many ODMs out there, however when we searched for an ODM that uses TypeScript,
|
||||
acts smart while still embracing the NoSQL idea we didn't find a matching solution.
|
||||
This is why we started smartdata.
|
||||
|
||||
How RethinkDB's terms map to the ones of smartdata:
|
||||
|
||||
| MongoDb term | smartdata class |
|
||||
| ------------ | ----------------------------- |
|
||||
| Database | smartdata.SmartdataDb |
|
||||
| Collection | smartdata.SmartdataCollection |
|
||||
| Document | smartdata.SmartadataDoc |
|
||||
|
||||
### class Db
|
||||
|
||||
represents a Database. Naturally it has .connect() etc. methods on it.
|
||||
Before interacting with the database, you need to set up and establish a connection. The `SmartdataDb` class handles connection pooling and automatic reconnection.
|
||||
|
||||
```typescript
|
||||
// Assuming toplevel await
|
||||
import * as smartdata from 'smartdata';
|
||||
import { SmartdataDb } from '@push.rocks/smartdata';
|
||||
|
||||
const smartdataDb = new smartdata.SmartdataDb({
|
||||
mongoDbUrl: '//someurl',
|
||||
mongoDbName: 'myDatabase',
|
||||
mongoDbPass: 'mypassword',
|
||||
// Create a new instance of SmartdataDb with MongoDB connection details
|
||||
const db = new SmartdataDb({
|
||||
mongoDbUrl: 'mongodb://<USERNAME>:<PASSWORD>@localhost:27017/<DBNAME>',
|
||||
mongoDbName: 'your-database-name',
|
||||
mongoDbUser: 'your-username',
|
||||
mongoDbPass: 'your-password',
|
||||
});
|
||||
|
||||
await smartdataDb.connect();
|
||||
// Initialize and connect to the database
|
||||
// This sets up a connection pool with max 100 connections
|
||||
await db.init();
|
||||
```
|
||||
|
||||
### class DbCollection
|
||||
### Defining Data Models
|
||||
|
||||
represents a collection of objects.
|
||||
A collection is defined by the object class (that is extending smartdata.dbdoc) it respresents
|
||||
|
||||
So to get to get access to a specific collection you document
|
||||
Data models in `@push.rocks/smartdata` are classes that represent collections and documents in your MongoDB database. Use decorators such as `@Collection`, `@unI`, and `@svDb` to define your data models.
|
||||
|
||||
```typescript
|
||||
// Assuming toplevel await
|
||||
// continues from the block before...
|
||||
import {
|
||||
SmartDataDbDoc,
|
||||
Collection,
|
||||
unI,
|
||||
svDb,
|
||||
oid,
|
||||
bin,
|
||||
index,
|
||||
searchable,
|
||||
} from '@push.rocks/smartdata';
|
||||
import { ObjectId } from 'mongodb';
|
||||
|
||||
@smartdata.Collection(smartdataDb)
|
||||
class MyObject extends smartdata.DbDoc<MyObject /* ,[an optional interface to implement] */> {
|
||||
// read the next block about DbDoc
|
||||
@smartdata.svDb()
|
||||
property1: string; // @smartdata.svDb() marks the property for db save
|
||||
@Collection(() => db) // Associate this model with the database instance
|
||||
class User extends SmartDataDbDoc<User, User> {
|
||||
@unI()
|
||||
public id: string = 'unique-user-id'; // Mark 'id' as a unique index
|
||||
|
||||
property2: number; // this one is not marked, so it won't be save upon calling this.save()
|
||||
@svDb()
|
||||
@searchable() // Mark 'username' as searchable
|
||||
public username: string; // Mark 'username' to be saved in DB
|
||||
|
||||
constructor() {
|
||||
super(); // the super call is important ;) But you probably know that.
|
||||
@svDb()
|
||||
@searchable() // Mark 'email' as searchable
|
||||
@index() // Create a regular index for this field
|
||||
public email: string; // Mark 'email' to be saved in DB
|
||||
|
||||
@svDb()
|
||||
@oid() // Automatically handle as ObjectId type
|
||||
public organizationId: ObjectId; // Will be automatically converted to/from ObjectId
|
||||
|
||||
@svDb()
|
||||
@bin() // Automatically handle as Binary data
|
||||
public profilePicture: Buffer; // Will be automatically converted to/from Binary
|
||||
|
||||
@svDb({
|
||||
serialize: (data) => JSON.stringify(data), // Custom serialization
|
||||
deserialize: (data) => JSON.parse(data), // Custom deserialization
|
||||
})
|
||||
public preferences: Record<string, any>;
|
||||
|
||||
constructor(username: string, email: string) {
|
||||
super();
|
||||
this.username = username;
|
||||
this.email = email;
|
||||
}
|
||||
}
|
||||
|
||||
// start to instantiate instances of classes from scratch or database
|
||||
|
||||
const localObject = new MyObject({
|
||||
property1: 'hi',
|
||||
property2: {
|
||||
deep: 3,
|
||||
},
|
||||
});
|
||||
await localObject.save(); // saves the object to the database
|
||||
|
||||
// start retrieving instances
|
||||
|
||||
// .getInstance is staticly inheritied, yet fully typed static function to get instances with fully typed filters
|
||||
const myInstance = await MyObject.getInstance({
|
||||
property1: 'hi',
|
||||
property2: {
|
||||
deep: {
|
||||
$gt: 2,
|
||||
} as any,
|
||||
},
|
||||
}); // outputs a new instance of MyObject with the values from db assigned
|
||||
```
|
||||
|
||||
### class DbDoc
|
||||
### CRUD Operations
|
||||
|
||||
represents a individual document in a collection
|
||||
and thereby is ideally suited to extend the class you want to actually store.
|
||||
`@push.rocks/smartdata` simplifies CRUD operations with intuitive methods on model instances.
|
||||
|
||||
### CRUD operations
|
||||
#### Create
|
||||
|
||||
smartdata supports full CRUD operations
|
||||
```typescript
|
||||
const newUser = new User('myUsername', 'myEmail@example.com');
|
||||
await newUser.save(); // Save the new user to the database
|
||||
```
|
||||
|
||||
**Store** or **Update** instances of classes to MongoDB:
|
||||
DbDoc extends your class with the following methods:
|
||||
#### Read
|
||||
|
||||
- async `.save()` will save (or update) the object you call it on only. Any referenced non-savable objects will not get stored.
|
||||
- async `.saveDeep()` does the same like `.save()`.
|
||||
In addition it will look for properties that reference an object
|
||||
that extends DbDoc as well and call .saveDeep() on them as well.
|
||||
Loops are prevented
|
||||
```typescript
|
||||
// Fetch a single user by a unique attribute
|
||||
const user = await User.getInstance({ username: 'myUsername' });
|
||||
|
||||
**Get** a new class instance from MongoDB:
|
||||
DbDoc exposes a static method that allows you specify a filter to retrieve a cloned class of the one you used to that doc at some point later in time:
|
||||
// Fetch multiple users that match criteria
|
||||
const users = await User.getInstances({ email: 'myEmail@example.com' });
|
||||
|
||||
- static async `.getInstance({ /* filter props here */ })` gets you an instance that has the data of the first matched document as properties.
|
||||
- static async `getInstances({ /* filter props here */ })` get you an array instances (one instance for every matched document).
|
||||
// Using a cursor for large collections
|
||||
const cursor = await User.getCursor({ active: true });
|
||||
|
||||
**Delete** instances from MongoDb:
|
||||
smartdata extends your class with a method to easily delete the doucment from DB:
|
||||
// Process documents one at a time (memory efficient)
|
||||
await cursor.forEach(async (user, index) => {
|
||||
// Process each user with its position
|
||||
console.log(`Processing user ${index}: ${user.username}`);
|
||||
});
|
||||
|
||||
- async `.delete()`will delete the document from DB.
|
||||
// Chain cursor methods like in the MongoDB native driver
|
||||
const paginatedCursor = await User.getCursor({ active: true })
|
||||
.limit(10) // Limit results
|
||||
.skip(20) // Skip first 20 results
|
||||
.sort({ createdAt: -1 }); // Sort by creation date descending
|
||||
|
||||
## TypeScript
|
||||
// Convert cursor to array (when you know the result set is small)
|
||||
const userArray = await paginatedCursor.toArray();
|
||||
|
||||
How does TypeScript play into this?
|
||||
Since you define your classes in TypeScript and types flow through smartdata in a generic way
|
||||
you should get all the Intellisense and type checking you love when using smartdata.
|
||||
smartdata itself also bundles typings. You don't need to install any additional types for smartdata.
|
||||
// Other cursor operations
|
||||
const nextUser = await cursor.next(); // Get the next document
|
||||
const hasMoreUsers = await cursor.hasNext(); // Check if more documents exist
|
||||
const count = await cursor.count(); // Get the count of documents in the cursor
|
||||
|
||||
## Contribution
|
||||
// Always close cursors when done with them
|
||||
await cursor.close();
|
||||
```
|
||||
|
||||
We are always happy for code contributions. If you are not the code contributing type that is ok. Still, maintaining Open Source repositories takes considerable time and thought. If you like the quality of what we do and our modules are useful to you we would appreciate a little monthly contribution: You can [contribute one time](https://lossless.link/contribute-onetime) or [contribute monthly](https://lossless.link/contribute). :)
|
||||
#### Update
|
||||
|
||||
For further information read the linked docs at the top of this readme.
|
||||
```typescript
|
||||
// Assuming 'user' is an instance of User
|
||||
user.email = 'newEmail@example.com';
|
||||
await user.save(); // Update the user in the database
|
||||
|
||||
## Legal
|
||||
> MIT licensed | **©** [Task Venture Capital GmbH](https://task.vc)
|
||||
| By using this npm module you agree to our [privacy policy](https://lossless.gmbH/privacy)
|
||||
// Upsert operations (insert if not exists, update if exists)
|
||||
const upsertedUser = await User.upsert(
|
||||
{ id: 'user-123' }, // Query to find the user
|
||||
{
|
||||
// Fields to update or insert
|
||||
username: 'newUsername',
|
||||
email: 'newEmail@example.com',
|
||||
},
|
||||
);
|
||||
```
|
||||
|
||||
#### Delete
|
||||
|
||||
```typescript
|
||||
// Assuming 'user' is an instance of User
|
||||
await user.delete(); // Delete the user from the database
|
||||
```
|
||||
|
||||
## Advanced Features
|
||||
|
||||
### Search Functionality
|
||||
|
||||
SmartData provides powerful search capabilities with a Lucene-like query syntax and robust fallback mechanisms:
|
||||
|
||||
```typescript
|
||||
// Define a model with searchable fields
|
||||
@Collection(() => db)
|
||||
class Product extends SmartDataDbDoc<Product, Product> {
|
||||
@unI()
|
||||
public id: string = 'product-id';
|
||||
|
||||
@svDb()
|
||||
@searchable() // Mark this field as searchable
|
||||
public name: string;
|
||||
|
||||
@svDb()
|
||||
@searchable() // Mark this field as searchable
|
||||
public description: string;
|
||||
|
||||
@svDb()
|
||||
@searchable() // Mark this field as searchable
|
||||
public category: string;
|
||||
|
||||
@svDb()
|
||||
public price: number;
|
||||
}
|
||||
|
||||
// Get all fields marked as searchable for a class
|
||||
const searchableFields = getSearchableFields('Product'); // ['name', 'description', 'category']
|
||||
|
||||
// Basic search across all searchable fields
|
||||
const iPhoneProducts = await Product.searchWithLucene('iPhone');
|
||||
|
||||
// Field-specific search
|
||||
const electronicsProducts = await Product.searchWithLucene('category:Electronics');
|
||||
|
||||
// Search with wildcards
|
||||
const macProducts = await Product.searchWithLucene('Mac*');
|
||||
|
||||
// Search in specific fields with partial words
|
||||
const laptopResults = await Product.searchWithLucene('description:laptop');
|
||||
|
||||
// Search is case-insensitive
|
||||
const results1 = await Product.searchWithLucene('electronics');
|
||||
const results2 = await Product.searchWithLucene('Electronics');
|
||||
// results1 and results2 will contain the same documents
|
||||
|
||||
// Using boolean operators (requires text index in MongoDB)
|
||||
const wirelessOrLaptop = await Product.searchWithLucene('wireless OR laptop');
|
||||
|
||||
// Negative searches
|
||||
const electronicsNotSamsung = await Product.searchWithLucene('Electronics NOT Samsung');
|
||||
|
||||
// Phrase searches
|
||||
const exactPhrase = await Product.searchWithLucene('"high-speed blender"');
|
||||
|
||||
// Grouping with parentheses
|
||||
const complexQuery = await Product.searchWithLucene('(wireless OR bluetooth) AND Electronics');
|
||||
```
|
||||
|
||||
The search functionality includes:
|
||||
|
||||
- `@searchable()` decorator for marking fields as searchable
|
||||
- `getSearchableFields()` to retrieve all searchable fields for a class
|
||||
- `search()` method for basic search (requires MongoDB text index)
|
||||
- `searchWithLucene()` method with robust fallback mechanisms
|
||||
- Support for field-specific searches, wildcards, and boolean operators
|
||||
- Automatic fallback to regex-based search if MongoDB text search fails
|
||||
|
||||
### EasyStore
|
||||
|
||||
EasyStore provides a simple key-value storage system with automatic persistence:
|
||||
|
||||
```typescript
|
||||
// Create an EasyStore instance with a specific type
|
||||
interface ConfigStore {
|
||||
apiKey: string;
|
||||
settings: {
|
||||
theme: string;
|
||||
notifications: boolean;
|
||||
};
|
||||
}
|
||||
|
||||
// Create a type-safe EasyStore
|
||||
const store = await db.createEasyStore<ConfigStore>('app-config');
|
||||
|
||||
// Write and read data with full type safety
|
||||
await store.writeKey('apiKey', 'secret-api-key-123');
|
||||
await store.writeKey('settings', { theme: 'dark', notifications: true });
|
||||
|
||||
const apiKey = await store.readKey('apiKey'); // Type: string
|
||||
const settings = await store.readKey('settings'); // Type: { theme: string, notifications: boolean }
|
||||
|
||||
// Check if a key exists
|
||||
const hasKey = await store.hasKey('apiKey'); // true
|
||||
|
||||
// Delete a key
|
||||
await store.deleteKey('apiKey');
|
||||
```
|
||||
|
||||
### Distributed Coordination
|
||||
|
||||
Built-in support for distributed systems with leader election:
|
||||
|
||||
```typescript
|
||||
// Create a distributed coordinator
|
||||
const coordinator = new SmartdataDistributedCoordinator(db);
|
||||
|
||||
// Start coordination
|
||||
await coordinator.start();
|
||||
|
||||
// Handle leadership changes
|
||||
coordinator.on('leadershipChange', (isLeader) => {
|
||||
if (isLeader) {
|
||||
// This instance is now the leader
|
||||
// Run leader-specific tasks
|
||||
startPeriodicJobs();
|
||||
} else {
|
||||
// This instance is no longer the leader
|
||||
stopPeriodicJobs();
|
||||
}
|
||||
});
|
||||
|
||||
// Access leadership status anytime
|
||||
if (coordinator.isLeader) {
|
||||
// Run leader-only operations
|
||||
}
|
||||
|
||||
// Execute a task only on the leader
|
||||
await coordinator.executeIfLeader(async () => {
|
||||
// This code only runs on the leader instance
|
||||
await runImportantTask();
|
||||
});
|
||||
|
||||
// Stop coordination when shutting down
|
||||
await coordinator.stop();
|
||||
```
|
||||
|
||||
### Real-time Data Watching
|
||||
|
||||
Watch for changes in your collections with RxJS integration using MongoDB Change Streams:
|
||||
|
||||
```typescript
|
||||
// Create a watcher for a specific collection with a query filter
|
||||
const watcher = await User.watch(
|
||||
{
|
||||
active: true, // Only watch for changes to active users
|
||||
},
|
||||
{
|
||||
fullDocument: true, // Include the full document in change notifications
|
||||
bufferTimeMs: 100, // Buffer changes for 100ms to reduce notification frequency
|
||||
},
|
||||
);
|
||||
|
||||
// Subscribe to changes using RxJS
|
||||
watcher.changeSubject.subscribe((change) => {
|
||||
console.log('Change operation:', change.operationType); // 'insert', 'update', 'delete', etc.
|
||||
console.log('Document changed:', change.docInstance); // The full document instance
|
||||
|
||||
// Handle different types of changes
|
||||
if (change.operationType === 'insert') {
|
||||
console.log('New user created:', change.docInstance.username);
|
||||
} else if (change.operationType === 'update') {
|
||||
console.log('User updated:', change.docInstance.username);
|
||||
} else if (change.operationType === 'delete') {
|
||||
console.log('User deleted');
|
||||
}
|
||||
});
|
||||
|
||||
// Manual observation with event emitter pattern
|
||||
watcher.on('change', (change) => {
|
||||
console.log('Document changed:', change);
|
||||
});
|
||||
|
||||
// Stop watching when no longer needed
|
||||
await watcher.stop();
|
||||
```
|
||||
|
||||
### Managed Collections
|
||||
|
||||
For more complex data models that require additional context:
|
||||
|
||||
```typescript
|
||||
@Collection(() => db)
|
||||
class ManagedDoc extends SmartDataDbDoc<ManagedDoc, ManagedDoc> {
|
||||
@unI()
|
||||
public id: string = 'unique-id';
|
||||
|
||||
@svDb()
|
||||
public data: string;
|
||||
|
||||
@managed()
|
||||
public manager: YourCustomManager;
|
||||
|
||||
// The manager can provide additional functionality
|
||||
async specialOperation() {
|
||||
return this.manager.doSomethingSpecial(this);
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Automatic Indexing
|
||||
|
||||
Define indexes directly in your model class:
|
||||
|
||||
```typescript
|
||||
@Collection(() => db)
|
||||
class Product extends SmartDataDbDoc<Product, Product> {
|
||||
@unI() // Unique index
|
||||
public id: string = 'product-id';
|
||||
|
||||
@svDb()
|
||||
@index() // Regular index for faster queries
|
||||
public category: string;
|
||||
|
||||
@svDb()
|
||||
@index({ sparse: true }) // Sparse index with options
|
||||
public optionalField?: string;
|
||||
|
||||
// Compound indexes can be defined in the collection decorator
|
||||
@Collection(() => db, {
|
||||
indexMap: {
|
||||
compoundIndex: {
|
||||
fields: { category: 1, name: 1 },
|
||||
options: { background: true }
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
```
|
||||
|
||||
### Transaction Support
|
||||
|
||||
Use MongoDB transactions for atomic operations:
|
||||
|
||||
```typescript
|
||||
const session = await db.startSession();
|
||||
try {
|
||||
await session.withTransaction(async () => {
|
||||
const user = await User.getInstance({ id: 'user-id' }, { session });
|
||||
user.balance -= 100;
|
||||
await user.save({ session });
|
||||
|
||||
const recipient = await User.getInstance({ id: 'recipient-id' }, { session });
|
||||
recipient.balance += 100;
|
||||
await user.save({ session });
|
||||
});
|
||||
} finally {
|
||||
await session.endSession();
|
||||
}
|
||||
```
|
||||
|
||||
### Deep Object Queries
|
||||
|
||||
SmartData provides fully type-safe deep property queries with the `DeepQuery` type:
|
||||
|
||||
```typescript
|
||||
// If your document has nested objects
|
||||
class UserProfile extends SmartDataDbDoc<UserProfile, UserProfile> {
|
||||
@unI()
|
||||
public id: string = 'profile-id';
|
||||
|
||||
@svDb()
|
||||
public user: {
|
||||
details: {
|
||||
firstName: string;
|
||||
lastName: string;
|
||||
address: {
|
||||
city: string;
|
||||
country: string;
|
||||
};
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
// Type-safe string literals for dot notation
|
||||
const usersInUSA = await UserProfile.getInstances({
|
||||
'user.details.address.country': 'USA',
|
||||
});
|
||||
|
||||
// Fully typed deep queries with the DeepQuery type
|
||||
import { DeepQuery } from '@push.rocks/smartdata';
|
||||
|
||||
const typedQuery: DeepQuery<UserProfile> = {
|
||||
id: 'profile-id',
|
||||
'user.details.firstName': 'John',
|
||||
'user.details.address.country': 'USA',
|
||||
};
|
||||
|
||||
// TypeScript will error if paths are incorrect
|
||||
const results = await UserProfile.getInstances(typedQuery);
|
||||
|
||||
// MongoDB query operators are supported
|
||||
const operatorQuery: DeepQuery<UserProfile> = {
|
||||
'user.details.address.country': 'USA',
|
||||
'user.details.address.city': { $in: ['New York', 'Los Angeles'] },
|
||||
};
|
||||
|
||||
const filteredResults = await UserProfile.getInstances(operatorQuery);
|
||||
```
|
||||
|
||||
### Document Lifecycle Hooks
|
||||
|
||||
Implement custom logic at different stages of a document's lifecycle:
|
||||
|
||||
```typescript
|
||||
@Collection(() => db)
|
||||
class Order extends SmartDataDbDoc<Order, Order> {
|
||||
@unI()
|
||||
public id: string = 'order-id';
|
||||
|
||||
@svDb()
|
||||
public total: number;
|
||||
|
||||
@svDb()
|
||||
public items: string[];
|
||||
|
||||
// Called before saving the document
|
||||
async beforeSave() {
|
||||
// Calculate total based on items
|
||||
this.total = await calculateTotal(this.items);
|
||||
|
||||
// Validate the document
|
||||
if (this.items.length === 0) {
|
||||
throw new Error('Order must have at least one item');
|
||||
}
|
||||
}
|
||||
|
||||
// Called after the document is saved
|
||||
async afterSave() {
|
||||
// Notify other systems about the saved order
|
||||
await notifyExternalSystems(this);
|
||||
}
|
||||
|
||||
// Called before deleting the document
|
||||
async beforeDelete() {
|
||||
// Check if order can be deleted
|
||||
const canDelete = await checkOrderDeletable(this.id);
|
||||
if (!canDelete) {
|
||||
throw new Error('Order cannot be deleted');
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Best Practices
|
||||
|
||||
### Connection Management
|
||||
|
||||
- Always call `db.init()` before using any database features
|
||||
- Use `db.disconnect()` when shutting down your application
|
||||
- Set appropriate connection pool sizes based on your application's needs
|
||||
|
||||
### Document Design
|
||||
|
||||
- Use appropriate decorators (`@svDb`, `@unI`, `@index`, `@searchable`) to optimize database operations
|
||||
- Implement type-safe models by properly extending `SmartDataDbDoc`
|
||||
- Consider using interfaces to define document structures separately from implementation
|
||||
- Mark fields that need to be searched with the `@searchable()` decorator
|
||||
|
||||
### Search Optimization
|
||||
|
||||
- Create MongoDB text indexes for collections that need advanced search operations
|
||||
- Use `searchWithLucene()` for robust searches with fallback mechanisms
|
||||
- Prefer field-specific searches when possible for better performance
|
||||
- Use simple term queries instead of boolean operators if you don't have text indexes
|
||||
|
||||
### Performance Optimization
|
||||
|
||||
- Use cursors for large datasets instead of loading all documents into memory
|
||||
- Create appropriate indexes for frequent query patterns
|
||||
- Use projections to limit the fields returned when you don't need the entire document
|
||||
|
||||
### Distributed Systems
|
||||
|
||||
- Implement proper error handling for leader election events
|
||||
- Ensure all instances have synchronized clocks when using time-based coordination
|
||||
- Use the distributed coordinator's task management features for coordinated operations
|
||||
|
||||
### Type Safety
|
||||
|
||||
- Take advantage of the `DeepQuery<T>` type for fully type-safe queries
|
||||
- Define proper types for your document models to enhance IDE auto-completion
|
||||
- Use generic type parameters to specify exact document types when working with collections
|
||||
|
||||
## Contributing
|
||||
|
||||
We welcome contributions to @push.rocks/smartdata! Here's how you can help:
|
||||
|
||||
1. Fork the repository
|
||||
2. Create a feature branch (`git checkout -b feature/amazing-feature`)
|
||||
3. Commit your changes (`git commit -m 'Add amazing feature'`)
|
||||
4. Push to the branch (`git push origin feature/amazing-feature`)
|
||||
5. Open a Pull Request
|
||||
|
||||
Please make sure to update tests as appropriate and follow our coding standards.
|
||||
|
||||
## License and Legal Information
|
||||
|
||||
This repository contains open-source code that is licensed under the MIT License. A copy of the MIT License can be found in the [license](license) file within this repository.
|
||||
|
||||
**Please note:** The MIT License does not grant permission to use the trade names, trademarks, service marks, or product names of the project, except as required for reasonable and customary use in describing the origin of the work and reproducing the content of the NOTICE file.
|
||||
|
||||
### Trademarks
|
||||
|
||||
This project is owned and maintained by Task Venture Capital GmbH. The names and logos associated with Task Venture Capital GmbH and any related products or services are trademarks of Task Venture Capital GmbH and are not included within the scope of the MIT license granted herein. Use of these trademarks must comply with Task Venture Capital GmbH's Trademark Guidelines, and any usage must be approved in writing by Task Venture Capital GmbH.
|
||||
|
||||
### Company Information
|
||||
|
||||
Task Venture Capital GmbH
|
||||
Registered at District court Bremen HRB 35230 HB, Germany
|
||||
|
||||
For any legal inquiries or if you require further information, please contact us via email at hello@task.vc.
|
||||
|
||||
By using this repository, you acknowledge that you have read this section, agree to comply with its terms, and understand that the licensing of the code does not imply endorsement by Task Venture Capital GmbH of any derivative works.
|
||||
|
@ -3,7 +3,10 @@ import * as smartmongo from '@push.rocks/smartmongo';
|
||||
import type * as taskbuffer from '@push.rocks/taskbuffer';
|
||||
|
||||
import * as smartdata from '../ts/index.js';
|
||||
import { SmartdataDistributedCoordinator, DistributedClass } from '../ts/smartdata.classes.distributedcoordinator.js'; // path might need adjusting
|
||||
import {
|
||||
SmartdataDistributedCoordinator,
|
||||
DistributedClass,
|
||||
} from '../ts/classes.distributedcoordinator.js'; // path might need adjusting
|
||||
const totalInstances = 10;
|
||||
|
||||
// =======================================
|
||||
@ -20,93 +23,100 @@ tap.test('should create a testinstance as database', async () => {
|
||||
});
|
||||
|
||||
tap.test('should instantiate DistributedClass', async (tools) => {
|
||||
const instance = new DistributedClass();
|
||||
expect(instance).toBeInstanceOf(DistributedClass);
|
||||
const instance = new DistributedClass();
|
||||
expect(instance).toBeInstanceOf(DistributedClass);
|
||||
});
|
||||
|
||||
tap.test('DistributedClass should update the time', async (tools) => {
|
||||
const distributedCoordinator = new SmartdataDistributedCoordinator(testDb);
|
||||
await distributedCoordinator.start();
|
||||
const initialTime = distributedCoordinator.ownInstance.data.lastUpdated;
|
||||
await distributedCoordinator.sendHeartbeat();
|
||||
const updatedTime = distributedCoordinator.ownInstance.data.lastUpdated;
|
||||
expect(updatedTime).toBeGreaterThan(initialTime);
|
||||
await distributedCoordinator.stop();
|
||||
const distributedCoordinator = new SmartdataDistributedCoordinator(testDb);
|
||||
await distributedCoordinator.start();
|
||||
const initialTime = distributedCoordinator.ownInstance.data.lastUpdated;
|
||||
await distributedCoordinator.sendHeartbeat();
|
||||
const updatedTime = distributedCoordinator.ownInstance.data.lastUpdated;
|
||||
expect(updatedTime).toBeGreaterThan(initialTime);
|
||||
await distributedCoordinator.stop();
|
||||
});
|
||||
|
||||
tap.test('should instantiate SmartdataDistributedCoordinator', async (tools) => {
|
||||
const distributedCoordinator = new SmartdataDistributedCoordinator(testDb);
|
||||
await distributedCoordinator.start();
|
||||
expect(distributedCoordinator).toBeInstanceOf(SmartdataDistributedCoordinator);
|
||||
await distributedCoordinator.stop();
|
||||
const distributedCoordinator = new SmartdataDistributedCoordinator(testDb);
|
||||
await distributedCoordinator.start();
|
||||
expect(distributedCoordinator).toBeInstanceOf(SmartdataDistributedCoordinator);
|
||||
await distributedCoordinator.stop();
|
||||
});
|
||||
|
||||
tap.test('SmartdataDistributedCoordinator should update leader status', async (tools) => {
|
||||
const distributedCoordinator = new SmartdataDistributedCoordinator(testDb);
|
||||
await distributedCoordinator.start();
|
||||
await distributedCoordinator.checkAndMaybeLead();
|
||||
expect(distributedCoordinator.ownInstance.data.elected).toBeOneOf([true, false]);
|
||||
await distributedCoordinator.stop();
|
||||
const distributedCoordinator = new SmartdataDistributedCoordinator(testDb);
|
||||
await distributedCoordinator.start();
|
||||
await distributedCoordinator.checkAndMaybeLead();
|
||||
expect(distributedCoordinator.ownInstance.data.elected).toBeOneOf([true, false]);
|
||||
await distributedCoordinator.stop();
|
||||
});
|
||||
|
||||
tap.test('SmartdataDistributedCoordinator should handle distributed task requests', async (tools) => {
|
||||
tap.test(
|
||||
'SmartdataDistributedCoordinator should handle distributed task requests',
|
||||
async (tools) => {
|
||||
const distributedCoordinator = new SmartdataDistributedCoordinator(testDb);
|
||||
await distributedCoordinator.start();
|
||||
|
||||
const mockTaskRequest: taskbuffer.distributedCoordination.IDistributedTaskRequest = {
|
||||
submitterId: "mockSubmitter12345", // Some unique mock submitter ID
|
||||
requestResponseId: 'uni879873462hjhfkjhsdf', // Some unique ID for the request-response
|
||||
taskName: "SampleTask",
|
||||
taskVersion: "1.0.0", // Assuming it's a version string
|
||||
taskExecutionTime: Date.now(),
|
||||
taskExecutionTimeout: 60000, // Let's say the timeout is 1 minute (60000 ms)
|
||||
taskExecutionParallel: 5, // Let's assume max 5 parallel executions
|
||||
status: 'requesting'
|
||||
submitterId: 'mockSubmitter12345', // Some unique mock submitter ID
|
||||
requestResponseId: 'uni879873462hjhfkjhsdf', // Some unique ID for the request-response
|
||||
taskName: 'SampleTask',
|
||||
taskVersion: '1.0.0', // Assuming it's a version string
|
||||
taskExecutionTime: Date.now(),
|
||||
taskExecutionTimeout: 60000, // Let's say the timeout is 1 minute (60000 ms)
|
||||
taskExecutionParallel: 5, // Let's assume max 5 parallel executions
|
||||
status: 'requesting',
|
||||
};
|
||||
|
||||
const response = await distributedCoordinator.fireDistributedTaskRequest(mockTaskRequest);
|
||||
console.log(response) // based on your expected structure for the response
|
||||
console.log(response); // based on your expected structure for the response
|
||||
await distributedCoordinator.stop();
|
||||
});
|
||||
},
|
||||
);
|
||||
|
||||
tap.test('SmartdataDistributedCoordinator should update distributed task requests', async (tools) => {
|
||||
tap.test(
|
||||
'SmartdataDistributedCoordinator should update distributed task requests',
|
||||
async (tools) => {
|
||||
const distributedCoordinator = new SmartdataDistributedCoordinator(testDb);
|
||||
|
||||
|
||||
await distributedCoordinator.start();
|
||||
|
||||
const mockTaskRequest: taskbuffer.distributedCoordination.IDistributedTaskRequest = {
|
||||
submitterId: "mockSubmitter12345", // Some unique mock submitter ID
|
||||
requestResponseId: 'uni879873462hjhfkjhsdf', // Some unique ID for the request-response
|
||||
taskName: "SampleTask",
|
||||
taskVersion: "1.0.0", // Assuming it's a version string
|
||||
taskExecutionTime: Date.now(),
|
||||
taskExecutionTimeout: 60000, // Let's say the timeout is 1 minute (60000 ms)
|
||||
taskExecutionParallel: 5, // Let's assume max 5 parallel executions
|
||||
status: 'requesting'
|
||||
submitterId: 'mockSubmitter12345', // Some unique mock submitter ID
|
||||
requestResponseId: 'uni879873462hjhfkjhsdf', // Some unique ID for the request-response
|
||||
taskName: 'SampleTask',
|
||||
taskVersion: '1.0.0', // Assuming it's a version string
|
||||
taskExecutionTime: Date.now(),
|
||||
taskExecutionTimeout: 60000, // Let's say the timeout is 1 minute (60000 ms)
|
||||
taskExecutionParallel: 5, // Let's assume max 5 parallel executions
|
||||
status: 'requesting',
|
||||
};
|
||||
|
||||
|
||||
await distributedCoordinator.updateDistributedTaskRequest(mockTaskRequest);
|
||||
// Here, we can potentially check if a DB entry got updated or some other side-effect of the update method.
|
||||
await distributedCoordinator.stop();
|
||||
});
|
||||
},
|
||||
);
|
||||
|
||||
tap.test('should elect only one leader amongst multiple instances', async (tools) => {
|
||||
const coordinators = Array.from({ length: totalInstances }).map(() => new SmartdataDistributedCoordinator(testDb));
|
||||
await Promise.all(coordinators.map(coordinator => coordinator.start()));
|
||||
const leaders = coordinators.filter(coordinator => coordinator.ownInstance.data.elected);
|
||||
for (const leader of leaders) {
|
||||
console.log(leader.ownInstance);
|
||||
}
|
||||
expect(leaders.length).toEqual(1);
|
||||
const coordinators = Array.from({ length: totalInstances }).map(
|
||||
() => new SmartdataDistributedCoordinator(testDb),
|
||||
);
|
||||
await Promise.all(coordinators.map((coordinator) => coordinator.start()));
|
||||
const leaders = coordinators.filter((coordinator) => coordinator.ownInstance.data.elected);
|
||||
for (const leader of leaders) {
|
||||
console.log(leader.ownInstance);
|
||||
}
|
||||
expect(leaders.length).toEqual(1);
|
||||
|
||||
// stopping clears a coordinator from being elected.
|
||||
await Promise.all(coordinators.map(coordinator => coordinator.stop()));
|
||||
// stopping clears a coordinator from being elected.
|
||||
await Promise.all(coordinators.map((coordinator) => coordinator.stop()));
|
||||
});
|
||||
|
||||
tap.test('should clean up', async () => {
|
||||
await smartmongoInstance.stopAndDumpToDir(`.nogit/testdata/`);
|
||||
setTimeout(() => process.exit(), 2000);
|
||||
})
|
||||
await smartmongoInstance.stopAndDumpToDir(`.nogit/dbdump/test.distributedcoordinator.ts`);
|
||||
setTimeout(() => process.exit(), 2000);
|
||||
});
|
||||
|
||||
tap.start({ throwOnError: true });
|
||||
|
@ -1,7 +1,7 @@
|
||||
import { tap, expect } from '@push.rocks/tapbundle';
|
||||
import { Qenv } from '@push.rocks/qenv';
|
||||
import * as smartmongo from '@push.rocks/smartmongo';
|
||||
import { smartunique } from '../ts/smartdata.plugins.js';
|
||||
import { smartunique } from '../ts/plugins.js';
|
||||
|
||||
const testQenv = new Qenv(process.cwd(), process.cwd() + '/.nogit/');
|
||||
|
||||
|
204
test/test.search.ts
Normal file
204
test/test.search.ts
Normal file
@ -0,0 +1,204 @@
|
||||
import { tap, expect } from '@push.rocks/tapbundle';
|
||||
import * as smartmongo from '@push.rocks/smartmongo';
|
||||
import { smartunique } from '../ts/plugins.js';
|
||||
|
||||
// Import the smartdata library
|
||||
import * as smartdata from '../ts/index.js';
|
||||
import { searchable, getSearchableFields } from '../ts/classes.doc.js';
|
||||
|
||||
// Set up database connection
|
||||
let smartmongoInstance: smartmongo.SmartMongo;
|
||||
let testDb: smartdata.SmartdataDb;
|
||||
|
||||
// Define a test class with searchable fields using the standard SmartDataDbDoc
|
||||
@smartdata.Collection(() => testDb)
|
||||
class Product extends smartdata.SmartDataDbDoc<Product, Product> {
|
||||
@smartdata.unI()
|
||||
public id: string = smartunique.shortId();
|
||||
|
||||
@smartdata.svDb()
|
||||
@searchable()
|
||||
public name: string;
|
||||
|
||||
@smartdata.svDb()
|
||||
@searchable()
|
||||
public description: string;
|
||||
|
||||
@smartdata.svDb()
|
||||
@searchable()
|
||||
public category: string;
|
||||
|
||||
@smartdata.svDb()
|
||||
public price: number;
|
||||
|
||||
constructor(nameArg: string, descriptionArg: string, categoryArg: string, priceArg: number) {
|
||||
super();
|
||||
this.name = nameArg;
|
||||
this.description = descriptionArg;
|
||||
this.category = categoryArg;
|
||||
this.price = priceArg;
|
||||
}
|
||||
}
|
||||
|
||||
tap.test('should create a test database instance', async () => {
|
||||
smartmongoInstance = await smartmongo.SmartMongo.createAndStart();
|
||||
testDb = new smartdata.SmartdataDb(await smartmongoInstance.getMongoDescriptor());
|
||||
await testDb.init();
|
||||
});
|
||||
|
||||
tap.test('should create test products with searchable fields', async () => {
|
||||
// Create several products with different fields to search
|
||||
const products = [
|
||||
new Product('iPhone 12', 'Latest iPhone with A14 Bionic chip', 'Electronics', 999),
|
||||
new Product('MacBook Pro', 'Powerful laptop for professionals', 'Electronics', 1999),
|
||||
new Product('AirPods', 'Wireless earbuds with noise cancellation', 'Electronics', 249),
|
||||
new Product('Galaxy S21', 'Samsung flagship phone with great camera', 'Electronics', 899),
|
||||
new Product('Kindle Paperwhite', 'E-reader with built-in light', 'Books', 129),
|
||||
new Product('Harry Potter', 'Fantasy book series about wizards', 'Books', 49),
|
||||
new Product('Coffee Maker', 'Automatic drip coffee machine', 'Kitchen', 89),
|
||||
new Product('Blender', 'High-speed blender for smoothies', 'Kitchen', 129),
|
||||
];
|
||||
|
||||
// Save all products to the database
|
||||
for (const product of products) {
|
||||
await product.save();
|
||||
}
|
||||
|
||||
// Verify that we can get all products
|
||||
const allProducts = await Product.getInstances({});
|
||||
expect(allProducts.length).toEqual(products.length);
|
||||
console.log(`Successfully created and saved ${allProducts.length} products`);
|
||||
});
|
||||
|
||||
tap.test('should retrieve searchable fields for a class', async () => {
|
||||
// Use the getSearchableFields function to verify our searchable fields
|
||||
const searchableFields = getSearchableFields('Product');
|
||||
console.log('Searchable fields:', searchableFields);
|
||||
|
||||
expect(searchableFields.length).toEqual(3);
|
||||
expect(searchableFields).toContain('name');
|
||||
expect(searchableFields).toContain('description');
|
||||
expect(searchableFields).toContain('category');
|
||||
});
|
||||
|
||||
tap.test('should search products by exact field match', async () => {
|
||||
// Basic field exact match search
|
||||
const electronicsProducts = await Product.getInstances({ category: 'Electronics' });
|
||||
console.log(`Found ${electronicsProducts.length} products in Electronics category`);
|
||||
|
||||
expect(electronicsProducts.length).toEqual(4);
|
||||
});
|
||||
|
||||
tap.test('should search products by basic search method', async () => {
|
||||
// Using the basic search method with simple Lucene query
|
||||
try {
|
||||
const iPhoneResults = await Product.search('iPhone');
|
||||
console.log(`Found ${iPhoneResults.length} products matching 'iPhone' using basic search`);
|
||||
|
||||
expect(iPhoneResults.length).toEqual(1);
|
||||
expect(iPhoneResults[0].name).toEqual('iPhone 12');
|
||||
} catch (error) {
|
||||
console.error('Basic search error:', error.message);
|
||||
// If basic search fails, we'll demonstrate the enhanced approach in later tests
|
||||
console.log('Will test with enhanced searchWithLucene method next');
|
||||
}
|
||||
});
|
||||
|
||||
tap.test('should search products with searchWithLucene method', async () => {
|
||||
// Using the robust searchWithLucene method
|
||||
const wirelessResults = await Product.searchWithLucene('wireless');
|
||||
console.log(
|
||||
`Found ${wirelessResults.length} products matching 'wireless' using searchWithLucene`,
|
||||
);
|
||||
|
||||
expect(wirelessResults.length).toEqual(1);
|
||||
expect(wirelessResults[0].name).toEqual('AirPods');
|
||||
});
|
||||
|
||||
tap.test('should search products by category with searchWithLucene', async () => {
|
||||
// Using field-specific search with searchWithLucene
|
||||
const kitchenResults = await Product.searchWithLucene('category:Kitchen');
|
||||
console.log(`Found ${kitchenResults.length} products in Kitchen category using searchWithLucene`);
|
||||
|
||||
expect(kitchenResults.length).toEqual(2);
|
||||
expect(kitchenResults[0].category).toEqual('Kitchen');
|
||||
expect(kitchenResults[1].category).toEqual('Kitchen');
|
||||
});
|
||||
|
||||
tap.test('should search products with partial word matches', async () => {
|
||||
// Testing partial word matches
|
||||
const proResults = await Product.searchWithLucene('Pro');
|
||||
console.log(`Found ${proResults.length} products matching 'Pro'`);
|
||||
|
||||
// Should match both "MacBook Pro" and "professionals" in description
|
||||
expect(proResults.length).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
tap.test('should search across multiple searchable fields', async () => {
|
||||
// Test searching across all searchable fields
|
||||
const bookResults = await Product.searchWithLucene('book');
|
||||
console.log(`Found ${bookResults.length} products matching 'book' across all fields`);
|
||||
|
||||
// Should match "MacBook" in name and "Books" in category
|
||||
expect(bookResults.length).toBeGreaterThan(1);
|
||||
});
|
||||
|
||||
tap.test('should handle case insensitive searches', async () => {
|
||||
// Test case insensitivity
|
||||
const electronicsResults = await Product.searchWithLucene('electronics');
|
||||
const ElectronicsResults = await Product.searchWithLucene('Electronics');
|
||||
|
||||
console.log(`Found ${electronicsResults.length} products matching lowercase 'electronics'`);
|
||||
console.log(`Found ${ElectronicsResults.length} products matching capitalized 'Electronics'`);
|
||||
|
||||
// Both searches should return the same results
|
||||
expect(electronicsResults.length).toEqual(ElectronicsResults.length);
|
||||
});
|
||||
|
||||
tap.test('should demonstrate search fallback mechanisms', async () => {
|
||||
console.log('\n====== FALLBACK MECHANISM DEMONSTRATION ======');
|
||||
console.log('If MongoDB query fails, searchWithLucene will:');
|
||||
console.log('1. Try using basic MongoDB filters');
|
||||
console.log('2. Fall back to field-specific searches');
|
||||
console.log('3. As last resort, perform in-memory filtering');
|
||||
console.log('This ensures robust search even with complex queries');
|
||||
console.log('==============================================\n');
|
||||
|
||||
// Use a simpler term that should be found in descriptions
|
||||
// Avoid using "OR" operator which requires a text index
|
||||
const results = await Product.searchWithLucene('high');
|
||||
console.log(`Found ${results.length} products matching 'high'`);
|
||||
|
||||
// "High-speed blender" contains "high"
|
||||
expect(results.length).toBeGreaterThan(0);
|
||||
|
||||
// Try another fallback example that won't need $text
|
||||
const powerfulResults = await Product.searchWithLucene('powerful');
|
||||
console.log(`Found ${powerfulResults.length} products matching 'powerful'`);
|
||||
|
||||
// "Powerful laptop for professionals" contains "powerful"
|
||||
expect(powerfulResults.length).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
tap.test('should explain the advantages of the integrated approach', async () => {
|
||||
console.log('\n====== INTEGRATED SEARCH APPROACH BENEFITS ======');
|
||||
console.log('1. No separate class hierarchy - keeps code simple');
|
||||
console.log('2. Enhanced convertFilterForMongoDb handles MongoDB operators');
|
||||
console.log('3. Robust fallback mechanisms ensure searches always work');
|
||||
console.log('4. searchWithLucene provides powerful search capabilities');
|
||||
console.log('5. Backwards compatible with existing code');
|
||||
console.log('================================================\n');
|
||||
|
||||
expect(true).toEqual(true);
|
||||
});
|
||||
|
||||
tap.test('close database connection', async () => {
|
||||
await testDb.mongoDb.dropDatabase();
|
||||
await testDb.close();
|
||||
if (smartmongoInstance) {
|
||||
await smartmongoInstance.stopAndDumpToDir(`.nogit/dbdump/test.search.ts`);
|
||||
}
|
||||
setTimeout(() => process.exit(), 2000);
|
||||
});
|
||||
|
||||
tap.start({ throwOnError: true });
|
26
test/test.ts
26
test/test.ts
@ -1,7 +1,7 @@
|
||||
import { tap, expect } from '@push.rocks/tapbundle';
|
||||
import { Qenv } from '@push.rocks/qenv';
|
||||
import * as smartmongo from '@push.rocks/smartmongo';
|
||||
import { smartunique } from '../ts/smartdata.plugins.js';
|
||||
import { smartunique } from '../ts/plugins.js';
|
||||
|
||||
import * as mongodb from 'mongodb';
|
||||
|
||||
@ -75,7 +75,7 @@ class Car extends smartdata.SmartDataDbDoc<Car, Car> {
|
||||
tap.test('should create a new id', async () => {
|
||||
const newid = await Car.getNewId();
|
||||
console.log(newid);
|
||||
})
|
||||
});
|
||||
|
||||
tap.test('should save the car to the db', async (toolsArg) => {
|
||||
const myCar = new Car('red', 'Volvo');
|
||||
@ -97,7 +97,7 @@ tap.test('should save the car to the db', async (toolsArg) => {
|
||||
console.log(
|
||||
`Filled database with ${counter} of ${totalCars} Cars and memory usage ${
|
||||
process.memoryUsage().rss / 1e6
|
||||
} MB`
|
||||
} MB`,
|
||||
);
|
||||
}
|
||||
} while (counter < totalCars);
|
||||
@ -116,7 +116,7 @@ tap.test('expect to get instance of Car with shallow match', async () => {
|
||||
console.log(
|
||||
`performed ${counter} of ${totalQueryCycles} total query cycles: took ${
|
||||
Date.now() - timeStart
|
||||
}ms to query a set of 2000 with memory footprint ${process.memoryUsage().rss / 1e6} MB`
|
||||
}ms to query a set of 2000 with memory footprint ${process.memoryUsage().rss / 1e6} MB`,
|
||||
);
|
||||
}
|
||||
expect(myCars[0].deepData.sodeep).toEqual('yes');
|
||||
@ -139,7 +139,7 @@ tap.test('expect to get instance of Car with deep match', async () => {
|
||||
console.log(
|
||||
`performed ${counter} of ${totalQueryCycles} total query cycles: took ${
|
||||
Date.now() - timeStart
|
||||
}ms to deep query a set of 2000 with memory footprint ${process.memoryUsage().rss / 1e6} MB`
|
||||
}ms to deep query a set of 2000 with memory footprint ${process.memoryUsage().rss / 1e6} MB`,
|
||||
);
|
||||
}
|
||||
expect(myCars2[0].deepData.sodeep).toEqual('yes');
|
||||
@ -199,10 +199,16 @@ tap.test('should store a new Truck', async () => {
|
||||
const truck = new Truck('blue', 'MAN');
|
||||
await truck.save();
|
||||
const myTruck2 = await Truck.getInstance({ color: 'blue' });
|
||||
expect(myTruck2.color).toEqual('blue');
|
||||
myTruck2.color = 'red';
|
||||
await myTruck2.save();
|
||||
const myTruck3 = await Truck.getInstance({ color: 'blue' });
|
||||
console.log(myTruck3);
|
||||
expect(myTruck3).toBeNull();
|
||||
});
|
||||
|
||||
tap.test('should return a count', async () => {
|
||||
const truckCount = await Truck.getCount();
|
||||
expect(truckCount).toEqual(1);
|
||||
});
|
||||
|
||||
tap.test('should use a cursor', async () => {
|
||||
@ -218,11 +224,13 @@ tap.test('should use a cursor', async () => {
|
||||
// close the database connection
|
||||
// =======================================
|
||||
tap.test('close', async () => {
|
||||
await testDb.mongoDb.dropDatabase();
|
||||
await testDb.close();
|
||||
if (smartmongoInstance) {
|
||||
await smartmongoInstance.stop();
|
||||
await smartmongoInstance.stopAndDumpToDir('./.nogit/dbdump/test.ts');
|
||||
} else {
|
||||
await testDb.mongoDb.dropDatabase();
|
||||
await testDb.close();
|
||||
}
|
||||
setTimeout(() => process.exit(), 2000);
|
||||
});
|
||||
|
||||
tap.start({ throwOnError: true });
|
||||
|
@ -1,7 +1,7 @@
|
||||
import { tap, expect } from '@push.rocks/tapbundle';
|
||||
import { Qenv } from '@push.rocks/qenv';
|
||||
import * as smartmongo from '@push.rocks/smartmongo';
|
||||
import { smartunique } from '../ts/smartdata.plugins.js';
|
||||
import { smartunique } from '../ts/plugins.js';
|
||||
|
||||
const testQenv = new Qenv(process.cwd(), process.cwd() + '/.nogit/');
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
import { tap, expect } from '@push.rocks/tapbundle';
|
||||
import { Qenv } from '@push.rocks/qenv';
|
||||
import * as smartmongo from '@push.rocks/smartmongo';
|
||||
import { smartunique } from '../ts/smartdata.plugins.js';
|
||||
import { smartunique } from '../ts/plugins.js';
|
||||
|
||||
const testQenv = new Qenv(process.cwd(), process.cwd() + '/.nogit/');
|
||||
|
||||
|
@ -1,8 +1,8 @@
|
||||
/**
|
||||
* autocreated commitinfo by @pushrocks/commitinfo
|
||||
* autocreated commitinfo by @push.rocks/commitinfo
|
||||
*/
|
||||
export const commitinfo = {
|
||||
name: '@push.rocks/smartdata',
|
||||
version: '5.0.39',
|
||||
description: 'do more with data'
|
||||
version: '5.8.2',
|
||||
description: 'An advanced library for NoSQL data organization and manipulation using TypeScript with support for MongoDB, data validation, collections, and custom data types.'
|
||||
}
|
||||
|
@ -1,9 +1,9 @@
|
||||
import * as plugins from './smartdata.plugins.js';
|
||||
import { SmartdataDb } from './smartdata.classes.db.js';
|
||||
import { SmartdataDbCursor } from './smartdata.classes.cursor.js';
|
||||
import { SmartDataDbDoc } from './smartdata.classes.doc.js';
|
||||
import { SmartdataDbWatcher } from './smartdata.classes.watcher.js';
|
||||
import { CollectionFactory } from './smartdata.classes.collectionfactory.js';
|
||||
import * as plugins from './plugins.js';
|
||||
import { SmartdataDb } from './classes.db.js';
|
||||
import { SmartdataDbCursor } from './classes.cursor.js';
|
||||
import { SmartDataDbDoc, type IIndexOptions } from './classes.doc.js';
|
||||
import { SmartdataDbWatcher } from './classes.watcher.js';
|
||||
import { CollectionFactory } from './classes.collectionfactory.js';
|
||||
|
||||
export interface IFindOptions {
|
||||
limit?: number;
|
||||
@ -49,7 +49,7 @@ export interface IManager {
|
||||
db: SmartdataDb;
|
||||
}
|
||||
|
||||
export const setDefaultManagerForDoc = <T>(managerArg: IManager, dbDocArg: T): T => {
|
||||
export const setDefaultManagerForDoc = <T,>(managerArg: IManager, dbDocArg: T): T => {
|
||||
(dbDocArg as any).prototype.defaultManager = managerArg;
|
||||
return dbDocArg;
|
||||
};
|
||||
@ -127,6 +127,7 @@ export class SmartdataCollection<T> {
|
||||
public collectionName: string;
|
||||
public smartdataDb: SmartdataDb;
|
||||
public uniqueIndexes: string[] = [];
|
||||
public regularIndexes: Array<{field: string, options: IIndexOptions}> = [];
|
||||
|
||||
constructor(classNameArg: string, smartDataDbArg: SmartdataDb) {
|
||||
// tell the collection where it belongs
|
||||
@ -170,6 +171,24 @@ export class SmartdataCollection<T> {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* creates regular indexes for the collection
|
||||
*/
|
||||
public createRegularIndexes(indexesArg: Array<{field: string, options: IIndexOptions}> = []) {
|
||||
for (const indexDef of indexesArg) {
|
||||
// Check if we've already created this index
|
||||
const indexKey = indexDef.field;
|
||||
if (!this.regularIndexes.some(i => i.field === indexKey)) {
|
||||
this.mongoDbCollection.createIndex(
|
||||
{ [indexDef.field]: 1 }, // Simple single-field index
|
||||
indexDef.options
|
||||
);
|
||||
// Track that we've created this index
|
||||
this.regularIndexes.push(indexDef);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* adds a validation function that all newly inserted and updated objects have to pass
|
||||
*/
|
||||
@ -190,7 +209,7 @@ export class SmartdataCollection<T> {
|
||||
|
||||
public async getCursor(
|
||||
filterObjectArg: any,
|
||||
dbDocArg: typeof SmartDataDbDoc
|
||||
dbDocArg: typeof SmartDataDbDoc,
|
||||
): Promise<SmartdataDbCursor<any>> {
|
||||
await this.init();
|
||||
const cursor = this.mongoDbCollection.find(filterObjectArg);
|
||||
@ -213,7 +232,7 @@ export class SmartdataCollection<T> {
|
||||
*/
|
||||
public async watch(
|
||||
filterObject: any,
|
||||
smartdataDbDocArg: typeof SmartDataDbDoc
|
||||
smartdataDbDocArg: typeof SmartDataDbDoc,
|
||||
): Promise<SmartdataDbWatcher> {
|
||||
await this.init();
|
||||
const changeStream = this.mongoDbCollection.watch(
|
||||
@ -224,7 +243,7 @@ export class SmartdataCollection<T> {
|
||||
],
|
||||
{
|
||||
fullDocument: 'updateLookup',
|
||||
}
|
||||
},
|
||||
);
|
||||
const smartdataWatcher = new SmartdataDbWatcher(changeStream, smartdataDbDocArg);
|
||||
await smartdataWatcher.readyDeferred.promise;
|
||||
@ -238,6 +257,12 @@ export class SmartdataCollection<T> {
|
||||
await this.init();
|
||||
await this.checkDoc(dbDocArg);
|
||||
this.markUniqueIndexes(dbDocArg.uniqueIndexes);
|
||||
|
||||
// Create regular indexes if available
|
||||
if (dbDocArg.regularIndexes && dbDocArg.regularIndexes.length > 0) {
|
||||
this.createRegularIndexes(dbDocArg.regularIndexes);
|
||||
}
|
||||
|
||||
const saveableObject = await dbDocArg.createSavableObject();
|
||||
const result = await this.mongoDbCollection.insertOne(saveableObject);
|
||||
return result;
|
||||
@ -261,7 +286,7 @@ export class SmartdataCollection<T> {
|
||||
const result = await this.mongoDbCollection.updateOne(
|
||||
identifiableObject,
|
||||
{ $set: updateableObject },
|
||||
{ upsert: true }
|
||||
{ upsert: true },
|
||||
);
|
||||
return result;
|
||||
}
|
||||
@ -273,6 +298,11 @@ export class SmartdataCollection<T> {
|
||||
await this.mongoDbCollection.deleteOne(identifiableObject);
|
||||
}
|
||||
|
||||
public async getCount(filterObject: any) {
|
||||
await this.init();
|
||||
return this.mongoDbCollection.countDocuments(filterObject);
|
||||
}
|
||||
|
||||
/**
|
||||
* checks a Doc for constraints
|
||||
* if this.objectValidation is not set it passes.
|
||||
@ -290,4 +320,4 @@ export class SmartdataCollection<T> {
|
||||
}
|
||||
return done.promise;
|
||||
}
|
||||
}
|
||||
}
|
@ -1,6 +1,6 @@
|
||||
import * as plugins from './smartdata.plugins.js';
|
||||
import { SmartdataCollection } from './smartdata.classes.collection.js';
|
||||
import { SmartdataDb } from './smartdata.classes.db.js';
|
||||
import * as plugins from './plugins.js';
|
||||
import { SmartdataCollection } from './classes.collection.js';
|
||||
import { SmartdataDb } from './classes.db.js';
|
||||
|
||||
export class CollectionFactory {
|
||||
public collections: { [key: string]: SmartdataCollection<any> } = {};
|
@ -1,4 +1,4 @@
|
||||
import * as plugins from './smartdata.plugins.js';
|
||||
import * as plugins from './plugins.js';
|
||||
|
||||
export const getNewUniqueId = async (prefixArg?: string) => {
|
||||
return plugins.smartunique.uni(prefixArg);
|
@ -1,5 +1,5 @@
|
||||
import { SmartDataDbDoc } from './smartdata.classes.doc.js';
|
||||
import * as plugins from './smartdata.plugins.js';
|
||||
import { SmartDataDbDoc } from './classes.doc.js';
|
||||
import * as plugins from './plugins.js';
|
||||
|
||||
/**
|
||||
* a wrapper for the native mongodb cursor. Exposes better
|
||||
@ -15,14 +15,14 @@ export class SmartdataDbCursor<T = any> {
|
||||
this.smartdataDbDoc = dbDocArg;
|
||||
}
|
||||
|
||||
public async next(closeAtEnd = true) {
|
||||
public async next(closeAtEnd = true): Promise<T> {
|
||||
const result = this.smartdataDbDoc.createInstanceFromMongoDbNativeDoc(
|
||||
await this.mongodbCursor.next()
|
||||
await this.mongodbCursor.next(),
|
||||
);
|
||||
if (!result && closeAtEnd) {
|
||||
await this.close();
|
||||
}
|
||||
return result;
|
||||
return result as T;
|
||||
}
|
||||
|
||||
public async forEach(forEachFuncArg: (itemArg: T) => Promise<any>, closeCursorAtEnd = true) {
|
||||
@ -40,6 +40,11 @@ export class SmartdataDbCursor<T = any> {
|
||||
}
|
||||
}
|
||||
|
||||
public async toArray(): Promise<T[]> {
|
||||
const result = await this.mongodbCursor.toArray();
|
||||
return result.map((itemArg) => this.smartdataDbDoc.createInstanceFromMongoDbNativeDoc(itemArg)) as T[];
|
||||
}
|
||||
|
||||
public async close() {
|
||||
await this.mongodbCursor.close();
|
||||
}
|
@ -1,9 +1,9 @@
|
||||
import * as plugins from './smartdata.plugins.js';
|
||||
import * as plugins from './plugins.js';
|
||||
|
||||
import { SmartdataCollection } from './smartdata.classes.collection.js';
|
||||
import { EasyStore } from './smartdata.classes.easystore.js';
|
||||
import { SmartdataCollection } from './classes.collection.js';
|
||||
import { EasyStore } from './classes.easystore.js';
|
||||
|
||||
import { logger } from './smartdata.logging.js';
|
||||
import { logger } from './logging.js';
|
||||
|
||||
/**
|
||||
* interface - indicates the connection status of the db
|
@ -1,8 +1,8 @@
|
||||
import * as plugins from './smartdata.plugins.js';
|
||||
import { SmartdataDb } from './smartdata.classes.db.js';
|
||||
import { managed, setDefaultManagerForDoc } from './smartdata.classes.collection.js';
|
||||
import { SmartDataDbDoc, svDb, unI } from './smartdata.classes.doc.js';
|
||||
import { SmartdataDbWatcher } from './smartdata.classes.watcher.js';
|
||||
import * as plugins from './plugins.js';
|
||||
import { SmartdataDb } from './classes.db.js';
|
||||
import { managed, setDefaultManagerForDoc } from './classes.collection.js';
|
||||
import { SmartDataDbDoc, svDb, unI } from './classes.doc.js';
|
||||
import { SmartdataDbWatcher } from './classes.watcher.js';
|
||||
|
||||
@managed()
|
||||
export class DistributedClass extends SmartDataDbDoc<DistributedClass, DistributedClass> {
|
||||
@ -139,7 +139,7 @@ export class SmartdataDistributedCoordinator extends plugins.taskbuffer.distribu
|
||||
const eligibleLeader = leaders.find(
|
||||
(leader) =>
|
||||
leader.data.lastUpdated >=
|
||||
Date.now() - plugins.smarttime.getMilliSecondsFromUnits({ seconds: 20 })
|
||||
Date.now() - plugins.smarttime.getMilliSecondsFromUnits({ seconds: 20 }),
|
||||
);
|
||||
return eligibleLeader;
|
||||
});
|
||||
@ -178,16 +178,14 @@ export class SmartdataDistributedCoordinator extends plugins.taskbuffer.distribu
|
||||
console.log('bidding code stored.');
|
||||
});
|
||||
console.log(`bidding for leadership...`);
|
||||
await plugins.smartdelay.delayFor(
|
||||
plugins.smarttime.getMilliSecondsFromUnits({ seconds: 5 })
|
||||
);
|
||||
await plugins.smartdelay.delayFor(plugins.smarttime.getMilliSecondsFromUnits({ seconds: 5 }));
|
||||
await this.asyncExecutionStack.getExclusiveExecutionSlot(async () => {
|
||||
let biddingInstances = await DistributedClass.getInstances({});
|
||||
biddingInstances = biddingInstances.filter(
|
||||
(instanceArg) =>
|
||||
instanceArg.data.status === 'bidding' &&
|
||||
instanceArg.data.lastUpdated >=
|
||||
Date.now() - plugins.smarttime.getMilliSecondsFromUnits({ seconds: 10 })
|
||||
Date.now() - plugins.smarttime.getMilliSecondsFromUnits({ seconds: 10 }),
|
||||
);
|
||||
console.log(`found ${biddingInstances.length} bidding instances...`);
|
||||
this.ownInstance.data.elected = true;
|
||||
@ -242,7 +240,7 @@ export class SmartdataDistributedCoordinator extends plugins.taskbuffer.distribu
|
||||
for (const instance of allInstances) {
|
||||
if (instance.data.status === 'stopped') {
|
||||
await instance.delete();
|
||||
};
|
||||
}
|
||||
}
|
||||
await plugins.smartdelay.delayFor(10000);
|
||||
}
|
||||
@ -250,7 +248,7 @@ export class SmartdataDistributedCoordinator extends plugins.taskbuffer.distribu
|
||||
|
||||
// abstract implemented methods
|
||||
public async fireDistributedTaskRequest(
|
||||
taskRequestArg: plugins.taskbuffer.distributedCoordination.IDistributedTaskRequest
|
||||
taskRequestArg: plugins.taskbuffer.distributedCoordination.IDistributedTaskRequest,
|
||||
): Promise<plugins.taskbuffer.distributedCoordination.IDistributedTaskRequestResult> {
|
||||
await this.asyncExecutionStack.getExclusiveExecutionSlot(async () => {
|
||||
if (!this.ownInstance) {
|
||||
@ -277,7 +275,7 @@ export class SmartdataDistributedCoordinator extends plugins.taskbuffer.distribu
|
||||
}
|
||||
|
||||
public async updateDistributedTaskRequest(
|
||||
infoBasisArg: plugins.taskbuffer.distributedCoordination.IDistributedTaskRequest
|
||||
infoBasisArg: plugins.taskbuffer.distributedCoordination.IDistributedTaskRequest,
|
||||
): Promise<void> {
|
||||
await this.asyncExecutionStack.getExclusiveExecutionSlot(async () => {
|
||||
const existingInfoBasis = this.ownInstance.data.taskRequests.find((infoBasisItem) => {
|
565
ts/classes.doc.ts
Normal file
565
ts/classes.doc.ts
Normal file
@ -0,0 +1,565 @@
|
||||
import * as plugins from './plugins.js';
|
||||
|
||||
import { SmartdataDb } from './classes.db.js';
|
||||
import { SmartdataDbCursor } from './classes.cursor.js';
|
||||
import { type IManager, SmartdataCollection } from './classes.collection.js';
|
||||
import { SmartdataDbWatcher } from './classes.watcher.js';
|
||||
import { SmartdataLuceneAdapter } from './classes.lucene.adapter.js';
|
||||
|
||||
export type TDocCreation = 'db' | 'new' | 'mixed';
|
||||
|
||||
// Set of searchable fields for each class
|
||||
const searchableFieldsMap = new Map<string, Set<string>>();
|
||||
|
||||
export function globalSvDb() {
|
||||
return (target: SmartDataDbDoc<unknown, unknown>, key: string) => {
|
||||
console.log(`called svDb() on >${target.constructor.name}.${key}<`);
|
||||
if (!target.globalSaveableProperties) {
|
||||
target.globalSaveableProperties = [];
|
||||
}
|
||||
target.globalSaveableProperties.push(key);
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* saveable - saveable decorator to be used on class properties
|
||||
*/
|
||||
export function svDb() {
|
||||
return (target: SmartDataDbDoc<unknown, unknown>, key: string) => {
|
||||
console.log(`called svDb() on >${target.constructor.name}.${key}<`);
|
||||
if (!target.saveableProperties) {
|
||||
target.saveableProperties = [];
|
||||
}
|
||||
target.saveableProperties.push(key);
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* searchable - marks a property as searchable with Lucene query syntax
|
||||
*/
|
||||
export function searchable() {
|
||||
return (target: SmartDataDbDoc<unknown, unknown>, key: string) => {
|
||||
console.log(`called searchable() on >${target.constructor.name}.${key}<`);
|
||||
|
||||
// Initialize the set for this class if it doesn't exist
|
||||
const className = target.constructor.name;
|
||||
if (!searchableFieldsMap.has(className)) {
|
||||
searchableFieldsMap.set(className, new Set<string>());
|
||||
}
|
||||
|
||||
// Add the property to the searchable fields set
|
||||
searchableFieldsMap.get(className).add(key);
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Get searchable fields for a class
|
||||
*/
|
||||
export function getSearchableFields(className: string): string[] {
|
||||
if (!searchableFieldsMap.has(className)) {
|
||||
return [];
|
||||
}
|
||||
return Array.from(searchableFieldsMap.get(className));
|
||||
}
|
||||
|
||||
/**
|
||||
* unique index - decorator to mark a unique index
|
||||
*/
|
||||
export function unI() {
|
||||
return (target: SmartDataDbDoc<unknown, unknown>, key: string) => {
|
||||
console.log(`called unI on >>${target.constructor.name}.${key}<<`);
|
||||
|
||||
// mark the index as unique
|
||||
if (!target.uniqueIndexes) {
|
||||
target.uniqueIndexes = [];
|
||||
}
|
||||
target.uniqueIndexes.push(key);
|
||||
|
||||
// and also save it
|
||||
if (!target.saveableProperties) {
|
||||
target.saveableProperties = [];
|
||||
}
|
||||
target.saveableProperties.push(key);
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Options for MongoDB indexes
|
||||
*/
|
||||
export interface IIndexOptions {
|
||||
background?: boolean;
|
||||
unique?: boolean;
|
||||
sparse?: boolean;
|
||||
expireAfterSeconds?: number;
|
||||
[key: string]: any;
|
||||
}
|
||||
|
||||
/**
|
||||
* index - decorator to mark a field for regular indexing
|
||||
*/
|
||||
export function index(options?: IIndexOptions) {
|
||||
return (target: SmartDataDbDoc<unknown, unknown>, key: string) => {
|
||||
console.log(`called index() on >${target.constructor.name}.${key}<`);
|
||||
|
||||
// Initialize regular indexes array if it doesn't exist
|
||||
if (!target.regularIndexes) {
|
||||
target.regularIndexes = [];
|
||||
}
|
||||
|
||||
// Add this field to regularIndexes with its options
|
||||
target.regularIndexes.push({
|
||||
field: key,
|
||||
options: options || {}
|
||||
});
|
||||
|
||||
// Also ensure it's marked as saveable
|
||||
if (!target.saveableProperties) {
|
||||
target.saveableProperties = [];
|
||||
}
|
||||
|
||||
if (!target.saveableProperties.includes(key)) {
|
||||
target.saveableProperties.push(key);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
export const convertFilterForMongoDb = (filterArg: { [key: string]: any }) => {
|
||||
// Special case: detect MongoDB operators and pass them through directly
|
||||
const topLevelOperators = ['$and', '$or', '$nor', '$not', '$text', '$where', '$regex'];
|
||||
for (const key of Object.keys(filterArg)) {
|
||||
if (topLevelOperators.includes(key)) {
|
||||
return filterArg; // Return the filter as-is for MongoDB operators
|
||||
}
|
||||
}
|
||||
|
||||
// Original conversion logic for non-MongoDB query objects
|
||||
const convertedFilter: { [key: string]: any } = {};
|
||||
|
||||
const convertFilterArgument = (keyPathArg2: string, filterArg2: any) => {
|
||||
if (Array.isArray(filterArg2)) {
|
||||
// Directly assign arrays (they might be using operators like $in or $all)
|
||||
convertFilterArgument(keyPathArg2, filterArg2[0]);
|
||||
} else if (typeof filterArg2 === 'object' && filterArg2 !== null) {
|
||||
for (const key of Object.keys(filterArg2)) {
|
||||
if (key.startsWith('$')) {
|
||||
convertedFilter[keyPathArg2] = filterArg2;
|
||||
return;
|
||||
} else if (key.includes('.')) {
|
||||
throw new Error('keys cannot contain dots');
|
||||
}
|
||||
}
|
||||
for (const key of Object.keys(filterArg2)) {
|
||||
convertFilterArgument(`${keyPathArg2}.${key}`, filterArg2[key]);
|
||||
}
|
||||
} else {
|
||||
convertedFilter[keyPathArg2] = filterArg2;
|
||||
}
|
||||
};
|
||||
|
||||
for (const key of Object.keys(filterArg)) {
|
||||
convertFilterArgument(key, filterArg[key]);
|
||||
}
|
||||
return convertedFilter;
|
||||
};
|
||||
|
||||
export class SmartDataDbDoc<T extends TImplements, TImplements, TManager extends IManager = any> {
|
||||
/**
|
||||
* the collection object an Doc belongs to
|
||||
*/
|
||||
public static collection: SmartdataCollection<any>;
|
||||
public collection: SmartdataCollection<any>;
|
||||
public static defaultManager;
|
||||
public static manager;
|
||||
public manager: TManager;
|
||||
|
||||
// STATIC
|
||||
public static createInstanceFromMongoDbNativeDoc<T>(
|
||||
this: plugins.tsclass.typeFest.Class<T>,
|
||||
mongoDbNativeDocArg: any,
|
||||
): T {
|
||||
const newInstance = new this();
|
||||
(newInstance as any).creationStatus = 'db';
|
||||
for (const key of Object.keys(mongoDbNativeDocArg)) {
|
||||
newInstance[key] = mongoDbNativeDocArg[key];
|
||||
}
|
||||
return newInstance;
|
||||
}
|
||||
|
||||
/**
|
||||
* gets all instances as array
|
||||
* @param this
|
||||
* @param filterArg
|
||||
* @returns
|
||||
*/
|
||||
public static async getInstances<T>(
|
||||
this: plugins.tsclass.typeFest.Class<T>,
|
||||
filterArg: plugins.tsclass.typeFest.PartialDeep<T>,
|
||||
): Promise<T[]> {
|
||||
const foundDocs = await (this as any).collection.findAll(convertFilterForMongoDb(filterArg));
|
||||
const returnArray = [];
|
||||
for (const foundDoc of foundDocs) {
|
||||
const newInstance: T = (this as any).createInstanceFromMongoDbNativeDoc(foundDoc);
|
||||
returnArray.push(newInstance);
|
||||
}
|
||||
return returnArray;
|
||||
}
|
||||
|
||||
/**
|
||||
* gets the first matching instance
|
||||
* @param this
|
||||
* @param filterArg
|
||||
* @returns
|
||||
*/
|
||||
public static async getInstance<T>(
|
||||
this: plugins.tsclass.typeFest.Class<T>,
|
||||
filterArg: plugins.tsclass.typeFest.PartialDeep<T>,
|
||||
): Promise<T> {
|
||||
const foundDoc = await (this as any).collection.findOne(convertFilterForMongoDb(filterArg));
|
||||
if (foundDoc) {
|
||||
const newInstance: T = (this as any).createInstanceFromMongoDbNativeDoc(foundDoc);
|
||||
return newInstance;
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* get a unique id prefixed with the class name
|
||||
*/
|
||||
public static async getNewId<T = any>(
|
||||
this: plugins.tsclass.typeFest.Class<T>,
|
||||
lengthArg: number = 20,
|
||||
) {
|
||||
return `${(this as any).className}:${plugins.smartunique.shortId(lengthArg)}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* get cursor
|
||||
* @returns
|
||||
*/
|
||||
public static async getCursor<T>(
|
||||
this: plugins.tsclass.typeFest.Class<T>,
|
||||
filterArg: plugins.tsclass.typeFest.PartialDeep<T>,
|
||||
) {
|
||||
const collection: SmartdataCollection<T> = (this as any).collection;
|
||||
const cursor: SmartdataDbCursor<T> = await collection.getCursor(
|
||||
convertFilterForMongoDb(filterArg),
|
||||
this as any as typeof SmartDataDbDoc,
|
||||
);
|
||||
return cursor;
|
||||
}
|
||||
|
||||
public static async getCursorExtended<T>(
|
||||
this: plugins.tsclass.typeFest.Class<T>,
|
||||
filterArg: plugins.tsclass.typeFest.PartialDeep<T>,
|
||||
modifierFunction = (cursorArg: plugins.mongodb.FindCursor<plugins.mongodb.WithId<plugins.mongodb.BSON.Document>>) => cursorArg,
|
||||
): Promise<SmartdataDbCursor<T>> {
|
||||
const collection: SmartdataCollection<T> = (this as any).collection;
|
||||
await collection.init();
|
||||
let cursor: plugins.mongodb.FindCursor<any> = collection.mongoDbCollection.find(
|
||||
convertFilterForMongoDb(filterArg),
|
||||
);
|
||||
cursor = modifierFunction(cursor);
|
||||
return new SmartdataDbCursor<T>(cursor, this as any as typeof SmartDataDbDoc);
|
||||
}
|
||||
|
||||
/**
|
||||
* watch the collection
|
||||
* @param this
|
||||
* @param filterArg
|
||||
* @param forEachFunction
|
||||
*/
|
||||
public static async watch<T>(
|
||||
this: plugins.tsclass.typeFest.Class<T>,
|
||||
filterArg: plugins.tsclass.typeFest.PartialDeep<T>,
|
||||
) {
|
||||
const collection: SmartdataCollection<T> = (this as any).collection;
|
||||
const watcher: SmartdataDbWatcher<T> = await collection.watch(
|
||||
convertFilterForMongoDb(filterArg),
|
||||
this as any,
|
||||
);
|
||||
return watcher;
|
||||
}
|
||||
|
||||
/**
|
||||
* run a function for all instances
|
||||
* @returns
|
||||
*/
|
||||
public static async forEach<T>(
|
||||
this: plugins.tsclass.typeFest.Class<T>,
|
||||
filterArg: plugins.tsclass.typeFest.PartialDeep<T>,
|
||||
forEachFunction: (itemArg: T) => Promise<any>,
|
||||
) {
|
||||
const cursor: SmartdataDbCursor<T> = await (this as any).getCursor(filterArg);
|
||||
await cursor.forEach(forEachFunction);
|
||||
}
|
||||
|
||||
/**
|
||||
* returns a count of the documents in the collection
|
||||
*/
|
||||
public static async getCount<T>(
|
||||
this: plugins.tsclass.typeFest.Class<T>,
|
||||
filterArg: plugins.tsclass.typeFest.PartialDeep<T> = {} as any,
|
||||
) {
|
||||
const collection: SmartdataCollection<T> = (this as any).collection;
|
||||
return await collection.getCount(filterArg);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a MongoDB filter from a Lucene query string
|
||||
* @param luceneQuery Lucene query string
|
||||
* @returns MongoDB query object
|
||||
*/
|
||||
public static createSearchFilter<T>(
|
||||
this: plugins.tsclass.typeFest.Class<T>,
|
||||
luceneQuery: string,
|
||||
): any {
|
||||
const className = (this as any).className || this.name;
|
||||
const searchableFields = getSearchableFields(className);
|
||||
|
||||
if (searchableFields.length === 0) {
|
||||
throw new Error(`No searchable fields defined for class ${className}`);
|
||||
}
|
||||
|
||||
const adapter = new SmartdataLuceneAdapter(searchableFields);
|
||||
return adapter.convert(luceneQuery);
|
||||
}
|
||||
|
||||
/**
|
||||
* Search documents using Lucene query syntax
|
||||
* @param luceneQuery Lucene query string
|
||||
* @returns Array of matching documents
|
||||
*/
|
||||
public static async search<T>(
|
||||
this: plugins.tsclass.typeFest.Class<T>,
|
||||
luceneQuery: string,
|
||||
): Promise<T[]> {
|
||||
const filter = (this as any).createSearchFilter(luceneQuery);
|
||||
return await (this as any).getInstances(filter);
|
||||
}
|
||||
|
||||
/**
|
||||
* Search documents using Lucene query syntax with robust error handling
|
||||
* @param luceneQuery The Lucene query string to search with
|
||||
* @returns Array of matching documents
|
||||
*/
|
||||
public static async searchWithLucene<T>(
|
||||
this: plugins.tsclass.typeFest.Class<T>,
|
||||
luceneQuery: string,
|
||||
): Promise<T[]> {
|
||||
try {
|
||||
const className = (this as any).className || this.name;
|
||||
const searchableFields = getSearchableFields(className);
|
||||
|
||||
if (searchableFields.length === 0) {
|
||||
console.warn(
|
||||
`No searchable fields defined for class ${className}, falling back to simple search`,
|
||||
);
|
||||
return (this as any).searchByTextAcrossFields(luceneQuery);
|
||||
}
|
||||
|
||||
// Simple term search optimization
|
||||
if (
|
||||
!luceneQuery.includes(':') &&
|
||||
!luceneQuery.includes(' AND ') &&
|
||||
!luceneQuery.includes(' OR ') &&
|
||||
!luceneQuery.includes(' NOT ')
|
||||
) {
|
||||
return (this as any).searchByTextAcrossFields(luceneQuery);
|
||||
}
|
||||
|
||||
// Try to use the Lucene-to-MongoDB conversion
|
||||
const filter = (this as any).createSearchFilter(luceneQuery);
|
||||
return await (this as any).getInstances(filter);
|
||||
} catch (error) {
|
||||
console.error(`Error in searchWithLucene: ${error.message}`);
|
||||
return (this as any).searchByTextAcrossFields(luceneQuery);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Search by text across all searchable fields (fallback method)
|
||||
* @param searchText The text to search for in all searchable fields
|
||||
* @returns Array of matching documents
|
||||
*/
|
||||
private static async searchByTextAcrossFields<T>(
|
||||
this: plugins.tsclass.typeFest.Class<T>,
|
||||
searchText: string,
|
||||
): Promise<T[]> {
|
||||
try {
|
||||
const className = (this as any).className || this.name;
|
||||
const searchableFields = getSearchableFields(className);
|
||||
|
||||
// Fallback to direct filter if we have searchable fields
|
||||
if (searchableFields.length > 0) {
|
||||
// Create a simple $or query with regex for each field
|
||||
const orConditions = searchableFields.map((field) => ({
|
||||
[field]: { $regex: searchText, $options: 'i' },
|
||||
}));
|
||||
|
||||
const filter = { $or: orConditions };
|
||||
|
||||
try {
|
||||
// Try with MongoDB filter first
|
||||
return await (this as any).getInstances(filter);
|
||||
} catch (error) {
|
||||
console.warn('MongoDB filter failed, falling back to in-memory search');
|
||||
}
|
||||
}
|
||||
|
||||
// Last resort: get all and filter in memory
|
||||
const allDocs = await (this as any).getInstances({});
|
||||
const lowerSearchText = searchText.toLowerCase();
|
||||
|
||||
return allDocs.filter((doc: any) => {
|
||||
for (const field of searchableFields) {
|
||||
const value = doc[field];
|
||||
if (value && typeof value === 'string' && value.toLowerCase().includes(lowerSearchText)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
});
|
||||
} catch (error) {
|
||||
console.error(`Error in searchByTextAcrossFields: ${error.message}`);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
// INSTANCE
|
||||
|
||||
/**
|
||||
* how the Doc in memory was created, may prove useful later.
|
||||
*/
|
||||
public creationStatus: TDocCreation = 'new';
|
||||
|
||||
/**
|
||||
* updated from db in any case where doc comes from db
|
||||
*/
|
||||
@globalSvDb()
|
||||
_createdAt: string = new Date().toISOString();
|
||||
|
||||
/**
|
||||
* will be updated everytime the doc is saved
|
||||
*/
|
||||
@globalSvDb()
|
||||
_updatedAt: string = new Date().toISOString();
|
||||
|
||||
/**
|
||||
* an array of saveable properties of ALL doc
|
||||
*/
|
||||
public globalSaveableProperties: string[];
|
||||
|
||||
/**
|
||||
* unique indexes
|
||||
*/
|
||||
public uniqueIndexes: string[];
|
||||
|
||||
/**
|
||||
* regular indexes with their options
|
||||
*/
|
||||
public regularIndexes: Array<{field: string, options: IIndexOptions}> = [];
|
||||
|
||||
/**
|
||||
* an array of saveable properties of a specific doc
|
||||
*/
|
||||
public saveableProperties: string[];
|
||||
|
||||
/**
|
||||
* name
|
||||
*/
|
||||
public name: string;
|
||||
|
||||
/**
|
||||
* primary id in the database
|
||||
*/
|
||||
public dbDocUniqueId: string;
|
||||
|
||||
/**
|
||||
* class constructor
|
||||
*/
|
||||
constructor() {}
|
||||
|
||||
/**
|
||||
* saves this instance but not any connected items
|
||||
* may lead to data inconsistencies, but is faster
|
||||
*/
|
||||
public async save() {
|
||||
// tslint:disable-next-line: no-this-assignment
|
||||
const self: any = this;
|
||||
let dbResult: any;
|
||||
|
||||
this._updatedAt = new Date().toISOString();
|
||||
|
||||
switch (this.creationStatus) {
|
||||
case 'db':
|
||||
dbResult = await this.collection.update(self);
|
||||
break;
|
||||
case 'new':
|
||||
dbResult = await this.collection.insert(self);
|
||||
this.creationStatus = 'db';
|
||||
break;
|
||||
default:
|
||||
console.error('neither new nor in db?');
|
||||
}
|
||||
return dbResult;
|
||||
}
|
||||
|
||||
/**
|
||||
* deletes a document from the database
|
||||
*/
|
||||
public async delete() {
|
||||
await this.collection.delete(this);
|
||||
}
|
||||
|
||||
/**
|
||||
* also store any referenced objects to DB
|
||||
* better for data consistency
|
||||
*/
|
||||
public saveDeep(savedMapArg: plugins.lik.ObjectMap<SmartDataDbDoc<any, any>> = null) {
|
||||
if (!savedMapArg) {
|
||||
savedMapArg = new plugins.lik.ObjectMap<SmartDataDbDoc<any, any>>();
|
||||
}
|
||||
savedMapArg.add(this);
|
||||
this.save();
|
||||
for (const propertyKey of Object.keys(this)) {
|
||||
const property: any = this[propertyKey];
|
||||
if (property instanceof SmartDataDbDoc && !savedMapArg.checkForObject(property)) {
|
||||
property.saveDeep(savedMapArg);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* updates an object from db
|
||||
*/
|
||||
public async updateFromDb() {
|
||||
const mongoDbNativeDoc = await this.collection.findOne(await this.createIdentifiableObject());
|
||||
for (const key of Object.keys(mongoDbNativeDoc)) {
|
||||
this[key] = mongoDbNativeDoc[key];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* creates a saveable object so the instance can be persisted as json in the database
|
||||
*/
|
||||
public async createSavableObject(): Promise<TImplements> {
|
||||
const saveableObject: unknown = {}; // is not exposed to outside, so any is ok here
|
||||
const saveableProperties = [...this.globalSaveableProperties, ...this.saveableProperties];
|
||||
for (const propertyNameString of saveableProperties) {
|
||||
saveableObject[propertyNameString] = this[propertyNameString];
|
||||
}
|
||||
return saveableObject as TImplements;
|
||||
}
|
||||
|
||||
/**
|
||||
* creates an identifiable object for operations that require filtering
|
||||
*/
|
||||
public async createIdentifiableObject() {
|
||||
const identifiableObject: any = {}; // is not exposed to outside, so any is ok here
|
||||
for (const propertyNameString of this.uniqueIndexes) {
|
||||
identifiableObject[propertyNameString] = this[propertyNameString];
|
||||
}
|
||||
return identifiableObject;
|
||||
}
|
||||
}
|
@ -1,7 +1,7 @@
|
||||
import * as plugins from './smartdata.plugins.js';
|
||||
import { Collection } from './smartdata.classes.collection.js';
|
||||
import { SmartdataDb } from './smartdata.classes.db.js';
|
||||
import { SmartDataDbDoc, svDb, unI } from './smartdata.classes.doc.js';
|
||||
import * as plugins from './plugins.js';
|
||||
import { Collection } from './classes.collection.js';
|
||||
import { SmartdataDb } from './classes.db.js';
|
||||
import { SmartDataDbDoc, svDb, unI } from './classes.doc.js';
|
||||
|
||||
/**
|
||||
* EasyStore allows the storage of easy objects. It also allows easy sharing of the object between different instances
|
||||
@ -41,7 +41,7 @@ export class EasyStore<T> {
|
||||
private async getEasyStore(): Promise<InstanceType<typeof this.easyStoreClass>> {
|
||||
if (this.easyStorePromise) {
|
||||
return this.easyStorePromise;
|
||||
};
|
||||
}
|
||||
|
||||
// first run from here
|
||||
const deferred = plugins.smartpromise.defer<InstanceType<typeof this.easyStoreClass>>();
|
760
ts/classes.lucene.adapter.ts
Normal file
760
ts/classes.lucene.adapter.ts
Normal file
@ -0,0 +1,760 @@
|
||||
/**
|
||||
* Lucene to MongoDB query adapter for SmartData
|
||||
*/
|
||||
import * as plugins from './plugins.js';
|
||||
|
||||
// Types
|
||||
type NodeType =
|
||||
| 'TERM'
|
||||
| 'PHRASE'
|
||||
| 'FIELD'
|
||||
| 'AND'
|
||||
| 'OR'
|
||||
| 'NOT'
|
||||
| 'RANGE'
|
||||
| 'WILDCARD'
|
||||
| 'FUZZY'
|
||||
| 'GROUP';
|
||||
|
||||
interface QueryNode {
|
||||
type: NodeType;
|
||||
}
|
||||
|
||||
interface TermNode extends QueryNode {
|
||||
type: 'TERM';
|
||||
value: string;
|
||||
boost?: number;
|
||||
}
|
||||
|
||||
interface PhraseNode extends QueryNode {
|
||||
type: 'PHRASE';
|
||||
value: string;
|
||||
proximity?: number;
|
||||
}
|
||||
|
||||
interface FieldNode extends QueryNode {
|
||||
type: 'FIELD';
|
||||
field: string;
|
||||
value: AnyQueryNode;
|
||||
}
|
||||
|
||||
interface BooleanNode extends QueryNode {
|
||||
type: 'AND' | 'OR' | 'NOT';
|
||||
left: AnyQueryNode;
|
||||
right: AnyQueryNode;
|
||||
}
|
||||
|
||||
interface RangeNode extends QueryNode {
|
||||
type: 'RANGE';
|
||||
field: string;
|
||||
lower: string;
|
||||
upper: string;
|
||||
includeLower: boolean;
|
||||
includeUpper: boolean;
|
||||
}
|
||||
|
||||
interface WildcardNode extends QueryNode {
|
||||
type: 'WILDCARD';
|
||||
value: string;
|
||||
}
|
||||
|
||||
interface FuzzyNode extends QueryNode {
|
||||
type: 'FUZZY';
|
||||
value: string;
|
||||
maxEdits: number;
|
||||
}
|
||||
|
||||
interface GroupNode extends QueryNode {
|
||||
type: 'GROUP';
|
||||
value: AnyQueryNode;
|
||||
}
|
||||
|
||||
type AnyQueryNode =
|
||||
| TermNode
|
||||
| PhraseNode
|
||||
| FieldNode
|
||||
| BooleanNode
|
||||
| RangeNode
|
||||
| WildcardNode
|
||||
| FuzzyNode
|
||||
| GroupNode;
|
||||
|
||||
/**
|
||||
* Lucene query parser
|
||||
*/
|
||||
export class LuceneParser {
|
||||
private pos: number = 0;
|
||||
private input: string = '';
|
||||
private tokens: string[] = [];
|
||||
|
||||
constructor() {}
|
||||
|
||||
/**
|
||||
* Parse a Lucene query string into an AST
|
||||
*/
|
||||
parse(query: string): AnyQueryNode {
|
||||
this.input = query.trim();
|
||||
this.pos = 0;
|
||||
this.tokens = this.tokenize(this.input);
|
||||
|
||||
return this.parseQuery();
|
||||
}
|
||||
|
||||
/**
|
||||
* Tokenize the input string into tokens
|
||||
*/
|
||||
private tokenize(input: string): string[] {
|
||||
const specialChars = /[()\[\]{}"~^:]/;
|
||||
const operators = /AND|OR|NOT|TO/;
|
||||
|
||||
let tokens: string[] = [];
|
||||
let current = '';
|
||||
let inQuote = false;
|
||||
|
||||
for (let i = 0; i < input.length; i++) {
|
||||
const char = input[i];
|
||||
|
||||
// Handle quoted strings
|
||||
if (char === '"') {
|
||||
if (inQuote) {
|
||||
tokens.push(current + char);
|
||||
current = '';
|
||||
inQuote = false;
|
||||
} else {
|
||||
if (current) tokens.push(current);
|
||||
current = char;
|
||||
inQuote = true;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
if (inQuote) {
|
||||
current += char;
|
||||
continue;
|
||||
}
|
||||
|
||||
// Handle whitespace
|
||||
if (char === ' ' || char === '\t' || char === '\n') {
|
||||
if (current) {
|
||||
tokens.push(current);
|
||||
current = '';
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
// Handle special characters
|
||||
if (specialChars.test(char)) {
|
||||
if (current) {
|
||||
tokens.push(current);
|
||||
current = '';
|
||||
}
|
||||
tokens.push(char);
|
||||
continue;
|
||||
}
|
||||
|
||||
current += char;
|
||||
|
||||
// Check if current is an operator
|
||||
if (operators.test(current) && (i + 1 === input.length || /\s/.test(input[i + 1]))) {
|
||||
tokens.push(current);
|
||||
current = '';
|
||||
}
|
||||
}
|
||||
|
||||
if (current) tokens.push(current);
|
||||
|
||||
return tokens;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse the main query expression
|
||||
*/
|
||||
private parseQuery(): AnyQueryNode {
|
||||
const left = this.parseBooleanOperand();
|
||||
|
||||
if (this.pos < this.tokens.length) {
|
||||
const token = this.tokens[this.pos];
|
||||
|
||||
if (token === 'AND' || token === 'OR') {
|
||||
this.pos++;
|
||||
const right = this.parseQuery();
|
||||
return {
|
||||
type: token as 'AND' | 'OR',
|
||||
left,
|
||||
right,
|
||||
};
|
||||
} else if (token === 'NOT' || token === '-') {
|
||||
this.pos++;
|
||||
const right = this.parseQuery();
|
||||
return {
|
||||
type: 'NOT',
|
||||
left,
|
||||
right,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
return left;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse boolean operands (terms, phrases, fields, groups)
|
||||
*/
|
||||
private parseBooleanOperand(): AnyQueryNode {
|
||||
if (this.pos >= this.tokens.length) {
|
||||
throw new Error('Unexpected end of input');
|
||||
}
|
||||
|
||||
const token = this.tokens[this.pos];
|
||||
|
||||
// Handle grouping with parentheses
|
||||
if (token === '(') {
|
||||
this.pos++;
|
||||
const group = this.parseQuery();
|
||||
|
||||
if (this.pos < this.tokens.length && this.tokens[this.pos] === ')') {
|
||||
this.pos++;
|
||||
return { type: 'GROUP', value: group } as GroupNode;
|
||||
} else {
|
||||
throw new Error('Unclosed group');
|
||||
}
|
||||
}
|
||||
|
||||
// Handle fields (field:value)
|
||||
if (this.pos + 1 < this.tokens.length && this.tokens[this.pos + 1] === ':') {
|
||||
const field = token;
|
||||
this.pos += 2; // Skip field and colon
|
||||
|
||||
if (this.pos < this.tokens.length) {
|
||||
const value = this.parseBooleanOperand();
|
||||
return { type: 'FIELD', field, value } as FieldNode;
|
||||
} else {
|
||||
throw new Error('Expected value after field');
|
||||
}
|
||||
}
|
||||
|
||||
// Handle range queries
|
||||
if (token === '[' || token === '{') {
|
||||
return this.parseRange();
|
||||
}
|
||||
|
||||
// Handle phrases ("term term")
|
||||
if (token.startsWith('"') && token.endsWith('"')) {
|
||||
const phrase = token.slice(1, -1);
|
||||
this.pos++;
|
||||
|
||||
// Check for proximity operator
|
||||
let proximity: number | undefined;
|
||||
if (this.pos < this.tokens.length && this.tokens[this.pos] === '~') {
|
||||
this.pos++;
|
||||
if (this.pos < this.tokens.length && /^\d+$/.test(this.tokens[this.pos])) {
|
||||
proximity = parseInt(this.tokens[this.pos], 10);
|
||||
this.pos++;
|
||||
} else {
|
||||
throw new Error('Expected number after proximity operator');
|
||||
}
|
||||
}
|
||||
|
||||
return { type: 'PHRASE', value: phrase, proximity } as PhraseNode;
|
||||
}
|
||||
|
||||
// Handle wildcards
|
||||
if (token.includes('*') || token.includes('?')) {
|
||||
this.pos++;
|
||||
return { type: 'WILDCARD', value: token } as WildcardNode;
|
||||
}
|
||||
|
||||
// Handle fuzzy searches
|
||||
if (this.pos + 1 < this.tokens.length && this.tokens[this.pos + 1] === '~') {
|
||||
const term = token;
|
||||
this.pos += 2; // Skip term and tilde
|
||||
|
||||
let maxEdits = 2; // Default
|
||||
if (this.pos < this.tokens.length && /^\d+$/.test(this.tokens[this.pos])) {
|
||||
maxEdits = parseInt(this.tokens[this.pos], 10);
|
||||
this.pos++;
|
||||
}
|
||||
|
||||
return { type: 'FUZZY', value: term, maxEdits } as FuzzyNode;
|
||||
}
|
||||
|
||||
// Simple term
|
||||
this.pos++;
|
||||
return { type: 'TERM', value: token } as TermNode;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse range queries
|
||||
*/
|
||||
private parseRange(): RangeNode {
|
||||
const includeLower = this.tokens[this.pos] === '[';
|
||||
const includeUpper = this.tokens[this.pos + 4] === ']';
|
||||
|
||||
this.pos++; // Skip open bracket
|
||||
|
||||
if (this.pos + 4 >= this.tokens.length) {
|
||||
throw new Error('Invalid range query syntax');
|
||||
}
|
||||
|
||||
const lower = this.tokens[this.pos];
|
||||
this.pos++;
|
||||
|
||||
if (this.tokens[this.pos] !== 'TO') {
|
||||
throw new Error('Expected TO in range query');
|
||||
}
|
||||
this.pos++;
|
||||
|
||||
const upper = this.tokens[this.pos];
|
||||
this.pos++;
|
||||
|
||||
if (this.tokens[this.pos] !== (includeLower ? ']' : '}')) {
|
||||
throw new Error('Invalid range query closing bracket');
|
||||
}
|
||||
this.pos++;
|
||||
|
||||
// For simplicity, assuming the field is handled separately
|
||||
return {
|
||||
type: 'RANGE',
|
||||
field: '', // This will be filled by the field node
|
||||
lower,
|
||||
upper,
|
||||
includeLower,
|
||||
includeUpper,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Transformer for Lucene AST to MongoDB query
|
||||
* FIXED VERSION - proper MongoDB query structure
|
||||
*/
|
||||
export class LuceneToMongoTransformer {
|
||||
constructor() {}
|
||||
|
||||
/**
|
||||
* Transform a Lucene AST node to a MongoDB query
|
||||
*/
|
||||
transform(node: AnyQueryNode, searchFields?: string[]): any {
|
||||
switch (node.type) {
|
||||
case 'TERM':
|
||||
return this.transformTerm(node, searchFields);
|
||||
case 'PHRASE':
|
||||
return this.transformPhrase(node, searchFields);
|
||||
case 'FIELD':
|
||||
return this.transformField(node);
|
||||
case 'AND':
|
||||
return this.transformAnd(node);
|
||||
case 'OR':
|
||||
return this.transformOr(node);
|
||||
case 'NOT':
|
||||
return this.transformNot(node);
|
||||
case 'RANGE':
|
||||
return this.transformRange(node);
|
||||
case 'WILDCARD':
|
||||
return this.transformWildcard(node, searchFields);
|
||||
case 'FUZZY':
|
||||
return this.transformFuzzy(node, searchFields);
|
||||
case 'GROUP':
|
||||
return this.transform(node.value, searchFields);
|
||||
default:
|
||||
throw new Error(`Unsupported node type: ${(node as any).type}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Transform a term to MongoDB query
|
||||
* FIXED: properly structured $or query for multiple fields
|
||||
*/
|
||||
private transformTerm(node: TermNode, searchFields?: string[]): any {
|
||||
// If specific fields are provided, search across those fields
|
||||
if (searchFields && searchFields.length > 0) {
|
||||
// Create an $or query to search across multiple fields
|
||||
const orConditions = searchFields.map((field) => ({
|
||||
[field]: { $regex: node.value, $options: 'i' },
|
||||
}));
|
||||
|
||||
return { $or: orConditions };
|
||||
}
|
||||
|
||||
// Otherwise, use text search (requires a text index on desired fields)
|
||||
return { $text: { $search: node.value } };
|
||||
}
|
||||
|
||||
/**
|
||||
* Transform a phrase to MongoDB query
|
||||
* FIXED: properly structured $or query for multiple fields
|
||||
*/
|
||||
private transformPhrase(node: PhraseNode, searchFields?: string[]): any {
|
||||
// If specific fields are provided, search phrase across those fields
|
||||
if (searchFields && searchFields.length > 0) {
|
||||
const orConditions = searchFields.map((field) => ({
|
||||
[field]: { $regex: `${node.value.replace(/\s+/g, '\\s+')}`, $options: 'i' },
|
||||
}));
|
||||
|
||||
return { $or: orConditions };
|
||||
}
|
||||
|
||||
// For phrases, we use a regex to ensure exact matches
|
||||
return { $text: { $search: `"${node.value}"` } };
|
||||
}
|
||||
|
||||
/**
|
||||
* Transform a field query to MongoDB query
|
||||
*/
|
||||
private transformField(node: FieldNode): any {
|
||||
// Handle special case for range queries on fields
|
||||
if (node.value.type === 'RANGE') {
|
||||
const rangeNode = node.value as RangeNode;
|
||||
rangeNode.field = node.field;
|
||||
return this.transformRange(rangeNode);
|
||||
}
|
||||
|
||||
// Handle special case for wildcards on fields
|
||||
if (node.value.type === 'WILDCARD') {
|
||||
return {
|
||||
[node.field]: {
|
||||
$regex: this.luceneWildcardToRegex((node.value as WildcardNode).value),
|
||||
$options: 'i',
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// Handle special case for fuzzy searches on fields
|
||||
if (node.value.type === 'FUZZY') {
|
||||
return {
|
||||
[node.field]: {
|
||||
$regex: this.createFuzzyRegex((node.value as FuzzyNode).value),
|
||||
$options: 'i',
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// Special case for exact term matches on fields
|
||||
if (node.value.type === 'TERM') {
|
||||
return { [node.field]: { $regex: (node.value as TermNode).value, $options: 'i' } };
|
||||
}
|
||||
|
||||
// Special case for phrase matches on fields
|
||||
if (node.value.type === 'PHRASE') {
|
||||
return {
|
||||
[node.field]: {
|
||||
$regex: `${(node.value as PhraseNode).value.replace(/\s+/g, '\\s+')}`,
|
||||
$options: 'i',
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// For other cases, we'll transform the value and apply it to the field
|
||||
const transformedValue = this.transform(node.value);
|
||||
|
||||
// If the transformed value uses $text, we need to adapt it for the field
|
||||
if (transformedValue.$text) {
|
||||
return { [node.field]: { $regex: transformedValue.$text.$search, $options: 'i' } };
|
||||
}
|
||||
|
||||
// Handle $or and $and cases
|
||||
if (transformedValue.$or || transformedValue.$and) {
|
||||
// This is a bit complex - we need to restructure the query to apply the field
|
||||
// For now, simplify by just using a regex on the field
|
||||
const term = this.extractTermFromBooleanQuery(transformedValue);
|
||||
if (term) {
|
||||
return { [node.field]: { $regex: term, $options: 'i' } };
|
||||
}
|
||||
}
|
||||
|
||||
return { [node.field]: transformedValue };
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract a term from a boolean query (simplification)
|
||||
*/
|
||||
private extractTermFromBooleanQuery(query: any): string | null {
|
||||
if (query.$or && Array.isArray(query.$or) && query.$or.length > 0) {
|
||||
const firstClause = query.$or[0];
|
||||
for (const field in firstClause) {
|
||||
if (firstClause[field].$regex) {
|
||||
return firstClause[field].$regex;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (query.$and && Array.isArray(query.$and) && query.$and.length > 0) {
|
||||
const firstClause = query.$and[0];
|
||||
for (const field in firstClause) {
|
||||
if (firstClause[field].$regex) {
|
||||
return firstClause[field].$regex;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Transform AND operator to MongoDB query
|
||||
* FIXED: $and must be an array
|
||||
*/
|
||||
private transformAnd(node: BooleanNode): any {
|
||||
return { $and: [this.transform(node.left), this.transform(node.right)] };
|
||||
}
|
||||
|
||||
/**
|
||||
* Transform OR operator to MongoDB query
|
||||
* FIXED: $or must be an array
|
||||
*/
|
||||
private transformOr(node: BooleanNode): any {
|
||||
return { $or: [this.transform(node.left), this.transform(node.right)] };
|
||||
}
|
||||
|
||||
/**
|
||||
* Transform NOT operator to MongoDB query
|
||||
* FIXED: $and must be an array and $not usage
|
||||
*/
|
||||
private transformNot(node: BooleanNode): any {
|
||||
const leftQuery = this.transform(node.left);
|
||||
const rightQuery = this.transform(node.right);
|
||||
|
||||
// Create a query that includes left but excludes right
|
||||
if (rightQuery.$text) {
|
||||
// For text searches, we need a different approach
|
||||
// We'll use a negated regex instead
|
||||
const searchTerm = rightQuery.$text.$search.replace(/"/g, '');
|
||||
|
||||
// Determine the fields to apply the negation to
|
||||
const notConditions = [];
|
||||
|
||||
for (const field in leftQuery) {
|
||||
if (field !== '$or' && field !== '$and') {
|
||||
notConditions.push({
|
||||
[field]: { $not: { $regex: searchTerm, $options: 'i' } },
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// If left query has $or or $and, we need to handle it differently
|
||||
if (leftQuery.$or) {
|
||||
return {
|
||||
$and: [leftQuery, { $nor: [{ $or: notConditions }] }],
|
||||
};
|
||||
} else {
|
||||
// Simple case - just add $not to each field
|
||||
return {
|
||||
$and: [leftQuery, { $and: notConditions }],
|
||||
};
|
||||
}
|
||||
} else {
|
||||
// For other queries, we can use $not directly
|
||||
// We need to handle different structures based on the rightQuery
|
||||
let notQuery = {};
|
||||
|
||||
if (rightQuery.$or) {
|
||||
notQuery = { $nor: rightQuery.$or };
|
||||
} else if (rightQuery.$and) {
|
||||
// Convert $and to $nor
|
||||
notQuery = { $nor: rightQuery.$and };
|
||||
} else {
|
||||
// Simple field condition
|
||||
for (const field in rightQuery) {
|
||||
notQuery[field] = { $not: rightQuery[field] };
|
||||
}
|
||||
}
|
||||
|
||||
return { $and: [leftQuery, notQuery] };
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Transform range query to MongoDB query
|
||||
*/
|
||||
private transformRange(node: RangeNode): any {
|
||||
const range: any = {};
|
||||
|
||||
if (node.lower !== '*') {
|
||||
range[node.includeLower ? '$gte' : '$gt'] = this.parseValue(node.lower);
|
||||
}
|
||||
|
||||
if (node.upper !== '*') {
|
||||
range[node.includeUpper ? '$lte' : '$lt'] = this.parseValue(node.upper);
|
||||
}
|
||||
|
||||
return { [node.field]: range };
|
||||
}
|
||||
|
||||
/**
|
||||
* Transform wildcard query to MongoDB query
|
||||
* FIXED: properly structured for multiple fields
|
||||
*/
|
||||
private transformWildcard(node: WildcardNode, searchFields?: string[]): any {
|
||||
// Convert Lucene wildcards to MongoDB regex
|
||||
const regex = this.luceneWildcardToRegex(node.value);
|
||||
|
||||
// If specific fields are provided, search wildcard across those fields
|
||||
if (searchFields && searchFields.length > 0) {
|
||||
const orConditions = searchFields.map((field) => ({
|
||||
[field]: { $regex: regex, $options: 'i' },
|
||||
}));
|
||||
|
||||
return { $or: orConditions };
|
||||
}
|
||||
|
||||
// By default, apply to the default field
|
||||
return { $regex: regex, $options: 'i' };
|
||||
}
|
||||
|
||||
/**
|
||||
* Transform fuzzy query to MongoDB query
|
||||
* FIXED: properly structured for multiple fields
|
||||
*/
|
||||
private transformFuzzy(node: FuzzyNode, searchFields?: string[]): any {
|
||||
// MongoDB doesn't have built-in fuzzy search
|
||||
// This is a very basic approach using regex
|
||||
const regex = this.createFuzzyRegex(node.value);
|
||||
|
||||
// If specific fields are provided, search fuzzy term across those fields
|
||||
if (searchFields && searchFields.length > 0) {
|
||||
const orConditions = searchFields.map((field) => ({
|
||||
[field]: { $regex: regex, $options: 'i' },
|
||||
}));
|
||||
|
||||
return { $or: orConditions };
|
||||
}
|
||||
|
||||
// By default, apply to the default field
|
||||
return { $regex: regex, $options: 'i' };
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert Lucene wildcards to MongoDB regex patterns
|
||||
*/
|
||||
private luceneWildcardToRegex(wildcardPattern: string): string {
|
||||
// Replace Lucene wildcards with regex equivalents
|
||||
// * => .*
|
||||
// ? => .
|
||||
// Also escape regex special chars
|
||||
return wildcardPattern
|
||||
.replace(/([.+^${}()|\\])/g, '\\$1') // Escape regex special chars
|
||||
.replace(/\*/g, '.*')
|
||||
.replace(/\?/g, '.');
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a simplified fuzzy search regex
|
||||
*/
|
||||
private createFuzzyRegex(term: string): string {
|
||||
// For a very simple approach, we allow some characters to be optional
|
||||
let regex = '';
|
||||
for (let i = 0; i < term.length; i++) {
|
||||
// Make every other character optional (simplified fuzzy)
|
||||
if (i % 2 === 1) {
|
||||
regex += term[i] + '?';
|
||||
} else {
|
||||
regex += term[i];
|
||||
}
|
||||
}
|
||||
return regex;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse string values to appropriate types (numbers, dates, etc.)
|
||||
*/
|
||||
private parseValue(value: string): any {
|
||||
// Try to parse as number
|
||||
if (/^-?\d+$/.test(value)) {
|
||||
return parseInt(value, 10);
|
||||
}
|
||||
|
||||
if (/^-?\d+\.\d+$/.test(value)) {
|
||||
return parseFloat(value);
|
||||
}
|
||||
|
||||
// Try to parse as date (simplified)
|
||||
const date = new Date(value);
|
||||
if (!isNaN(date.getTime())) {
|
||||
return date;
|
||||
}
|
||||
|
||||
// Default to string
|
||||
return value;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Main adapter class
|
||||
*/
|
||||
export class SmartdataLuceneAdapter {
|
||||
private parser: LuceneParser;
|
||||
private transformer: LuceneToMongoTransformer;
|
||||
private defaultSearchFields: string[] = [];
|
||||
|
||||
/**
|
||||
* @param defaultSearchFields - Optional array of field names to search across when no field is specified
|
||||
*/
|
||||
constructor(defaultSearchFields?: string[]) {
|
||||
this.parser = new LuceneParser();
|
||||
this.transformer = new LuceneToMongoTransformer();
|
||||
if (defaultSearchFields) {
|
||||
this.defaultSearchFields = defaultSearchFields;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert a Lucene query string to a MongoDB query object
|
||||
* @param luceneQuery - The Lucene query string to convert
|
||||
* @param searchFields - Optional array of field names to search across (overrides defaultSearchFields)
|
||||
*/
|
||||
convert(luceneQuery: string, searchFields?: string[]): any {
|
||||
try {
|
||||
// For simple single term queries, create a simpler query structure
|
||||
if (
|
||||
!luceneQuery.includes(':') &&
|
||||
!luceneQuery.includes(' AND ') &&
|
||||
!luceneQuery.includes(' OR ') &&
|
||||
!luceneQuery.includes(' NOT ') &&
|
||||
!luceneQuery.includes('(') &&
|
||||
!luceneQuery.includes('[')
|
||||
) {
|
||||
// This is a simple term, use a more direct approach
|
||||
const fieldsToSearch = searchFields || this.defaultSearchFields;
|
||||
|
||||
if (fieldsToSearch && fieldsToSearch.length > 0) {
|
||||
return {
|
||||
$or: fieldsToSearch.map((field) => ({
|
||||
[field]: { $regex: luceneQuery, $options: 'i' },
|
||||
})),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// For more complex queries, use the full parser
|
||||
// Parse the Lucene query into an AST
|
||||
const ast = this.parser.parse(luceneQuery);
|
||||
|
||||
// Use provided searchFields, fall back to defaultSearchFields
|
||||
const fieldsToSearch = searchFields || this.defaultSearchFields;
|
||||
|
||||
// Transform the AST to a MongoDB query
|
||||
return this.transformWithFields(ast, fieldsToSearch);
|
||||
} catch (error) {
|
||||
console.error(`Failed to convert Lucene query "${luceneQuery}":`, error);
|
||||
throw new Error(`Failed to convert Lucene query: ${error}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper method to transform the AST with field information
|
||||
*/
|
||||
private transformWithFields(node: AnyQueryNode, searchFields: string[]): any {
|
||||
// Special case for term nodes without a specific field
|
||||
if (
|
||||
node.type === 'TERM' ||
|
||||
node.type === 'PHRASE' ||
|
||||
node.type === 'WILDCARD' ||
|
||||
node.type === 'FUZZY'
|
||||
) {
|
||||
return this.transformer.transform(node, searchFields);
|
||||
}
|
||||
|
||||
// For other node types, use the standard transformation
|
||||
return this.transformer.transform(node);
|
||||
}
|
||||
}
|
@ -1,5 +1,5 @@
|
||||
import { SmartDataDbDoc } from './smartdata.classes.doc.js';
|
||||
import * as plugins from './smartdata.plugins.js';
|
||||
import { SmartDataDbDoc } from './classes.doc.js';
|
||||
import * as plugins from './plugins.js';
|
||||
|
||||
/**
|
||||
* a wrapper for the native mongodb cursor. Exposes better
|
||||
@ -14,7 +14,7 @@ export class SmartdataDbWatcher<T = any> {
|
||||
public changeSubject = new plugins.smartrx.rxjs.Subject<T>();
|
||||
constructor(
|
||||
changeStreamArg: plugins.mongodb.ChangeStream<T>,
|
||||
smartdataDbDocArg: typeof SmartDataDbDoc
|
||||
smartdataDbDocArg: typeof SmartDataDbDoc,
|
||||
) {
|
||||
this.changeStream = changeStreamArg;
|
||||
this.changeStream.on('change', async (item: any) => {
|
||||
@ -23,7 +23,7 @@ export class SmartdataDbWatcher<T = any> {
|
||||
return;
|
||||
}
|
||||
this.changeSubject.next(
|
||||
smartdataDbDocArg.createInstanceFromMongoDbNativeDoc(item.fullDocument) as any as T
|
||||
smartdataDbDocArg.createInstanceFromMongoDbNativeDoc(item.fullDocument) as any as T,
|
||||
);
|
||||
});
|
||||
plugins.smartdelay.delayFor(0).then(() => {
|
14
ts/index.ts
14
ts/index.ts
@ -1,14 +1,14 @@
|
||||
export * from './smartdata.classes.db.js';
|
||||
export * from './smartdata.classes.collection.js';
|
||||
export * from './smartdata.classes.doc.js';
|
||||
export * from './smartdata.classes.easystore.js';
|
||||
export * from './smartdata.classes.cursor.js';
|
||||
export * from './classes.db.js';
|
||||
export * from './classes.collection.js';
|
||||
export * from './classes.doc.js';
|
||||
export * from './classes.easystore.js';
|
||||
export * from './classes.cursor.js';
|
||||
|
||||
import * as convenience from './smartadata.convenience.js';
|
||||
import * as convenience from './classes.convenience.js';
|
||||
|
||||
export { convenience };
|
||||
|
||||
// to be removed with the next breaking update
|
||||
import type * as plugins from './smartdata.plugins.js';
|
||||
import type * as plugins from './plugins.js';
|
||||
type IMongoDescriptor = plugins.tsclass.database.IMongoDescriptor;
|
||||
export type { IMongoDescriptor };
|
||||
|
@ -1,3 +1,3 @@
|
||||
import * as plugins from './smartdata.plugins.js';
|
||||
import * as plugins from './plugins.js';
|
||||
|
||||
export const logger = new plugins.smartlog.ConsoleLog();
|
@ -1,294 +0,0 @@
|
||||
import * as plugins from './smartdata.plugins.js';
|
||||
|
||||
import { SmartdataDb } from './smartdata.classes.db.js';
|
||||
import { SmartdataDbCursor } from './smartdata.classes.cursor.js';
|
||||
import { type IManager, SmartdataCollection } from './smartdata.classes.collection.js';
|
||||
import { SmartdataDbWatcher } from './smartdata.classes.watcher.js';
|
||||
|
||||
export type TDocCreation = 'db' | 'new' | 'mixed';
|
||||
|
||||
/**
|
||||
* saveable - saveable decorator to be used on class properties
|
||||
*/
|
||||
export function svDb() {
|
||||
return (target: SmartDataDbDoc<unknown, unknown>, key: string) => {
|
||||
console.log(`called svDb() on >${target.constructor.name}.${key}<`);
|
||||
if (!target.saveableProperties) {
|
||||
target.saveableProperties = [];
|
||||
}
|
||||
target.saveableProperties.push(key);
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* unique index - decorator to mark a unique index
|
||||
*/
|
||||
export function unI() {
|
||||
return (target: SmartDataDbDoc<unknown, unknown>, key: string) => {
|
||||
console.log(`called unI on >>${target.constructor.name}.${key}<<`);
|
||||
|
||||
// mark the index as unique
|
||||
if (!target.uniqueIndexes) {
|
||||
target.uniqueIndexes = [];
|
||||
}
|
||||
target.uniqueIndexes.push(key);
|
||||
|
||||
// and also save it
|
||||
if (!target.saveableProperties) {
|
||||
target.saveableProperties = [];
|
||||
}
|
||||
target.saveableProperties.push(key);
|
||||
};
|
||||
}
|
||||
|
||||
export const convertFilterForMongoDb = (filterArg: { [key: string]: any }) => {
|
||||
const convertedFilter: { [key: string]: any } = {};
|
||||
const convertFilterArgument = (keyPathArg2: string, filterArg2: any) => {
|
||||
if (typeof filterArg2 === 'object') {
|
||||
for (const key of Object.keys(filterArg2)) {
|
||||
if (key.startsWith('$')) {
|
||||
convertedFilter[keyPathArg2] = filterArg2;
|
||||
return;
|
||||
} else if (key.includes('.')) {
|
||||
throw new Error('keys cannot contain dots');
|
||||
}
|
||||
}
|
||||
for (const key of Object.keys(filterArg2)) {
|
||||
convertFilterArgument(`${keyPathArg2}.${key}`, filterArg2[key]);
|
||||
}
|
||||
} else {
|
||||
convertedFilter[keyPathArg2] = filterArg2;
|
||||
}
|
||||
};
|
||||
for (const key of Object.keys(filterArg)) {
|
||||
convertFilterArgument(key, filterArg[key]);
|
||||
}
|
||||
return convertedFilter;
|
||||
};
|
||||
|
||||
export class SmartDataDbDoc<T extends TImplements, TImplements, TManager extends IManager = any> {
|
||||
/**
|
||||
* the collection object an Doc belongs to
|
||||
*/
|
||||
public static collection: SmartdataCollection<any>;
|
||||
public collection: SmartdataCollection<any>;
|
||||
public static defaultManager;
|
||||
public static manager;
|
||||
public manager: TManager;
|
||||
|
||||
// STATIC
|
||||
public static createInstanceFromMongoDbNativeDoc<T>(
|
||||
this: plugins.tsclass.typeFest.Class<T>,
|
||||
mongoDbNativeDocArg: any
|
||||
): T {
|
||||
const newInstance = new this();
|
||||
(newInstance as any).creationStatus = 'db';
|
||||
for (const key of Object.keys(mongoDbNativeDocArg)) {
|
||||
newInstance[key] = mongoDbNativeDocArg[key];
|
||||
}
|
||||
return newInstance;
|
||||
}
|
||||
|
||||
/**
|
||||
* gets all instances as array
|
||||
* @param this
|
||||
* @param filterArg
|
||||
* @returns
|
||||
*/
|
||||
public static async getInstances<T>(
|
||||
this: plugins.tsclass.typeFest.Class<T>,
|
||||
filterArg: plugins.tsclass.typeFest.PartialDeep<T>
|
||||
): Promise<T[]> {
|
||||
const foundDocs = await (this as any).collection.findAll(convertFilterForMongoDb(filterArg));
|
||||
const returnArray = [];
|
||||
for (const foundDoc of foundDocs) {
|
||||
const newInstance: T = (this as any).createInstanceFromMongoDbNativeDoc(foundDoc);
|
||||
returnArray.push(newInstance);
|
||||
}
|
||||
return returnArray;
|
||||
}
|
||||
|
||||
/**
|
||||
* gets the first matching instance
|
||||
* @param this
|
||||
* @param filterArg
|
||||
* @returns
|
||||
*/
|
||||
public static async getInstance<T>(
|
||||
this: plugins.tsclass.typeFest.Class<T>,
|
||||
filterArg: plugins.tsclass.typeFest.PartialDeep<T>
|
||||
): Promise<T> {
|
||||
const foundDoc = await (this as any).collection.findOne(convertFilterForMongoDb(filterArg));
|
||||
if (foundDoc) {
|
||||
const newInstance: T = (this as any).createInstanceFromMongoDbNativeDoc(foundDoc);
|
||||
return newInstance;
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* get a unique id prefixed with the class name
|
||||
*/
|
||||
public static async getNewId<T = any>(this: plugins.tsclass.typeFest.Class<T>, lengthArg: number = 20) {
|
||||
return `${(this as any).className}:${plugins.smartunique.shortId(lengthArg)}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* get cursor
|
||||
* @returns
|
||||
*/
|
||||
public static async getCursor<T>(
|
||||
this: plugins.tsclass.typeFest.Class<T>,
|
||||
filterArg: plugins.tsclass.typeFest.PartialDeep<T>
|
||||
) {
|
||||
const collection: SmartdataCollection<T> = (this as any).collection;
|
||||
const cursor: SmartdataDbCursor<T> = await collection.getCursor(
|
||||
convertFilterForMongoDb(filterArg),
|
||||
this as any as typeof SmartDataDbDoc
|
||||
);
|
||||
return cursor;
|
||||
}
|
||||
|
||||
/**
|
||||
* watch the collection
|
||||
* @param this
|
||||
* @param filterArg
|
||||
* @param forEachFunction
|
||||
*/
|
||||
public static async watch<T>(
|
||||
this: plugins.tsclass.typeFest.Class<T>,
|
||||
filterArg: plugins.tsclass.typeFest.PartialDeep<T>
|
||||
) {
|
||||
const collection: SmartdataCollection<T> = (this as any).collection;
|
||||
const watcher: SmartdataDbWatcher<T> = await collection.watch(
|
||||
convertFilterForMongoDb(filterArg),
|
||||
this as any
|
||||
);
|
||||
return watcher;
|
||||
}
|
||||
|
||||
/**
|
||||
* run a function for all instances
|
||||
* @returns
|
||||
*/
|
||||
public static async forEach<T>(
|
||||
this: plugins.tsclass.typeFest.Class<T>,
|
||||
filterArg: plugins.tsclass.typeFest.PartialDeep<T>,
|
||||
forEachFunction: (itemArg: T) => Promise<any>
|
||||
) {
|
||||
const cursor: SmartdataDbCursor<T> = await (this as any).getCursor(filterArg);
|
||||
await cursor.forEach(forEachFunction);
|
||||
}
|
||||
|
||||
// INSTANCE
|
||||
|
||||
/**
|
||||
* how the Doc in memory was created, may prove useful later.
|
||||
*/
|
||||
public creationStatus: TDocCreation = 'new';
|
||||
|
||||
/**
|
||||
* unique indexes
|
||||
*/
|
||||
public uniqueIndexes: string[];
|
||||
|
||||
/**
|
||||
* an array of saveable properties of a doc
|
||||
*/
|
||||
public saveableProperties: string[];
|
||||
|
||||
/**
|
||||
* name
|
||||
*/
|
||||
public name: string;
|
||||
|
||||
/**
|
||||
* primary id in the database
|
||||
*/
|
||||
public dbDocUniqueId: string;
|
||||
|
||||
/**
|
||||
* class constructor
|
||||
*/
|
||||
constructor() {}
|
||||
|
||||
/**
|
||||
* saves this instance but not any connected items
|
||||
* may lead to data inconsistencies, but is faster
|
||||
*/
|
||||
public async save() {
|
||||
// tslint:disable-next-line: no-this-assignment
|
||||
const self: any = this;
|
||||
let dbResult: any;
|
||||
switch (this.creationStatus) {
|
||||
case 'db':
|
||||
dbResult = await this.collection.update(self);
|
||||
break;
|
||||
case 'new':
|
||||
dbResult = await this.collection.insert(self);
|
||||
this.creationStatus = 'db';
|
||||
break;
|
||||
default:
|
||||
console.error('neither new nor in db?');
|
||||
}
|
||||
return dbResult;
|
||||
}
|
||||
|
||||
/**
|
||||
* deletes a document from the database
|
||||
*/
|
||||
public async delete() {
|
||||
await this.collection.delete(this);
|
||||
}
|
||||
|
||||
/**
|
||||
* also store any referenced objects to DB
|
||||
* better for data consistency
|
||||
*/
|
||||
public saveDeep(savedMapArg: plugins.lik.ObjectMap<SmartDataDbDoc<any, any>> = null) {
|
||||
if (!savedMapArg) {
|
||||
savedMapArg = new plugins.lik.ObjectMap<SmartDataDbDoc<any, any>>();
|
||||
}
|
||||
savedMapArg.add(this);
|
||||
this.save();
|
||||
for (const propertyKey of Object.keys(this)) {
|
||||
const property: any = this[propertyKey];
|
||||
if (property instanceof SmartDataDbDoc && !savedMapArg.checkForObject(property)) {
|
||||
property.saveDeep(savedMapArg);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* updates an object from db
|
||||
*/
|
||||
public async updateFromDb() {
|
||||
const mongoDbNativeDoc = await this.collection.findOne(await this.createIdentifiableObject());
|
||||
for (const key of Object.keys(mongoDbNativeDoc)) {
|
||||
this[key] = mongoDbNativeDoc[key];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* creates a saveable object so the instance can be persisted as json in the database
|
||||
*/
|
||||
public async createSavableObject(): Promise<TImplements> {
|
||||
const saveableObject: unknown = {}; // is not exposed to outside, so any is ok here
|
||||
for (const propertyNameString of this.saveableProperties) {
|
||||
saveableObject[propertyNameString] = this[propertyNameString];
|
||||
}
|
||||
return saveableObject as TImplements;
|
||||
}
|
||||
|
||||
/**
|
||||
* creates an identifiable object for operations that require filtering
|
||||
*/
|
||||
public async createIdentifiableObject() {
|
||||
const identifiableObject: any = {}; // is not exposed to outside, so any is ok here
|
||||
for (const propertyNameString of this.uniqueIndexes) {
|
||||
identifiableObject[propertyNameString] = this[propertyNameString];
|
||||
}
|
||||
return identifiableObject;
|
||||
}
|
||||
}
|
@ -3,7 +3,14 @@
|
||||
"experimentalDecorators": true,
|
||||
"useDefineForClassFields": false,
|
||||
"target": "ES2022",
|
||||
"module": "ES2022",
|
||||
"moduleResolution": "nodenext"
|
||||
}
|
||||
}
|
||||
"module": "NodeNext",
|
||||
"moduleResolution": "NodeNext",
|
||||
"esModuleInterop": true,
|
||||
"verbatimModuleSyntax": true,
|
||||
"baseUrl": ".",
|
||||
"paths": {}
|
||||
},
|
||||
"exclude": [
|
||||
"dist_*/**/*.d.ts"
|
||||
]
|
||||
}
|
Loading…
x
Reference in New Issue
Block a user