Compare commits
187 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 026f2acc89 | |||
| 1cd0f09598 | |||
| d254f58a05 | |||
| c5e7b6f982 | |||
| d30c9619c5 | |||
| 7344ae2db3 | |||
| 3b29a150a8 | |||
| 59186d84a9 | |||
| 7fab4e5dd0 | |||
| 0dbaa1bc5d | |||
| 8b37ebc8f9 | |||
| 5d757207c8 | |||
| c80df05fdf | |||
| 9be43a85ef | |||
| bf66209d3e | |||
| cdd1ae2c9b | |||
| f4290ae7f7 | |||
| e58c0fd215 | |||
| a91fac450a | |||
| 5cb043009c | |||
| 4a1f11b885 | |||
| 43f9033ccc | |||
| e7c0951786 | |||
| efc107907c | |||
| 2b8b0e5bdd | |||
| 3ae2a7fcf5 | |||
| 0806d3749b | |||
| f5d5e20a97 | |||
| db2767010d | |||
| e2dc094afd | |||
| 39d2957b7d | |||
| 490524516e | |||
| ccd4b9e1ec | |||
| 9c6d6d9f2c | |||
| e4d787096e | |||
| 2bf923b4f1 | |||
| 0ca1d452b4 | |||
| 436311ab06 | |||
| 498f586ddb | |||
| 6c50bd23ec | |||
| 419eb163f4 | |||
| 75aeb12e81 | |||
| c5a44da975 | |||
| 969b073939 | |||
| ac80f90ae0 | |||
| d0e769622e | |||
| eef758cabb | |||
| d0cc2a0ed2 | |||
| 87c930121c | |||
| 23b499b3a8 | |||
| 0834ec5c91 | |||
| 6a2a708ea1 | |||
| 1d977986f1 | |||
| e325b42906 | |||
| 1a359d355a | |||
| b5a9449d5e | |||
| 558f83a3d9 | |||
| 76ae454221 | |||
| 90cfc4644d | |||
| 0be279e5f5 | |||
| 9755522bba | |||
| de8736e99e | |||
| c430627a21 | |||
| 0bfebaf5b9 | |||
| 4733982d03 | |||
| 368dc27607 | |||
| 938b25c925 | |||
| ab251858ba | |||
| 24371ccf78 | |||
| ed1eecbab8 | |||
| 0d2dcec3e2 | |||
| 9426a21a2a | |||
| 4fac974fc9 | |||
| cad2decf59 | |||
| 0f61bdc455 | |||
| 408b2cce4a | |||
| 7a08700451 | |||
| ebaf3e685c | |||
| c8d51a30d8 | |||
| d957e911de | |||
| fee936c75f | |||
| ac867401de | |||
| c066464526 | |||
| 0105aa2a18 | |||
| 4c2477c269 | |||
| ea0d2bb251 | |||
| b3e30a8711 | |||
| 64621dd38f | |||
| 117c257a27 | |||
| b30522c505 | |||
| 57d2d56d00 | |||
| 90751002aa | |||
| 7606e074a5 | |||
| 7ec39e397e | |||
| 21d8d3dc32 | |||
| 6d456955d8 | |||
| d08544c782 | |||
| bda9ac8a07 | |||
| d27dafba2b | |||
| b6594de18c | |||
| d9246cbeac | |||
| 9a5864656e | |||
| 307f0c7277 | |||
| 62dc897e73 | |||
| 552b344914 | |||
| 5a2cc2406c | |||
| 73a11370b6 | |||
| 162265f353 | |||
| 06776d74c8 | |||
| b4cd6b0fe1 | |||
| b282f69b35 | |||
| 203a284c88 | |||
| 30ae641a9c | |||
| cfe733621f | |||
| 1f76e2478e | |||
| 7d668bee05 | |||
| bef7f68360 | |||
| 56e9754725 | |||
| 30d81581cf | |||
| 5e9db12955 | |||
| ad2f422c86 | |||
| 17ce14bcb9 | |||
| 32319e6e77 | |||
| 4cd284eaa9 | |||
| 00ec2e57c2 | |||
| 765356ce3d | |||
| 56b8581d2b | |||
| 37a9df9086 | |||
| 090fb668cd | |||
| a1c807261c | |||
| a2ccf15f69 | |||
| 84d48f1914 | |||
| 1e258e5ffb | |||
| 19d5f553b9 | |||
| 7a257ea925 | |||
| 2fa1e89f34 | |||
| d6b3896dd3 | |||
| 49b11b17ce | |||
| 4ac8a4c0cd | |||
| 7f9983382a | |||
| 54f529b0a7 | |||
| f542463bf6 | |||
| 1235ae2eb3 | |||
| 8166d2f7c2 | |||
| 7c9f27e02f | |||
| 842e4b280b | |||
| 009f3297b2 | |||
| 2ff3a4e0b7 | |||
| 0e55cd8876 | |||
| eccdf3f00a | |||
| c7544133d9 | |||
| c7c9acf5bd | |||
| c99ec50853 | |||
| 4dd9557e1d | |||
| 52b34a6da1 | |||
| 1bf74fe04d | |||
| fdd875ad31 | |||
| a7bf0c0298 | |||
| 59d6336e43 | |||
| e0fc81179a | |||
| 5aa81a56a2 | |||
| 9ae26177b8 | |||
| 26ac52d6c5 | |||
| fb39463b7d | |||
| 44acba80c1 | |||
| 8cf8315577 | |||
| 9b44b64a50 | |||
| 699e25201c | |||
| 2ef9aace68 | |||
| cc55a57dfd | |||
| b2df512552 | |||
| 23c62fbd69 | |||
| 5f70ea0b05 | |||
| 49a595876a | |||
| db38a1ef85 | |||
| 94854638dd | |||
| 902fab4cc0 | |||
| ed3b19abc5 | |||
| 5b88da7dce | |||
| df273e9efa | |||
| fd590e0be3 | |||
| ef97b390d4 | |||
| cd14eb8bf3 | |||
| f48443dcd3 | |||
| 3f28ff80cb | |||
| 64005a0b32 | |||
| 8a77bb3281 |
66
.gitea/workflows/default_nottags.yaml
Normal file
66
.gitea/workflows/default_nottags.yaml
Normal file
@@ -0,0 +1,66 @@
|
|||||||
|
name: Default (not tags)
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
tags-ignore:
|
||||||
|
- '**'
|
||||||
|
|
||||||
|
env:
|
||||||
|
IMAGE: code.foss.global/host.today/ht-docker-node:npmci
|
||||||
|
NPMCI_COMPUTED_REPOURL: https://${{gitea.repository_owner}}:${{secrets.GITEA_TOKEN}}@/${{gitea.repository}}.git
|
||||||
|
NPMCI_TOKEN_NPM: ${{secrets.NPMCI_TOKEN_NPM}}
|
||||||
|
NPMCI_TOKEN_NPM2: ${{secrets.NPMCI_TOKEN_NPM2}}
|
||||||
|
NPMCI_GIT_GITHUBTOKEN: ${{secrets.NPMCI_GIT_GITHUBTOKEN}}
|
||||||
|
NPMCI_URL_CLOUDLY: ${{secrets.NPMCI_URL_CLOUDLY}}
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
security:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
continue-on-error: true
|
||||||
|
container:
|
||||||
|
image: ${{ env.IMAGE }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- name: Install pnpm and npmci
|
||||||
|
run: |
|
||||||
|
pnpm install -g pnpm
|
||||||
|
pnpm install -g @ship.zone/npmci
|
||||||
|
|
||||||
|
- name: Run npm prepare
|
||||||
|
run: npmci npm prepare
|
||||||
|
|
||||||
|
- name: Audit production dependencies
|
||||||
|
run: |
|
||||||
|
npmci command npm config set registry https://registry.npmjs.org
|
||||||
|
npmci command pnpm audit --audit-level=high --prod
|
||||||
|
continue-on-error: true
|
||||||
|
|
||||||
|
- name: Audit development dependencies
|
||||||
|
run: |
|
||||||
|
npmci command npm config set registry https://registry.npmjs.org
|
||||||
|
npmci command pnpm audit --audit-level=high --dev
|
||||||
|
continue-on-error: true
|
||||||
|
|
||||||
|
test:
|
||||||
|
if: ${{ always() }}
|
||||||
|
needs: security
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
container:
|
||||||
|
image: ${{ env.IMAGE }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- name: Test stable
|
||||||
|
run: |
|
||||||
|
npmci node install stable
|
||||||
|
npmci npm install
|
||||||
|
npmci npm test
|
||||||
|
|
||||||
|
- name: Test build
|
||||||
|
run: |
|
||||||
|
npmci node install stable
|
||||||
|
npmci npm install
|
||||||
|
npmci npm build
|
||||||
124
.gitea/workflows/default_tags.yaml
Normal file
124
.gitea/workflows/default_tags.yaml
Normal file
@@ -0,0 +1,124 @@
|
|||||||
|
name: Default (tags)
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
tags:
|
||||||
|
- '*'
|
||||||
|
|
||||||
|
env:
|
||||||
|
IMAGE: code.foss.global/host.today/ht-docker-node:npmci
|
||||||
|
NPMCI_COMPUTED_REPOURL: https://${{gitea.repository_owner}}:${{secrets.GITEA_TOKEN}}@/${{gitea.repository}}.git
|
||||||
|
NPMCI_TOKEN_NPM: ${{secrets.NPMCI_TOKEN_NPM}}
|
||||||
|
NPMCI_TOKEN_NPM2: ${{secrets.NPMCI_TOKEN_NPM2}}
|
||||||
|
NPMCI_GIT_GITHUBTOKEN: ${{secrets.NPMCI_GIT_GITHUBTOKEN}}
|
||||||
|
NPMCI_URL_CLOUDLY: ${{secrets.NPMCI_URL_CLOUDLY}}
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
security:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
continue-on-error: true
|
||||||
|
container:
|
||||||
|
image: ${{ env.IMAGE }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- name: Prepare
|
||||||
|
run: |
|
||||||
|
pnpm install -g pnpm
|
||||||
|
pnpm install -g @ship.zone/npmci
|
||||||
|
npmci npm prepare
|
||||||
|
|
||||||
|
- name: Audit production dependencies
|
||||||
|
run: |
|
||||||
|
npmci command npm config set registry https://registry.npmjs.org
|
||||||
|
npmci command pnpm audit --audit-level=high --prod
|
||||||
|
continue-on-error: true
|
||||||
|
|
||||||
|
- name: Audit development dependencies
|
||||||
|
run: |
|
||||||
|
npmci command npm config set registry https://registry.npmjs.org
|
||||||
|
npmci command pnpm audit --audit-level=high --dev
|
||||||
|
continue-on-error: true
|
||||||
|
|
||||||
|
test:
|
||||||
|
if: ${{ always() }}
|
||||||
|
needs: security
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
container:
|
||||||
|
image: ${{ env.IMAGE }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- name: Prepare
|
||||||
|
run: |
|
||||||
|
pnpm install -g pnpm
|
||||||
|
pnpm install -g @ship.zone/npmci
|
||||||
|
npmci npm prepare
|
||||||
|
|
||||||
|
- name: Test stable
|
||||||
|
run: |
|
||||||
|
npmci node install stable
|
||||||
|
npmci npm install
|
||||||
|
npmci npm test
|
||||||
|
|
||||||
|
- name: Test build
|
||||||
|
run: |
|
||||||
|
npmci node install stable
|
||||||
|
npmci npm install
|
||||||
|
npmci npm build
|
||||||
|
|
||||||
|
release:
|
||||||
|
needs: test
|
||||||
|
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/')
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
container:
|
||||||
|
image: ${{ env.IMAGE }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- name: Prepare
|
||||||
|
run: |
|
||||||
|
pnpm install -g pnpm
|
||||||
|
pnpm install -g @ship.zone/npmci
|
||||||
|
npmci npm prepare
|
||||||
|
|
||||||
|
- name: Release
|
||||||
|
run: |
|
||||||
|
npmci node install stable
|
||||||
|
npmci npm publish
|
||||||
|
|
||||||
|
metadata:
|
||||||
|
needs: test
|
||||||
|
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/')
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
container:
|
||||||
|
image: ${{ env.IMAGE }}
|
||||||
|
continue-on-error: true
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- name: Prepare
|
||||||
|
run: |
|
||||||
|
pnpm install -g pnpm
|
||||||
|
pnpm install -g @ship.zone/npmci
|
||||||
|
npmci npm prepare
|
||||||
|
|
||||||
|
- name: Code quality
|
||||||
|
run: |
|
||||||
|
npmci command npm install -g typescript
|
||||||
|
npmci npm install
|
||||||
|
|
||||||
|
- name: Trigger
|
||||||
|
run: npmci trigger
|
||||||
|
|
||||||
|
- name: Build docs and upload artifacts
|
||||||
|
run: |
|
||||||
|
npmci node install stable
|
||||||
|
npmci npm install
|
||||||
|
pnpm install -g @git.zone/tsdoc
|
||||||
|
npmci command tsdoc
|
||||||
|
continue-on-error: true
|
||||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -3,7 +3,6 @@
|
|||||||
# artifacts
|
# artifacts
|
||||||
coverage/
|
coverage/
|
||||||
public/
|
public/
|
||||||
pages/
|
|
||||||
|
|
||||||
# installs
|
# installs
|
||||||
node_modules/
|
node_modules/
|
||||||
@@ -17,4 +16,4 @@ node_modules/
|
|||||||
dist/
|
dist/
|
||||||
dist_*/
|
dist_*/
|
||||||
|
|
||||||
# custom
|
#------# custom
|
||||||
141
.gitlab-ci.yml
141
.gitlab-ci.yml
@@ -1,141 +0,0 @@
|
|||||||
# gitzone ci_default
|
|
||||||
image: registry.gitlab.com/hosttoday/ht-docker-node:npmci
|
|
||||||
|
|
||||||
cache:
|
|
||||||
paths:
|
|
||||||
- .npmci_cache/
|
|
||||||
key: '$CI_BUILD_STAGE'
|
|
||||||
|
|
||||||
stages:
|
|
||||||
- security
|
|
||||||
- test
|
|
||||||
- release
|
|
||||||
- metadata
|
|
||||||
|
|
||||||
before_script:
|
|
||||||
- npm install -g @shipzone/npmci
|
|
||||||
|
|
||||||
# ====================
|
|
||||||
# security stage
|
|
||||||
# ====================
|
|
||||||
mirror:
|
|
||||||
stage: security
|
|
||||||
script:
|
|
||||||
- npmci git mirror
|
|
||||||
only:
|
|
||||||
- tags
|
|
||||||
tags:
|
|
||||||
- lossless
|
|
||||||
- docker
|
|
||||||
- notpriv
|
|
||||||
|
|
||||||
auditProductionDependencies:
|
|
||||||
image: registry.gitlab.com/hosttoday/ht-docker-node:npmci
|
|
||||||
stage: security
|
|
||||||
script:
|
|
||||||
- npmci npm prepare
|
|
||||||
- npmci command npm install --production --ignore-scripts
|
|
||||||
- npmci command npm config set registry https://registry.npmjs.org
|
|
||||||
- npmci command npm audit --audit-level=high --only=prod --production
|
|
||||||
tags:
|
|
||||||
- docker
|
|
||||||
allow_failure: true
|
|
||||||
|
|
||||||
auditDevDependencies:
|
|
||||||
image: registry.gitlab.com/hosttoday/ht-docker-node:npmci
|
|
||||||
stage: security
|
|
||||||
script:
|
|
||||||
- npmci npm prepare
|
|
||||||
- npmci command npm install --ignore-scripts
|
|
||||||
- npmci command npm config set registry https://registry.npmjs.org
|
|
||||||
- npmci command npm audit --audit-level=high --only=dev
|
|
||||||
tags:
|
|
||||||
- docker
|
|
||||||
allow_failure: true
|
|
||||||
|
|
||||||
# ====================
|
|
||||||
# test stage
|
|
||||||
# ====================
|
|
||||||
|
|
||||||
testStable:
|
|
||||||
stage: test
|
|
||||||
script:
|
|
||||||
- npmci npm prepare
|
|
||||||
- npmci node install stable
|
|
||||||
- npmci npm install
|
|
||||||
- npmci npm test
|
|
||||||
coverage: /\d+.?\d+?\%\s*coverage/
|
|
||||||
tags:
|
|
||||||
- docker
|
|
||||||
|
|
||||||
testBuild:
|
|
||||||
stage: test
|
|
||||||
script:
|
|
||||||
- npmci npm prepare
|
|
||||||
- npmci node install stable
|
|
||||||
- npmci npm install
|
|
||||||
- npmci command npm run build
|
|
||||||
coverage: /\d+.?\d+?\%\s*coverage/
|
|
||||||
tags:
|
|
||||||
- docker
|
|
||||||
|
|
||||||
release:
|
|
||||||
stage: release
|
|
||||||
script:
|
|
||||||
- npmci node install stable
|
|
||||||
- npmci npm publish
|
|
||||||
only:
|
|
||||||
- tags
|
|
||||||
tags:
|
|
||||||
- lossless
|
|
||||||
- docker
|
|
||||||
- notpriv
|
|
||||||
|
|
||||||
# ====================
|
|
||||||
# metadata stage
|
|
||||||
# ====================
|
|
||||||
codequality:
|
|
||||||
stage: metadata
|
|
||||||
allow_failure: true
|
|
||||||
only:
|
|
||||||
- tags
|
|
||||||
script:
|
|
||||||
- npmci command npm install -g tslint typescript
|
|
||||||
- npmci npm prepare
|
|
||||||
- npmci npm install
|
|
||||||
- npmci command "tslint -c tslint.json ./ts/**/*.ts"
|
|
||||||
tags:
|
|
||||||
- lossless
|
|
||||||
- docker
|
|
||||||
- priv
|
|
||||||
|
|
||||||
trigger:
|
|
||||||
stage: metadata
|
|
||||||
script:
|
|
||||||
- npmci trigger
|
|
||||||
only:
|
|
||||||
- tags
|
|
||||||
tags:
|
|
||||||
- lossless
|
|
||||||
- docker
|
|
||||||
- notpriv
|
|
||||||
|
|
||||||
pages:
|
|
||||||
stage: metadata
|
|
||||||
script:
|
|
||||||
- npmci node install lts
|
|
||||||
- npmci command npm install -g @gitzone/tsdoc
|
|
||||||
- npmci npm prepare
|
|
||||||
- npmci npm install
|
|
||||||
- npmci command tsdoc
|
|
||||||
tags:
|
|
||||||
- lossless
|
|
||||||
- docker
|
|
||||||
- notpriv
|
|
||||||
only:
|
|
||||||
- tags
|
|
||||||
artifacts:
|
|
||||||
expire_in: 1 week
|
|
||||||
paths:
|
|
||||||
- public
|
|
||||||
allow_failure: true
|
|
||||||
3
.vscode/settings.json
vendored
3
.vscode/settings.json
vendored
@@ -22,5 +22,6 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
]
|
],
|
||||||
|
"deno.enable": false
|
||||||
}
|
}
|
||||||
|
|||||||
459
changelog.md
Normal file
459
changelog.md
Normal file
@@ -0,0 +1,459 @@
|
|||||||
|
# Changelog
|
||||||
|
|
||||||
|
## 2025-11-17 - 6.0.0 - BREAKING CHANGE(decorators)
|
||||||
|
Migrate to TC39 Stage 3 decorators and refactor decorator metadata handling; update class initialization, lucene adapter fixes and docs
|
||||||
|
|
||||||
|
- Switch all decorators to TC39 Stage 3 signatures and metadata usage (use context.metadata and context.addInitializer) — affects svDb, globalSvDb, searchable, unI, index, Collection and managed.
|
||||||
|
- Refactor Collection/managed decorators to read and initialize prototype/constructor properties from context.metadata to ensure prototype properties are available before instance creation (ts/classes.collection.ts).
|
||||||
|
- Improve search implementation: add a Lucene parser and transformer with safer MongoDB query generation, wildcard/fuzzy handling and properly structured boolean operators (ts/classes.lucene.adapter.ts).
|
||||||
|
- Search integration updated to use the new adapter and handle advanced Lucene syntax and edge cases more robustly.
|
||||||
|
- Bump dev tooling versions: @git.zone/tsbuild -> ^3.1.0 and @git.zone/tsrun -> ^2.0.0.
|
||||||
|
- Documentation: update README and add readme.hints.md describing the TC39 decorator migration, minimum TypeScript (>=5.2) and Deno notes; tests adjusted accordingly.
|
||||||
|
- Clean up project memory/config files related to the previous decorator approach and Deno configuration adjustments.
|
||||||
|
|
||||||
|
## 2025-11-17 - 5.16.7 - fix(classes.collection)
|
||||||
|
Improve Deno and TypeScript compatibility: Collection decorator _svDbOptions forwarding and config cleanup
|
||||||
|
|
||||||
|
- Collection decorator: capture original constructor and forward _svDbOptions to ensure property decorator options (serialize/deserialize) remain accessible in Deno environments.
|
||||||
|
- Collection decorator: keep instance getter defined on prototype for Deno compatibility (no behavior change, clarifies forwarding logic).
|
||||||
|
- Build/config: removed experimentalDecorators and useDefineForClassFields from deno.json and tsconfig.json to avoid Deno/TS build issues and rely on default compilation settings.
|
||||||
|
|
||||||
|
## 2025-11-17 - 5.16.6 - fix(classes)
|
||||||
|
Add Deno compatibility, prototype-safe decorators and safe collection accessor; bump a few deps
|
||||||
|
|
||||||
|
- Add deno.json to enable experimentalDecorators and target ES2022/DOM for Deno builds.
|
||||||
|
- Introduce getCollectionSafe() on SmartDataDbDoc and use it for save/update/delete/findOne to avoid runtime errors when instance 'collection' is not present.
|
||||||
|
- Change several instance properties (globalSaveableProperties, uniqueIndexes, regularIndexes, saveableProperties) to 'declare' so decorator-set prototype properties are not shadowed (Deno compatibility).
|
||||||
|
- Enhance @Collection decorator: capture original constructor/prototype for Deno, define prototype getter for collection on decorated class, attach docCtor for searchableFields, and forward _svDbOptions to the original constructor to preserve serializer metadata.
|
||||||
|
- Improve text/search index handling by relying on docCtor.searchableFields and guarding text index creation.
|
||||||
|
- Bump dependencies/devDependencies: @push.rocks/smartmongo -> ^2.0.14, @git.zone/tsbuild -> ^2.7.1, @git.zone/tstest -> ^2.8.1.
|
||||||
|
- These are non-breaking runtime compatibility and developer-experience fixes; intended as a patch release.
|
||||||
|
|
||||||
|
## 2025-11-16 - 5.16.5 - fix(watcher)
|
||||||
|
Update dependencies, tooling and watcher import; add .serena cache ignore
|
||||||
|
|
||||||
|
- Bump runtime dependencies: @push.rocks/smartlog 3.1.8 → 3.1.10, @push.rocks/smartstring 4.0.15 → 4.1.0, @push.rocks/taskbuffer 3.1.7 → 3.4.0, @tsclass/tsclass 9.2.0 → 9.3.0, mongodb 6.18.0 → 6.20.0
|
||||||
|
- Bump devDependencies: @git.zone/tsbuild 2.6.7 → 2.6.8, @git.zone/tsrun 1.2.44 → 1.6.2, @git.zone/tstest 2.3.5 → 2.6.2
|
||||||
|
- Switch EventEmitter import to node:events in ts/classes.watcher.ts to use the namespaced Node import
|
||||||
|
- Add .serena/.gitignore to ignore /cache
|
||||||
|
|
||||||
|
## 2025-08-18 - 5.16.4 - fix(classes.doc (convertFilterForMongoDb))
|
||||||
|
Improve filter conversion: handle logical operators, merge operator objects, add nested filter tests and docs, and fix test script
|
||||||
|
|
||||||
|
- Fix package.json test script: remove stray dot in tstest --verbose argument to ensure tests run correctly
|
||||||
|
- Enhance convertFilterForMongoDb in ts/classes.doc.ts to properly handle logical operators ($and, $or, $nor, $not) and return them recursively
|
||||||
|
- Merge operator objects for the same field path (e.g. combining $gte and $lte) to avoid overwriting operator clauses when object and dot-notation are mixed
|
||||||
|
- Add validation/guards for operator argument types (e.g. $in, $nin, $all must be arrays; $size must be numeric) and preserve existing behavior blocking $where for security
|
||||||
|
- Add comprehensive nested filter tests in test/test.filters.ts to cover deep nested object queries, $elemMatch, array size, $all, $in on nested fields and more
|
||||||
|
- Expand README filtering section with detailed examples for basic filtering, deep nested filters, comparison operators, array operations, logical and element operators, and advanced patterns
|
||||||
|
|
||||||
|
## 2025-08-18 - 5.16.3 - fix(docs)
|
||||||
|
Add local Claude settings and remove outdated codex.md
|
||||||
|
|
||||||
|
- Added .claude/settings.local.json to store local Claude/assistant permissions and configuration.
|
||||||
|
- Removed codex.md (project overview) — documentation file deleted.
|
||||||
|
- No runtime/library code changes; documentation/configuration-only update, bump patch version.
|
||||||
|
|
||||||
|
## 2025-08-18 - 5.16.2 - fix(readme)
|
||||||
|
Update README: clarify examples, expand search/cursor/docs and add local Claude settings
|
||||||
|
|
||||||
|
- Refined README wording and structure: clearer Quick Start, improved examples and developer-focused phrasing
|
||||||
|
- Expanded documentation for search, cursors, change streams, distributed coordination, transactions and EasyStore with more concrete code examples
|
||||||
|
- Adjusted code examples to show safer defaults (ID generation, status/tags, connection pooling) and improved best-practices guidance
|
||||||
|
- Added .claude/settings.local.json to provide local assistant/CI permission configuration
|
||||||
|
|
||||||
|
## 2025-08-12 - 5.16.1 - fix(core)
|
||||||
|
Improve error handling and logging; enhance search query sanitization; update dependency versions and documentation
|
||||||
|
|
||||||
|
- Replaced console.log and console.warn with structured logger.log calls throughout the core modules
|
||||||
|
- Enhanced database initialization with try/catch and proper URI credential encoding
|
||||||
|
- Improved search query conversion by disallowing dangerous operators (e.g. $where) and securely escaping regex patterns
|
||||||
|
- Bumped dependency versions (smartlog, @tsclass/tsclass, mongodb, etc.) in package.json
|
||||||
|
- Added detailed project memories including code style, project overview, and suggested commands for developers
|
||||||
|
- Updated README with improved instructions, feature highlights, and quick start sections
|
||||||
|
|
||||||
|
## 2025-04-25 - 5.16.0 - feat(watcher)
|
||||||
|
Enhance change stream watchers with buffering and EventEmitter support; update dependency versions
|
||||||
|
|
||||||
|
- Bumped smartmongo from ^2.0.11 to ^2.0.12 and smartrx from ^3.0.7 to ^3.0.10
|
||||||
|
- Upgraded @tsclass/tsclass to ^9.0.0 and mongodb to ^6.16.0
|
||||||
|
- Refactored the watch API to accept additional options (bufferTimeMs, fullDocument) for improved change stream handling
|
||||||
|
- Modified SmartdataDbWatcher to extend EventEmitter and support event notifications
|
||||||
|
|
||||||
|
## 2025-04-24 - 5.15.1 - fix(cursor)
|
||||||
|
Improve cursor usage documentation and refactor getCursor API to support native cursor modifiers
|
||||||
|
|
||||||
|
- Updated examples in readme.md to demonstrate manual iteration using cursor.next() and proper cursor closing.
|
||||||
|
- Refactored the getCursor method in classes.doc.ts to accept session and modifier options, consolidating cursor handling.
|
||||||
|
- Added new tests in test/test.cursor.ts to verify cursor operations, including limits, sorting, and skipping.
|
||||||
|
|
||||||
|
## 2025-04-24 - 5.15.0 - feat(svDb)
|
||||||
|
Enhance svDb decorator to support custom serialization and deserialization options
|
||||||
|
|
||||||
|
- Added an optional options parameter to the svDb decorator to accept serialize/deserialize functions
|
||||||
|
- Updated instance creation logic (updateFromDb) to apply custom deserialization if provided
|
||||||
|
- Updated createSavableObject to use custom serialization when available
|
||||||
|
|
||||||
|
## 2025-04-23 - 5.14.1 - fix(db operations)
|
||||||
|
Update transaction API to consistently pass optional session parameters across database operations
|
||||||
|
|
||||||
|
- Revised transaction support in readme to use startSession without await and showcased session usage in getInstance and save calls
|
||||||
|
- Updated methods in classes.collection.ts to accept an optional session parameter for findOne, getCursor, findAll, insert, update, delete, and getCount
|
||||||
|
- Enhanced SmartDataDbDoc save and delete methods to propagate session parameters
|
||||||
|
- Improved overall consistency of transactional APIs across the library
|
||||||
|
|
||||||
|
## 2025-04-23 - 5.14.0 - feat(doc)
|
||||||
|
Implement support for beforeSave, afterSave, beforeDelete, and afterDelete lifecycle hooks in document save and delete operations to allow custom logic execution during these critical moments.
|
||||||
|
|
||||||
|
- Calls beforeSave hook if defined before performing insert or update.
|
||||||
|
- Calls afterSave hook after a document is saved.
|
||||||
|
- Calls beforeDelete hook before deletion and afterDelete hook afterward.
|
||||||
|
- Ensures _updatedAt timestamp is refreshed during save operations.
|
||||||
|
|
||||||
|
## 2025-04-22 - 5.13.1 - fix(search)
|
||||||
|
Improve search query parsing for implicit AND queries by preserving quoted substrings and better handling free terms, quoted phrases, and field:value tokens.
|
||||||
|
|
||||||
|
- Replace previous implicit AND logic with tokenization that preserves quoted substrings
|
||||||
|
- Support both free term and field:value tokens with wildcards inside quotes
|
||||||
|
- Ensure errors are thrown for non-searchable fields in field-specific queries
|
||||||
|
|
||||||
|
## 2025-04-22 - 5.13.0 - feat(search)
|
||||||
|
Improve search query handling and update documentation
|
||||||
|
|
||||||
|
- Added 'codex.md' providing a high-level project overview and detailed search API documentation.
|
||||||
|
- Enhanced search parsing in SmartDataDbDoc to support combined free-term and quoted field phrase queries.
|
||||||
|
- Introduced a new fallback branch in the search method to handle free term with quoted field input.
|
||||||
|
- Updated tests in test/test.search.ts to cover new combined query scenarios and ensure robust behavior.
|
||||||
|
|
||||||
|
## 2025-04-22 - 5.12.2 - fix(search)
|
||||||
|
Fix handling of quoted wildcard patterns in field-specific search queries and add tests for location-based wildcard phrase searches
|
||||||
|
|
||||||
|
- Strip surrounding quotes from wildcard patterns in field queries to correctly transform them to regex
|
||||||
|
- Introduce new tests in test/test.search.ts to validate exact quoted and unquoted wildcard searches on a location field
|
||||||
|
|
||||||
|
## 2025-04-22 - 5.12.1 - fix(search)
|
||||||
|
Improve implicit AND logic for mixed free term and field queries in search and enhance wildcard field handling.
|
||||||
|
|
||||||
|
- Updated regex for field:value parsing to capture full value with wildcards.
|
||||||
|
- Added explicit handling for free terms by converting to regex across searchable fields.
|
||||||
|
- Improved error messaging for attempts to search non-searchable fields.
|
||||||
|
- Extended tests to cover combined free term and wildcard field searches, including error cases.
|
||||||
|
|
||||||
|
## 2025-04-22 - 5.12.0 - feat(doc/search)
|
||||||
|
Enhance search functionality with filter and validate options for advanced query control
|
||||||
|
|
||||||
|
- Added 'filter' option to merge additional MongoDB query constraints in search
|
||||||
|
- Introduced 'validate' hook to post-process and filter fetched documents
|
||||||
|
- Refactored underlying execQuery function to support additional search options
|
||||||
|
- Updated tests to cover new search scenarios and fallback mechanisms
|
||||||
|
|
||||||
|
## 2025-04-22 - 5.11.4 - fix(search)
|
||||||
|
Implement implicit AND logic for mixed simple term and field:value queries in search
|
||||||
|
|
||||||
|
- Added a new branch to detect and handle search queries that mix field:value pairs with plain terms without explicit operators
|
||||||
|
- Builds an implicit $and filter when query parts contain colon(s) but lack explicit boolean operators or quotes
|
||||||
|
- Ensures proper parsing and improved robustness of search filters
|
||||||
|
|
||||||
|
## 2025-04-22 - 5.11.3 - fix(lucene adapter and search tests)
|
||||||
|
Improve range query parsing in Lucene adapter and expand search test coverage
|
||||||
|
|
||||||
|
- Added a new 'testSearch' script in package.json to run search tests.
|
||||||
|
- Introduced advanced search tests for range queries and combined field filters in test/search.advanced.ts.
|
||||||
|
- Enhanced robustness tests in test/search.ts for wildcard and empty query scenarios.
|
||||||
|
- Fixed token validation in the parseRange method of the Lucene adapter to ensure proper error handling.
|
||||||
|
|
||||||
|
## 2025-04-21 - 5.11.2 - fix(readme)
|
||||||
|
Update readme to clarify usage of searchable fields retrieval
|
||||||
|
|
||||||
|
- Replaced getSearchableFields('Product') with Product.getSearchableFields()
|
||||||
|
- Updated documentation to reference the static method Class.getSearchableFields()
|
||||||
|
|
||||||
|
## 2025-04-21 - 5.11.1 - fix(doc)
|
||||||
|
Refactor searchable fields API and improve collection registration.
|
||||||
|
|
||||||
|
- Removed the standalone getSearchableFields utility in favor of a static method on document classes.
|
||||||
|
- Updated tests to use the new static method (e.g., Product.getSearchableFields()).
|
||||||
|
- Ensured the Collection decorator attaches a docCtor property to correctly register searchable fields.
|
||||||
|
- Added try/catch in test cleanup to gracefully handle dropDatabase errors.
|
||||||
|
|
||||||
|
## 2025-04-21 - 5.11.0 - feat(ts/classes.lucene.adapter)
|
||||||
|
Expose luceneWildcardToRegex method to allow external usage and enhance regex transformation capabilities.
|
||||||
|
|
||||||
|
- Changed luceneWildcardToRegex from private to public in ts/classes.lucene.adapter.ts.
|
||||||
|
|
||||||
|
## 2025-04-21 - 5.10.0 - feat(search)
|
||||||
|
Improve search functionality: update documentation, refine Lucene query transformation, and add advanced search tests
|
||||||
|
|
||||||
|
- Updated readme.md with detailed Lucene‑style search examples and use cases
|
||||||
|
- Enhanced LuceneToMongoTransformer to properly handle wildcard conversion and regex escaping
|
||||||
|
- Improved search query parsing in SmartDataDbDoc for field-specific, multi-term, and advanced Lucene syntax
|
||||||
|
- Added new advanced search tests covering boolean operators, grouping, quoted phrases, and wildcard queries
|
||||||
|
|
||||||
|
## 2025-04-18 - 5.9.2 - fix(documentation)
|
||||||
|
Update search API documentation to replace deprecated searchWithLucene examples with the unified search(query) API and clarify its behavior.
|
||||||
|
|
||||||
|
- Replaced 'searchWithLucene' examples with 'search(query)' in the README.
|
||||||
|
- Updated explanation to detail field-specific exact match, partial word regex search, multi-word literal matching, and handling of empty queries.
|
||||||
|
- Clarified guidelines for creating MongoDB text indexes on searchable fields for optimized search performance.
|
||||||
|
|
||||||
|
## 2025-04-18 - 5.9.1 - fix(search)
|
||||||
|
Refactor search tests to use unified search API and update text index type casting
|
||||||
|
|
||||||
|
- Replaced all calls from searchWithLucene with search in test/search tests
|
||||||
|
- Updated text index specification in the collection class to use proper type casting
|
||||||
|
|
||||||
|
## 2025-04-18 - 5.9.0 - feat(collections/search)
|
||||||
|
Improve text index creation and search fallback mechanisms in collections and document search methods
|
||||||
|
|
||||||
|
- Auto-create a compound text index on all searchable fields in SmartdataCollection with a one-time flag to prevent duplicate index creation.
|
||||||
|
- Refine the search method in SmartDataDbDoc to support exact field matches and safe regex fallback for non-Lucene queries.
|
||||||
|
|
||||||
|
## 2025-04-17 - 5.8.4 - fix(core)
|
||||||
|
Update commit metadata with no functional code changes
|
||||||
|
|
||||||
|
- Commit info and documentation refreshed
|
||||||
|
- No code or test changes detected in the diff
|
||||||
|
|
||||||
|
## 2025-04-17 - 5.8.3 - fix(readme)
|
||||||
|
Improve readme documentation on data models and connection management
|
||||||
|
|
||||||
|
- Clarify that data models use @Collection, @unI, @svDb, @index, and @searchable decorators
|
||||||
|
- Document that ObjectId and Buffer fields are stored as BSON types natively without extra decorators
|
||||||
|
- Update connection management section to use 'db.close()' instead of 'db.disconnect()'
|
||||||
|
- Revise license section to reference the MIT License without including additional legal details
|
||||||
|
|
||||||
|
## 2025-04-14 - 5.8.2 - fix(classes.doc.ts)
|
||||||
|
Ensure collection initialization before creating a cursor in getCursorExtended
|
||||||
|
|
||||||
|
- Added 'await collection.init()' to guarantee that the MongoDB collection is initialized before using the cursor
|
||||||
|
- Prevents potential runtime errors when accessing collection.mongoDbCollection
|
||||||
|
|
||||||
|
## 2025-04-14 - 5.8.1 - fix(cursor, doc)
|
||||||
|
Add explicit return types and casts to SmartdataDbCursor methods and update getCursorExtended signature in SmartDataDbDoc.
|
||||||
|
|
||||||
|
- Specify Promise<T> as return type for next() in SmartdataDbCursor and cast return value to T.
|
||||||
|
- Specify Promise<T[]> as return type for toArray() in SmartdataDbCursor and cast return value to T[].
|
||||||
|
- Update getCursorExtended to return Promise<SmartdataDbCursor<T>> for clearer type safety.
|
||||||
|
|
||||||
|
## 2025-04-14 - 5.8.0 - feat(cursor)
|
||||||
|
Add toArray method to SmartdataDbCursor to convert raw MongoDB documents into initialized class instances
|
||||||
|
|
||||||
|
- Introduced asynchronous toArray method in SmartdataDbCursor which retrieves all documents from the MongoDB cursor
|
||||||
|
- Maps each native document to a SmartDataDbDoc instance using createInstanceFromMongoDbNativeDoc for consistent API usage
|
||||||
|
|
||||||
|
## 2025-04-14 - 5.7.0 - feat(SmartDataDbDoc)
|
||||||
|
Add extended cursor method getCursorExtended for flexible cursor modifications
|
||||||
|
|
||||||
|
- Introduces getCursorExtended in classes.doc.ts to allow modifier functions for MongoDB cursors
|
||||||
|
- Wraps the modified cursor with SmartdataDbCursor for improved API consistency
|
||||||
|
- Enhances querying capabilities by enabling customized cursor transformations
|
||||||
|
|
||||||
|
## 2025-04-07 - 5.6.0 - feat(indexing)
|
||||||
|
Add support for regular index creation in documents and collections
|
||||||
|
|
||||||
|
- Implement new index decorator in classes.doc.ts to mark properties with regular indexing options
|
||||||
|
- Update SmartdataCollection to create regular indexes if defined on a document during insert
|
||||||
|
- Enhance document structure to store and utilize regular index configurations
|
||||||
|
|
||||||
|
## 2025-04-06 - 5.5.1 - fix(ci & formatting)
|
||||||
|
Minor fixes: update CI workflow image and npmci package references, adjust package.json and readme URLs, and apply consistent code formatting.
|
||||||
|
|
||||||
|
- Update image and repo URL in Gitea workflows from GitLab to code.foss.global
|
||||||
|
- Replace '@shipzone/npmci' with '@ship.zone/npmci' throughout CI scripts
|
||||||
|
- Adjust homepage and bugs URL in package.json and readme
|
||||||
|
- Apply trailing commas and consistent formatting in TypeScript source files
|
||||||
|
- Minor update to .gitignore custom section label
|
||||||
|
|
||||||
|
## 2025-04-06 - 5.5.0 - feat(search)
|
||||||
|
Enhance search functionality with robust Lucene query transformation and reliable fallback mechanisms
|
||||||
|
|
||||||
|
- Improve Lucene adapter to properly structure $or queries for term, phrase, wildcard, and fuzzy search
|
||||||
|
- Implement and document a robust searchWithLucene method with fallback to in-memory filtering
|
||||||
|
- Update readme and tests with extensive examples for @searchable fields and Lucene-based queries
|
||||||
|
|
||||||
|
## 2025-04-06 - 5.4.0 - feat(core)
|
||||||
|
Refactor file structure and update dependency versions
|
||||||
|
|
||||||
|
- Renamed files and modules from 'smartdata.classes.*' to 'classes.*' and adjusted corresponding import paths.
|
||||||
|
- Updated dependency versions: '@push.rocks/smartmongo' to ^2.0.11, '@tsclass/tsclass' to ^8.2.0, and 'mongodb' to ^6.15.0.
|
||||||
|
- Renamed dev dependency packages from '@gitzone/...' to '@git.zone/...' and updated '@push.rocks/tapbundle' and '@types/node'.
|
||||||
|
- Fixed YAML workflow command: replaced 'pnpm install -g @gitzone/tsdoc' with 'pnpm install -g @git.zone/tsdoc'.
|
||||||
|
- Added package manager configuration and pnpm-workspace.yaml for built dependencies.
|
||||||
|
|
||||||
|
## 2025-03-10 - 5.3.0 - feat(docs)
|
||||||
|
Enhance documentation with updated installation instructions and comprehensive usage examples covering advanced features such as deep queries, automatic indexing, and distributed coordination.
|
||||||
|
|
||||||
|
- Added pnpm installation command
|
||||||
|
- Updated User model example to include ObjectId, Binary, and custom serialization
|
||||||
|
- Expanded CRUD operations examples with cursor methods and deep query support
|
||||||
|
- Enhanced sections on EasyStore, real-time data watching with RxJS integration, and managed collections
|
||||||
|
- Included detailed examples for transactions, deep object queries, and document lifecycle hooks
|
||||||
|
|
||||||
|
## 2025-02-03 - 5.2.12 - fix(documentation)
|
||||||
|
Remove license badge from README
|
||||||
|
|
||||||
|
- Removed the license badge from the README file, ensuring compliance with branding guidelines.
|
||||||
|
|
||||||
|
## 2025-02-03 - 5.2.11 - fix(documentation)
|
||||||
|
Updated project documentation for accuracy and added advanced feature details
|
||||||
|
|
||||||
|
- Added details for EasyStore, Distributed Coordination, and Real-time Data Watching features.
|
||||||
|
- Updated database connection setup instructions to include user authentication.
|
||||||
|
- Re-organized advanced usage section to showcase additional features separately.
|
||||||
|
|
||||||
|
## 2024-09-05 - 5.2.10 - fix(smartdata.classes.doc)
|
||||||
|
Fix issue with array handling in convertFilterForMongoDb function
|
||||||
|
|
||||||
|
- Corrected the logic to properly handle array filters in the convertFilterForMongoDb function to avoid incorrect assignments.
|
||||||
|
|
||||||
|
## 2024-09-05 - 5.2.9 - fix(smartdata.classes.doc)
|
||||||
|
Fixed issue with convertFilterForMongoDb to handle array operators.
|
||||||
|
|
||||||
|
- Updated the convertFilterForMongoDb function in smartdata.classes.doc.ts to properly handle array operators like $in and $all.
|
||||||
|
|
||||||
|
## 2024-09-05 - 5.2.8 - fix(smartdata.classes.doc)
|
||||||
|
Fix key handling in convertFilterForMongoDb function
|
||||||
|
|
||||||
|
- Fixed an issue in convertFilterForMongoDb that allowed keys with dots which could cause errors.
|
||||||
|
|
||||||
|
## 2024-09-05 - 5.2.7 - fix(core)
|
||||||
|
Fixed issue with handling filter keys containing dots in smartdata.classes.doc.ts
|
||||||
|
|
||||||
|
- Fixed an error in the convertFilterForMongoDb function which previously threw an error when keys contained dots.
|
||||||
|
|
||||||
|
## 2024-06-18 - 5.2.6 - Chore
|
||||||
|
Maintenance Release
|
||||||
|
|
||||||
|
- Release version 5.2.6
|
||||||
|
|
||||||
|
## 2024-05-31 - 5.2.2 - Bug Fixes
|
||||||
|
Fixes and Maintenance
|
||||||
|
|
||||||
|
- Fixed issue where `_createdAt` and `_updatedAt` registered saveableProperties for all document types
|
||||||
|
|
||||||
|
## 2024-04-15 - 5.1.2 - New Feature
|
||||||
|
Enhancements and Bug Fixes
|
||||||
|
|
||||||
|
- Added static `.getCount({})` method to `SmartDataDbDoc`
|
||||||
|
- Changed fields `_createdAt` and `_updatedAt` to ISO format
|
||||||
|
|
||||||
|
## 2024-04-14 - 5.0.43 - New Feature
|
||||||
|
New Feature Addition
|
||||||
|
|
||||||
|
- Added default `_createdAt` and `_updatedAt` fields, fixes #1
|
||||||
|
|
||||||
|
## 2024-03-30 - 5.0.41 - Bug Fixes
|
||||||
|
Improvements and Fixes
|
||||||
|
|
||||||
|
- Improved `tsconfig.json` for ES Module use
|
||||||
|
|
||||||
|
## 2023-07-10 - 5.0.20 - Chore
|
||||||
|
Organizational Changes
|
||||||
|
|
||||||
|
- Switched to new org scheme
|
||||||
|
|
||||||
|
## 2023-07-21 - 5.0.21 to 5.0.26 - Fixes
|
||||||
|
Multiple Fix Releases
|
||||||
|
|
||||||
|
- Various core updates and bug fixes
|
||||||
|
|
||||||
|
## 2023-07-21 - 5.0.20 - Chore
|
||||||
|
Organizational Changes
|
||||||
|
|
||||||
|
- Switch to the new org scheme
|
||||||
|
|
||||||
|
## 2023-06-25 - 5.0.14 to 5.0.19 - Fixes
|
||||||
|
Multiple Fix Releases
|
||||||
|
|
||||||
|
- Various core updates and bug fixes
|
||||||
|
|
||||||
|
## 2022-05-17 - 5.0.0 - Major Update
|
||||||
|
Breaking Changes
|
||||||
|
|
||||||
|
- Switched to ESM
|
||||||
|
|
||||||
|
## 2022-05-18 - 5.0.2 - Bug Fixes
|
||||||
|
Bug Fixes
|
||||||
|
|
||||||
|
- The `watcher.changeSubject` now emits the correct type into observer functions
|
||||||
|
|
||||||
|
## 2022-05-17 - 5.0.1 - Chore
|
||||||
|
Testing Improvements
|
||||||
|
|
||||||
|
- Tests now use `@pushrocks/smartmongo` backed by `wiredTiger`
|
||||||
|
|
||||||
|
## 2022-05-17 to 2022-11-08 - 5.0.8 to 5.0.10
|
||||||
|
Multiple Fix Releases
|
||||||
|
|
||||||
|
- Various core updates and bug fixes
|
||||||
|
|
||||||
|
## 2021-11-12 - 4.0.17 to 4.0.20
|
||||||
|
Multiple Fix Releases
|
||||||
|
|
||||||
|
- Various core updates and bug fixes
|
||||||
|
|
||||||
|
## 2021-09-17 - 4.0.10 to 4.0.16
|
||||||
|
Multiple Fix Releases
|
||||||
|
|
||||||
|
- Various core updates and bug fixes
|
||||||
|
|
||||||
|
## 2021-06-09 - 4.0.1 to 4.0.9
|
||||||
|
Multiple Fix Releases
|
||||||
|
|
||||||
|
- Various core updates and bug fixes
|
||||||
|
|
||||||
|
## 2021-06-06 - 4.0.0 - Major Update
|
||||||
|
Major Release
|
||||||
|
|
||||||
|
- Maintenance and core updates
|
||||||
|
|
||||||
|
## 2021-05-17 - 3.1.56 - Chore
|
||||||
|
Maintenance Release
|
||||||
|
|
||||||
|
- Release version 3.1.56
|
||||||
|
|
||||||
|
## 2020-09-09 - 3.1.44 to 3.1.52
|
||||||
|
Multiple Fix Releases
|
||||||
|
|
||||||
|
- Various core updates and bug fixes
|
||||||
|
|
||||||
|
## 2020-06-12 - 3.1.26 to 3.1.28
|
||||||
|
Multiple Fix Releases
|
||||||
|
|
||||||
|
- Various core updates and bug fixes
|
||||||
|
|
||||||
|
## 2020-02-18 - 3.1.23 to 3.1.25
|
||||||
|
Multiple Fix Releases
|
||||||
|
|
||||||
|
- Various core updates and bug fixes
|
||||||
|
|
||||||
|
## 2019-09-11 - 3.1.20 to 3.1.22
|
||||||
|
Multiple Fix Releases
|
||||||
|
|
||||||
|
- Various core updates and bug fixes
|
||||||
|
|
||||||
|
## 2018-07-10 - 3.0.5 - New Feature
|
||||||
|
Added Feature
|
||||||
|
|
||||||
|
- Added custom unique indexes to `SmartdataDoc`
|
||||||
|
|
||||||
|
## 2018-07-08 - 3.0.1 - Chore
|
||||||
|
Dependencies Update
|
||||||
|
|
||||||
|
- Updated mongodb dependencies
|
||||||
|
|
||||||
|
## 2018-07-08 - 3.0.0 - Major Update
|
||||||
|
Refactor and Cleanup
|
||||||
|
|
||||||
|
- Cleaned project structure
|
||||||
|
|
||||||
|
## 2018-01-16 - 2.0.7 - Breaking Change
|
||||||
|
Big Changes
|
||||||
|
|
||||||
|
- Switched to `@pushrocks` scope and moved from `rethinkdb` to `mongodb`
|
||||||
|
|
||||||
|
## 2018-01-12 - 2.0.0 - Major Release
|
||||||
|
Core Updates
|
||||||
|
|
||||||
|
- Updated CI configurations
|
||||||
|
|
||||||
@@ -12,12 +12,25 @@
|
|||||||
"gitzone": {
|
"gitzone": {
|
||||||
"projectType": "npm",
|
"projectType": "npm",
|
||||||
"module": {
|
"module": {
|
||||||
"githost": "gitlab.com",
|
"githost": "code.foss.global",
|
||||||
"gitscope": "pushrocks",
|
"gitscope": "push.rocks",
|
||||||
"gitrepo": "smartdata",
|
"gitrepo": "smartdata",
|
||||||
"shortDescription": "do more with data",
|
"description": "An advanced library for NoSQL data organization and manipulation using TypeScript with support for MongoDB, data validation, collections, and custom data types.",
|
||||||
"npmPackagename": "@pushrocks/smartdata",
|
"npmPackagename": "@push.rocks/smartdata",
|
||||||
"license": "MIT"
|
"license": "MIT",
|
||||||
|
"keywords": [
|
||||||
|
"data manipulation",
|
||||||
|
"NoSQL",
|
||||||
|
"MongoDB",
|
||||||
|
"TypeScript",
|
||||||
|
"data validation",
|
||||||
|
"collections",
|
||||||
|
"custom data types",
|
||||||
|
"ODM"
|
||||||
|
]
|
||||||
}
|
}
|
||||||
|
},
|
||||||
|
"tsdoc": {
|
||||||
|
"legal": "\n## License and Legal Information\n\nThis repository contains open-source code that is licensed under the MIT License. A copy of the MIT License can be found in the [license](license) file within this repository. \n\n**Please note:** The MIT License does not grant permission to use the trade names, trademarks, service marks, or product names of the project, except as required for reasonable and customary use in describing the origin of the work and reproducing the content of the NOTICE file.\n\n### Trademarks\n\nThis project is owned and maintained by Task Venture Capital GmbH. The names and logos associated with Task Venture Capital GmbH and any related products or services are trademarks of Task Venture Capital GmbH and are not included within the scope of the MIT license granted herein. Use of these trademarks must comply with Task Venture Capital GmbH's Trademark Guidelines, and any usage must be approved in writing by Task Venture Capital GmbH.\n\n### Company Information\n\nTask Venture Capital GmbH \nRegistered at District court Bremen HRB 35230 HB, Germany\n\nFor any legal inquiries or if you require further information, please contact us via email at hello@task.vc.\n\nBy using this repository, you acknowledge that you have read this section, agree to comply with its terms, and understand that the licensing of the code does not imply endorsement by Task Venture Capital GmbH of any derivative works.\n"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
11294
package-lock.json
generated
11294
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
72
package.json
72
package.json
@@ -1,48 +1,48 @@
|
|||||||
{
|
{
|
||||||
"name": "@pushrocks/smartdata",
|
"name": "@push.rocks/smartdata",
|
||||||
"version": "5.0.7",
|
"version": "6.0.0",
|
||||||
"private": false,
|
"private": false,
|
||||||
"description": "do more with data",
|
"description": "An advanced library for NoSQL data organization and manipulation using TypeScript with support for MongoDB, data validation, collections, and custom data types.",
|
||||||
"main": "dist_ts/index.js",
|
"main": "dist_ts/index.js",
|
||||||
"typings": "dist_ts/index.d.ts",
|
"typings": "dist_ts/index.d.ts",
|
||||||
"type": "module",
|
"type": "module",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"test": "tstest test/",
|
"test": "tstest test/ --verbose --logfile --timeout 120",
|
||||||
"build": "tsbuild --web --allowimplicitany"
|
"testSearch": "tsx test/test.search.ts",
|
||||||
|
"build": "tsbuild --web --allowimplicitany",
|
||||||
|
"buildDocs": "tsdoc"
|
||||||
},
|
},
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
"url": "git+ssh://git@gitlab.com/pushrocks/smartdata.git"
|
"url": "https://code.foss.global/push.rocks/smartdata.git"
|
||||||
},
|
},
|
||||||
"author": "Lossless GmbH",
|
"author": "Lossless GmbH",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"bugs": {
|
"bugs": {
|
||||||
"url": "https://gitlab.com/pushrocks/smartdata/issues"
|
"url": "https://code.foss.global/push.rocks/smartdata/issues"
|
||||||
},
|
},
|
||||||
"homepage": "https://gitlab.com/pushrocks/smartdata#README",
|
"homepage": "https://code.foss.global/push.rocks/smartdata#readme",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@pushrocks/lik": "^6.0.0",
|
"@push.rocks/lik": "^6.2.2",
|
||||||
"@pushrocks/smartdelay": "^2.0.13",
|
"@push.rocks/smartdelay": "^3.0.1",
|
||||||
"@pushrocks/smartlog": "^2.0.44",
|
"@push.rocks/smartlog": "^3.1.10",
|
||||||
"@pushrocks/smartmongo": "^2.0.7",
|
"@push.rocks/smartmongo": "^2.0.14",
|
||||||
"@pushrocks/smartpromise": "^3.1.7",
|
"@push.rocks/smartpromise": "^4.0.2",
|
||||||
"@pushrocks/smartrx": "^2.0.25",
|
"@push.rocks/smartrx": "^3.0.10",
|
||||||
"@pushrocks/smartstring": "^4.0.2",
|
"@push.rocks/smartstring": "^4.1.0",
|
||||||
"@pushrocks/smartunique": "^3.0.3",
|
"@push.rocks/smarttime": "^4.0.6",
|
||||||
"@tsclass/tsclass": "^4.0.8",
|
"@push.rocks/smartunique": "^3.0.8",
|
||||||
"@types/lodash": "^4.14.182",
|
"@push.rocks/taskbuffer": "^3.4.0",
|
||||||
"@types/mongodb": "^4.0.7",
|
"@tsclass/tsclass": "^9.3.0",
|
||||||
"lodash": "^4.17.21",
|
"mongodb": "^6.20.0"
|
||||||
"mongodb": "^4.7.0"
|
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@gitzone/tsbuild": "^2.1.63",
|
"@git.zone/tsbuild": "^3.1.0",
|
||||||
"@gitzone/tsrun": "^1.2.37",
|
"@git.zone/tsrun": "^2.0.0",
|
||||||
"@gitzone/tstest": "^1.0.71",
|
"@git.zone/tstest": "^2.8.1",
|
||||||
"@pushrocks/qenv": "^4.0.10",
|
"@push.rocks/qenv": "^6.1.3",
|
||||||
"@pushrocks/tapbundle": "^5.0.3",
|
"@push.rocks/tapbundle": "^6.0.3",
|
||||||
"@types/node": "^17.0.42",
|
"@types/node": "^22.15.2"
|
||||||
"@types/shortid": "0.0.29"
|
|
||||||
},
|
},
|
||||||
"files": [
|
"files": [
|
||||||
"ts/**/*",
|
"ts/**/*",
|
||||||
@@ -58,5 +58,19 @@
|
|||||||
],
|
],
|
||||||
"browserslist": [
|
"browserslist": [
|
||||||
"last 1 chrome versions"
|
"last 1 chrome versions"
|
||||||
]
|
],
|
||||||
|
"keywords": [
|
||||||
|
"data manipulation",
|
||||||
|
"NoSQL",
|
||||||
|
"MongoDB",
|
||||||
|
"TypeScript",
|
||||||
|
"data validation",
|
||||||
|
"collections",
|
||||||
|
"custom data types",
|
||||||
|
"ODM"
|
||||||
|
],
|
||||||
|
"packageManager": "pnpm@10.7.0+sha512.6b865ad4b62a1d9842b61d674a393903b871d9244954f652b8842c2b553c72176b278f64c463e52d40fff8aba385c235c8c9ecf5cc7de4fd78b8bb6d49633ab6",
|
||||||
|
"pnpm": {
|
||||||
|
"overrides": {}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
12157
pnpm-lock.yaml
generated
Normal file
12157
pnpm-lock.yaml
generated
Normal file
File diff suppressed because it is too large
Load Diff
4
pnpm-workspace.yaml
Normal file
4
pnpm-workspace.yaml
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
onlyBuiltDependencies:
|
||||||
|
- esbuild
|
||||||
|
- mongodb-memory-server
|
||||||
|
- puppeteer
|
||||||
80
readme.hints.md
Normal file
80
readme.hints.md
Normal file
@@ -0,0 +1,80 @@
|
|||||||
|
# Project Memory - Smartdata
|
||||||
|
|
||||||
|
## TC39 Decorator Migration (v6.0.0) - ✅ COMPLETED
|
||||||
|
|
||||||
|
### Final Status: All Tests Passing (157/157)
|
||||||
|
Migration successfully completed on 2025-11-17.
|
||||||
|
|
||||||
|
### What Changed:
|
||||||
|
- ✅ Removed `experimentalDecorators` from tsconfig.json
|
||||||
|
- ✅ Refactored all 7 decorators to TC39 Stage 3 syntax
|
||||||
|
- 5 property decorators: @globalSvDb, @svDb, @unI, @index, @searchable
|
||||||
|
- 2 class decorators: @Collection, @managed
|
||||||
|
- ✅ Implemented context.metadata pattern for shared decorator state
|
||||||
|
- ✅ All tests passing across Node.js and Deno runtimes
|
||||||
|
|
||||||
|
### Critical Discovery: TC39 Metadata Access Pattern
|
||||||
|
**THE KEY INSIGHT**: In TC39 decorators, metadata is NOT accessed via `constructor[Symbol.metadata]`. Instead:
|
||||||
|
- **Field decorators**: Write to `context.metadata`
|
||||||
|
- **Class decorators**: Read from `context.metadata` (same shared object!)
|
||||||
|
- The `context.metadata` object is shared between all decorators on the same class
|
||||||
|
- Attempting to write to `constructor[Symbol.metadata]` throws: "Cannot assign to read only property"
|
||||||
|
|
||||||
|
### Implementation Pattern:
|
||||||
|
```typescript
|
||||||
|
// Field decorator - stores metadata
|
||||||
|
export function svDb() {
|
||||||
|
return (value: undefined, context: ClassFieldDecoratorContext) => {
|
||||||
|
const metadata = context.metadata as ISmartdataDecoratorMetadata;
|
||||||
|
if (!metadata.saveableProperties) {
|
||||||
|
metadata.saveableProperties = [];
|
||||||
|
}
|
||||||
|
metadata.saveableProperties.push(String(context.name));
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Class decorator - reads metadata and initializes prototype
|
||||||
|
export function Collection(dbArg: SmartdataDb) {
|
||||||
|
return function(value: Function, context: ClassDecoratorContext) => {
|
||||||
|
const metadata = context.metadata as ISmartdataDecoratorMetadata;
|
||||||
|
if (metadata?.saveableProperties) {
|
||||||
|
decoratedClass.prototype.saveableProperties = [...metadata.saveableProperties];
|
||||||
|
}
|
||||||
|
return decoratedClass;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Runtime Compatibility:
|
||||||
|
- ✅ **Node.js v23.8.0**: Full TC39 support
|
||||||
|
- ✅ **Deno v2.5.4**: Full TC39 support
|
||||||
|
- ❌ **Bun v1.3.0**: No TC39 support (uses legacy decorators only)
|
||||||
|
- Removed "+bun" from test filenames to skip Bun tests
|
||||||
|
|
||||||
|
### Key Technical Notes:
|
||||||
|
1. **Metadata Initialization Timing**: Class decorators run AFTER field decorators, allowing them to read accumulated metadata and initialize prototypes before any instances are created
|
||||||
|
2. **Prototype vs Instance Properties**: Properties set on prototype are accessible via `this.propertyName` in instances
|
||||||
|
3. **TypeScript Lib Support**: TypeScript 5.9.3 includes built-in decorator types (no custom lib configuration needed)
|
||||||
|
4. **Interface Naming**: Used `ISmartdataDecoratorMetadata` extending `DecoratorMetadataObject` for type safety
|
||||||
|
|
||||||
|
### Files Modified:
|
||||||
|
- ts/classes.doc.ts (property decorators + metadata interface)
|
||||||
|
- ts/classes.collection.ts (class decorators + prototype initialization)
|
||||||
|
- tsconfig.json (removed experimentalDecorators flag)
|
||||||
|
- test/*.ts (renamed files to remove "+bun" suffix)
|
||||||
|
|
||||||
|
### Test Results:
|
||||||
|
All 157 tests passing across 10 test files:
|
||||||
|
- test.cursor.ts: 7/7
|
||||||
|
- test.deno.ts: 11/11 (queries working correctly!)
|
||||||
|
- test.search.advanced.ts: 41/41
|
||||||
|
- test.typescript.ts: 4/4
|
||||||
|
- test.watch.ts: 5/5
|
||||||
|
- And 5 more test files
|
||||||
|
|
||||||
|
### Migration Learnings for Future Reference:
|
||||||
|
1. `context.metadata` is the ONLY way to share state between decorators
|
||||||
|
2. Class decorators must initialize prototypes from metadata immediately
|
||||||
|
3. `Symbol.metadata` on constructors is read-only (managed by runtime)
|
||||||
|
4. Field decorators run before class decorators (guaranteed order)
|
||||||
|
5. TypeScript 5.2+ has built-in TC39 decorator support
|
||||||
97
test/test.cursor.ts
Normal file
97
test/test.cursor.ts
Normal file
@@ -0,0 +1,97 @@
|
|||||||
|
import { tap, expect } from '@push.rocks/tapbundle';
|
||||||
|
import * as smartmongo from '@push.rocks/smartmongo';
|
||||||
|
import { smartunique } from '../ts/plugins.js';
|
||||||
|
import * as smartdata from '../ts/index.js';
|
||||||
|
|
||||||
|
// Set up database connection
|
||||||
|
let smartmongoInstance: smartmongo.SmartMongo;
|
||||||
|
let testDb: smartdata.SmartdataDb;
|
||||||
|
|
||||||
|
// Define a simple document model for cursor tests
|
||||||
|
@smartdata.Collection(() => testDb)
|
||||||
|
class CursorTest extends smartdata.SmartDataDbDoc<CursorTest, CursorTest> {
|
||||||
|
@smartdata.unI()
|
||||||
|
public id: string = smartunique.shortId();
|
||||||
|
|
||||||
|
@smartdata.svDb()
|
||||||
|
public name: string;
|
||||||
|
|
||||||
|
@smartdata.svDb()
|
||||||
|
public order: number;
|
||||||
|
|
||||||
|
constructor(name: string, order: number) {
|
||||||
|
super();
|
||||||
|
this.name = name;
|
||||||
|
this.order = order;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Initialize the in-memory MongoDB and SmartdataDB
|
||||||
|
tap.test('cursor init: start Mongo and SmartdataDb', async () => {
|
||||||
|
smartmongoInstance = await smartmongo.SmartMongo.createAndStart();
|
||||||
|
testDb = new smartdata.SmartdataDb(
|
||||||
|
await smartmongoInstance.getMongoDescriptor(),
|
||||||
|
);
|
||||||
|
await testDb.init();
|
||||||
|
});
|
||||||
|
|
||||||
|
// Insert sample documents
|
||||||
|
tap.test('cursor insert: save 5 test documents', async () => {
|
||||||
|
for (let i = 1; i <= 5; i++) {
|
||||||
|
const doc = new CursorTest(`item${i}`, i);
|
||||||
|
await doc.save();
|
||||||
|
}
|
||||||
|
const count = await CursorTest.getCount({});
|
||||||
|
expect(count).toEqual(5);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Test that toArray returns all documents
|
||||||
|
tap.test('cursor toArray: retrieves all documents', async () => {
|
||||||
|
const cursor = await CursorTest.getCursor({});
|
||||||
|
const all = await cursor.toArray();
|
||||||
|
expect(all.length).toEqual(5);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Test iteration via forEach
|
||||||
|
tap.test('cursor forEach: iterates through all documents', async () => {
|
||||||
|
const names: string[] = [];
|
||||||
|
const cursor = await CursorTest.getCursor({});
|
||||||
|
await cursor.forEach(async (item) => {
|
||||||
|
names.push(item.name);
|
||||||
|
});
|
||||||
|
expect(names.length).toEqual(5);
|
||||||
|
expect(names).toContain('item3');
|
||||||
|
});
|
||||||
|
|
||||||
|
// Test native cursor modifiers: limit
|
||||||
|
tap.test('cursor modifier limit: only two documents', async () => {
|
||||||
|
const cursor = await CursorTest.getCursor({}, { modifier: (c) => c.limit(2) });
|
||||||
|
const limited = await cursor.toArray();
|
||||||
|
expect(limited.length).toEqual(2);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Test native cursor modifiers: sort and skip
|
||||||
|
tap.test('cursor modifier sort & skip: returns correct order', async () => {
|
||||||
|
const cursor = await CursorTest.getCursor({}, {
|
||||||
|
modifier: (c) => c.sort({ order: -1 }).skip(1),
|
||||||
|
});
|
||||||
|
const results = await cursor.toArray();
|
||||||
|
// Skipped the first (order 5), next should be 4,3,2,1
|
||||||
|
expect(results.length).toEqual(4);
|
||||||
|
expect(results[0].order).toEqual(4);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Cleanup: drop database, close connections, stop Mongo
|
||||||
|
tap.test('cursor cleanup: drop DB and stop', async () => {
|
||||||
|
await testDb.mongoDb.dropDatabase();
|
||||||
|
await testDb.close();
|
||||||
|
if (smartmongoInstance) {
|
||||||
|
await smartmongoInstance.stopAndDumpToDir(
|
||||||
|
`.nogit/dbdump/test.cursor.ts`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
// Ensure process exits after cleanup
|
||||||
|
setTimeout(() => process.exit(), 2000);
|
||||||
|
});
|
||||||
|
|
||||||
|
export default tap.start();
|
||||||
255
test/test.deno.ts
Normal file
255
test/test.deno.ts
Normal file
@@ -0,0 +1,255 @@
|
|||||||
|
// TODO: Decorator support during testing for bun and deno in @git.zone/tstest
|
||||||
|
|
||||||
|
import { tap, expect } from '@push.rocks/tapbundle';
|
||||||
|
import { Qenv } from '@push.rocks/qenv';
|
||||||
|
import * as smartmongo from '@push.rocks/smartmongo';
|
||||||
|
import { smartunique } from '../ts/plugins.js';
|
||||||
|
|
||||||
|
import * as mongodb from 'mongodb';
|
||||||
|
|
||||||
|
const testQenv = new Qenv(process.cwd(), process.cwd() + '/.nogit/');
|
||||||
|
|
||||||
|
console.log(process.memoryUsage());
|
||||||
|
|
||||||
|
// the tested module
|
||||||
|
import * as smartdata from '../ts/index.js';
|
||||||
|
|
||||||
|
// =======================================
|
||||||
|
// Connecting to the database server
|
||||||
|
// =======================================
|
||||||
|
|
||||||
|
let smartmongoInstance: smartmongo.SmartMongo;
|
||||||
|
let testDb: smartdata.SmartdataDb;
|
||||||
|
|
||||||
|
const totalCars = 2000;
|
||||||
|
|
||||||
|
tap.test('should create a testinstance as database', async () => {
|
||||||
|
const databaseName = `test-smartdata-deno-${smartunique.shortId()}`;
|
||||||
|
testDb = new smartdata.SmartdataDb({
|
||||||
|
mongoDbUrl: await testQenv.getEnvVarOnDemand('MONGODB_URL'),
|
||||||
|
mongoDbName: databaseName,
|
||||||
|
});
|
||||||
|
await testDb.init();
|
||||||
|
});
|
||||||
|
|
||||||
|
// =======================================
|
||||||
|
// The actual tests
|
||||||
|
// =======================================
|
||||||
|
|
||||||
|
// ------
|
||||||
|
// Collections
|
||||||
|
// ------
|
||||||
|
|
||||||
|
@smartdata.Collection(() => {
|
||||||
|
return testDb;
|
||||||
|
})
|
||||||
|
class Car extends smartdata.SmartDataDbDoc<Car, Car> {
|
||||||
|
@smartdata.unI()
|
||||||
|
public index: string = smartunique.shortId();
|
||||||
|
|
||||||
|
@smartdata.svDb()
|
||||||
|
public color: string;
|
||||||
|
|
||||||
|
@smartdata.svDb()
|
||||||
|
public brand: string;
|
||||||
|
|
||||||
|
@smartdata.svDb()
|
||||||
|
public testBuffer = Buffer.from('hello');
|
||||||
|
|
||||||
|
@smartdata.svDb()
|
||||||
|
deepData = {
|
||||||
|
sodeep: 'yes',
|
||||||
|
};
|
||||||
|
|
||||||
|
constructor(colorArg: string, brandArg: string) {
|
||||||
|
super();
|
||||||
|
this.color = colorArg;
|
||||||
|
this.brand = brandArg;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
tap.test('should create a new id', async () => {
|
||||||
|
const newid = await Car.getNewId();
|
||||||
|
console.log(newid);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should save the car to the db', async (toolsArg) => {
|
||||||
|
const myCar = new Car('red', 'Volvo');
|
||||||
|
console.log('Car.collection.smartdataDb:', (Car.collection as any).smartdataDb?.mongoDb?.databaseName);
|
||||||
|
console.log('Car.collection.collectionName:', (Car.collection as any).collectionName);
|
||||||
|
console.log('testDb.mongoDb.databaseName:', testDb.mongoDb.databaseName);
|
||||||
|
await myCar.save();
|
||||||
|
|
||||||
|
const myCar2 = new Car('red', 'Volvo');
|
||||||
|
await myCar2.save();
|
||||||
|
|
||||||
|
let counter = 0;
|
||||||
|
|
||||||
|
const gottenCarInstance = await Car.getInstance({});
|
||||||
|
console.log(gottenCarInstance.testBuffer instanceof mongodb.Binary);
|
||||||
|
process.memoryUsage();
|
||||||
|
do {
|
||||||
|
const myCar3 = new Car('red', 'Renault');
|
||||||
|
await myCar3.save();
|
||||||
|
counter++;
|
||||||
|
if (counter % 100 === 0) {
|
||||||
|
console.log(
|
||||||
|
`Filled database with ${counter} of ${totalCars} Cars and memory usage ${
|
||||||
|
process.memoryUsage().rss / 1e6
|
||||||
|
} MB`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
} while (counter < totalCars);
|
||||||
|
console.log(process.memoryUsage());
|
||||||
|
|
||||||
|
// DEBUG: Check what's actually in the database
|
||||||
|
const savedCount = await Car.getCount({});
|
||||||
|
console.log('Total cars saved in DB:', savedCount);
|
||||||
|
const renaultCount = await Car.getCount({ brand: 'Renault' });
|
||||||
|
console.log('Renault cars in DB:', renaultCount);
|
||||||
|
|
||||||
|
// Check what's actually in the first saved car
|
||||||
|
const firstCar = await Car.getInstance({});
|
||||||
|
console.log('First car data:', JSON.stringify({
|
||||||
|
color: firstCar?.color,
|
||||||
|
brand: firstCar?.brand,
|
||||||
|
index: firstCar?.index
|
||||||
|
}));
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('expect to get instance of Car with shallow match', async () => {
|
||||||
|
console.log('Before query - testDb.mongoDb.databaseName:', testDb.mongoDb.databaseName);
|
||||||
|
console.log('Before query - Car.collection.smartdataDb:', (Car.collection as any).smartdataDb?.mongoDb?.databaseName);
|
||||||
|
console.log('Before query - Car.collection.collectionName:', (Car.collection as any).collectionName);
|
||||||
|
|
||||||
|
const totalQueryCycles = totalCars / 2;
|
||||||
|
let counter = 0;
|
||||||
|
do {
|
||||||
|
const timeStart = Date.now();
|
||||||
|
const myCars = await Car.getInstances({
|
||||||
|
brand: 'Renault',
|
||||||
|
});
|
||||||
|
if (counter % 10 === 0) {
|
||||||
|
console.log(
|
||||||
|
`performed ${counter} of ${totalQueryCycles} total query cycles: took ${
|
||||||
|
Date.now() - timeStart
|
||||||
|
}ms to query a set of 2000 with memory footprint ${process.memoryUsage().rss / 1e6} MB`,
|
||||||
|
);
|
||||||
|
console.log('myCars.length:', myCars.length);
|
||||||
|
console.log('myCars[0]:', myCars[0]);
|
||||||
|
}
|
||||||
|
expect(myCars[0].deepData.sodeep).toEqual('yes');
|
||||||
|
expect(myCars[0].brand).toEqual('Renault');
|
||||||
|
counter++;
|
||||||
|
} while (counter < totalQueryCycles);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('expect to get instance of Car with deep match', async () => {
|
||||||
|
const totalQueryCycles = totalCars / 6;
|
||||||
|
let counter = 0;
|
||||||
|
do {
|
||||||
|
const timeStart = Date.now();
|
||||||
|
const myCars2 = await Car.getInstances({
|
||||||
|
deepData: {
|
||||||
|
sodeep: 'yes',
|
||||||
|
},
|
||||||
|
});
|
||||||
|
if (counter % 10 === 0) {
|
||||||
|
console.log(
|
||||||
|
`performed ${counter} of ${totalQueryCycles} total query cycles: took ${
|
||||||
|
Date.now() - timeStart
|
||||||
|
}ms to deep query a set of 2000 with memory footprint ${process.memoryUsage().rss / 1e6} MB`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
expect(myCars2[0].deepData.sodeep).toEqual('yes');
|
||||||
|
expect(myCars2[0].brand).toEqual('Volvo');
|
||||||
|
counter++;
|
||||||
|
} while (counter < totalQueryCycles);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('expect to get instance of Car and update it', async () => {
|
||||||
|
const myCar = await Car.getInstance<Car>({
|
||||||
|
brand: 'Volvo',
|
||||||
|
});
|
||||||
|
expect(myCar.color).toEqual('red');
|
||||||
|
myCar.color = 'blue';
|
||||||
|
await myCar.save();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should be able to delete an instance of car', async () => {
|
||||||
|
const myCars = await Car.getInstances({
|
||||||
|
brand: 'Volvo',
|
||||||
|
color: 'blue',
|
||||||
|
});
|
||||||
|
console.log(myCars);
|
||||||
|
expect(myCars[0].color).toEqual('blue');
|
||||||
|
for (const myCar of myCars) {
|
||||||
|
await myCar.delete();
|
||||||
|
}
|
||||||
|
|
||||||
|
const myCar2 = await Car.getInstance<Car>({
|
||||||
|
brand: 'Volvo',
|
||||||
|
});
|
||||||
|
expect(myCar2.color).toEqual('red');
|
||||||
|
});
|
||||||
|
|
||||||
|
// tslint:disable-next-line: max-classes-per-file
|
||||||
|
@smartdata.Collection(() => {
|
||||||
|
return testDb;
|
||||||
|
})
|
||||||
|
class Truck extends smartdata.SmartDataDbDoc<Car, Car> {
|
||||||
|
@smartdata.unI()
|
||||||
|
public id: string = smartunique.shortId();
|
||||||
|
|
||||||
|
@smartdata.svDb()
|
||||||
|
public color: string;
|
||||||
|
|
||||||
|
@smartdata.svDb()
|
||||||
|
public brand: string;
|
||||||
|
|
||||||
|
constructor(colorArg: string, brandArg: string) {
|
||||||
|
super();
|
||||||
|
this.color = colorArg;
|
||||||
|
this.brand = brandArg;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
tap.test('should store a new Truck', async () => {
|
||||||
|
const truck = new Truck('blue', 'MAN');
|
||||||
|
await truck.save();
|
||||||
|
const myTruck2 = await Truck.getInstance({ color: 'blue' });
|
||||||
|
expect(myTruck2.color).toEqual('blue');
|
||||||
|
myTruck2.color = 'red';
|
||||||
|
await myTruck2.save();
|
||||||
|
const myTruck3 = await Truck.getInstance({ color: 'blue' });
|
||||||
|
expect(myTruck3).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should return a count', async () => {
|
||||||
|
const truckCount = await Truck.getCount();
|
||||||
|
expect(truckCount).toEqual(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should use a cursor', async () => {
|
||||||
|
const cursor = await Car.getCursor({});
|
||||||
|
let counter = 0;
|
||||||
|
await cursor.forEach(async (carArg) => {
|
||||||
|
counter++;
|
||||||
|
counter % 50 === 0 ? console.log(`50 more of ${carArg.color}`) : null;
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// =======================================
|
||||||
|
// close the database connection
|
||||||
|
// =======================================
|
||||||
|
tap.test('close', async () => {
|
||||||
|
if (smartmongoInstance) {
|
||||||
|
await smartmongoInstance.stopAndDumpToDir('./.nogit/dbdump/test.ts');
|
||||||
|
} else {
|
||||||
|
await testDb.mongoDb.dropDatabase();
|
||||||
|
await testDb.close();
|
||||||
|
}
|
||||||
|
setTimeout(() => process.exit(), 2000);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.start({ throwOnError: true });
|
||||||
122
test/test.distributedcoordinator.ts
Normal file
122
test/test.distributedcoordinator.ts
Normal file
@@ -0,0 +1,122 @@
|
|||||||
|
import { tap, expect } from '@push.rocks/tapbundle';
|
||||||
|
import * as smartmongo from '@push.rocks/smartmongo';
|
||||||
|
import type * as taskbuffer from '@push.rocks/taskbuffer';
|
||||||
|
|
||||||
|
import * as smartdata from '../ts/index.js';
|
||||||
|
import {
|
||||||
|
SmartdataDistributedCoordinator,
|
||||||
|
DistributedClass,
|
||||||
|
} from '../ts/classes.distributedcoordinator.js'; // path might need adjusting
|
||||||
|
const totalInstances = 10;
|
||||||
|
|
||||||
|
// =======================================
|
||||||
|
// Connecting to the database server
|
||||||
|
// =======================================
|
||||||
|
|
||||||
|
let smartmongoInstance: smartmongo.SmartMongo;
|
||||||
|
let testDb: smartdata.SmartdataDb;
|
||||||
|
|
||||||
|
tap.test('should create a testinstance as database', async () => {
|
||||||
|
smartmongoInstance = await smartmongo.SmartMongo.createAndStart();
|
||||||
|
testDb = new smartdata.SmartdataDb(await smartmongoInstance.getMongoDescriptor());
|
||||||
|
await testDb.init();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should instantiate DistributedClass', async (tools) => {
|
||||||
|
const instance = new DistributedClass();
|
||||||
|
expect(instance).toBeInstanceOf(DistributedClass);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('DistributedClass should update the time', async (tools) => {
|
||||||
|
const distributedCoordinator = new SmartdataDistributedCoordinator(testDb);
|
||||||
|
await distributedCoordinator.start();
|
||||||
|
const initialTime = distributedCoordinator.ownInstance.data.lastUpdated;
|
||||||
|
await distributedCoordinator.sendHeartbeat();
|
||||||
|
const updatedTime = distributedCoordinator.ownInstance.data.lastUpdated;
|
||||||
|
expect(updatedTime).toBeGreaterThan(initialTime);
|
||||||
|
await distributedCoordinator.stop();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should instantiate SmartdataDistributedCoordinator', async (tools) => {
|
||||||
|
const distributedCoordinator = new SmartdataDistributedCoordinator(testDb);
|
||||||
|
await distributedCoordinator.start();
|
||||||
|
expect(distributedCoordinator).toBeInstanceOf(SmartdataDistributedCoordinator);
|
||||||
|
await distributedCoordinator.stop();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('SmartdataDistributedCoordinator should update leader status', async (tools) => {
|
||||||
|
const distributedCoordinator = new SmartdataDistributedCoordinator(testDb);
|
||||||
|
await distributedCoordinator.start();
|
||||||
|
await distributedCoordinator.checkAndMaybeLead();
|
||||||
|
expect(distributedCoordinator.ownInstance.data.elected).toBeOneOf([true, false]);
|
||||||
|
await distributedCoordinator.stop();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test(
|
||||||
|
'SmartdataDistributedCoordinator should handle distributed task requests',
|
||||||
|
async (tools) => {
|
||||||
|
const distributedCoordinator = new SmartdataDistributedCoordinator(testDb);
|
||||||
|
await distributedCoordinator.start();
|
||||||
|
|
||||||
|
const mockTaskRequest: taskbuffer.distributedCoordination.IDistributedTaskRequest = {
|
||||||
|
submitterId: 'mockSubmitter12345', // Some unique mock submitter ID
|
||||||
|
requestResponseId: 'uni879873462hjhfkjhsdf', // Some unique ID for the request-response
|
||||||
|
taskName: 'SampleTask',
|
||||||
|
taskVersion: '1.0.0', // Assuming it's a version string
|
||||||
|
taskExecutionTime: Date.now(),
|
||||||
|
taskExecutionTimeout: 60000, // Let's say the timeout is 1 minute (60000 ms)
|
||||||
|
taskExecutionParallel: 5, // Let's assume max 5 parallel executions
|
||||||
|
status: 'requesting',
|
||||||
|
};
|
||||||
|
|
||||||
|
const response = await distributedCoordinator.fireDistributedTaskRequest(mockTaskRequest);
|
||||||
|
console.log(response); // based on your expected structure for the response
|
||||||
|
await distributedCoordinator.stop();
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
tap.test(
|
||||||
|
'SmartdataDistributedCoordinator should update distributed task requests',
|
||||||
|
async (tools) => {
|
||||||
|
const distributedCoordinator = new SmartdataDistributedCoordinator(testDb);
|
||||||
|
|
||||||
|
await distributedCoordinator.start();
|
||||||
|
|
||||||
|
const mockTaskRequest: taskbuffer.distributedCoordination.IDistributedTaskRequest = {
|
||||||
|
submitterId: 'mockSubmitter12345', // Some unique mock submitter ID
|
||||||
|
requestResponseId: 'uni879873462hjhfkjhsdf', // Some unique ID for the request-response
|
||||||
|
taskName: 'SampleTask',
|
||||||
|
taskVersion: '1.0.0', // Assuming it's a version string
|
||||||
|
taskExecutionTime: Date.now(),
|
||||||
|
taskExecutionTimeout: 60000, // Let's say the timeout is 1 minute (60000 ms)
|
||||||
|
taskExecutionParallel: 5, // Let's assume max 5 parallel executions
|
||||||
|
status: 'requesting',
|
||||||
|
};
|
||||||
|
|
||||||
|
await distributedCoordinator.updateDistributedTaskRequest(mockTaskRequest);
|
||||||
|
// Here, we can potentially check if a DB entry got updated or some other side-effect of the update method.
|
||||||
|
await distributedCoordinator.stop();
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
tap.test('should elect only one leader amongst multiple instances', async (tools) => {
|
||||||
|
const coordinators = Array.from({ length: totalInstances }).map(
|
||||||
|
() => new SmartdataDistributedCoordinator(testDb),
|
||||||
|
);
|
||||||
|
await Promise.all(coordinators.map((coordinator) => coordinator.start()));
|
||||||
|
const leaders = coordinators.filter((coordinator) => coordinator.ownInstance.data.elected);
|
||||||
|
for (const leader of leaders) {
|
||||||
|
console.log(leader.ownInstance);
|
||||||
|
}
|
||||||
|
expect(leaders.length).toEqual(1);
|
||||||
|
|
||||||
|
// stopping clears a coordinator from being elected.
|
||||||
|
await Promise.all(coordinators.map((coordinator) => coordinator.stop()));
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should clean up', async () => {
|
||||||
|
await smartmongoInstance.stopAndDumpToDir(`.nogit/dbdump/test.distributedcoordinator.ts`);
|
||||||
|
setTimeout(() => process.exit(), 2000);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.start({ throwOnError: true });
|
||||||
@@ -1,7 +1,7 @@
|
|||||||
import { tap, expect } from '@pushrocks/tapbundle';
|
import { tap, expect } from '@push.rocks/tapbundle';
|
||||||
import { Qenv } from '@pushrocks/qenv';
|
import { Qenv } from '@push.rocks/qenv';
|
||||||
import * as smartmongo from '@pushrocks/smartmongo';
|
import * as smartmongo from '@push.rocks/smartmongo';
|
||||||
import { smartunique } from '../ts/smartdata.plugins.js';
|
import { smartunique } from '../ts/plugins.js';
|
||||||
|
|
||||||
const testQenv = new Qenv(process.cwd(), process.cwd() + '/.nogit/');
|
const testQenv = new Qenv(process.cwd(), process.cwd() + '/.nogit/');
|
||||||
|
|
||||||
@@ -26,7 +26,7 @@ tap.test('should create a testinstance as database', async () => {
|
|||||||
tap.skip.test('should connect to atlas', async (tools) => {
|
tap.skip.test('should connect to atlas', async (tools) => {
|
||||||
const databaseName = `test-smartdata-${smartunique.shortId()}`;
|
const databaseName = `test-smartdata-${smartunique.shortId()}`;
|
||||||
testDb = new smartdata.SmartdataDb({
|
testDb = new smartdata.SmartdataDb({
|
||||||
mongoDbUrl: testQenv.getEnvVarOnDemand('MONGO_URL'),
|
mongoDbUrl: await testQenv.getEnvVarOnDemand('MONGO_URL'),
|
||||||
mongoDbName: databaseName,
|
mongoDbName: databaseName,
|
||||||
});
|
});
|
||||||
await testDb.init();
|
await testDb.init();
|
||||||
|
|||||||
819
test/test.filters.ts
Normal file
819
test/test.filters.ts
Normal file
@@ -0,0 +1,819 @@
|
|||||||
|
import { tap, expect } from '@git.zone/tstest/tapbundle';
|
||||||
|
import * as smartmongo from '@push.rocks/smartmongo';
|
||||||
|
import * as smartunique from '@push.rocks/smartunique';
|
||||||
|
import * as smartdata from '../ts/index.js';
|
||||||
|
|
||||||
|
const { SmartdataDb, Collection, svDb, unI, index } = smartdata;
|
||||||
|
|
||||||
|
let smartmongoInstance: smartmongo.SmartMongo;
|
||||||
|
let testDb: smartdata.SmartdataDb;
|
||||||
|
|
||||||
|
// Define test document classes
|
||||||
|
@Collection(() => testDb)
|
||||||
|
class TestUser extends smartdata.SmartDataDbDoc<TestUser, TestUser> {
|
||||||
|
@unI()
|
||||||
|
public id: string = smartunique.shortId();
|
||||||
|
|
||||||
|
@svDb()
|
||||||
|
public name: string;
|
||||||
|
|
||||||
|
@svDb()
|
||||||
|
public age: number;
|
||||||
|
|
||||||
|
@svDb()
|
||||||
|
public email: string;
|
||||||
|
|
||||||
|
@svDb()
|
||||||
|
public roles: string[];
|
||||||
|
|
||||||
|
@svDb()
|
||||||
|
public tags: string[];
|
||||||
|
|
||||||
|
@svDb()
|
||||||
|
public status: 'active' | 'inactive' | 'pending';
|
||||||
|
|
||||||
|
@svDb()
|
||||||
|
public metadata: {
|
||||||
|
lastLogin?: Date;
|
||||||
|
loginCount?: number;
|
||||||
|
preferences?: Record<string, any>;
|
||||||
|
};
|
||||||
|
|
||||||
|
@svDb()
|
||||||
|
public scores: number[];
|
||||||
|
|
||||||
|
constructor(data: Partial<TestUser> = {}) {
|
||||||
|
super();
|
||||||
|
Object.assign(this, data);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Collection(() => testDb)
|
||||||
|
class TestOrder extends smartdata.SmartDataDbDoc<TestOrder, TestOrder> {
|
||||||
|
@unI()
|
||||||
|
public id: string = smartunique.shortId();
|
||||||
|
|
||||||
|
@svDb()
|
||||||
|
public userId: string;
|
||||||
|
|
||||||
|
@svDb()
|
||||||
|
public items: Array<{
|
||||||
|
product: string;
|
||||||
|
quantity: number;
|
||||||
|
price: number;
|
||||||
|
}>;
|
||||||
|
|
||||||
|
@svDb()
|
||||||
|
public totalAmount: number;
|
||||||
|
|
||||||
|
@svDb()
|
||||||
|
public status: string;
|
||||||
|
|
||||||
|
@svDb()
|
||||||
|
public tags: string[];
|
||||||
|
|
||||||
|
constructor(data: Partial<TestOrder> = {}) {
|
||||||
|
super();
|
||||||
|
Object.assign(this, data);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Setup and teardown
|
||||||
|
tap.test('should create a test database instance', async () => {
|
||||||
|
smartmongoInstance = await smartmongo.SmartMongo.createAndStart();
|
||||||
|
testDb = new smartdata.SmartdataDb(await smartmongoInstance.getMongoDescriptor());
|
||||||
|
await testDb.init();
|
||||||
|
expect(testDb).toBeInstanceOf(SmartdataDb);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should create test data', async () => {
|
||||||
|
// Create test users
|
||||||
|
const users = [
|
||||||
|
new TestUser({
|
||||||
|
name: 'John Doe',
|
||||||
|
age: 30,
|
||||||
|
email: 'john@example.com',
|
||||||
|
roles: ['admin', 'user'],
|
||||||
|
tags: ['javascript', 'nodejs', 'mongodb'],
|
||||||
|
status: 'active',
|
||||||
|
metadata: { loginCount: 5, lastLogin: new Date() },
|
||||||
|
scores: [85, 90, 78]
|
||||||
|
}),
|
||||||
|
new TestUser({
|
||||||
|
name: 'Jane Smith',
|
||||||
|
age: 25,
|
||||||
|
email: 'jane@example.com',
|
||||||
|
roles: ['user'],
|
||||||
|
tags: ['python', 'mongodb'],
|
||||||
|
status: 'active',
|
||||||
|
metadata: { loginCount: 3 },
|
||||||
|
scores: [92, 88, 95]
|
||||||
|
}),
|
||||||
|
new TestUser({
|
||||||
|
name: 'Bob Johnson',
|
||||||
|
age: 35,
|
||||||
|
email: 'bob@example.com',
|
||||||
|
roles: ['moderator', 'user'],
|
||||||
|
tags: ['javascript', 'react', 'nodejs'],
|
||||||
|
status: 'inactive',
|
||||||
|
metadata: { loginCount: 0 },
|
||||||
|
scores: [70, 75, 80]
|
||||||
|
}),
|
||||||
|
new TestUser({
|
||||||
|
name: 'Alice Brown',
|
||||||
|
age: 28,
|
||||||
|
email: 'alice@example.com',
|
||||||
|
roles: ['admin'],
|
||||||
|
tags: ['typescript', 'angular', 'mongodb'],
|
||||||
|
status: 'active',
|
||||||
|
metadata: { loginCount: 10 },
|
||||||
|
scores: [95, 98, 100]
|
||||||
|
}),
|
||||||
|
new TestUser({
|
||||||
|
name: 'Charlie Wilson',
|
||||||
|
age: 22,
|
||||||
|
email: 'charlie@example.com',
|
||||||
|
roles: ['user'],
|
||||||
|
tags: ['golang', 'kubernetes'],
|
||||||
|
status: 'pending',
|
||||||
|
metadata: { loginCount: 1 },
|
||||||
|
scores: [60, 65]
|
||||||
|
})
|
||||||
|
];
|
||||||
|
|
||||||
|
for (const user of users) {
|
||||||
|
await user.save();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create test orders
|
||||||
|
const orders = [
|
||||||
|
new TestOrder({
|
||||||
|
userId: users[0].id,
|
||||||
|
items: [
|
||||||
|
{ product: 'laptop', quantity: 1, price: 1200 },
|
||||||
|
{ product: 'mouse', quantity: 2, price: 25 }
|
||||||
|
],
|
||||||
|
totalAmount: 1250,
|
||||||
|
status: 'completed',
|
||||||
|
tags: ['electronics', 'priority']
|
||||||
|
}),
|
||||||
|
new TestOrder({
|
||||||
|
userId: users[1].id,
|
||||||
|
items: [
|
||||||
|
{ product: 'book', quantity: 3, price: 15 },
|
||||||
|
{ product: 'pen', quantity: 5, price: 2 }
|
||||||
|
],
|
||||||
|
totalAmount: 55,
|
||||||
|
status: 'pending',
|
||||||
|
tags: ['stationery']
|
||||||
|
}),
|
||||||
|
new TestOrder({
|
||||||
|
userId: users[0].id,
|
||||||
|
items: [
|
||||||
|
{ product: 'laptop', quantity: 2, price: 1200 },
|
||||||
|
{ product: 'keyboard', quantity: 2, price: 80 }
|
||||||
|
],
|
||||||
|
totalAmount: 2560,
|
||||||
|
status: 'processing',
|
||||||
|
tags: ['electronics', 'bulk']
|
||||||
|
})
|
||||||
|
];
|
||||||
|
|
||||||
|
for (const order of orders) {
|
||||||
|
await order.save();
|
||||||
|
}
|
||||||
|
|
||||||
|
const savedUsers = await TestUser.getInstances({});
|
||||||
|
const savedOrders = await TestOrder.getInstances({});
|
||||||
|
expect(savedUsers.length).toEqual(5);
|
||||||
|
expect(savedOrders.length).toEqual(3);
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============= BASIC FILTER TESTS =============
|
||||||
|
tap.test('should filter by simple equality', async () => {
|
||||||
|
const users = await TestUser.getInstances({ name: 'John Doe' });
|
||||||
|
expect(users.length).toEqual(1);
|
||||||
|
expect(users[0].name).toEqual('John Doe');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should filter by multiple fields (implicit AND)', async () => {
|
||||||
|
const users = await TestUser.getInstances({
|
||||||
|
status: 'active',
|
||||||
|
age: 30
|
||||||
|
});
|
||||||
|
expect(users.length).toEqual(1);
|
||||||
|
expect(users[0].name).toEqual('John Doe');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should filter by nested object fields', async () => {
|
||||||
|
const users = await TestUser.getInstances({
|
||||||
|
'metadata.loginCount': 5
|
||||||
|
});
|
||||||
|
expect(users.length).toEqual(1);
|
||||||
|
expect(users[0].name).toEqual('John Doe');
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============= COMPREHENSIVE NESTED FILTER TESTS =============
|
||||||
|
tap.test('should filter by nested object with direct object syntax', async () => {
|
||||||
|
// Direct nested object matching (exact match)
|
||||||
|
const users = await TestUser.getInstances({
|
||||||
|
metadata: {
|
||||||
|
loginCount: 5,
|
||||||
|
lastLogin: (await TestUser.getInstances({}))[0].metadata.lastLogin // Get the exact date
|
||||||
|
}
|
||||||
|
});
|
||||||
|
expect(users.length).toEqual(1);
|
||||||
|
expect(users[0].name).toEqual('John Doe');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should filter by partial nested object match', async () => {
|
||||||
|
// When using object syntax, only specified fields must match
|
||||||
|
const users = await TestUser.getInstances({
|
||||||
|
metadata: { loginCount: 5 } // Only checks loginCount, ignores other fields
|
||||||
|
});
|
||||||
|
expect(users.length).toEqual(1);
|
||||||
|
expect(users[0].name).toEqual('John Doe');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should combine nested object and dot notation', async () => {
|
||||||
|
const users = await TestUser.getInstances({
|
||||||
|
metadata: { loginCount: { $gte: 3 } }, // Object syntax with operator
|
||||||
|
'metadata.loginCount': { $lte: 10 } // Dot notation with operator
|
||||||
|
});
|
||||||
|
expect(users.length).toEqual(3); // Jane (3), John (5), and Alice (10) have loginCount between 3-10
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should filter nested fields with operators using dot notation', async () => {
|
||||||
|
const users = await TestUser.getInstances({
|
||||||
|
'metadata.loginCount': { $gte: 5 }
|
||||||
|
});
|
||||||
|
expect(users.length).toEqual(2); // John (5) and Alice (10)
|
||||||
|
const names = users.map(u => u.name).sort();
|
||||||
|
expect(names).toEqual(['Alice Brown', 'John Doe']);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should filter nested fields with multiple operators', async () => {
|
||||||
|
const users = await TestUser.getInstances({
|
||||||
|
'metadata.loginCount': { $gte: 3, $lt: 10 }
|
||||||
|
});
|
||||||
|
expect(users.length).toEqual(2); // Jane (3) and John (5)
|
||||||
|
const names = users.map(u => u.name).sort();
|
||||||
|
expect(names).toEqual(['Jane Smith', 'John Doe']);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should handle deeply nested object structures', async () => {
|
||||||
|
// First, create a user with deep nesting in preferences
|
||||||
|
const deepUser = new TestUser({
|
||||||
|
name: 'Deep Nester',
|
||||||
|
age: 40,
|
||||||
|
email: 'deep@example.com',
|
||||||
|
roles: ['admin'],
|
||||||
|
tags: [],
|
||||||
|
status: 'active',
|
||||||
|
metadata: {
|
||||||
|
loginCount: 1,
|
||||||
|
preferences: {
|
||||||
|
theme: {
|
||||||
|
colors: {
|
||||||
|
primary: '#000000',
|
||||||
|
secondary: '#ffffff'
|
||||||
|
},
|
||||||
|
fonts: {
|
||||||
|
heading: 'Arial',
|
||||||
|
body: 'Helvetica'
|
||||||
|
}
|
||||||
|
},
|
||||||
|
notifications: {
|
||||||
|
email: true,
|
||||||
|
push: false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
scores: []
|
||||||
|
});
|
||||||
|
await deepUser.save();
|
||||||
|
|
||||||
|
// Test deep nesting with dot notation
|
||||||
|
const deepResults = await TestUser.getInstances({
|
||||||
|
'metadata.preferences.theme.colors.primary': '#000000'
|
||||||
|
});
|
||||||
|
expect(deepResults.length).toEqual(1);
|
||||||
|
expect(deepResults[0].name).toEqual('Deep Nester');
|
||||||
|
|
||||||
|
// Test deep nesting with operators
|
||||||
|
const boolResults = await TestUser.getInstances({
|
||||||
|
'metadata.preferences.notifications.email': { $eq: true }
|
||||||
|
});
|
||||||
|
expect(boolResults.length).toEqual(1);
|
||||||
|
expect(boolResults[0].name).toEqual('Deep Nester');
|
||||||
|
|
||||||
|
// Clean up
|
||||||
|
await deepUser.delete();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should filter arrays of nested objects using $elemMatch', async () => {
|
||||||
|
const orders = await TestOrder.getInstances({
|
||||||
|
items: {
|
||||||
|
$elemMatch: {
|
||||||
|
product: 'laptop',
|
||||||
|
price: { $gte: 1000 }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
expect(orders.length).toEqual(2); // Both laptop orders have price >= 1000
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should filter nested arrays with dot notation', async () => {
|
||||||
|
// Query for any order that has an item with specific product
|
||||||
|
const orders = await TestOrder.getInstances({
|
||||||
|
'items.product': 'laptop'
|
||||||
|
});
|
||||||
|
expect(orders.length).toEqual(2); // Two orders contain laptops
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should combine nested object filters with logical operators', async () => {
|
||||||
|
const users = await TestUser.getInstances({
|
||||||
|
$or: [
|
||||||
|
{ 'metadata.loginCount': { $gte: 10 } }, // Alice has 10
|
||||||
|
{
|
||||||
|
$and: [
|
||||||
|
{ 'metadata.loginCount': { $lt: 5 } }, // Jane has 3, Bob has 0, Charlie has 1
|
||||||
|
{ status: 'active' } // Jane is active, Bob is inactive, Charlie is pending
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
});
|
||||||
|
expect(users.length).toEqual(2); // Alice (loginCount >= 10), Jane (loginCount < 5 AND active)
|
||||||
|
const names = users.map(u => u.name).sort();
|
||||||
|
expect(names).toEqual(['Alice Brown', 'Jane Smith']);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should handle null and undefined in nested fields', async () => {
|
||||||
|
// Users without lastLogin
|
||||||
|
const noLastLogin = await TestUser.getInstances({
|
||||||
|
'metadata.lastLogin': { $exists: false }
|
||||||
|
});
|
||||||
|
expect(noLastLogin.length).toEqual(4); // Everyone except John
|
||||||
|
|
||||||
|
// Users with preferences (none have it set)
|
||||||
|
const withPreferences = await TestUser.getInstances({
|
||||||
|
'metadata.preferences': { $exists: true }
|
||||||
|
});
|
||||||
|
expect(withPreferences.length).toEqual(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should filter nested arrays by size', async () => {
|
||||||
|
// Create an order with specific number of items
|
||||||
|
const multiItemOrder = new TestOrder({
|
||||||
|
userId: 'test-user',
|
||||||
|
items: [
|
||||||
|
{ product: 'item1', quantity: 1, price: 10 },
|
||||||
|
{ product: 'item2', quantity: 2, price: 20 },
|
||||||
|
{ product: 'item3', quantity: 3, price: 30 },
|
||||||
|
{ product: 'item4', quantity: 4, price: 40 }
|
||||||
|
],
|
||||||
|
totalAmount: 100,
|
||||||
|
status: 'pending',
|
||||||
|
tags: ['test']
|
||||||
|
});
|
||||||
|
await multiItemOrder.save();
|
||||||
|
|
||||||
|
const fourItemOrders = await TestOrder.getInstances({
|
||||||
|
items: { $size: 4 }
|
||||||
|
});
|
||||||
|
expect(fourItemOrders.length).toEqual(1);
|
||||||
|
|
||||||
|
// Clean up
|
||||||
|
await multiItemOrder.delete();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should handle nested field comparison between documents', async () => {
|
||||||
|
// Find users where loginCount equals their age divided by 6 (John: 30/6=5)
|
||||||
|
const users = await TestUser.getInstances({
|
||||||
|
$and: [
|
||||||
|
{ 'metadata.loginCount': 5 },
|
||||||
|
{ age: 30 }
|
||||||
|
]
|
||||||
|
});
|
||||||
|
expect(users.length).toEqual(1);
|
||||||
|
expect(users[0].name).toEqual('John Doe');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should filter using $in on nested fields', async () => {
|
||||||
|
const users = await TestUser.getInstances({
|
||||||
|
'metadata.loginCount': { $in: [0, 1, 5] }
|
||||||
|
});
|
||||||
|
expect(users.length).toEqual(3); // Bob (0), Charlie (1), John (5)
|
||||||
|
const names = users.map(u => u.name).sort();
|
||||||
|
expect(names).toEqual(['Bob Johnson', 'Charlie Wilson', 'John Doe']);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should filter nested arrays with $all', async () => {
|
||||||
|
// Create an order with multiple tags
|
||||||
|
const taggedOrder = new TestOrder({
|
||||||
|
userId: 'test-user',
|
||||||
|
items: [{ product: 'test', quantity: 1, price: 10 }],
|
||||||
|
totalAmount: 10,
|
||||||
|
status: 'completed',
|
||||||
|
tags: ['urgent', 'priority', 'electronics']
|
||||||
|
});
|
||||||
|
await taggedOrder.save();
|
||||||
|
|
||||||
|
const priorityElectronics = await TestOrder.getInstances({
|
||||||
|
tags: { $all: ['priority', 'electronics'] }
|
||||||
|
});
|
||||||
|
expect(priorityElectronics.length).toEqual(2); // Original order and new one
|
||||||
|
|
||||||
|
// Clean up
|
||||||
|
await taggedOrder.delete();
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============= COMPARISON OPERATOR TESTS =============
|
||||||
|
tap.test('should filter using $gt operator', async () => {
|
||||||
|
const users = await TestUser.getInstances({
|
||||||
|
age: { $gt: 30 }
|
||||||
|
});
|
||||||
|
expect(users.length).toEqual(1);
|
||||||
|
expect(users[0].name).toEqual('Bob Johnson');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should filter using $gte operator', async () => {
|
||||||
|
const users = await TestUser.getInstances({
|
||||||
|
age: { $gte: 30 }
|
||||||
|
});
|
||||||
|
expect(users.length).toEqual(2);
|
||||||
|
const names = users.map(u => u.name).sort();
|
||||||
|
expect(names).toEqual(['Bob Johnson', 'John Doe']);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should filter using $lt operator', async () => {
|
||||||
|
const users = await TestUser.getInstances({
|
||||||
|
age: { $lt: 25 }
|
||||||
|
});
|
||||||
|
expect(users.length).toEqual(1);
|
||||||
|
expect(users[0].name).toEqual('Charlie Wilson');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should filter using $lte operator', async () => {
|
||||||
|
const users = await TestUser.getInstances({
|
||||||
|
age: { $lte: 25 }
|
||||||
|
});
|
||||||
|
expect(users.length).toEqual(2);
|
||||||
|
const names = users.map(u => u.name).sort();
|
||||||
|
expect(names).toEqual(['Charlie Wilson', 'Jane Smith']);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should filter using $ne operator', async () => {
|
||||||
|
const users = await TestUser.getInstances({
|
||||||
|
status: { $ne: 'active' }
|
||||||
|
});
|
||||||
|
expect(users.length).toEqual(2);
|
||||||
|
const statuses = users.map(u => u.status).sort();
|
||||||
|
expect(statuses).toEqual(['inactive', 'pending']);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should filter using multiple comparison operators', async () => {
|
||||||
|
const users = await TestUser.getInstances({
|
||||||
|
age: { $gte: 25, $lt: 30 }
|
||||||
|
});
|
||||||
|
expect(users.length).toEqual(2);
|
||||||
|
const names = users.map(u => u.name).sort();
|
||||||
|
expect(names).toEqual(['Alice Brown', 'Jane Smith']);
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============= ARRAY OPERATOR TESTS =============
|
||||||
|
tap.test('should filter using $in operator', async () => {
|
||||||
|
const users = await TestUser.getInstances({
|
||||||
|
status: { $in: ['active', 'pending'] }
|
||||||
|
});
|
||||||
|
expect(users.length).toEqual(4);
|
||||||
|
expect(users.every(u => ['active', 'pending'].includes(u.status))).toEqual(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should filter arrays using $in operator', async () => {
|
||||||
|
const users = await TestUser.getInstances({
|
||||||
|
roles: { $in: ['admin'] }
|
||||||
|
});
|
||||||
|
expect(users.length).toEqual(2);
|
||||||
|
const names = users.map(u => u.name).sort();
|
||||||
|
expect(names).toEqual(['Alice Brown', 'John Doe']);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should filter using $nin operator', async () => {
|
||||||
|
const users = await TestUser.getInstances({
|
||||||
|
status: { $nin: ['inactive', 'pending'] }
|
||||||
|
});
|
||||||
|
expect(users.length).toEqual(3);
|
||||||
|
expect(users.every(u => u.status === 'active')).toEqual(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should filter arrays using $all operator', async () => {
|
||||||
|
const users = await TestUser.getInstances({
|
||||||
|
tags: { $all: ['javascript', 'nodejs'] }
|
||||||
|
});
|
||||||
|
expect(users.length).toEqual(2);
|
||||||
|
const names = users.map(u => u.name).sort();
|
||||||
|
expect(names).toEqual(['Bob Johnson', 'John Doe']);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should filter arrays using $size operator', async () => {
|
||||||
|
const users = await TestUser.getInstances({
|
||||||
|
scores: { $size: 2 }
|
||||||
|
});
|
||||||
|
expect(users.length).toEqual(1);
|
||||||
|
expect(users[0].name).toEqual('Charlie Wilson');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should filter arrays using $elemMatch operator', async () => {
|
||||||
|
const orders = await TestOrder.getInstances({
|
||||||
|
items: {
|
||||||
|
$elemMatch: {
|
||||||
|
product: 'laptop',
|
||||||
|
quantity: { $gte: 2 }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
expect(orders.length).toEqual(1);
|
||||||
|
expect(orders[0].totalAmount).toEqual(2560);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should filter using $elemMatch with single condition', async () => {
|
||||||
|
const orders = await TestOrder.getInstances({
|
||||||
|
items: {
|
||||||
|
$elemMatch: {
|
||||||
|
price: { $gt: 100 }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
expect(orders.length).toEqual(2);
|
||||||
|
expect(orders.every(o => o.items.some(i => i.price > 100))).toEqual(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============= LOGICAL OPERATOR TESTS =============
|
||||||
|
tap.test('should filter using $or operator', async () => {
|
||||||
|
const users = await TestUser.getInstances({
|
||||||
|
$or: [
|
||||||
|
{ age: { $lt: 25 } },
|
||||||
|
{ status: 'inactive' }
|
||||||
|
]
|
||||||
|
});
|
||||||
|
expect(users.length).toEqual(2);
|
||||||
|
const names = users.map(u => u.name).sort();
|
||||||
|
expect(names).toEqual(['Bob Johnson', 'Charlie Wilson']);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should filter using $and operator', async () => {
|
||||||
|
const users = await TestUser.getInstances({
|
||||||
|
$and: [
|
||||||
|
{ status: 'active' },
|
||||||
|
{ age: { $gte: 28 } }
|
||||||
|
]
|
||||||
|
});
|
||||||
|
expect(users.length).toEqual(2);
|
||||||
|
const names = users.map(u => u.name).sort();
|
||||||
|
expect(names).toEqual(['Alice Brown', 'John Doe']);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should filter using $nor operator', async () => {
|
||||||
|
const users = await TestUser.getInstances({
|
||||||
|
$nor: [
|
||||||
|
{ status: 'inactive' },
|
||||||
|
{ age: { $lt: 25 } }
|
||||||
|
]
|
||||||
|
});
|
||||||
|
expect(users.length).toEqual(3);
|
||||||
|
expect(users.every(u => u.status !== 'inactive' && u.age >= 25)).toEqual(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should filter using nested logical operators', async () => {
|
||||||
|
const users = await TestUser.getInstances({
|
||||||
|
$or: [
|
||||||
|
{
|
||||||
|
$and: [
|
||||||
|
{ status: 'active' },
|
||||||
|
{ roles: { $in: ['admin'] } }
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{ age: { $lt: 23 } }
|
||||||
|
]
|
||||||
|
});
|
||||||
|
expect(users.length).toEqual(3);
|
||||||
|
const names = users.map(u => u.name).sort();
|
||||||
|
expect(names).toEqual(['Alice Brown', 'Charlie Wilson', 'John Doe']);
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============= ELEMENT OPERATOR TESTS =============
|
||||||
|
tap.test('should filter using $exists operator', async () => {
|
||||||
|
const users = await TestUser.getInstances({
|
||||||
|
'metadata.lastLogin': { $exists: true }
|
||||||
|
});
|
||||||
|
expect(users.length).toEqual(1);
|
||||||
|
expect(users[0].name).toEqual('John Doe');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should filter using $exists false', async () => {
|
||||||
|
const users = await TestUser.getInstances({
|
||||||
|
'metadata.preferences': { $exists: false }
|
||||||
|
});
|
||||||
|
expect(users.length).toEqual(5);
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============= COMPLEX FILTER TESTS =============
|
||||||
|
tap.test('should handle complex nested filters', async () => {
|
||||||
|
const users = await TestUser.getInstances({
|
||||||
|
$and: [
|
||||||
|
{ status: 'active' },
|
||||||
|
{
|
||||||
|
$or: [
|
||||||
|
{ age: { $gte: 30 } },
|
||||||
|
{ roles: { $all: ['admin'] } }
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{ tags: { $in: ['mongodb'] } }
|
||||||
|
]
|
||||||
|
});
|
||||||
|
expect(users.length).toEqual(2);
|
||||||
|
const names = users.map(u => u.name).sort();
|
||||||
|
expect(names).toEqual(['Alice Brown', 'John Doe']);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should combine multiple operator types', async () => {
|
||||||
|
const orders = await TestOrder.getInstances({
|
||||||
|
$and: [
|
||||||
|
{ totalAmount: { $gte: 100 } },
|
||||||
|
{ status: { $in: ['completed', 'processing'] } },
|
||||||
|
{ tags: { $in: ['electronics'] } }
|
||||||
|
]
|
||||||
|
});
|
||||||
|
expect(orders.length).toEqual(2);
|
||||||
|
expect(orders.every(o => o.totalAmount >= 100)).toEqual(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============= ERROR HANDLING TESTS =============
|
||||||
|
tap.test('should throw error for $where operator', async () => {
|
||||||
|
let error: Error | null = null;
|
||||||
|
try {
|
||||||
|
await TestUser.getInstances({
|
||||||
|
$where: 'this.age > 25'
|
||||||
|
});
|
||||||
|
} catch (e) {
|
||||||
|
error = e as Error;
|
||||||
|
}
|
||||||
|
expect(error).toBeTruthy();
|
||||||
|
expect(error?.message).toMatch(/\$where.*not allowed/);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should throw error for invalid $in value', async () => {
|
||||||
|
let error: Error | null = null;
|
||||||
|
try {
|
||||||
|
await TestUser.getInstances({
|
||||||
|
status: { $in: 'active' as any } // Should be an array
|
||||||
|
});
|
||||||
|
} catch (e) {
|
||||||
|
error = e as Error;
|
||||||
|
}
|
||||||
|
expect(error).toBeTruthy();
|
||||||
|
expect(error?.message).toMatch(/\$in.*requires.*array/);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should throw error for invalid $size value', async () => {
|
||||||
|
let error: Error | null = null;
|
||||||
|
try {
|
||||||
|
await TestUser.getInstances({
|
||||||
|
scores: { $size: '3' as any } // Should be a number
|
||||||
|
});
|
||||||
|
} catch (e) {
|
||||||
|
error = e as Error;
|
||||||
|
}
|
||||||
|
expect(error).toBeTruthy();
|
||||||
|
expect(error?.message).toMatch(/\$size.*requires.*numeric/);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should throw error for dots in field names', async () => {
|
||||||
|
let error: Error | null = null;
|
||||||
|
try {
|
||||||
|
await TestUser.getInstances({
|
||||||
|
'some.nested.field': { 'invalid.key': 'value' }
|
||||||
|
});
|
||||||
|
} catch (e) {
|
||||||
|
error = e as Error;
|
||||||
|
}
|
||||||
|
expect(error).toBeTruthy();
|
||||||
|
expect(error?.message).toMatch(/keys cannot contain dots/);
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============= EDGE CASE TESTS =============
|
||||||
|
tap.test('should handle empty filter (return all)', async () => {
|
||||||
|
const users = await TestUser.getInstances({});
|
||||||
|
expect(users.length).toEqual(5);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should handle null values in filter', async () => {
|
||||||
|
// First, create a user with null email
|
||||||
|
const nullUser = new TestUser({
|
||||||
|
name: 'Null User',
|
||||||
|
age: 40,
|
||||||
|
email: null as any,
|
||||||
|
roles: ['user'],
|
||||||
|
tags: [],
|
||||||
|
status: 'active',
|
||||||
|
metadata: {},
|
||||||
|
scores: []
|
||||||
|
});
|
||||||
|
await nullUser.save();
|
||||||
|
|
||||||
|
const users = await TestUser.getInstances({ email: null });
|
||||||
|
expect(users.length).toEqual(1);
|
||||||
|
expect(users[0].name).toEqual('Null User');
|
||||||
|
|
||||||
|
// Clean up
|
||||||
|
await nullUser.delete();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should handle arrays as direct equality match', async () => {
|
||||||
|
// This tests that arrays without operators are treated as equality matches
|
||||||
|
const users = await TestUser.getInstances({
|
||||||
|
roles: ['user'] // Exact match for array
|
||||||
|
});
|
||||||
|
expect(users.length).toEqual(2); // Both Jane and Charlie have exactly ['user']
|
||||||
|
const names = users.map(u => u.name).sort();
|
||||||
|
expect(names).toEqual(['Charlie Wilson', 'Jane Smith']);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should handle regex operator', async () => {
|
||||||
|
const users = await TestUser.getInstances({
|
||||||
|
name: { $regex: '^J', $options: 'i' }
|
||||||
|
});
|
||||||
|
expect(users.length).toEqual(2);
|
||||||
|
const names = users.map(u => u.name).sort();
|
||||||
|
expect(names).toEqual(['Jane Smith', 'John Doe']);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should handle unknown operators by letting MongoDB reject them', async () => {
|
||||||
|
// Unknown operators should be passed through to MongoDB, which will reject them
|
||||||
|
let error: Error | null = null;
|
||||||
|
|
||||||
|
try {
|
||||||
|
await TestUser.getInstances({
|
||||||
|
age: { $unknownOp: 30 } as any
|
||||||
|
});
|
||||||
|
} catch (e) {
|
||||||
|
error = e as Error;
|
||||||
|
}
|
||||||
|
|
||||||
|
expect(error).toBeTruthy();
|
||||||
|
expect(error?.message).toMatch(/unknown operator.*\$unknownOp/);
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============= PERFORMANCE TESTS =============
|
||||||
|
tap.test('should efficiently filter large result sets', async () => {
|
||||||
|
// Create many test documents
|
||||||
|
const manyUsers = [];
|
||||||
|
for (let i = 0; i < 100; i++) {
|
||||||
|
manyUsers.push(new TestUser({
|
||||||
|
name: `User ${i}`,
|
||||||
|
age: 20 + (i % 40),
|
||||||
|
email: `user${i}@example.com`,
|
||||||
|
roles: i % 3 === 0 ? ['admin'] : ['user'],
|
||||||
|
tags: i % 2 === 0 ? ['even', 'test'] : ['odd', 'test'],
|
||||||
|
status: i % 4 === 0 ? 'inactive' : 'active',
|
||||||
|
metadata: { loginCount: i },
|
||||||
|
scores: [i, i + 10, i + 20]
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Save in batches for efficiency
|
||||||
|
for (const user of manyUsers) {
|
||||||
|
await user.save();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Complex filter that should still be fast
|
||||||
|
const startTime = Date.now();
|
||||||
|
const filtered = await TestUser.getInstances({
|
||||||
|
$and: [
|
||||||
|
{ age: { $gte: 30, $lt: 40 } },
|
||||||
|
{ status: 'active' },
|
||||||
|
{ tags: { $in: ['even'] } },
|
||||||
|
{ 'metadata.loginCount': { $gte: 20 } }
|
||||||
|
]
|
||||||
|
});
|
||||||
|
const duration = Date.now() - startTime;
|
||||||
|
|
||||||
|
console.log(`Complex filter on 100+ documents took ${duration}ms`);
|
||||||
|
expect(duration).toBeLessThan(1000); // Should complete in under 1 second
|
||||||
|
expect(filtered.length).toBeGreaterThan(0);
|
||||||
|
|
||||||
|
// Clean up
|
||||||
|
for (const user of manyUsers) {
|
||||||
|
await user.delete();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============= CLEANUP =============
|
||||||
|
tap.test('should clean up test database', async () => {
|
||||||
|
await testDb.mongoDb.dropDatabase();
|
||||||
|
await testDb.close();
|
||||||
|
await smartmongoInstance.stop();
|
||||||
|
});
|
||||||
|
|
||||||
|
export default tap.start();
|
||||||
@@ -1,7 +1,9 @@
|
|||||||
import { tap, expect } from '@pushrocks/tapbundle';
|
// TODO: Decorator support during testing for bun and deno in @git.zone/tstest
|
||||||
import { Qenv } from '@pushrocks/qenv';
|
|
||||||
import * as smartmongo from '@pushrocks/smartmongo';
|
import { tap, expect } from '@push.rocks/tapbundle';
|
||||||
import { smartunique } from '../ts/smartdata.plugins.js';
|
import { Qenv } from '@push.rocks/qenv';
|
||||||
|
import * as smartmongo from '@push.rocks/smartmongo';
|
||||||
|
import { smartunique } from '../ts/plugins.js';
|
||||||
|
|
||||||
import * as mongodb from 'mongodb';
|
import * as mongodb from 'mongodb';
|
||||||
|
|
||||||
@@ -30,7 +32,7 @@ tap.test('should create a testinstance as database', async () => {
|
|||||||
tap.skip.test('should connect to atlas', async (tools) => {
|
tap.skip.test('should connect to atlas', async (tools) => {
|
||||||
const databaseName = `test-smartdata-${smartunique.shortId()}`;
|
const databaseName = `test-smartdata-${smartunique.shortId()}`;
|
||||||
testDb = new smartdata.SmartdataDb({
|
testDb = new smartdata.SmartdataDb({
|
||||||
mongoDbUrl: testQenv.getEnvVarOnDemand('MONGO_URL'),
|
mongoDbUrl: await testQenv.getEnvVarOnDemand('MONGO_URL'),
|
||||||
mongoDbName: databaseName,
|
mongoDbName: databaseName,
|
||||||
});
|
});
|
||||||
await testDb.init();
|
await testDb.init();
|
||||||
@@ -72,6 +74,11 @@ class Car extends smartdata.SmartDataDbDoc<Car, Car> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
tap.test('should create a new id', async () => {
|
||||||
|
const newid = await Car.getNewId();
|
||||||
|
console.log(newid);
|
||||||
|
});
|
||||||
|
|
||||||
tap.test('should save the car to the db', async (toolsArg) => {
|
tap.test('should save the car to the db', async (toolsArg) => {
|
||||||
const myCar = new Car('red', 'Volvo');
|
const myCar = new Car('red', 'Volvo');
|
||||||
await myCar.save();
|
await myCar.save();
|
||||||
@@ -92,7 +99,7 @@ tap.test('should save the car to the db', async (toolsArg) => {
|
|||||||
console.log(
|
console.log(
|
||||||
`Filled database with ${counter} of ${totalCars} Cars and memory usage ${
|
`Filled database with ${counter} of ${totalCars} Cars and memory usage ${
|
||||||
process.memoryUsage().rss / 1e6
|
process.memoryUsage().rss / 1e6
|
||||||
} MB`
|
} MB`,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
} while (counter < totalCars);
|
} while (counter < totalCars);
|
||||||
@@ -100,7 +107,7 @@ tap.test('should save the car to the db', async (toolsArg) => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
tap.test('expect to get instance of Car with shallow match', async () => {
|
tap.test('expect to get instance of Car with shallow match', async () => {
|
||||||
const totalQueryCycles = totalCars / 6;
|
const totalQueryCycles = totalCars / 2;
|
||||||
let counter = 0;
|
let counter = 0;
|
||||||
do {
|
do {
|
||||||
const timeStart = Date.now();
|
const timeStart = Date.now();
|
||||||
@@ -111,7 +118,7 @@ tap.test('expect to get instance of Car with shallow match', async () => {
|
|||||||
console.log(
|
console.log(
|
||||||
`performed ${counter} of ${totalQueryCycles} total query cycles: took ${
|
`performed ${counter} of ${totalQueryCycles} total query cycles: took ${
|
||||||
Date.now() - timeStart
|
Date.now() - timeStart
|
||||||
}ms to query a set of 2000 with memory footprint ${process.memoryUsage().rss / 1e6} MB`
|
}ms to query a set of 2000 with memory footprint ${process.memoryUsage().rss / 1e6} MB`,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
expect(myCars[0].deepData.sodeep).toEqual('yes');
|
expect(myCars[0].deepData.sodeep).toEqual('yes');
|
||||||
@@ -134,7 +141,7 @@ tap.test('expect to get instance of Car with deep match', async () => {
|
|||||||
console.log(
|
console.log(
|
||||||
`performed ${counter} of ${totalQueryCycles} total query cycles: took ${
|
`performed ${counter} of ${totalQueryCycles} total query cycles: took ${
|
||||||
Date.now() - timeStart
|
Date.now() - timeStart
|
||||||
}ms to deep query a set of 2000 with memory footprint ${process.memoryUsage().rss / 1e6} MB`
|
}ms to deep query a set of 2000 with memory footprint ${process.memoryUsage().rss / 1e6} MB`,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
expect(myCars2[0].deepData.sodeep).toEqual('yes');
|
expect(myCars2[0].deepData.sodeep).toEqual('yes');
|
||||||
@@ -194,10 +201,16 @@ tap.test('should store a new Truck', async () => {
|
|||||||
const truck = new Truck('blue', 'MAN');
|
const truck = new Truck('blue', 'MAN');
|
||||||
await truck.save();
|
await truck.save();
|
||||||
const myTruck2 = await Truck.getInstance({ color: 'blue' });
|
const myTruck2 = await Truck.getInstance({ color: 'blue' });
|
||||||
|
expect(myTruck2.color).toEqual('blue');
|
||||||
myTruck2.color = 'red';
|
myTruck2.color = 'red';
|
||||||
await myTruck2.save();
|
await myTruck2.save();
|
||||||
const myTruck3 = await Truck.getInstance({ color: 'blue' });
|
const myTruck3 = await Truck.getInstance({ color: 'blue' });
|
||||||
console.log(myTruck3);
|
expect(myTruck3).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should return a count', async () => {
|
||||||
|
const truckCount = await Truck.getCount();
|
||||||
|
expect(truckCount).toEqual(1);
|
||||||
});
|
});
|
||||||
|
|
||||||
tap.test('should use a cursor', async () => {
|
tap.test('should use a cursor', async () => {
|
||||||
@@ -213,11 +226,13 @@ tap.test('should use a cursor', async () => {
|
|||||||
// close the database connection
|
// close the database connection
|
||||||
// =======================================
|
// =======================================
|
||||||
tap.test('close', async () => {
|
tap.test('close', async () => {
|
||||||
await testDb.mongoDb.dropDatabase();
|
|
||||||
await testDb.close();
|
|
||||||
if (smartmongoInstance) {
|
if (smartmongoInstance) {
|
||||||
await smartmongoInstance.stop();
|
await smartmongoInstance.stopAndDumpToDir('./.nogit/dbdump/test.ts');
|
||||||
|
} else {
|
||||||
|
await testDb.mongoDb.dropDatabase();
|
||||||
|
await testDb.close();
|
||||||
}
|
}
|
||||||
|
setTimeout(() => process.exit(), 2000);
|
||||||
});
|
});
|
||||||
|
|
||||||
tap.start({ throwOnError: true });
|
tap.start({ throwOnError: true });
|
||||||
202
test/test.search.advanced.node.ts
Normal file
202
test/test.search.advanced.node.ts
Normal file
@@ -0,0 +1,202 @@
|
|||||||
|
import { tap, expect } from '@push.rocks/tapbundle';
|
||||||
|
import * as smartmongo from '@push.rocks/smartmongo';
|
||||||
|
import * as smartdata from '../ts/index.js';
|
||||||
|
import { searchable } from '../ts/classes.doc.js';
|
||||||
|
import { smartunique } from '../ts/plugins.js';
|
||||||
|
|
||||||
|
// Set up database connection
|
||||||
|
let smartmongoInstance: smartmongo.SmartMongo;
|
||||||
|
let testDb: smartdata.SmartdataDb;
|
||||||
|
|
||||||
|
// Define a test class for advanced search scenarios
|
||||||
|
@smartdata.Collection(() => testDb)
|
||||||
|
class Product extends smartdata.SmartDataDbDoc<Product, Product> {
|
||||||
|
@smartdata.unI()
|
||||||
|
public id: string = smartunique.shortId();
|
||||||
|
|
||||||
|
@smartdata.svDb()
|
||||||
|
@searchable()
|
||||||
|
public name: string;
|
||||||
|
|
||||||
|
@smartdata.svDb()
|
||||||
|
@searchable()
|
||||||
|
public description: string;
|
||||||
|
|
||||||
|
@smartdata.svDb()
|
||||||
|
@searchable()
|
||||||
|
public category: string;
|
||||||
|
|
||||||
|
@smartdata.svDb()
|
||||||
|
public price: number;
|
||||||
|
|
||||||
|
constructor(
|
||||||
|
nameArg: string,
|
||||||
|
descriptionArg: string,
|
||||||
|
categoryArg: string,
|
||||||
|
priceArg: number,
|
||||||
|
) {
|
||||||
|
super();
|
||||||
|
this.name = nameArg;
|
||||||
|
this.description = descriptionArg;
|
||||||
|
this.category = categoryArg;
|
||||||
|
this.price = priceArg;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Initialize DB and insert sample products
|
||||||
|
tap.test('setup advanced search database', async () => {
|
||||||
|
smartmongoInstance = await smartmongo.SmartMongo.createAndStart();
|
||||||
|
testDb = new smartdata.SmartdataDb(
|
||||||
|
await smartmongoInstance.getMongoDescriptor(),
|
||||||
|
);
|
||||||
|
await testDb.init();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('insert products for advanced search', async () => {
|
||||||
|
const products = [
|
||||||
|
new Product(
|
||||||
|
'Night Owl Lamp',
|
||||||
|
'Bright lamp for night reading',
|
||||||
|
'Lighting',
|
||||||
|
29,
|
||||||
|
),
|
||||||
|
new Product(
|
||||||
|
'Day Light Lamp',
|
||||||
|
'Daytime lamp with adjustable brightness',
|
||||||
|
'Lighting',
|
||||||
|
39,
|
||||||
|
),
|
||||||
|
new Product(
|
||||||
|
'Office Chair',
|
||||||
|
'Ergonomic chair for office',
|
||||||
|
'Furniture',
|
||||||
|
199,
|
||||||
|
),
|
||||||
|
new Product(
|
||||||
|
'Gaming Chair',
|
||||||
|
'Comfortable for long gaming sessions',
|
||||||
|
'Furniture',
|
||||||
|
299,
|
||||||
|
),
|
||||||
|
new Product(
|
||||||
|
'iPhone 12',
|
||||||
|
'Latest iPhone with A14 Bionic chip',
|
||||||
|
'Electronics',
|
||||||
|
999,
|
||||||
|
),
|
||||||
|
new Product(
|
||||||
|
'AirPods',
|
||||||
|
'Wireless earbuds with noise cancellation',
|
||||||
|
'Electronics',
|
||||||
|
249,
|
||||||
|
),
|
||||||
|
];
|
||||||
|
for (const p of products) {
|
||||||
|
await p.save();
|
||||||
|
}
|
||||||
|
const all = await Product.getInstances({});
|
||||||
|
expect(all.length).toEqual(products.length);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Simple exact field:value matching
|
||||||
|
tap.test('simpleExact: category:Furniture returns chairs', async () => {
|
||||||
|
const res = await Product.search('category:Furniture');
|
||||||
|
expect(res.length).toEqual(2);
|
||||||
|
const names = res.map((r) => r.name).sort();
|
||||||
|
expect(names).toEqual(['Gaming Chair', 'Office Chair']);
|
||||||
|
});
|
||||||
|
|
||||||
|
// simpleExact invalid field should throw
|
||||||
|
tap.test('simpleExact invalid field errors', async () => {
|
||||||
|
let error: Error;
|
||||||
|
try {
|
||||||
|
await Product.search('price:29');
|
||||||
|
} catch (e) {
|
||||||
|
error = e as Error;
|
||||||
|
}
|
||||||
|
expect(error).toBeTruthy();
|
||||||
|
expect(error.message).toMatch(/not searchable/);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Quoted phrase search
|
||||||
|
tap.test('quoted phrase "Bright lamp" matches Night Owl Lamp', async () => {
|
||||||
|
const res = await Product.search('"Bright lamp"');
|
||||||
|
expect(res.length).toEqual(1);
|
||||||
|
expect(res[0].name).toEqual('Night Owl Lamp');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test("quoted phrase 'night reading' matches Night Owl Lamp", async () => {
|
||||||
|
const res = await Product.search("'night reading'");
|
||||||
|
expect(res.length).toEqual(1);
|
||||||
|
expect(res[0].name).toEqual('Night Owl Lamp');
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
tap.test('wildcard description:*gaming* matches Gaming Chair', async () => {
|
||||||
|
const res = await Product.search('description:*gaming*');
|
||||||
|
expect(res.length).toEqual(1);
|
||||||
|
expect(res[0].name).toEqual('Gaming Chair');
|
||||||
|
});
|
||||||
|
|
||||||
|
// Boolean AND and OR
|
||||||
|
tap.test('boolean AND: category:Lighting AND lamp', async () => {
|
||||||
|
const res = await Product.search('category:Lighting AND lamp');
|
||||||
|
expect(res.length).toEqual(2);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('boolean OR: Furniture OR Electronics', async () => {
|
||||||
|
const res = await Product.search('Furniture OR Electronics');
|
||||||
|
expect(res.length).toEqual(4);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Multi-term unquoted -> AND across terms
|
||||||
|
tap.test('multi-term unquoted adjustable brightness', async () => {
|
||||||
|
const res = await Product.search('adjustable brightness');
|
||||||
|
expect(res.length).toEqual(1);
|
||||||
|
expect(res[0].name).toEqual('Day Light Lamp');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('multi-term unquoted Night Lamp', async () => {
|
||||||
|
const res = await Product.search('Night Lamp');
|
||||||
|
expect(res.length).toEqual(1);
|
||||||
|
expect(res[0].name).toEqual('Night Owl Lamp');
|
||||||
|
});
|
||||||
|
|
||||||
|
// Grouping with parentheses
|
||||||
|
tap.test('grouping: (Furniture OR Electronics) AND Chair', async () => {
|
||||||
|
const res = await Product.search(
|
||||||
|
'(Furniture OR Electronics) AND Chair',
|
||||||
|
);
|
||||||
|
expect(res.length).toEqual(2);
|
||||||
|
const names = res.map((r) => r.name).sort();
|
||||||
|
expect(names).toEqual(['Gaming Chair', 'Office Chair']);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Additional range and combined query tests
|
||||||
|
tap.test('range query price:[30 TO 300] returns expected products', async () => {
|
||||||
|
const res = await Product.search('price:[30 TO 300]');
|
||||||
|
// Expect products with price between 30 and 300 inclusive: Day Light Lamp, Gaming Chair, Office Chair, AirPods
|
||||||
|
expect(res.length).toEqual(4);
|
||||||
|
const names = res.map((r) => r.name).sort();
|
||||||
|
expect(names).toEqual(['AirPods', 'Day Light Lamp', 'Gaming Chair', 'Office Chair']);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should filter category and price range', async () => {
|
||||||
|
const res = await Product.search('category:Lighting AND price:[30 TO 40]');
|
||||||
|
expect(res.length).toEqual(1);
|
||||||
|
expect(res[0].name).toEqual('Day Light Lamp');
|
||||||
|
});
|
||||||
|
|
||||||
|
// Teardown
|
||||||
|
tap.test('cleanup advanced search database', async () => {
|
||||||
|
await testDb.mongoDb.dropDatabase();
|
||||||
|
await testDb.close();
|
||||||
|
if (smartmongoInstance) {
|
||||||
|
await smartmongoInstance.stopAndDumpToDir(
|
||||||
|
`.nogit/dbdump/test.search.advanced.ts`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
setTimeout(() => process.exit(), 2000);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.start({ throwOnError: true });
|
||||||
408
test/test.search.ts
Normal file
408
test/test.search.ts
Normal file
@@ -0,0 +1,408 @@
|
|||||||
|
import { tap, expect } from '@push.rocks/tapbundle';
|
||||||
|
import * as smartmongo from '@push.rocks/smartmongo';
|
||||||
|
import { smartunique } from '../ts/plugins.js';
|
||||||
|
|
||||||
|
// Import the smartdata library
|
||||||
|
import * as smartdata from '../ts/index.js';
|
||||||
|
import { searchable } from '../ts/classes.doc.js';
|
||||||
|
|
||||||
|
// Set up database connection
|
||||||
|
let smartmongoInstance: smartmongo.SmartMongo;
|
||||||
|
let testDb: smartdata.SmartdataDb;
|
||||||
|
// Class for location-based wildcard/phrase tests
|
||||||
|
let LocationDoc: any;
|
||||||
|
|
||||||
|
// Define a test class with searchable fields using the standard SmartDataDbDoc
|
||||||
|
@smartdata.Collection(() => testDb)
|
||||||
|
class Product extends smartdata.SmartDataDbDoc<Product, Product> {
|
||||||
|
@smartdata.unI()
|
||||||
|
public id: string = smartunique.shortId();
|
||||||
|
|
||||||
|
@smartdata.svDb()
|
||||||
|
@searchable()
|
||||||
|
public name: string;
|
||||||
|
|
||||||
|
@smartdata.svDb()
|
||||||
|
@searchable()
|
||||||
|
public description: string;
|
||||||
|
|
||||||
|
@smartdata.svDb()
|
||||||
|
@searchable()
|
||||||
|
public category: string;
|
||||||
|
|
||||||
|
@smartdata.svDb()
|
||||||
|
public price: number;
|
||||||
|
|
||||||
|
constructor(nameArg: string, descriptionArg: string, categoryArg: string, priceArg: number) {
|
||||||
|
super();
|
||||||
|
this.name = nameArg;
|
||||||
|
this.description = descriptionArg;
|
||||||
|
this.category = categoryArg;
|
||||||
|
this.price = priceArg;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
tap.test('should create a test database instance', async () => {
|
||||||
|
smartmongoInstance = await smartmongo.SmartMongo.createAndStart();
|
||||||
|
testDb = new smartdata.SmartdataDb(await smartmongoInstance.getMongoDescriptor());
|
||||||
|
await testDb.init();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should create test products with searchable fields', async () => {
|
||||||
|
// Create several products with different fields to search
|
||||||
|
const products = [
|
||||||
|
new Product('iPhone 12', 'Latest iPhone with A14 Bionic chip', 'Electronics', 999),
|
||||||
|
new Product('MacBook Pro', 'Powerful laptop for professionals', 'Electronics', 1999),
|
||||||
|
new Product('AirPods', 'Wireless earbuds with noise cancellation', 'Electronics', 249),
|
||||||
|
new Product('Galaxy S21', 'Samsung flagship phone with great camera', 'Electronics', 899),
|
||||||
|
new Product('Kindle Paperwhite', 'E-reader with built-in light', 'Books', 129),
|
||||||
|
new Product('Harry Potter', 'Fantasy book series about wizards', 'Books', 49),
|
||||||
|
new Product('Coffee Maker', 'Automatic drip coffee machine', 'Kitchen', 89),
|
||||||
|
new Product('Blender', 'High-speed blender for smoothies', 'Kitchen', 129),
|
||||||
|
];
|
||||||
|
|
||||||
|
// Save all products to the database
|
||||||
|
for (const product of products) {
|
||||||
|
await product.save();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify that we can get all products
|
||||||
|
const allProducts = await Product.getInstances({});
|
||||||
|
expect(allProducts.length).toEqual(products.length);
|
||||||
|
console.log(`Successfully created and saved ${allProducts.length} products`);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should retrieve searchable fields for a class', async () => {
|
||||||
|
// Use the getSearchableFields function to verify our searchable fields
|
||||||
|
const searchableFields = Product.getSearchableFields();
|
||||||
|
console.log('Searchable fields:', searchableFields);
|
||||||
|
|
||||||
|
expect(searchableFields.length).toEqual(3);
|
||||||
|
expect(searchableFields).toContain('name');
|
||||||
|
expect(searchableFields).toContain('description');
|
||||||
|
expect(searchableFields).toContain('category');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should search products by exact field match', async () => {
|
||||||
|
// Basic field exact match search
|
||||||
|
const electronicsProducts = await Product.getInstances({ category: 'Electronics' });
|
||||||
|
console.log(`Found ${electronicsProducts.length} products in Electronics category`);
|
||||||
|
|
||||||
|
expect(electronicsProducts.length).toEqual(4);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should search products by basic search method', async () => {
|
||||||
|
// Using the basic search method with simple Lucene query
|
||||||
|
try {
|
||||||
|
const iPhoneResults = await Product.search('iPhone');
|
||||||
|
console.log(`Found ${iPhoneResults.length} products matching 'iPhone' using basic search`);
|
||||||
|
|
||||||
|
expect(iPhoneResults.length).toEqual(1);
|
||||||
|
expect(iPhoneResults[0].name).toEqual('iPhone 12');
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Basic search error:', error.message);
|
||||||
|
// If basic search fails, we'll demonstrate the enhanced approach in later tests
|
||||||
|
console.log('Will test with enhanced searchWithLucene method next');
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should search products with search method', async () => {
|
||||||
|
// Using the robust searchWithLucene method
|
||||||
|
const wirelessResults = await Product.search('wireless');
|
||||||
|
console.log(
|
||||||
|
`Found ${wirelessResults.length} products matching 'wireless' using search`,
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(wirelessResults.length).toEqual(1);
|
||||||
|
expect(wirelessResults[0].name).toEqual('AirPods');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should search products by category with search', async () => {
|
||||||
|
// Using field-specific search with searchWithLucene
|
||||||
|
const kitchenResults = await Product.search('category:Kitchen');
|
||||||
|
console.log(`Found ${kitchenResults.length} products in Kitchen category using search`);
|
||||||
|
|
||||||
|
expect(kitchenResults.length).toEqual(2);
|
||||||
|
expect(kitchenResults[0].category).toEqual('Kitchen');
|
||||||
|
expect(kitchenResults[1].category).toEqual('Kitchen');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should search products with partial word matches', async () => {
|
||||||
|
// Testing partial word matches
|
||||||
|
const proResults = await Product.search('Pro');
|
||||||
|
console.log(`Found ${proResults.length} products matching 'Pro'`);
|
||||||
|
|
||||||
|
// Should match both "MacBook Pro" and "professionals" in description
|
||||||
|
expect(proResults.length).toBeGreaterThan(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should search across multiple searchable fields', async () => {
|
||||||
|
// Test searching across all searchable fields
|
||||||
|
const bookResults = await Product.search('book');
|
||||||
|
console.log(`Found ${bookResults.length} products matching 'book' across all fields`);
|
||||||
|
|
||||||
|
// Should match "MacBook" in name and "Books" in category
|
||||||
|
expect(bookResults.length).toBeGreaterThan(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should handle case insensitive searches', async () => {
|
||||||
|
// Test case insensitivity
|
||||||
|
const electronicsResults = await Product.search('electronics');
|
||||||
|
const ElectronicsResults = await Product.search('Electronics');
|
||||||
|
|
||||||
|
console.log(`Found ${electronicsResults.length} products matching lowercase 'electronics'`);
|
||||||
|
console.log(`Found ${ElectronicsResults.length} products matching capitalized 'Electronics'`);
|
||||||
|
|
||||||
|
// Both searches should return the same results
|
||||||
|
expect(electronicsResults.length).toEqual(ElectronicsResults.length);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should demonstrate search fallback mechanisms', async () => {
|
||||||
|
console.log('\n====== FALLBACK MECHANISM DEMONSTRATION ======');
|
||||||
|
console.log('If MongoDB query fails, searchWithLucene will:');
|
||||||
|
console.log('1. Try using basic MongoDB filters');
|
||||||
|
console.log('2. Fall back to field-specific searches');
|
||||||
|
console.log('3. As last resort, perform in-memory filtering');
|
||||||
|
console.log('This ensures robust search even with complex queries');
|
||||||
|
console.log('==============================================\n');
|
||||||
|
|
||||||
|
// Use a simpler term that should be found in descriptions
|
||||||
|
// Avoid using "OR" operator which requires a text index
|
||||||
|
const results = await Product.search('high');
|
||||||
|
console.log(`Found ${results.length} products matching 'high'`);
|
||||||
|
|
||||||
|
// "High-speed blender" contains "high"
|
||||||
|
expect(results.length).toBeGreaterThan(0);
|
||||||
|
|
||||||
|
// Try another fallback example that won't need $text
|
||||||
|
const powerfulResults = await Product.search('powerful');
|
||||||
|
console.log(`Found ${powerfulResults.length} products matching 'powerful'`);
|
||||||
|
|
||||||
|
// "Powerful laptop for professionals" contains "powerful"
|
||||||
|
expect(powerfulResults.length).toBeGreaterThan(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should explain the advantages of the integrated approach', async () => {
|
||||||
|
console.log('\n====== INTEGRATED SEARCH APPROACH BENEFITS ======');
|
||||||
|
console.log('1. No separate class hierarchy - keeps code simple');
|
||||||
|
console.log('2. Enhanced convertFilterForMongoDb handles MongoDB operators');
|
||||||
|
console.log('3. Robust fallback mechanisms ensure searches always work');
|
||||||
|
console.log('4. searchWithLucene provides powerful search capabilities');
|
||||||
|
console.log('5. Backwards compatible with existing code');
|
||||||
|
console.log('================================================\n');
|
||||||
|
|
||||||
|
expect(true).toEqual(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Additional robustness tests
|
||||||
|
tap.test('should search exact name using field:value', async () => {
|
||||||
|
const nameResults = await Product.search('name:AirPods');
|
||||||
|
expect(nameResults.length).toEqual(1);
|
||||||
|
expect(nameResults[0].name).toEqual('AirPods');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should throw when searching non-searchable field', async () => {
|
||||||
|
let error: Error;
|
||||||
|
try {
|
||||||
|
await Product.search('price:129');
|
||||||
|
} catch (err) {
|
||||||
|
error = err as Error;
|
||||||
|
}
|
||||||
|
expect(error).toBeTruthy();
|
||||||
|
expect(error.message).toMatch(/not searchable/);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('empty query should return all products', async () => {
|
||||||
|
const allResults = await Product.search('');
|
||||||
|
expect(allResults.length).toEqual(8);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should search multi-word term across fields', async () => {
|
||||||
|
const termResults = await Product.search('iPhone 12');
|
||||||
|
expect(termResults.length).toEqual(1);
|
||||||
|
expect(termResults[0].name).toEqual('iPhone 12');
|
||||||
|
});
|
||||||
|
|
||||||
|
// Additional search scenarios
|
||||||
|
tap.test('should return zero results for non-existent terms', async () => {
|
||||||
|
const noResults = await Product.search('NonexistentTerm');
|
||||||
|
expect(noResults.length).toEqual(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should search products by description term "noise"', async () => {
|
||||||
|
const noiseResults = await Product.search('noise');
|
||||||
|
expect(noiseResults.length).toEqual(1);
|
||||||
|
expect(noiseResults[0].name).toEqual('AirPods');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should search products by description term "flagship"', async () => {
|
||||||
|
const flagshipResults = await Product.search('flagship');
|
||||||
|
expect(flagshipResults.length).toEqual(1);
|
||||||
|
expect(flagshipResults[0].name).toEqual('Galaxy S21');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should search numeric strings "12"', async () => {
|
||||||
|
const twelveResults = await Product.search('12');
|
||||||
|
expect(twelveResults.length).toEqual(1);
|
||||||
|
expect(twelveResults[0].name).toEqual('iPhone 12');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should search hyphenated terms "high-speed"', async () => {
|
||||||
|
const hyphenResults = await Product.search('high-speed');
|
||||||
|
expect(hyphenResults.length).toEqual(1);
|
||||||
|
expect(hyphenResults[0].name).toEqual('Blender');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should search hyphenated terms "E-reader"', async () => {
|
||||||
|
const ereaderResults = await Product.search('E-reader');
|
||||||
|
expect(ereaderResults.length).toEqual(1);
|
||||||
|
expect(ereaderResults[0].name).toEqual('Kindle Paperwhite');
|
||||||
|
});
|
||||||
|
|
||||||
|
// Additional robustness tests
|
||||||
|
tap.test('should return all products for empty search', async () => {
|
||||||
|
const searchResults = await Product.search('');
|
||||||
|
const allProducts = await Product.getInstances({});
|
||||||
|
expect(searchResults.length).toEqual(allProducts.length);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should support wildcard plain term across all fields', async () => {
|
||||||
|
const results = await Product.search('*book*');
|
||||||
|
const names = results.map((r) => r.name).sort();
|
||||||
|
expect(names).toEqual(['Harry Potter', 'Kindle Paperwhite', 'MacBook Pro']);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should support wildcard plain term with question mark pattern', async () => {
|
||||||
|
const results = await Product.search('?one?');
|
||||||
|
const names = results.map((r) => r.name).sort();
|
||||||
|
expect(names).toEqual(['Galaxy S21', 'iPhone 12']);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Filter and Validation tests
|
||||||
|
tap.test('should apply filter option to restrict results', async () => {
|
||||||
|
// search term 'book' across all fields but restrict to Books category
|
||||||
|
const bookFiltered = await Product.search('book', { filter: { category: 'Books' } });
|
||||||
|
expect(bookFiltered.length).toEqual(2);
|
||||||
|
bookFiltered.forEach((p) => expect(p.category).toEqual('Books'));
|
||||||
|
});
|
||||||
|
tap.test('should apply validate hook to post-filter results', async () => {
|
||||||
|
// return only products with price > 500
|
||||||
|
const expensive = await Product.search('', { validate: (p) => p.price > 500 });
|
||||||
|
expect(expensive.length).toBeGreaterThan(0);
|
||||||
|
expensive.forEach((p) => expect(p.price).toBeGreaterThan(500));
|
||||||
|
});
|
||||||
|
|
||||||
|
// Tests for quoted and wildcard field-specific phrases
|
||||||
|
tap.test('setup location test products', async () => {
|
||||||
|
@smartdata.Collection(() => testDb)
|
||||||
|
class LD extends smartdata.SmartDataDbDoc<LD, LD> {
|
||||||
|
@smartdata.unI() public id: string = smartunique.shortId();
|
||||||
|
@smartdata.svDb() @searchable() public location: string;
|
||||||
|
constructor(loc: string) { super(); this.location = loc; }
|
||||||
|
}
|
||||||
|
// Assign to outer variable for subsequent tests
|
||||||
|
LocationDoc = LD;
|
||||||
|
const locations = ['Berlin', 'Frankfurt am Main', 'Frankfurt am Oder', 'London'];
|
||||||
|
for (const loc of locations) {
|
||||||
|
await new LocationDoc(loc).save();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
tap.test('should search exact quoted field phrase', async () => {
|
||||||
|
const results = await (LocationDoc as any).search('location:"Frankfurt am Main"');
|
||||||
|
expect(results.length).toEqual(1);
|
||||||
|
expect(results[0].location).toEqual('Frankfurt am Main');
|
||||||
|
});
|
||||||
|
tap.test('should search wildcard quoted field phrase', async () => {
|
||||||
|
const results = await (LocationDoc as any).search('location:"Frankfurt am *"');
|
||||||
|
const names = results.map((d: any) => d.location).sort();
|
||||||
|
expect(names).toEqual(['Frankfurt am Main', 'Frankfurt am Oder']);
|
||||||
|
});
|
||||||
|
tap.test('should search unquoted wildcard field', async () => {
|
||||||
|
const results = await (LocationDoc as any).search('location:Frankfurt*');
|
||||||
|
const names = results.map((d: any) => d.location).sort();
|
||||||
|
expect(names).toEqual(['Frankfurt am Main', 'Frankfurt am Oder']);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Combined free-term + field phrase/wildcard tests
|
||||||
|
let CombinedDoc: any;
|
||||||
|
tap.test('setup combined docs for free-term and location tests', async () => {
|
||||||
|
@smartdata.Collection(() => testDb)
|
||||||
|
class CD extends smartdata.SmartDataDbDoc<CD, CD> {
|
||||||
|
@smartdata.unI() public id: string = smartunique.shortId();
|
||||||
|
@smartdata.svDb() @searchable() public name: string;
|
||||||
|
@smartdata.svDb() @searchable() public location: string;
|
||||||
|
constructor(name: string, location: string) { super(); this.name = name; this.location = location; }
|
||||||
|
}
|
||||||
|
CombinedDoc = CD;
|
||||||
|
const docs = [
|
||||||
|
new CombinedDoc('TypeScript', 'Berlin'),
|
||||||
|
new CombinedDoc('TypeScript', 'Frankfurt am Main'),
|
||||||
|
new CombinedDoc('TypeScript', 'Frankfurt am Oder'),
|
||||||
|
new CombinedDoc('JavaScript', 'Berlin'),
|
||||||
|
];
|
||||||
|
for (const d of docs) await d.save();
|
||||||
|
});
|
||||||
|
tap.test('should search free term and exact quoted field phrase', async () => {
|
||||||
|
const res = await CombinedDoc.search('TypeScript location:"Berlin"');
|
||||||
|
expect(res.length).toEqual(1);
|
||||||
|
expect(res[0].location).toEqual('Berlin');
|
||||||
|
});
|
||||||
|
tap.test('should not match free term with non-matching quoted field phrase', async () => {
|
||||||
|
const res = await CombinedDoc.search('TypeScript location:"Frankfurt d"');
|
||||||
|
expect(res.length).toEqual(0);
|
||||||
|
});
|
||||||
|
tap.test('should search free term with quoted wildcard field phrase', async () => {
|
||||||
|
const res = await CombinedDoc.search('TypeScript location:"Frankfurt am *"');
|
||||||
|
const locs = res.map((r: any) => r.location).sort();
|
||||||
|
expect(locs).toEqual(['Frankfurt am Main', 'Frankfurt am Oder']);
|
||||||
|
});
|
||||||
|
// Quoted exact field phrase without wildcard should return no matches if no exact match
|
||||||
|
tap.test('should not match location:"Frankfurt d"', async () => {
|
||||||
|
const results = await (LocationDoc as any).search('location:"Frankfurt d"');
|
||||||
|
expect(results.length).toEqual(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Combined free-term and field wildcard tests
|
||||||
|
tap.test('should combine free term and wildcard field search', async () => {
|
||||||
|
const results = await Product.search('book category:Book*');
|
||||||
|
expect(results.length).toEqual(2);
|
||||||
|
results.forEach((p) => expect(p.category).toEqual('Books'));
|
||||||
|
});
|
||||||
|
tap.test('should not match when free term matches but wildcard field does not', async () => {
|
||||||
|
const results = await Product.search('book category:Kitchen*');
|
||||||
|
expect(results.length).toEqual(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Non-searchable field should cause an error for combined queries
|
||||||
|
tap.test('should throw when combining term with non-searchable field', async () => {
|
||||||
|
let error: Error;
|
||||||
|
try {
|
||||||
|
await Product.search('book location:Berlin');
|
||||||
|
} catch (e) {
|
||||||
|
error = e as Error;
|
||||||
|
}
|
||||||
|
expect(error).toBeTruthy();
|
||||||
|
expect(error.message).toMatch(/not searchable/);
|
||||||
|
});
|
||||||
|
tap.test('should throw when combining term with non-searchable wildcard field', async () => {
|
||||||
|
let error: Error;
|
||||||
|
try {
|
||||||
|
await Product.search('book location:Berlin*');
|
||||||
|
} catch (e) {
|
||||||
|
error = e as Error;
|
||||||
|
}
|
||||||
|
expect(error).toBeTruthy();
|
||||||
|
expect(error.message).toMatch(/not searchable/);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Close database connection
|
||||||
|
tap.test('close database connection', async () => {
|
||||||
|
await testDb.mongoDb.dropDatabase();
|
||||||
|
await testDb.close();
|
||||||
|
if (smartmongoInstance) {
|
||||||
|
await smartmongoInstance.stopAndDumpToDir(`.nogit/dbdump/test.search.ts`);
|
||||||
|
}
|
||||||
|
setTimeout(() => process.exit(), 2000);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.start({ throwOnError: true });
|
||||||
@@ -1,7 +1,7 @@
|
|||||||
import { tap, expect } from '@pushrocks/tapbundle';
|
import { tap, expect } from '@push.rocks/tapbundle';
|
||||||
import { Qenv } from '@pushrocks/qenv';
|
import { Qenv } from '@push.rocks/qenv';
|
||||||
import * as smartmongo from '@pushrocks/smartmongo';
|
import * as smartmongo from '@push.rocks/smartmongo';
|
||||||
import { smartunique } from '../ts/smartdata.plugins.js';
|
import { smartunique } from '../ts/plugins.js';
|
||||||
|
|
||||||
const testQenv = new Qenv(process.cwd(), process.cwd() + '/.nogit/');
|
const testQenv = new Qenv(process.cwd(), process.cwd() + '/.nogit/');
|
||||||
|
|
||||||
@@ -28,7 +28,7 @@ tap.test('should create a testinstance as database', async () => {
|
|||||||
tap.skip.test('should connect to atlas', async (tools) => {
|
tap.skip.test('should connect to atlas', async (tools) => {
|
||||||
const databaseName = `test-smartdata-${smartunique.shortId()}`;
|
const databaseName = `test-smartdata-${smartunique.shortId()}`;
|
||||||
testDb = new smartdata.SmartdataDb({
|
testDb = new smartdata.SmartdataDb({
|
||||||
mongoDbUrl: testQenv.getEnvVarOnDemand('MONGO_URL'),
|
mongoDbUrl: await testQenv.getEnvVarOnDemand('MONGO_URL'),
|
||||||
mongoDbName: databaseName,
|
mongoDbName: databaseName,
|
||||||
});
|
});
|
||||||
await testDb.init();
|
await testDb.init();
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
import { tap, expect } from '@pushrocks/tapbundle';
|
import { tap, expect } from '@push.rocks/tapbundle';
|
||||||
import { Qenv } from '@pushrocks/qenv';
|
import { Qenv } from '@push.rocks/qenv';
|
||||||
import * as smartmongo from '@pushrocks/smartmongo';
|
import * as smartmongo from '@push.rocks/smartmongo';
|
||||||
import { smartunique } from '../ts/smartdata.plugins.js';
|
import { smartunique } from '../ts/plugins.js';
|
||||||
|
|
||||||
const testQenv = new Qenv(process.cwd(), process.cwd() + '/.nogit/');
|
const testQenv = new Qenv(process.cwd(), process.cwd() + '/.nogit/');
|
||||||
|
|
||||||
@@ -28,7 +28,7 @@ tap.test('should create a testinstance as database', async () => {
|
|||||||
tap.skip.test('should connect to atlas', async (tools) => {
|
tap.skip.test('should connect to atlas', async (tools) => {
|
||||||
const databaseName = `test-smartdata-${smartunique.shortId()}`;
|
const databaseName = `test-smartdata-${smartunique.shortId()}`;
|
||||||
testDb = new smartdata.SmartdataDb({
|
testDb = new smartdata.SmartdataDb({
|
||||||
mongoDbUrl: testQenv.getEnvVarOnDemand('MONGO_URL'),
|
mongoDbUrl: await testQenv.getEnvVarOnDemand('MONGO_URL'),
|
||||||
mongoDbName: databaseName,
|
mongoDbName: databaseName,
|
||||||
});
|
});
|
||||||
await testDb.init();
|
await testDb.init();
|
||||||
@@ -42,14 +42,14 @@ class House extends smartdata.SmartDataDbDoc<House, House> {
|
|||||||
@smartdata.svDb()
|
@smartdata.svDb()
|
||||||
public data = {
|
public data = {
|
||||||
id: smartunique.shortId(),
|
id: smartunique.shortId(),
|
||||||
hello: 'hello'
|
hello: 'hello',
|
||||||
}
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
tap.test('should watch a collection', async (toolsArg) => {
|
tap.test('should watch a collection', async (toolsArg) => {
|
||||||
const done = toolsArg.defer();
|
const done = toolsArg.defer();
|
||||||
const watcher = await House.watch({});
|
const watcher = await House.watch({});
|
||||||
watcher.changeSubject.subscribe(async houseArg => {
|
watcher.changeSubject.subscribe(async (houseArg) => {
|
||||||
console.log('hey there, we observed a house');
|
console.log('hey there, we observed a house');
|
||||||
await watcher.close();
|
await watcher.close();
|
||||||
done.resolve();
|
done.resolve();
|
||||||
@@ -58,17 +58,58 @@ tap.test('should watch a collection', async (toolsArg) => {
|
|||||||
await newHouse.save();
|
await newHouse.save();
|
||||||
console.log('saved a house');
|
console.log('saved a house');
|
||||||
await done.promise;
|
await done.promise;
|
||||||
})
|
});
|
||||||
|
|
||||||
|
// ======= New tests for EventEmitter and buffering support =======
|
||||||
|
tap.test('should emit change via EventEmitter', async (tools) => {
|
||||||
|
const done = tools.defer();
|
||||||
|
const watcher = await House.watch({});
|
||||||
|
watcher.on('change', async (houseArg) => {
|
||||||
|
// Expect a House instance
|
||||||
|
expect(houseArg).toBeDefined();
|
||||||
|
// Clean up
|
||||||
|
await watcher.stop();
|
||||||
|
done.resolve();
|
||||||
|
});
|
||||||
|
// Trigger an insert to generate a change event
|
||||||
|
const h = new House();
|
||||||
|
await h.save();
|
||||||
|
await done.promise;
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should buffer change events when bufferTimeMs is set', async (tools) => {
|
||||||
|
const done = tools.defer();
|
||||||
|
// bufferTimeMs collects events into arrays every 50ms
|
||||||
|
const watcher = await House.watch({}, { bufferTimeMs: 50 });
|
||||||
|
let received: House[];
|
||||||
|
watcher.changeSubject.subscribe(async (batch: House[]) => {
|
||||||
|
if (batch && batch.length > 0) {
|
||||||
|
received = batch;
|
||||||
|
await watcher.stop();
|
||||||
|
done.resolve();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
// Rapidly insert multiple docs
|
||||||
|
const docs = [new House(), new House(), new House()];
|
||||||
|
for (const doc of docs) await doc.save();
|
||||||
|
await done.promise;
|
||||||
|
// All inserts should be in one buffered batch
|
||||||
|
expect(received.length).toEqual(docs.length);
|
||||||
|
});
|
||||||
|
|
||||||
// =======================================
|
// =======================================
|
||||||
// close the database connection
|
// close the database connection
|
||||||
// =======================================
|
// =======================================
|
||||||
tap.test('close', async () => {
|
tap.test('close', async () => {
|
||||||
await testDb.mongoDb.dropDatabase();
|
try {
|
||||||
|
await testDb.mongoDb.dropDatabase();
|
||||||
|
} catch (err) {
|
||||||
|
console.warn('dropDatabase error ignored in cleanup:', err.message || err);
|
||||||
|
}
|
||||||
await testDb.close();
|
await testDb.close();
|
||||||
if (smartmongoInstance) {
|
if (smartmongoInstance) {
|
||||||
await smartmongoInstance.stop();
|
await smartmongoInstance.stop();
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
tap.start({ throwOnError: true });
|
tap.start({ throwOnError: true });
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
/**
|
/**
|
||||||
* autocreated commitinfo by @pushrocks/commitinfo
|
* autocreated commitinfo by @push.rocks/commitinfo
|
||||||
*/
|
*/
|
||||||
export const commitinfo = {
|
export const commitinfo = {
|
||||||
name: '@pushrocks/smartdata',
|
name: '@push.rocks/smartdata',
|
||||||
version: '5.0.7',
|
version: '6.0.0',
|
||||||
description: 'do more with data'
|
description: 'An advanced library for NoSQL data organization and manipulation using TypeScript with support for MongoDB, data validation, collections, and custom data types.'
|
||||||
}
|
}
|
||||||
|
|||||||
487
ts/classes.collection.ts
Normal file
487
ts/classes.collection.ts
Normal file
@@ -0,0 +1,487 @@
|
|||||||
|
import * as plugins from './plugins.js';
|
||||||
|
import { SmartdataDb } from './classes.db.js';
|
||||||
|
import { SmartdataDbCursor } from './classes.cursor.js';
|
||||||
|
import { SmartDataDbDoc, type IIndexOptions } from './classes.doc.js';
|
||||||
|
import { SmartdataDbWatcher } from './classes.watcher.js';
|
||||||
|
import { CollectionFactory } from './classes.collectionfactory.js';
|
||||||
|
import { logger } from './logging.js';
|
||||||
|
|
||||||
|
export interface IFindOptions {
|
||||||
|
limit?: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
export interface IDocValidationFunc<T> {
|
||||||
|
(doc: T): boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
export type TDelayed<TDelayedArg> = () => TDelayedArg;
|
||||||
|
|
||||||
|
const collectionFactory = new CollectionFactory();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This is a decorator that will tell the decorated class what dbTable to use
|
||||||
|
* @param dbArg
|
||||||
|
*/
|
||||||
|
export function Collection(dbArg: SmartdataDb | TDelayed<SmartdataDb>) {
|
||||||
|
return function classDecorator(value: Function, context: ClassDecoratorContext) {
|
||||||
|
if (context.kind !== 'class') {
|
||||||
|
throw new Error('Collection can only decorate classes');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Capture original constructor for _svDbOptions forwarding
|
||||||
|
const originalConstructor = value as any;
|
||||||
|
const constructor = value as { new (...args: any[]): any };
|
||||||
|
|
||||||
|
const getCollection = () => {
|
||||||
|
if (!(dbArg instanceof SmartdataDb)) {
|
||||||
|
dbArg = dbArg();
|
||||||
|
}
|
||||||
|
const coll = collectionFactory.getCollection(constructor.name, dbArg);
|
||||||
|
// Attach document constructor for searchableFields lookup
|
||||||
|
if (!(coll as any).docCtor) {
|
||||||
|
(coll as any).docCtor = decoratedClass;
|
||||||
|
}
|
||||||
|
return coll;
|
||||||
|
};
|
||||||
|
|
||||||
|
const decoratedClass = class extends constructor {
|
||||||
|
public static className = constructor.name;
|
||||||
|
public static get collection() {
|
||||||
|
return getCollection();
|
||||||
|
}
|
||||||
|
public get collection() {
|
||||||
|
return getCollection();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Ensure instance getter works in Deno by defining it on the prototype
|
||||||
|
Object.defineProperty(decoratedClass.prototype, 'collection', {
|
||||||
|
get: getCollection,
|
||||||
|
enumerable: false,
|
||||||
|
configurable: true
|
||||||
|
});
|
||||||
|
|
||||||
|
// Deno compatibility note: Property decorators set properties on the prototype.
|
||||||
|
// Since we removed instance property declarations from SmartDataDbDoc,
|
||||||
|
// the decorator-set prototype properties are now accessible without shadowing.
|
||||||
|
// No manual forwarding needed - natural prototype inheritance works!
|
||||||
|
|
||||||
|
// Point to original constructor's _svDbOptions
|
||||||
|
Object.defineProperty(decoratedClass, '_svDbOptions', {
|
||||||
|
get() { return originalConstructor._svDbOptions; },
|
||||||
|
set(value) { originalConstructor._svDbOptions = value; },
|
||||||
|
configurable: true
|
||||||
|
});
|
||||||
|
|
||||||
|
// Initialize prototype properties from context.metadata (TC39 decorator metadata)
|
||||||
|
// This ensures prototype properties are available before any instance is created
|
||||||
|
const metadata = context.metadata as any;
|
||||||
|
if (metadata) {
|
||||||
|
const proto = decoratedClass.prototype;
|
||||||
|
|
||||||
|
// Initialize globalSaveableProperties
|
||||||
|
if (metadata.globalSaveableProperties && !proto.globalSaveableProperties) {
|
||||||
|
proto.globalSaveableProperties = [...metadata.globalSaveableProperties];
|
||||||
|
}
|
||||||
|
|
||||||
|
// Initialize saveableProperties
|
||||||
|
if (metadata.saveableProperties && !proto.saveableProperties) {
|
||||||
|
proto.saveableProperties = [...metadata.saveableProperties];
|
||||||
|
}
|
||||||
|
|
||||||
|
// Initialize uniqueIndexes
|
||||||
|
if (metadata.uniqueIndexes && !proto.uniqueIndexes) {
|
||||||
|
proto.uniqueIndexes = [...metadata.uniqueIndexes];
|
||||||
|
}
|
||||||
|
|
||||||
|
// Initialize regularIndexes
|
||||||
|
if (metadata.regularIndexes && !proto.regularIndexes) {
|
||||||
|
proto.regularIndexes = [...metadata.regularIndexes];
|
||||||
|
}
|
||||||
|
|
||||||
|
// Initialize searchableFields on constructor (not prototype)
|
||||||
|
if (metadata.searchableFields && !Array.isArray((decoratedClass as any).searchableFields)) {
|
||||||
|
(decoratedClass as any).searchableFields = [...metadata.searchableFields];
|
||||||
|
}
|
||||||
|
|
||||||
|
// Initialize _svDbOptions from metadata
|
||||||
|
if (metadata._svDbOptions && !originalConstructor._svDbOptions) {
|
||||||
|
originalConstructor._svDbOptions = { ...metadata._svDbOptions };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return decoratedClass as any;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IManager {
|
||||||
|
db: SmartdataDb;
|
||||||
|
}
|
||||||
|
|
||||||
|
export const setDefaultManagerForDoc = <T,>(managerArg: IManager, dbDocArg: T): T => {
|
||||||
|
(dbDocArg as any).prototype.defaultManager = managerArg;
|
||||||
|
return dbDocArg;
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This is a decorator that will tell the decorated class what dbTable to use
|
||||||
|
* @param dbArg
|
||||||
|
*/
|
||||||
|
export function managed<TManager extends IManager>(managerArg?: TManager | TDelayed<TManager>) {
|
||||||
|
return function classDecorator(value: Function, context: ClassDecoratorContext) {
|
||||||
|
if (context.kind !== 'class') {
|
||||||
|
throw new Error('managed can only decorate classes');
|
||||||
|
}
|
||||||
|
|
||||||
|
const constructor = value as { new (...args: any[]): any };
|
||||||
|
|
||||||
|
const decoratedClass = class extends constructor {
|
||||||
|
public static className = constructor.name;
|
||||||
|
public static get collection() {
|
||||||
|
let dbArg: SmartdataDb;
|
||||||
|
if (!managerArg) {
|
||||||
|
dbArg = this.prototype.defaultManager.db;
|
||||||
|
} else if (managerArg['db']) {
|
||||||
|
dbArg = (managerArg as TManager).db;
|
||||||
|
} else {
|
||||||
|
dbArg = (managerArg as TDelayed<TManager>)().db;
|
||||||
|
}
|
||||||
|
return collectionFactory.getCollection(constructor.name, dbArg);
|
||||||
|
}
|
||||||
|
public get collection() {
|
||||||
|
let dbArg: SmartdataDb;
|
||||||
|
if (!managerArg) {
|
||||||
|
//console.log(this.defaultManager.db);
|
||||||
|
//process.exit(0)
|
||||||
|
dbArg = this.defaultManager.db;
|
||||||
|
} else if (managerArg['db']) {
|
||||||
|
dbArg = (managerArg as TManager).db;
|
||||||
|
} else {
|
||||||
|
dbArg = (managerArg as TDelayed<TManager>)().db;
|
||||||
|
}
|
||||||
|
return collectionFactory.getCollection(constructor.name, dbArg);
|
||||||
|
}
|
||||||
|
public static get manager() {
|
||||||
|
let manager: TManager;
|
||||||
|
if (!managerArg) {
|
||||||
|
manager = this.prototype.defaultManager;
|
||||||
|
} else if (managerArg['db']) {
|
||||||
|
manager = managerArg as TManager;
|
||||||
|
} else {
|
||||||
|
manager = (managerArg as TDelayed<TManager>)();
|
||||||
|
}
|
||||||
|
return manager;
|
||||||
|
}
|
||||||
|
public get manager() {
|
||||||
|
let manager: TManager;
|
||||||
|
if (!managerArg) {
|
||||||
|
manager = this.defaultManager;
|
||||||
|
} else if (managerArg['db']) {
|
||||||
|
manager = managerArg as TManager;
|
||||||
|
} else {
|
||||||
|
manager = (managerArg as TDelayed<TManager>)();
|
||||||
|
}
|
||||||
|
return manager;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Initialize prototype properties from context.metadata (TC39 decorator metadata)
|
||||||
|
// This ensures prototype properties are available before any instance is created
|
||||||
|
const originalConstructor = value as any;
|
||||||
|
const metadata = context.metadata as any;
|
||||||
|
if (metadata) {
|
||||||
|
const proto = decoratedClass.prototype;
|
||||||
|
|
||||||
|
// Initialize globalSaveableProperties
|
||||||
|
if (metadata.globalSaveableProperties && !proto.globalSaveableProperties) {
|
||||||
|
proto.globalSaveableProperties = [...metadata.globalSaveableProperties];
|
||||||
|
}
|
||||||
|
|
||||||
|
// Initialize saveableProperties
|
||||||
|
if (metadata.saveableProperties && !proto.saveableProperties) {
|
||||||
|
proto.saveableProperties = [...metadata.saveableProperties];
|
||||||
|
}
|
||||||
|
|
||||||
|
// Initialize uniqueIndexes
|
||||||
|
if (metadata.uniqueIndexes && !proto.uniqueIndexes) {
|
||||||
|
proto.uniqueIndexes = [...metadata.uniqueIndexes];
|
||||||
|
}
|
||||||
|
|
||||||
|
// Initialize regularIndexes
|
||||||
|
if (metadata.regularIndexes && !proto.regularIndexes) {
|
||||||
|
proto.regularIndexes = [...metadata.regularIndexes];
|
||||||
|
}
|
||||||
|
|
||||||
|
// Initialize searchableFields on constructor (not prototype)
|
||||||
|
if (metadata.searchableFields && !Array.isArray((decoratedClass as any).searchableFields)) {
|
||||||
|
(decoratedClass as any).searchableFields = [...metadata.searchableFields];
|
||||||
|
}
|
||||||
|
|
||||||
|
// Initialize _svDbOptions from metadata
|
||||||
|
if (metadata._svDbOptions && !originalConstructor._svDbOptions) {
|
||||||
|
originalConstructor._svDbOptions = { ...metadata._svDbOptions };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return decoratedClass as any;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @dpecrecated use @managed instead
|
||||||
|
*/
|
||||||
|
export const Manager = managed;
|
||||||
|
|
||||||
|
export class SmartdataCollection<T> {
|
||||||
|
/**
|
||||||
|
* the collection that is used
|
||||||
|
*/
|
||||||
|
public mongoDbCollection: plugins.mongodb.Collection;
|
||||||
|
public objectValidation: IDocValidationFunc<T> = null;
|
||||||
|
public collectionName: string;
|
||||||
|
public smartdataDb: SmartdataDb;
|
||||||
|
public uniqueIndexes: string[] = [];
|
||||||
|
public regularIndexes: Array<{field: string, options: IIndexOptions}> = [];
|
||||||
|
// flag to ensure text index is created only once
|
||||||
|
private textIndexCreated: boolean = false;
|
||||||
|
|
||||||
|
constructor(classNameArg: string, smartDataDbArg: SmartdataDb) {
|
||||||
|
// tell the collection where it belongs
|
||||||
|
this.collectionName = classNameArg;
|
||||||
|
this.smartdataDb = smartDataDbArg;
|
||||||
|
|
||||||
|
// tell the db class about it (important since Db uses different systems under the hood)
|
||||||
|
this.smartdataDb.addCollection(this);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* makes sure a collection exists within MongoDb that maps to the SmartdataCollection
|
||||||
|
*/
|
||||||
|
public async init() {
|
||||||
|
if (!this.mongoDbCollection) {
|
||||||
|
// connect this instance to a MongoDB collection
|
||||||
|
const availableMongoDbCollections = await this.smartdataDb.mongoDb.collections();
|
||||||
|
const wantedCollection = availableMongoDbCollections.find((collection) => {
|
||||||
|
return collection.collectionName === this.collectionName;
|
||||||
|
});
|
||||||
|
if (!wantedCollection) {
|
||||||
|
await this.smartdataDb.mongoDb.createCollection(this.collectionName);
|
||||||
|
logger.log('info', `Successfully initiated Collection ${this.collectionName}`);
|
||||||
|
}
|
||||||
|
this.mongoDbCollection = this.smartdataDb.mongoDb.collection(this.collectionName);
|
||||||
|
// Auto-create a compound text index on all searchable fields
|
||||||
|
// Use document constructor's searchableFields registered via decorator
|
||||||
|
const docCtor = (this as any).docCtor;
|
||||||
|
const searchableFields: string[] = docCtor?.searchableFields || [];
|
||||||
|
if (searchableFields.length > 0 && !this.textIndexCreated) {
|
||||||
|
// Build a compound text index spec
|
||||||
|
const indexSpec: Record<string, 'text'> = {};
|
||||||
|
searchableFields.forEach(f => { indexSpec[f] = 'text'; });
|
||||||
|
// Cast to any to satisfy TypeScript IndexSpecification typing
|
||||||
|
await this.mongoDbCollection.createIndex(indexSpec as any, { name: 'smartdata_text_index' });
|
||||||
|
this.textIndexCreated = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* mark unique index
|
||||||
|
*/
|
||||||
|
public async markUniqueIndexes(keyArrayArg: string[] = []) {
|
||||||
|
for (const key of keyArrayArg) {
|
||||||
|
if (!this.uniqueIndexes.includes(key)) {
|
||||||
|
await this.mongoDbCollection.createIndex({ [key]: 1 }, {
|
||||||
|
unique: true,
|
||||||
|
});
|
||||||
|
// make sure we only call this once and not for every doc we create
|
||||||
|
this.uniqueIndexes.push(key);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* creates regular indexes for the collection
|
||||||
|
*/
|
||||||
|
public async createRegularIndexes(indexesArg: Array<{field: string, options: IIndexOptions}> = []) {
|
||||||
|
for (const indexDef of indexesArg) {
|
||||||
|
// Check if we've already created this index
|
||||||
|
const indexKey = indexDef.field;
|
||||||
|
if (!this.regularIndexes.some(i => i.field === indexKey)) {
|
||||||
|
await this.mongoDbCollection.createIndex(
|
||||||
|
{ [indexDef.field]: 1 }, // Simple single-field index
|
||||||
|
indexDef.options
|
||||||
|
);
|
||||||
|
// Track that we've created this index
|
||||||
|
this.regularIndexes.push(indexDef);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* adds a validation function that all newly inserted and updated objects have to pass
|
||||||
|
*/
|
||||||
|
public addDocValidation(funcArg: IDocValidationFunc<T>) {
|
||||||
|
this.objectValidation = funcArg;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* finds an object in the DbCollection
|
||||||
|
*/
|
||||||
|
public async findOne(
|
||||||
|
filterObject: any,
|
||||||
|
opts?: { session?: plugins.mongodb.ClientSession }
|
||||||
|
): Promise<any> {
|
||||||
|
await this.init();
|
||||||
|
// Use MongoDB driver's findOne with optional session
|
||||||
|
return this.mongoDbCollection.findOne(filterObject, { session: opts?.session });
|
||||||
|
}
|
||||||
|
|
||||||
|
public async getCursor(
|
||||||
|
filterObjectArg: any,
|
||||||
|
dbDocArg: typeof SmartDataDbDoc,
|
||||||
|
opts?: { session?: plugins.mongodb.ClientSession }
|
||||||
|
): Promise<SmartdataDbCursor<any>> {
|
||||||
|
await this.init();
|
||||||
|
const cursor = this.mongoDbCollection.find(filterObjectArg, { session: opts?.session });
|
||||||
|
return new SmartdataDbCursor(cursor, dbDocArg);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* finds an object in the DbCollection
|
||||||
|
*/
|
||||||
|
public async findAll(
|
||||||
|
filterObject: any,
|
||||||
|
opts?: { session?: plugins.mongodb.ClientSession }
|
||||||
|
): Promise<any[]> {
|
||||||
|
await this.init();
|
||||||
|
const cursor = this.mongoDbCollection.find(filterObject, { session: opts?.session });
|
||||||
|
const result = await cursor.toArray();
|
||||||
|
cursor.close();
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Watches the collection, returning a SmartdataDbWatcher with RxJS and EventEmitter support.
|
||||||
|
* @param filterObject match filter for change stream
|
||||||
|
* @param opts optional MongoDB ChangeStreamOptions & { bufferTimeMs } to buffer events
|
||||||
|
* @param smartdataDbDocArg document class for instance creation
|
||||||
|
*/
|
||||||
|
public async watch(
|
||||||
|
filterObject: any,
|
||||||
|
opts: (plugins.mongodb.ChangeStreamOptions & { bufferTimeMs?: number }) = {},
|
||||||
|
smartdataDbDocArg?: typeof SmartDataDbDoc,
|
||||||
|
): Promise<SmartdataDbWatcher> {
|
||||||
|
await this.init();
|
||||||
|
// Extract bufferTimeMs from options
|
||||||
|
const { bufferTimeMs, fullDocument, ...otherOptions } = opts || {};
|
||||||
|
// Determine fullDocument behavior: default to 'updateLookup'
|
||||||
|
const changeStreamOptions: plugins.mongodb.ChangeStreamOptions = {
|
||||||
|
...otherOptions,
|
||||||
|
fullDocument:
|
||||||
|
fullDocument === undefined
|
||||||
|
? 'updateLookup'
|
||||||
|
: (fullDocument as any) === true
|
||||||
|
? 'updateLookup'
|
||||||
|
: fullDocument,
|
||||||
|
} as any;
|
||||||
|
// Build pipeline with match if provided
|
||||||
|
const pipeline = filterObject ? [{ $match: filterObject }] : [];
|
||||||
|
const changeStream = this.mongoDbCollection.watch(
|
||||||
|
pipeline,
|
||||||
|
changeStreamOptions,
|
||||||
|
);
|
||||||
|
const smartdataWatcher = new SmartdataDbWatcher(
|
||||||
|
changeStream,
|
||||||
|
smartdataDbDocArg,
|
||||||
|
{ bufferTimeMs },
|
||||||
|
);
|
||||||
|
await smartdataWatcher.readyDeferred.promise;
|
||||||
|
return smartdataWatcher;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* create an object in the database
|
||||||
|
*/
|
||||||
|
public async insert(
|
||||||
|
dbDocArg: T & SmartDataDbDoc<T, unknown>,
|
||||||
|
opts?: { session?: plugins.mongodb.ClientSession }
|
||||||
|
): Promise<any> {
|
||||||
|
await this.init();
|
||||||
|
await this.checkDoc(dbDocArg);
|
||||||
|
this.markUniqueIndexes(dbDocArg.uniqueIndexes);
|
||||||
|
|
||||||
|
// Create regular indexes if available
|
||||||
|
if (dbDocArg.regularIndexes && dbDocArg.regularIndexes.length > 0) {
|
||||||
|
this.createRegularIndexes(dbDocArg.regularIndexes);
|
||||||
|
}
|
||||||
|
|
||||||
|
const saveableObject = await dbDocArg.createSavableObject();
|
||||||
|
const result = await this.mongoDbCollection.insertOne(saveableObject, { session: opts?.session });
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* inserts object into the DbCollection
|
||||||
|
*/
|
||||||
|
public async update(
|
||||||
|
dbDocArg: T & SmartDataDbDoc<T, unknown>,
|
||||||
|
opts?: { session?: plugins.mongodb.ClientSession }
|
||||||
|
): Promise<any> {
|
||||||
|
await this.init();
|
||||||
|
await this.checkDoc(dbDocArg);
|
||||||
|
const identifiableObject = await dbDocArg.createIdentifiableObject();
|
||||||
|
const saveableObject = await dbDocArg.createSavableObject();
|
||||||
|
const updateableObject: any = {};
|
||||||
|
for (const key of Object.keys(saveableObject)) {
|
||||||
|
if (identifiableObject[key]) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
updateableObject[key] = saveableObject[key];
|
||||||
|
}
|
||||||
|
const result = await this.mongoDbCollection.updateOne(
|
||||||
|
identifiableObject,
|
||||||
|
{ $set: updateableObject },
|
||||||
|
{ upsert: true, session: opts?.session },
|
||||||
|
);
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
public async delete(
|
||||||
|
dbDocArg: T & SmartDataDbDoc<T, unknown>,
|
||||||
|
opts?: { session?: plugins.mongodb.ClientSession }
|
||||||
|
): Promise<any> {
|
||||||
|
await this.init();
|
||||||
|
await this.checkDoc(dbDocArg);
|
||||||
|
const identifiableObject = await dbDocArg.createIdentifiableObject();
|
||||||
|
await this.mongoDbCollection.deleteOne(identifiableObject, { session: opts?.session });
|
||||||
|
}
|
||||||
|
|
||||||
|
public async getCount(
|
||||||
|
filterObject: any,
|
||||||
|
opts?: { session?: plugins.mongodb.ClientSession }
|
||||||
|
) {
|
||||||
|
await this.init();
|
||||||
|
return this.mongoDbCollection.countDocuments(filterObject, { session: opts?.session });
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* checks a Doc for constraints
|
||||||
|
* if this.objectValidation is not set it passes.
|
||||||
|
*/
|
||||||
|
private checkDoc(docArg: T): Promise<void> {
|
||||||
|
const done = plugins.smartpromise.defer<void>();
|
||||||
|
let validationResult = true;
|
||||||
|
if (this.objectValidation) {
|
||||||
|
validationResult = this.objectValidation(docArg);
|
||||||
|
}
|
||||||
|
if (validationResult) {
|
||||||
|
done.resolve();
|
||||||
|
} else {
|
||||||
|
done.reject('validation of object did not pass');
|
||||||
|
}
|
||||||
|
return done.promise;
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
import * as plugins from './smartdata.plugins.js';
|
import * as plugins from './plugins.js';
|
||||||
import { SmartdataCollection } from './smartdata.classes.collection.js';
|
import { SmartdataCollection } from './classes.collection.js';
|
||||||
import { SmartdataDb } from './smartdata.classes.db.js';
|
import { SmartdataDb } from './classes.db.js';
|
||||||
|
|
||||||
export class CollectionFactory {
|
export class CollectionFactory {
|
||||||
public collections: { [key: string]: SmartdataCollection<any> } = {};
|
public collections: { [key: string]: SmartdataCollection<any> } = {};
|
||||||
5
ts/classes.convenience.ts
Normal file
5
ts/classes.convenience.ts
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
import * as plugins from './plugins.js';
|
||||||
|
|
||||||
|
export const getNewUniqueId = async (prefixArg?: string) => {
|
||||||
|
return plugins.smartunique.uni(prefixArg);
|
||||||
|
};
|
||||||
@@ -1,5 +1,5 @@
|
|||||||
import { SmartDataDbDoc } from './smartdata.classes.doc.js';
|
import { SmartDataDbDoc } from './classes.doc.js';
|
||||||
import * as plugins from './smartdata.plugins.js';
|
import * as plugins from './plugins.js';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* a wrapper for the native mongodb cursor. Exposes better
|
* a wrapper for the native mongodb cursor. Exposes better
|
||||||
@@ -15,21 +15,24 @@ export class SmartdataDbCursor<T = any> {
|
|||||||
this.smartdataDbDoc = dbDocArg;
|
this.smartdataDbDoc = dbDocArg;
|
||||||
}
|
}
|
||||||
|
|
||||||
public async next(closeAtEnd = true) {
|
public async next(closeAtEnd = true): Promise<T> {
|
||||||
const result = this.smartdataDbDoc.createInstanceFromMongoDbNativeDoc(await this.mongodbCursor.next());
|
const result = this.smartdataDbDoc.createInstanceFromMongoDbNativeDoc(
|
||||||
|
await this.mongodbCursor.next(),
|
||||||
|
);
|
||||||
if (!result && closeAtEnd) {
|
if (!result && closeAtEnd) {
|
||||||
await this.close();
|
await this.close();
|
||||||
}
|
}
|
||||||
return result;
|
return result as T;
|
||||||
}
|
}
|
||||||
|
|
||||||
public async forEach(forEachFuncArg: (itemArg: any) => Promise<any>, closeCursorAtEnd = true) {
|
public async forEach(forEachFuncArg: (itemArg: T) => Promise<any>, closeCursorAtEnd = true) {
|
||||||
let nextDocument: any;
|
let nextDocument: any;
|
||||||
do {
|
do {
|
||||||
nextDocument = await this.mongodbCursor.next();
|
nextDocument = await this.mongodbCursor.next();
|
||||||
if (nextDocument) {
|
if (nextDocument) {
|
||||||
const nextClassInstance = this.smartdataDbDoc.createInstanceFromMongoDbNativeDoc(nextDocument);
|
const nextClassInstance =
|
||||||
await forEachFuncArg(nextClassInstance);
|
this.smartdataDbDoc.createInstanceFromMongoDbNativeDoc(nextDocument);
|
||||||
|
await forEachFuncArg(nextClassInstance as any);
|
||||||
}
|
}
|
||||||
} while (nextDocument);
|
} while (nextDocument);
|
||||||
if (closeCursorAtEnd) {
|
if (closeCursorAtEnd) {
|
||||||
@@ -37,6 +40,11 @@ export class SmartdataDbCursor<T = any> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public async toArray(): Promise<T[]> {
|
||||||
|
const result = await this.mongodbCursor.toArray();
|
||||||
|
return result.map((itemArg) => this.smartdataDbDoc.createInstanceFromMongoDbNativeDoc(itemArg)) as T[];
|
||||||
|
}
|
||||||
|
|
||||||
public async close() {
|
public async close() {
|
||||||
await this.mongodbCursor.close();
|
await this.mongodbCursor.close();
|
||||||
}
|
}
|
||||||
109
ts/classes.db.ts
Normal file
109
ts/classes.db.ts
Normal file
@@ -0,0 +1,109 @@
|
|||||||
|
import * as plugins from './plugins.js';
|
||||||
|
|
||||||
|
import { SmartdataCollection } from './classes.collection.js';
|
||||||
|
import { EasyStore } from './classes.easystore.js';
|
||||||
|
|
||||||
|
import { logger } from './logging.js';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* interface - indicates the connection status of the db
|
||||||
|
*/
|
||||||
|
export type TConnectionStatus = 'initial' | 'disconnected' | 'connected' | 'failed';
|
||||||
|
|
||||||
|
export class SmartdataDb {
|
||||||
|
smartdataOptions: plugins.tsclass.database.IMongoDescriptor;
|
||||||
|
mongoDbClient: plugins.mongodb.MongoClient;
|
||||||
|
mongoDb: plugins.mongodb.Db;
|
||||||
|
status: TConnectionStatus;
|
||||||
|
statusConnectedDeferred = plugins.smartpromise.defer();
|
||||||
|
smartdataCollectionMap = new plugins.lik.ObjectMap<SmartdataCollection<any>>();
|
||||||
|
|
||||||
|
constructor(smartdataOptions: plugins.tsclass.database.IMongoDescriptor) {
|
||||||
|
this.smartdataOptions = smartdataOptions;
|
||||||
|
this.status = 'initial';
|
||||||
|
}
|
||||||
|
|
||||||
|
// easystore
|
||||||
|
public async createEasyStore(nameIdArg: string) {
|
||||||
|
const easyStore = new EasyStore(nameIdArg, this);
|
||||||
|
return easyStore;
|
||||||
|
}
|
||||||
|
|
||||||
|
// basic connection stuff ----------------------------------------------
|
||||||
|
|
||||||
|
/**
|
||||||
|
* connects to the database that was specified during instance creation
|
||||||
|
*/
|
||||||
|
public async init(): Promise<any> {
|
||||||
|
try {
|
||||||
|
// Safely encode credentials to handle special characters
|
||||||
|
const encodedUser = this.smartdataOptions.mongoDbUser
|
||||||
|
? encodeURIComponent(this.smartdataOptions.mongoDbUser)
|
||||||
|
: '';
|
||||||
|
const encodedPass = this.smartdataOptions.mongoDbPass
|
||||||
|
? encodeURIComponent(this.smartdataOptions.mongoDbPass)
|
||||||
|
: '';
|
||||||
|
|
||||||
|
const finalConnectionUrl = this.smartdataOptions.mongoDbUrl
|
||||||
|
.replace('<USERNAME>', encodedUser)
|
||||||
|
.replace('<username>', encodedUser)
|
||||||
|
.replace('<USER>', encodedUser)
|
||||||
|
.replace('<user>', encodedUser)
|
||||||
|
.replace('<PASSWORD>', encodedPass)
|
||||||
|
.replace('<password>', encodedPass)
|
||||||
|
.replace('<DBNAME>', this.smartdataOptions.mongoDbName)
|
||||||
|
.replace('<dbname>', this.smartdataOptions.mongoDbName);
|
||||||
|
|
||||||
|
const clientOptions: plugins.mongodb.MongoClientOptions = {
|
||||||
|
maxPoolSize: (this.smartdataOptions as any).maxPoolSize ?? 100,
|
||||||
|
maxIdleTimeMS: (this.smartdataOptions as any).maxIdleTimeMS ?? 300000, // 5 minutes default
|
||||||
|
serverSelectionTimeoutMS: (this.smartdataOptions as any).serverSelectionTimeoutMS ?? 30000,
|
||||||
|
retryWrites: true,
|
||||||
|
};
|
||||||
|
|
||||||
|
this.mongoDbClient = await plugins.mongodb.MongoClient.connect(finalConnectionUrl, clientOptions);
|
||||||
|
this.mongoDb = this.mongoDbClient.db(this.smartdataOptions.mongoDbName);
|
||||||
|
this.status = 'connected';
|
||||||
|
this.statusConnectedDeferred.resolve();
|
||||||
|
logger.log('info', `Connected to database ${this.smartdataOptions.mongoDbName}`);
|
||||||
|
} catch (error) {
|
||||||
|
this.status = 'disconnected';
|
||||||
|
this.statusConnectedDeferred.reject(error);
|
||||||
|
logger.log('error', `Failed to connect to database ${this.smartdataOptions.mongoDbName}: ${error.message}`);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* closes the connection to the databse
|
||||||
|
*/
|
||||||
|
public async close(): Promise<any> {
|
||||||
|
await this.mongoDbClient.close();
|
||||||
|
this.status = 'disconnected';
|
||||||
|
logger.log('info', `disconnected from database ${this.smartdataOptions.mongoDbName}`);
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Start a MongoDB client session for transactions
|
||||||
|
*/
|
||||||
|
public startSession(): plugins.mongodb.ClientSession {
|
||||||
|
return this.mongoDbClient.startSession();
|
||||||
|
}
|
||||||
|
|
||||||
|
// handle table to class distribution
|
||||||
|
|
||||||
|
public addCollection(SmartdataCollectionArg: SmartdataCollection<any>) {
|
||||||
|
this.smartdataCollectionMap.add(SmartdataCollectionArg);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets a collection's name and returns a SmartdataCollection instance
|
||||||
|
* @param nameArg
|
||||||
|
* @returns DbTable
|
||||||
|
*/
|
||||||
|
public async getSmartdataCollectionByName<T>(nameArg: string): Promise<SmartdataCollection<T>> {
|
||||||
|
const resultCollection = await this.smartdataCollectionMap.find(async (dbTableArg) => {
|
||||||
|
return dbTableArg.collectionName === nameArg;
|
||||||
|
});
|
||||||
|
return resultCollection;
|
||||||
|
}
|
||||||
|
}
|
||||||
305
ts/classes.distributedcoordinator.ts
Normal file
305
ts/classes.distributedcoordinator.ts
Normal file
@@ -0,0 +1,305 @@
|
|||||||
|
import * as plugins from './plugins.js';
|
||||||
|
import { SmartdataDb } from './classes.db.js';
|
||||||
|
import { managed, setDefaultManagerForDoc } from './classes.collection.js';
|
||||||
|
import { SmartDataDbDoc, svDb, unI } from './classes.doc.js';
|
||||||
|
import { SmartdataDbWatcher } from './classes.watcher.js';
|
||||||
|
import { logger } from './logging.js';
|
||||||
|
|
||||||
|
@managed()
|
||||||
|
export class DistributedClass extends SmartDataDbDoc<DistributedClass, DistributedClass> {
|
||||||
|
// INSTANCE
|
||||||
|
@unI()
|
||||||
|
public id: string;
|
||||||
|
|
||||||
|
@svDb()
|
||||||
|
public data: {
|
||||||
|
status: 'initializing' | 'bidding' | 'settled' | 'stopped';
|
||||||
|
biddingShortcode?: string;
|
||||||
|
biddingStartTime?: number;
|
||||||
|
lastUpdated: number;
|
||||||
|
elected: boolean;
|
||||||
|
/**
|
||||||
|
* used to store request
|
||||||
|
*/
|
||||||
|
taskRequests: plugins.taskbuffer.distributedCoordination.IDistributedTaskRequest[];
|
||||||
|
|
||||||
|
/**
|
||||||
|
* only used by the leader to convey consultation results
|
||||||
|
*/
|
||||||
|
taskRequestResults: plugins.taskbuffer.distributedCoordination.IDistributedTaskRequestResult[];
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This file implements a distributed coordinator according to the @pushrocks/taskbuffer standard.
|
||||||
|
* you should not set up this yourself. Instead, there is a factory on the SmartdataDb class
|
||||||
|
* that will take care of setting this up.
|
||||||
|
*/
|
||||||
|
export class SmartdataDistributedCoordinator extends plugins.taskbuffer.distributedCoordination
|
||||||
|
.AbstractDistributedCoordinator {
|
||||||
|
public readyPromise: Promise<any>;
|
||||||
|
public db: SmartdataDb;
|
||||||
|
private asyncExecutionStack = new plugins.lik.AsyncExecutionStack();
|
||||||
|
public ownInstance: DistributedClass;
|
||||||
|
public distributedWatcher: SmartdataDbWatcher<DistributedClass>;
|
||||||
|
|
||||||
|
constructor(dbArg: SmartdataDb) {
|
||||||
|
super();
|
||||||
|
this.db = dbArg;
|
||||||
|
setDefaultManagerForDoc(this, DistributedClass);
|
||||||
|
this.readyPromise = this.db.statusConnectedDeferred.promise;
|
||||||
|
}
|
||||||
|
|
||||||
|
// smartdata specific stuff
|
||||||
|
public async start() {
|
||||||
|
await this.init();
|
||||||
|
}
|
||||||
|
|
||||||
|
public async stop() {
|
||||||
|
await this.asyncExecutionStack.getExclusiveExecutionSlot(async () => {
|
||||||
|
if (this.distributedWatcher) {
|
||||||
|
await this.distributedWatcher.close();
|
||||||
|
}
|
||||||
|
if (this.ownInstance?.data.elected) {
|
||||||
|
this.ownInstance.data.elected = false;
|
||||||
|
}
|
||||||
|
if (this.ownInstance?.data.status === 'stopped') {
|
||||||
|
logger.log('warn', `stopping a distributed instance that has not been started yet.`);
|
||||||
|
}
|
||||||
|
this.ownInstance.data.status = 'stopped';
|
||||||
|
await this.ownInstance.save();
|
||||||
|
logger.log('info', `stopped ${this.ownInstance.id}`);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
public id = plugins.smartunique.uni('distributedInstance');
|
||||||
|
|
||||||
|
private startHeartbeat = async () => {
|
||||||
|
while (this.ownInstance.data.status !== 'stopped') {
|
||||||
|
await this.sendHeartbeat();
|
||||||
|
await plugins.smartdelay.delayForRandom(5000, 10000);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
public async sendHeartbeat() {
|
||||||
|
await this.asyncExecutionStack.getExclusiveExecutionSlot(async () => {
|
||||||
|
if (this.ownInstance.data.status === 'stopped') {
|
||||||
|
logger.log('debug', `aborted sending heartbeat because status is stopped`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
await this.ownInstance.updateFromDb();
|
||||||
|
this.ownInstance.data.lastUpdated = Date.now();
|
||||||
|
await this.ownInstance.save();
|
||||||
|
logger.log('debug', `sent heartbeat for ${this.ownInstance.id}`);
|
||||||
|
const allInstances = DistributedClass.getInstances({});
|
||||||
|
});
|
||||||
|
if (this.ownInstance.data.status === 'stopped') {
|
||||||
|
logger.log('info', `aborted sending heartbeat because status is stopped`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const eligibleLeader = await this.getEligibleLeader();
|
||||||
|
// not awaiting here because we don't want to block the heartbeat
|
||||||
|
this.asyncExecutionStack.getExclusiveExecutionSlot(async () => {
|
||||||
|
if (!eligibleLeader && this.ownInstance.data.status === 'settled') {
|
||||||
|
this.checkAndMaybeLead();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
private async init() {
|
||||||
|
await this.readyPromise;
|
||||||
|
if (!this.ownInstance) {
|
||||||
|
await this.asyncExecutionStack.getExclusiveExecutionSlot(async () => {
|
||||||
|
this.ownInstance = new DistributedClass();
|
||||||
|
this.ownInstance.id = this.id;
|
||||||
|
this.ownInstance.data = {
|
||||||
|
elected: false,
|
||||||
|
lastUpdated: Date.now(),
|
||||||
|
status: 'initializing',
|
||||||
|
taskRequests: [],
|
||||||
|
taskRequestResults: [],
|
||||||
|
};
|
||||||
|
await this.ownInstance.save();
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
logger.log('warn', `distributed instance already initialized`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// lets enable the heartbeat
|
||||||
|
this.startHeartbeat();
|
||||||
|
|
||||||
|
// lets do a leader check
|
||||||
|
await this.checkAndMaybeLead();
|
||||||
|
|
||||||
|
return this.ownInstance;
|
||||||
|
}
|
||||||
|
|
||||||
|
public async getEligibleLeader() {
|
||||||
|
return this.asyncExecutionStack.getExclusiveExecutionSlot(async () => {
|
||||||
|
const allInstances = await DistributedClass.getInstances({});
|
||||||
|
let leaders = allInstances.filter((instanceArg) => instanceArg.data.elected === true);
|
||||||
|
const eligibleLeader = leaders.find(
|
||||||
|
(leader) =>
|
||||||
|
leader.data.lastUpdated >=
|
||||||
|
Date.now() - plugins.smarttime.getMilliSecondsFromUnits({ seconds: 20 }),
|
||||||
|
);
|
||||||
|
return eligibleLeader;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// --> leader election
|
||||||
|
public async checkAndMaybeLead() {
|
||||||
|
await this.asyncExecutionStack.getExclusiveExecutionSlot(async () => {
|
||||||
|
this.ownInstance.data.status = 'initializing';
|
||||||
|
await this.ownInstance.save();
|
||||||
|
});
|
||||||
|
if (await this.getEligibleLeader()) {
|
||||||
|
await this.asyncExecutionStack.getExclusiveExecutionSlot(async () => {
|
||||||
|
await this.ownInstance.updateFromDb();
|
||||||
|
this.ownInstance.data.status = 'settled';
|
||||||
|
await this.ownInstance.save();
|
||||||
|
logger.log('info', `${this.ownInstance.id} settled as follower`);
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
} else if (
|
||||||
|
(await DistributedClass.getInstances({})).find((instanceArg) => {
|
||||||
|
return instanceArg.data.status === 'bidding' &&
|
||||||
|
instanceArg.data.biddingStartTime <= Date.now() - 4000 &&
|
||||||
|
instanceArg.data.biddingStartTime >= Date.now() - 30000;
|
||||||
|
})
|
||||||
|
) {
|
||||||
|
logger.log('info', 'too late to the bidding party... waiting for next round.');
|
||||||
|
return;
|
||||||
|
} else {
|
||||||
|
await this.asyncExecutionStack.getExclusiveExecutionSlot(async () => {
|
||||||
|
await this.ownInstance.updateFromDb();
|
||||||
|
this.ownInstance.data.status = 'bidding';
|
||||||
|
this.ownInstance.data.biddingStartTime = Date.now();
|
||||||
|
this.ownInstance.data.biddingShortcode = plugins.smartunique.shortId();
|
||||||
|
await this.ownInstance.save();
|
||||||
|
logger.log('info', 'bidding code stored.');
|
||||||
|
});
|
||||||
|
logger.log('info', `bidding for leadership...`);
|
||||||
|
await plugins.smartdelay.delayFor(plugins.smarttime.getMilliSecondsFromUnits({ seconds: 5 }));
|
||||||
|
await this.asyncExecutionStack.getExclusiveExecutionSlot(async () => {
|
||||||
|
let biddingInstances = await DistributedClass.getInstances({});
|
||||||
|
biddingInstances = biddingInstances.filter(
|
||||||
|
(instanceArg) =>
|
||||||
|
instanceArg.data.status === 'bidding' &&
|
||||||
|
instanceArg.data.lastUpdated >=
|
||||||
|
Date.now() - plugins.smarttime.getMilliSecondsFromUnits({ seconds: 10 }),
|
||||||
|
);
|
||||||
|
logger.log('info', `found ${biddingInstances.length} bidding instances...`);
|
||||||
|
this.ownInstance.data.elected = true;
|
||||||
|
for (const biddingInstance of biddingInstances) {
|
||||||
|
if (biddingInstance.data.biddingShortcode < this.ownInstance.data.biddingShortcode) {
|
||||||
|
this.ownInstance.data.elected = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
await plugins.smartdelay.delayFor(5000);
|
||||||
|
logger.log('info', `settling with status elected = ${this.ownInstance.data.elected}`);
|
||||||
|
this.ownInstance.data.status = 'settled';
|
||||||
|
await this.ownInstance.save();
|
||||||
|
});
|
||||||
|
if (this.ownInstance.data.elected) {
|
||||||
|
this.leadFunction();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* when it has been determined
|
||||||
|
* that this instance is leading
|
||||||
|
* the leading is implemented here
|
||||||
|
*/
|
||||||
|
public async leadFunction() {
|
||||||
|
this.distributedWatcher = await DistributedClass.watch({});
|
||||||
|
|
||||||
|
const currentTaskRequests: Array<{
|
||||||
|
taskName: string;
|
||||||
|
taskExecutionTime: number;
|
||||||
|
/**
|
||||||
|
* all instances that requested this task
|
||||||
|
*/
|
||||||
|
requestingDistibutedInstanceIds: string[];
|
||||||
|
responseTimeout: plugins.smartdelay.Timeout<any>;
|
||||||
|
}> = [];
|
||||||
|
|
||||||
|
this.distributedWatcher.changeSubject.subscribe({
|
||||||
|
next: async (distributedDoc) => {
|
||||||
|
if (!distributedDoc) {
|
||||||
|
logger.log('info', `registered deletion of instance...`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
logger.log('info', distributedDoc);
|
||||||
|
logger.log('info', `registered change for ${distributedDoc.id}`);
|
||||||
|
distributedDoc;
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
while (this.ownInstance.data.status !== 'stopped' && this.ownInstance.data.elected) {
|
||||||
|
const allInstances = await DistributedClass.getInstances({});
|
||||||
|
for (const instance of allInstances) {
|
||||||
|
if (instance.data.status === 'stopped') {
|
||||||
|
await instance.delete();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
await plugins.smartdelay.delayFor(10000);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// abstract implemented methods
|
||||||
|
public async fireDistributedTaskRequest(
|
||||||
|
taskRequestArg: plugins.taskbuffer.distributedCoordination.IDistributedTaskRequest,
|
||||||
|
): Promise<plugins.taskbuffer.distributedCoordination.IDistributedTaskRequestResult> {
|
||||||
|
await this.asyncExecutionStack.getExclusiveExecutionSlot(async () => {
|
||||||
|
if (!this.ownInstance) {
|
||||||
|
logger.log('error', 'instance need to be started first...');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
await this.ownInstance.updateFromDb();
|
||||||
|
this.ownInstance.data.taskRequests.push(taskRequestArg);
|
||||||
|
await this.ownInstance.save();
|
||||||
|
});
|
||||||
|
await plugins.smartdelay.delayFor(10000);
|
||||||
|
const result = await this.asyncExecutionStack.getExclusiveExecutionSlot(async () => {
|
||||||
|
await this.ownInstance.updateFromDb();
|
||||||
|
const taskRequestResult = this.ownInstance.data.taskRequestResults.find((resultItem) => {
|
||||||
|
return resultItem.requestResponseId === taskRequestArg.requestResponseId;
|
||||||
|
});
|
||||||
|
return taskRequestResult;
|
||||||
|
});
|
||||||
|
if (!result) {
|
||||||
|
logger.log('warn', 'no result found for task request...');
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
public async updateDistributedTaskRequest(
|
||||||
|
infoBasisArg: plugins.taskbuffer.distributedCoordination.IDistributedTaskRequest,
|
||||||
|
): Promise<void> {
|
||||||
|
await this.asyncExecutionStack.getExclusiveExecutionSlot(async () => {
|
||||||
|
const existingInfoBasis = this.ownInstance.data.taskRequests.find((infoBasisItem) => {
|
||||||
|
return (
|
||||||
|
infoBasisItem.taskName === infoBasisArg.taskName &&
|
||||||
|
infoBasisItem.taskExecutionTime === infoBasisArg.taskExecutionTime
|
||||||
|
);
|
||||||
|
});
|
||||||
|
if (!existingInfoBasis) {
|
||||||
|
logger.log('warn', 'trying to update a non existing task request... aborting!');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
Object.assign(existingInfoBasis, infoBasisArg);
|
||||||
|
await this.ownInstance.save();
|
||||||
|
plugins.smartdelay.delayFor(60000).then(() => {
|
||||||
|
this.asyncExecutionStack.getExclusiveExecutionSlot(async () => {
|
||||||
|
const indexToRemove = this.ownInstance.data.taskRequests.indexOf(existingInfoBasis);
|
||||||
|
if (indexToRemove >= 0) {
|
||||||
|
this.ownInstance.data.taskRequests.splice(indexToRemove, 1);
|
||||||
|
await this.ownInstance.save();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
976
ts/classes.doc.ts
Normal file
976
ts/classes.doc.ts
Normal file
@@ -0,0 +1,976 @@
|
|||||||
|
import * as plugins from './plugins.js';
|
||||||
|
|
||||||
|
import { SmartdataDb } from './classes.db.js';
|
||||||
|
import { logger } from './logging.js';
|
||||||
|
import { SmartdataDbCursor } from './classes.cursor.js';
|
||||||
|
import { type IManager, SmartdataCollection } from './classes.collection.js';
|
||||||
|
import { SmartdataDbWatcher } from './classes.watcher.js';
|
||||||
|
import { SmartdataLuceneAdapter } from './classes.lucene.adapter.js';
|
||||||
|
/**
|
||||||
|
* Search options for `.search()`:
|
||||||
|
* - filter: additional MongoDB query to AND-merge
|
||||||
|
* - validate: post-fetch validator, return true to keep a doc
|
||||||
|
*/
|
||||||
|
export interface SearchOptions<T> {
|
||||||
|
/**
|
||||||
|
* Additional MongoDB filter to AND‐merge into the query
|
||||||
|
*/
|
||||||
|
filter?: Record<string, any>;
|
||||||
|
/**
|
||||||
|
* Post‐fetch validator; return true to keep each doc
|
||||||
|
*/
|
||||||
|
validate?: (doc: T) => Promise<boolean> | boolean;
|
||||||
|
/**
|
||||||
|
* Optional MongoDB session for transactional operations
|
||||||
|
*/
|
||||||
|
session?: plugins.mongodb.ClientSession;
|
||||||
|
}
|
||||||
|
|
||||||
|
export type TDocCreation = 'db' | 'new' | 'mixed';
|
||||||
|
|
||||||
|
// Type for decorator metadata - extends TypeScript's built-in DecoratorMetadataObject
|
||||||
|
interface ISmartdataDecoratorMetadata extends DecoratorMetadataObject {
|
||||||
|
globalSaveableProperties?: string[];
|
||||||
|
saveableProperties?: string[];
|
||||||
|
uniqueIndexes?: string[];
|
||||||
|
regularIndexes?: Array<{field: string, options: IIndexOptions}>;
|
||||||
|
searchableFields?: string[];
|
||||||
|
_svDbOptions?: Record<string, SvDbOptions>;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function globalSvDb() {
|
||||||
|
return (value: undefined, context: ClassFieldDecoratorContext) => {
|
||||||
|
if (context.kind !== 'field') {
|
||||||
|
throw new Error('globalSvDb can only decorate fields');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Store metadata at class level using Symbol.metadata
|
||||||
|
const metadata = context.metadata as ISmartdataDecoratorMetadata;
|
||||||
|
if (!metadata.globalSaveableProperties) {
|
||||||
|
metadata.globalSaveableProperties = [];
|
||||||
|
}
|
||||||
|
metadata.globalSaveableProperties.push(String(context.name));
|
||||||
|
|
||||||
|
logger.log('debug', `called globalSvDb() on metadata for property ${String(context.name)}`);
|
||||||
|
|
||||||
|
// Use addInitializer to ensure prototype arrays are set up once
|
||||||
|
context.addInitializer(function(this: any) {
|
||||||
|
const proto = this.constructor.prototype;
|
||||||
|
const metadata = this.constructor[Symbol.metadata];
|
||||||
|
|
||||||
|
if (metadata && metadata.globalSaveableProperties && !proto.globalSaveableProperties) {
|
||||||
|
// Initialize prototype array from metadata (runs once per class)
|
||||||
|
proto.globalSaveableProperties = [...metadata.globalSaveableProperties];
|
||||||
|
logger.log('debug', `initialized globalSaveableProperties with ${proto.globalSaveableProperties.length} properties`);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Options for custom serialization/deserialization of a field.
|
||||||
|
*/
|
||||||
|
export interface SvDbOptions {
|
||||||
|
/** Function to serialize the field value before saving to DB */
|
||||||
|
serialize?: (value: any) => any;
|
||||||
|
/** Function to deserialize the field value after reading from DB */
|
||||||
|
deserialize?: (value: any) => any;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* saveable - saveable decorator to be used on class properties
|
||||||
|
*/
|
||||||
|
export function svDb(options?: SvDbOptions) {
|
||||||
|
return (value: undefined, context: ClassFieldDecoratorContext) => {
|
||||||
|
if (context.kind !== 'field') {
|
||||||
|
throw new Error('svDb can only decorate fields');
|
||||||
|
}
|
||||||
|
|
||||||
|
const propName = String(context.name);
|
||||||
|
|
||||||
|
// Store metadata at class level using Symbol.metadata
|
||||||
|
const metadata = context.metadata as ISmartdataDecoratorMetadata;
|
||||||
|
if (!metadata.saveableProperties) {
|
||||||
|
metadata.saveableProperties = [];
|
||||||
|
}
|
||||||
|
metadata.saveableProperties.push(propName);
|
||||||
|
|
||||||
|
// Store options in metadata
|
||||||
|
if (options) {
|
||||||
|
if (!metadata._svDbOptions) {
|
||||||
|
metadata._svDbOptions = {};
|
||||||
|
}
|
||||||
|
metadata._svDbOptions[propName] = options;
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.log('debug', `called svDb() on metadata for property ${propName}`);
|
||||||
|
|
||||||
|
// Use addInitializer to ensure prototype arrays are set up once
|
||||||
|
context.addInitializer(function(this: any) {
|
||||||
|
const proto = this.constructor.prototype;
|
||||||
|
const ctor = this.constructor;
|
||||||
|
const metadata = ctor[Symbol.metadata];
|
||||||
|
|
||||||
|
if (metadata && metadata.saveableProperties && !proto.saveableProperties) {
|
||||||
|
// Initialize prototype array from metadata (runs once per class)
|
||||||
|
proto.saveableProperties = [...metadata.saveableProperties];
|
||||||
|
logger.log('debug', `initialized saveableProperties with ${proto.saveableProperties.length} properties`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Initialize svDbOptions from metadata
|
||||||
|
if (metadata && metadata._svDbOptions && !ctor._svDbOptions) {
|
||||||
|
ctor._svDbOptions = { ...metadata._svDbOptions };
|
||||||
|
}
|
||||||
|
});
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* searchable - marks a property as searchable with Lucene query syntax
|
||||||
|
*/
|
||||||
|
export function searchable() {
|
||||||
|
return (value: undefined, context: ClassFieldDecoratorContext) => {
|
||||||
|
if (context.kind !== 'field') {
|
||||||
|
throw new Error('searchable can only decorate fields');
|
||||||
|
}
|
||||||
|
|
||||||
|
const propName = String(context.name);
|
||||||
|
|
||||||
|
// Store metadata at class level
|
||||||
|
const metadata = context.metadata as ISmartdataDecoratorMetadata;
|
||||||
|
if (!metadata.searchableFields) {
|
||||||
|
metadata.searchableFields = [];
|
||||||
|
}
|
||||||
|
metadata.searchableFields.push(propName);
|
||||||
|
|
||||||
|
// Use addInitializer to set up constructor property once
|
||||||
|
context.addInitializer(function(this: any) {
|
||||||
|
const ctor = this.constructor as any;
|
||||||
|
const metadata = ctor[Symbol.metadata];
|
||||||
|
|
||||||
|
if (metadata && metadata.searchableFields && !Array.isArray(ctor.searchableFields)) {
|
||||||
|
// Initialize from metadata (runs once per class)
|
||||||
|
ctor.searchableFields = [...metadata.searchableFields];
|
||||||
|
}
|
||||||
|
});
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Escape user input for safe use in MongoDB regular expressions
|
||||||
|
function escapeForRegex(input: string): string {
|
||||||
|
return input.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* unique index - decorator to mark a unique index
|
||||||
|
*/
|
||||||
|
export function unI() {
|
||||||
|
return (value: undefined, context: ClassFieldDecoratorContext) => {
|
||||||
|
if (context.kind !== 'field') {
|
||||||
|
throw new Error('unI can only decorate fields');
|
||||||
|
}
|
||||||
|
|
||||||
|
const propName = String(context.name);
|
||||||
|
|
||||||
|
// Store metadata at class level
|
||||||
|
const metadata = context.metadata as ISmartdataDecoratorMetadata;
|
||||||
|
if (!metadata.uniqueIndexes) {
|
||||||
|
metadata.uniqueIndexes = [];
|
||||||
|
}
|
||||||
|
metadata.uniqueIndexes.push(propName);
|
||||||
|
|
||||||
|
// Also mark as saveable
|
||||||
|
if (!metadata.saveableProperties) {
|
||||||
|
metadata.saveableProperties = [];
|
||||||
|
}
|
||||||
|
if (!metadata.saveableProperties.includes(propName)) {
|
||||||
|
metadata.saveableProperties.push(propName);
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.log('debug', `called unI on metadata for property ${propName}`);
|
||||||
|
|
||||||
|
// Use addInitializer to ensure prototype arrays are set up once
|
||||||
|
context.addInitializer(function(this: any) {
|
||||||
|
const proto = this.constructor.prototype;
|
||||||
|
const metadata = this.constructor[Symbol.metadata];
|
||||||
|
|
||||||
|
if (metadata && metadata.uniqueIndexes && !proto.uniqueIndexes) {
|
||||||
|
proto.uniqueIndexes = [...metadata.uniqueIndexes];
|
||||||
|
logger.log('debug', `initialized uniqueIndexes with ${proto.uniqueIndexes.length} properties`);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (metadata && metadata.saveableProperties && !proto.saveableProperties) {
|
||||||
|
proto.saveableProperties = [...metadata.saveableProperties];
|
||||||
|
}
|
||||||
|
});
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Options for MongoDB indexes
|
||||||
|
*/
|
||||||
|
export interface IIndexOptions {
|
||||||
|
background?: boolean;
|
||||||
|
unique?: boolean;
|
||||||
|
sparse?: boolean;
|
||||||
|
expireAfterSeconds?: number;
|
||||||
|
[key: string]: any;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* index - decorator to mark a field for regular indexing
|
||||||
|
*/
|
||||||
|
export function index(options?: IIndexOptions) {
|
||||||
|
return (value: undefined, context: ClassFieldDecoratorContext) => {
|
||||||
|
if (context.kind !== 'field') {
|
||||||
|
throw new Error('index can only decorate fields');
|
||||||
|
}
|
||||||
|
|
||||||
|
const propName = String(context.name);
|
||||||
|
|
||||||
|
// Store metadata at class level
|
||||||
|
const metadata = context.metadata as ISmartdataDecoratorMetadata;
|
||||||
|
if (!metadata.regularIndexes) {
|
||||||
|
metadata.regularIndexes = [];
|
||||||
|
}
|
||||||
|
metadata.regularIndexes.push({
|
||||||
|
field: propName,
|
||||||
|
options: options || {}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Also mark as saveable
|
||||||
|
if (!metadata.saveableProperties) {
|
||||||
|
metadata.saveableProperties = [];
|
||||||
|
}
|
||||||
|
if (!metadata.saveableProperties.includes(propName)) {
|
||||||
|
metadata.saveableProperties.push(propName);
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.log('debug', `called index() on metadata for property ${propName}`);
|
||||||
|
|
||||||
|
// Use addInitializer to ensure prototype arrays are set up once
|
||||||
|
context.addInitializer(function(this: any) {
|
||||||
|
const proto = this.constructor.prototype;
|
||||||
|
const metadata = this.constructor[Symbol.metadata];
|
||||||
|
|
||||||
|
if (metadata && metadata.regularIndexes && !proto.regularIndexes) {
|
||||||
|
proto.regularIndexes = [...metadata.regularIndexes];
|
||||||
|
logger.log('debug', `initialized regularIndexes with ${proto.regularIndexes.length} indexes`);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (metadata && metadata.saveableProperties && !proto.saveableProperties) {
|
||||||
|
proto.saveableProperties = [...metadata.saveableProperties];
|
||||||
|
}
|
||||||
|
});
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Helper type to extract element type from arrays or return T itself
|
||||||
|
type ElementOf<T> = T extends ReadonlyArray<infer U> ? U : T;
|
||||||
|
|
||||||
|
// Type for $in/$nin values - arrays of the element type
|
||||||
|
type InValues<T> = ReadonlyArray<ElementOf<T>>;
|
||||||
|
|
||||||
|
// Type that allows MongoDB operators on leaf values while maintaining nested type safety
|
||||||
|
export type MongoFilterCondition<T> = T | {
|
||||||
|
$eq?: T;
|
||||||
|
$ne?: T;
|
||||||
|
$gt?: T;
|
||||||
|
$gte?: T;
|
||||||
|
$lt?: T;
|
||||||
|
$lte?: T;
|
||||||
|
$in?: InValues<T>;
|
||||||
|
$nin?: InValues<T>;
|
||||||
|
$exists?: boolean;
|
||||||
|
$type?: string | number;
|
||||||
|
$regex?: string | RegExp;
|
||||||
|
$options?: string;
|
||||||
|
$all?: T extends ReadonlyArray<infer U> ? ReadonlyArray<U> : never;
|
||||||
|
$elemMatch?: T extends ReadonlyArray<infer U> ? MongoFilter<U> : never;
|
||||||
|
$size?: T extends ReadonlyArray<any> ? number : never;
|
||||||
|
$not?: MongoFilterCondition<T>;
|
||||||
|
};
|
||||||
|
|
||||||
|
export type MongoFilter<T> = {
|
||||||
|
[K in keyof T]?: T[K] extends object
|
||||||
|
? T[K] extends any[]
|
||||||
|
? MongoFilterCondition<T[K]> // Arrays can have operators
|
||||||
|
: MongoFilter<T[K]> | MongoFilterCondition<T[K]> // Objects can be nested or have operators
|
||||||
|
: MongoFilterCondition<T[K]>; // Primitives get operators
|
||||||
|
} & {
|
||||||
|
// Logical operators
|
||||||
|
$and?: MongoFilter<T>[];
|
||||||
|
$or?: MongoFilter<T>[];
|
||||||
|
$nor?: MongoFilter<T>[];
|
||||||
|
$not?: MongoFilter<T>;
|
||||||
|
// Allow any string key for dot notation (we lose type safety here but maintain flexibility)
|
||||||
|
[key: string]: any;
|
||||||
|
};
|
||||||
|
|
||||||
|
export const convertFilterForMongoDb = (filterArg: { [key: string]: any }) => {
|
||||||
|
// SECURITY: Block $where to prevent server-side JS execution
|
||||||
|
if (filterArg.$where !== undefined) {
|
||||||
|
throw new Error('$where operator is not allowed for security reasons');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle logical operators recursively
|
||||||
|
const logicalOperators = ['$and', '$or', '$nor', '$not'];
|
||||||
|
const processedFilter: { [key: string]: any } = {};
|
||||||
|
|
||||||
|
for (const key of Object.keys(filterArg)) {
|
||||||
|
if (logicalOperators.includes(key)) {
|
||||||
|
if (key === '$not') {
|
||||||
|
processedFilter[key] = convertFilterForMongoDb(filterArg[key]);
|
||||||
|
} else if (Array.isArray(filterArg[key])) {
|
||||||
|
processedFilter[key] = filterArg[key].map((subFilter: any) => convertFilterForMongoDb(subFilter));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// If only logical operators, return them
|
||||||
|
const hasOnlyLogicalOperators = Object.keys(filterArg).every(key => logicalOperators.includes(key));
|
||||||
|
if (hasOnlyLogicalOperators) {
|
||||||
|
return processedFilter;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Original conversion logic for non-MongoDB query objects
|
||||||
|
const convertedFilter: { [key: string]: any } = {};
|
||||||
|
|
||||||
|
// Helper to merge operator objects
|
||||||
|
const mergeIntoConverted = (path: string, value: any) => {
|
||||||
|
const existing = convertedFilter[path];
|
||||||
|
if (!existing) {
|
||||||
|
convertedFilter[path] = value;
|
||||||
|
} else if (
|
||||||
|
typeof existing === 'object' && !Array.isArray(existing) &&
|
||||||
|
typeof value === 'object' && !Array.isArray(value) &&
|
||||||
|
(Object.keys(existing).some(k => k.startsWith('$')) || Object.keys(value).some(k => k.startsWith('$')))
|
||||||
|
) {
|
||||||
|
// Both have operators, merge them
|
||||||
|
convertedFilter[path] = { ...existing, ...value };
|
||||||
|
} else {
|
||||||
|
// Otherwise later wins
|
||||||
|
convertedFilter[path] = value;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const convertFilterArgument = (keyPathArg2: string, filterArg2: any) => {
|
||||||
|
if (Array.isArray(filterArg2)) {
|
||||||
|
// Arrays are typically used as values for operators like $in or as direct equality matches
|
||||||
|
mergeIntoConverted(keyPathArg2, filterArg2);
|
||||||
|
return;
|
||||||
|
} else if (typeof filterArg2 === 'object' && filterArg2 !== null) {
|
||||||
|
// Check if this is an object with MongoDB operators
|
||||||
|
const keys = Object.keys(filterArg2);
|
||||||
|
const hasOperators = keys.some(key => key.startsWith('$'));
|
||||||
|
|
||||||
|
if (hasOperators) {
|
||||||
|
// This object contains MongoDB operators
|
||||||
|
// Validate and pass through allowed operators
|
||||||
|
const allowedOperators = [
|
||||||
|
// Comparison operators
|
||||||
|
'$eq', '$ne', '$gt', '$gte', '$lt', '$lte',
|
||||||
|
// Array operators
|
||||||
|
'$in', '$nin', '$all', '$elemMatch', '$size',
|
||||||
|
// Element operators
|
||||||
|
'$exists', '$type',
|
||||||
|
// Evaluation operators (safe ones only)
|
||||||
|
'$regex', '$options', '$text', '$mod',
|
||||||
|
// Logical operators (nested)
|
||||||
|
'$and', '$or', '$nor', '$not'
|
||||||
|
];
|
||||||
|
|
||||||
|
// Check for dangerous operators
|
||||||
|
if (keys.includes('$where')) {
|
||||||
|
throw new Error('$where operator is not allowed for security reasons');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate all operators are in the allowed list
|
||||||
|
const invalidOperators = keys.filter(key =>
|
||||||
|
key.startsWith('$') && !allowedOperators.includes(key)
|
||||||
|
);
|
||||||
|
|
||||||
|
if (invalidOperators.length > 0) {
|
||||||
|
console.warn(`Warning: Unknown MongoDB operators detected: ${invalidOperators.join(', ')}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// For array operators, ensure the values are appropriate
|
||||||
|
if (filterArg2.$in && !Array.isArray(filterArg2.$in)) {
|
||||||
|
throw new Error('$in operator requires an array value');
|
||||||
|
}
|
||||||
|
if (filterArg2.$nin && !Array.isArray(filterArg2.$nin)) {
|
||||||
|
throw new Error('$nin operator requires an array value');
|
||||||
|
}
|
||||||
|
if (filterArg2.$all && !Array.isArray(filterArg2.$all)) {
|
||||||
|
throw new Error('$all operator requires an array value');
|
||||||
|
}
|
||||||
|
if (filterArg2.$size && typeof filterArg2.$size !== 'number') {
|
||||||
|
throw new Error('$size operator requires a numeric value');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Use merge helper to handle duplicate paths
|
||||||
|
mergeIntoConverted(keyPathArg2, filterArg2);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// No operators, check for dots in keys
|
||||||
|
for (const key of keys) {
|
||||||
|
if (key.includes('.')) {
|
||||||
|
throw new Error('keys cannot contain dots');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Recursively process nested objects
|
||||||
|
for (const key of keys) {
|
||||||
|
convertFilterArgument(`${keyPathArg2}.${key}`, filterArg2[key]);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Primitive values
|
||||||
|
mergeIntoConverted(keyPathArg2, filterArg2);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
for (const key of Object.keys(filterArg)) {
|
||||||
|
// Skip logical operators, they were already processed
|
||||||
|
if (!logicalOperators.includes(key)) {
|
||||||
|
convertFilterArgument(key, filterArg[key]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add back processed logical operators
|
||||||
|
Object.assign(convertedFilter, processedFilter);
|
||||||
|
|
||||||
|
return convertedFilter;
|
||||||
|
};
|
||||||
|
|
||||||
|
export class SmartDataDbDoc<T extends TImplements, TImplements, TManager extends IManager = any> {
|
||||||
|
/**
|
||||||
|
* the collection object an Doc belongs to
|
||||||
|
*/
|
||||||
|
public static collection: SmartdataCollection<any>;
|
||||||
|
public collection: SmartdataCollection<any>;
|
||||||
|
public static defaultManager;
|
||||||
|
public static manager;
|
||||||
|
public manager: TManager;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Helper to get collection with fallback to static for Deno compatibility
|
||||||
|
*/
|
||||||
|
private getCollectionSafe(): SmartdataCollection<any> {
|
||||||
|
return this.collection || (this.constructor as any).collection;
|
||||||
|
}
|
||||||
|
|
||||||
|
// STATIC
|
||||||
|
public static createInstanceFromMongoDbNativeDoc<T>(
|
||||||
|
this: plugins.tsclass.typeFest.Class<T>,
|
||||||
|
mongoDbNativeDocArg: any,
|
||||||
|
): T {
|
||||||
|
const newInstance = new this();
|
||||||
|
(newInstance as any).creationStatus = 'db';
|
||||||
|
for (const key of Object.keys(mongoDbNativeDocArg)) {
|
||||||
|
const rawValue = mongoDbNativeDocArg[key];
|
||||||
|
const optionsMap = (this as any)._svDbOptions || {};
|
||||||
|
const opts = optionsMap[key];
|
||||||
|
newInstance[key] = opts && typeof opts.deserialize === 'function'
|
||||||
|
? opts.deserialize(rawValue)
|
||||||
|
: rawValue;
|
||||||
|
}
|
||||||
|
return newInstance;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* gets all instances as array
|
||||||
|
* @param this
|
||||||
|
* @param filterArg - Type-safe MongoDB filter with nested object support and operators
|
||||||
|
* @returns
|
||||||
|
*/
|
||||||
|
public static async getInstances<T>(
|
||||||
|
this: plugins.tsclass.typeFest.Class<T>,
|
||||||
|
filterArg: MongoFilter<T>,
|
||||||
|
opts?: { session?: plugins.mongodb.ClientSession }
|
||||||
|
): Promise<T[]> {
|
||||||
|
// Pass session through to findAll for transactional queries
|
||||||
|
const foundDocs = await (this as any).collection.findAll(
|
||||||
|
convertFilterForMongoDb(filterArg),
|
||||||
|
{ session: opts?.session },
|
||||||
|
);
|
||||||
|
const returnArray = [];
|
||||||
|
for (const foundDoc of foundDocs) {
|
||||||
|
const newInstance: T = (this as any).createInstanceFromMongoDbNativeDoc(foundDoc);
|
||||||
|
returnArray.push(newInstance);
|
||||||
|
}
|
||||||
|
return returnArray;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* gets the first matching instance
|
||||||
|
* @param this
|
||||||
|
* @param filterArg
|
||||||
|
* @returns
|
||||||
|
*/
|
||||||
|
public static async getInstance<T>(
|
||||||
|
this: plugins.tsclass.typeFest.Class<T>,
|
||||||
|
filterArg: MongoFilter<T>,
|
||||||
|
opts?: { session?: plugins.mongodb.ClientSession }
|
||||||
|
): Promise<T> {
|
||||||
|
// Retrieve one document, with optional session for transactions
|
||||||
|
const foundDoc = await (this as any).collection.findOne(
|
||||||
|
convertFilterForMongoDb(filterArg),
|
||||||
|
{ session: opts?.session },
|
||||||
|
);
|
||||||
|
if (foundDoc) {
|
||||||
|
const newInstance: T = (this as any).createInstanceFromMongoDbNativeDoc(foundDoc);
|
||||||
|
return newInstance;
|
||||||
|
} else {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* get a unique id prefixed with the class name
|
||||||
|
*/
|
||||||
|
public static async getNewId<T = any>(
|
||||||
|
this: plugins.tsclass.typeFest.Class<T>,
|
||||||
|
lengthArg: number = 20,
|
||||||
|
) {
|
||||||
|
return `${(this as any).className}:${plugins.smartunique.shortId(lengthArg)}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get a cursor for streaming results, with optional session and native cursor modifiers.
|
||||||
|
* @param filterArg Partial filter to apply
|
||||||
|
* @param opts Optional session and modifier for the raw MongoDB cursor
|
||||||
|
*/
|
||||||
|
public static async getCursor<T>(
|
||||||
|
this: plugins.tsclass.typeFest.Class<T>,
|
||||||
|
filterArg: MongoFilter<T>,
|
||||||
|
opts?: {
|
||||||
|
session?: plugins.mongodb.ClientSession;
|
||||||
|
modifier?: (cursorArg: plugins.mongodb.FindCursor<plugins.mongodb.WithId<plugins.mongodb.BSON.Document>>) => plugins.mongodb.FindCursor<plugins.mongodb.WithId<plugins.mongodb.BSON.Document>>;
|
||||||
|
}
|
||||||
|
): Promise<SmartdataDbCursor<T>> {
|
||||||
|
const collection: SmartdataCollection<T> = (this as any).collection;
|
||||||
|
const { session, modifier } = opts || {};
|
||||||
|
await collection.init();
|
||||||
|
let rawCursor: plugins.mongodb.FindCursor<any> =
|
||||||
|
collection.mongoDbCollection.find(convertFilterForMongoDb(filterArg), { session });
|
||||||
|
if (modifier) {
|
||||||
|
rawCursor = modifier(rawCursor);
|
||||||
|
}
|
||||||
|
return new SmartdataDbCursor<T>(rawCursor, this as any as typeof SmartDataDbDoc);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* watch the collection
|
||||||
|
* @param this
|
||||||
|
* @param filterArg
|
||||||
|
* @param forEachFunction
|
||||||
|
*/
|
||||||
|
/**
|
||||||
|
* Watch the collection for changes, with optional buffering and change stream options.
|
||||||
|
* @param filterArg MongoDB filter to select which changes to observe
|
||||||
|
* @param opts optional ChangeStreamOptions plus bufferTimeMs
|
||||||
|
*/
|
||||||
|
public static async watch<T>(
|
||||||
|
this: plugins.tsclass.typeFest.Class<T>,
|
||||||
|
filterArg: MongoFilter<T>,
|
||||||
|
opts?: plugins.mongodb.ChangeStreamOptions & { bufferTimeMs?: number },
|
||||||
|
): Promise<SmartdataDbWatcher<T>> {
|
||||||
|
const collection: SmartdataCollection<T> = (this as any).collection;
|
||||||
|
const watcher: SmartdataDbWatcher<T> = await collection.watch(
|
||||||
|
convertFilterForMongoDb(filterArg),
|
||||||
|
opts || {},
|
||||||
|
this as any,
|
||||||
|
);
|
||||||
|
return watcher;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* run a function for all instances
|
||||||
|
* @returns
|
||||||
|
*/
|
||||||
|
public static async forEach<T>(
|
||||||
|
this: plugins.tsclass.typeFest.Class<T>,
|
||||||
|
filterArg: MongoFilter<T>,
|
||||||
|
forEachFunction: (itemArg: T) => Promise<any>,
|
||||||
|
) {
|
||||||
|
const cursor: SmartdataDbCursor<T> = await (this as any).getCursor(filterArg);
|
||||||
|
await cursor.forEach(forEachFunction);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* returns a count of the documents in the collection
|
||||||
|
*/
|
||||||
|
public static async getCount<T>(
|
||||||
|
this: plugins.tsclass.typeFest.Class<T>,
|
||||||
|
filterArg: MongoFilter<T> = {} as any,
|
||||||
|
) {
|
||||||
|
const collection: SmartdataCollection<T> = (this as any).collection;
|
||||||
|
return await collection.getCount(filterArg);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a MongoDB filter from a Lucene query string
|
||||||
|
* @param luceneQuery Lucene query string
|
||||||
|
* @returns MongoDB query object
|
||||||
|
*/
|
||||||
|
public static createSearchFilter<T>(
|
||||||
|
this: plugins.tsclass.typeFest.Class<T>,
|
||||||
|
luceneQuery: string,
|
||||||
|
): any {
|
||||||
|
const searchableFields = (this as any).getSearchableFields();
|
||||||
|
if (searchableFields.length === 0) {
|
||||||
|
throw new Error(`No searchable fields defined for class ${this.name}`);
|
||||||
|
}
|
||||||
|
const adapter = new SmartdataLuceneAdapter(searchableFields);
|
||||||
|
return adapter.convert(luceneQuery);
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* List all searchable fields defined on this class
|
||||||
|
*/
|
||||||
|
public static getSearchableFields(): string[] {
|
||||||
|
const ctor = this as any;
|
||||||
|
return Array.isArray(ctor.searchableFields) ? ctor.searchableFields : [];
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Execute a query with optional hard filter and post-fetch validation
|
||||||
|
*/
|
||||||
|
private static async execQuery<T>(
|
||||||
|
this: plugins.tsclass.typeFest.Class<T>,
|
||||||
|
baseFilter: Record<string, any>,
|
||||||
|
opts?: SearchOptions<T>
|
||||||
|
): Promise<T[]> {
|
||||||
|
let mongoFilter = baseFilter || {};
|
||||||
|
if (opts?.filter) {
|
||||||
|
mongoFilter = { $and: [mongoFilter, opts.filter] };
|
||||||
|
}
|
||||||
|
// Fetch with optional session for transactions
|
||||||
|
// Fetch within optional session
|
||||||
|
let docs: T[] = await (this as any).getInstances(mongoFilter, { session: opts?.session });
|
||||||
|
if (opts?.validate) {
|
||||||
|
const out: T[] = [];
|
||||||
|
for (const d of docs) {
|
||||||
|
if (await opts.validate(d)) out.push(d);
|
||||||
|
}
|
||||||
|
docs = out;
|
||||||
|
}
|
||||||
|
return docs;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Search documents by text or field:value syntax, with safe regex fallback
|
||||||
|
* Supports additional filtering and post-fetch validation via opts
|
||||||
|
* @param query A search term or field:value expression
|
||||||
|
* @param opts Optional filter and validate hooks
|
||||||
|
* @returns Array of matching documents
|
||||||
|
*/
|
||||||
|
public static async search<T>(
|
||||||
|
this: plugins.tsclass.typeFest.Class<T>,
|
||||||
|
query: string,
|
||||||
|
opts?: SearchOptions<T>,
|
||||||
|
): Promise<T[]> {
|
||||||
|
const searchableFields = (this as any).getSearchableFields();
|
||||||
|
if (searchableFields.length === 0) {
|
||||||
|
throw new Error(`No searchable fields defined for class ${this.name}`);
|
||||||
|
}
|
||||||
|
// empty query -> return all
|
||||||
|
const q = query.trim();
|
||||||
|
if (!q) {
|
||||||
|
// empty query: fetch all, apply opts
|
||||||
|
return await (this as any).execQuery({}, opts);
|
||||||
|
}
|
||||||
|
// simple exact field:value (no spaces, no wildcards, no quotes)
|
||||||
|
// simple exact field:value (no spaces, wildcards, quotes)
|
||||||
|
const simpleExact = q.match(/^(\w+):([^"'\*\?\s]+)$/);
|
||||||
|
if (simpleExact) {
|
||||||
|
const field = simpleExact[1];
|
||||||
|
const value = simpleExact[2];
|
||||||
|
if (!searchableFields.includes(field)) {
|
||||||
|
throw new Error(`Field '${field}' is not searchable for class ${this.name}`);
|
||||||
|
}
|
||||||
|
// simple field:value search
|
||||||
|
return await (this as any).execQuery({ [field]: value }, opts);
|
||||||
|
}
|
||||||
|
// quoted phrase across all searchable fields: exact match of phrase
|
||||||
|
const quoted = q.match(/^"(.+)"$|^'(.+)'$/);
|
||||||
|
if (quoted) {
|
||||||
|
const phrase = quoted[1] || quoted[2] || '';
|
||||||
|
const parts = phrase.split(/\s+/).map((t) => escapeForRegex(t));
|
||||||
|
const pattern = parts.join('\\s+');
|
||||||
|
const orConds = searchableFields.map((f) => ({ [f]: { $regex: pattern, $options: 'i' } }));
|
||||||
|
return await (this as any).execQuery({ $or: orConds }, opts);
|
||||||
|
}
|
||||||
|
// wildcard field:value (supports * and ?) -> direct regex on that field
|
||||||
|
const wildcardField = q.match(/^(\w+):(.+[*?].*)$/);
|
||||||
|
if (wildcardField) {
|
||||||
|
const field = wildcardField[1];
|
||||||
|
// Support quoted wildcard patterns: strip surrounding quotes
|
||||||
|
let pattern = wildcardField[2];
|
||||||
|
if ((pattern.startsWith('"') && pattern.endsWith('"')) ||
|
||||||
|
(pattern.startsWith("'") && pattern.endsWith("'"))) {
|
||||||
|
pattern = pattern.slice(1, -1);
|
||||||
|
}
|
||||||
|
if (!searchableFields.includes(field)) {
|
||||||
|
throw new Error(`Field '${field}' is not searchable for class ${this.name}`);
|
||||||
|
}
|
||||||
|
// escape regex special chars except * and ?, then convert wildcards
|
||||||
|
const escaped = pattern.replace(/([.+^${}()|[\\]\\])/g, '\\$1');
|
||||||
|
const regexPattern = escaped.replace(/\*/g, '.*').replace(/\?/g, '.');
|
||||||
|
return await (this as any).execQuery({ [field]: { $regex: regexPattern, $options: 'i' } }, opts);
|
||||||
|
}
|
||||||
|
// wildcard plain term across all fields (supports * and ?)
|
||||||
|
if (!q.includes(':') && (q.includes('*') || q.includes('?'))) {
|
||||||
|
// build wildcard regex pattern: escape all except * and ? then convert
|
||||||
|
const escaped = q.replace(/([.+^${}()|[\\]\\])/g, '\\$1');
|
||||||
|
const pattern = escaped.replace(/\*/g, '.*').replace(/\?/g, '.');
|
||||||
|
const orConds = searchableFields.map((f) => ({ [f]: { $regex: pattern, $options: 'i' } }));
|
||||||
|
return await (this as any).execQuery({ $or: orConds }, opts);
|
||||||
|
}
|
||||||
|
// implicit AND for multiple tokens: free terms, quoted phrases, and field:values
|
||||||
|
{
|
||||||
|
// Split query into tokens, preserving quoted substrings
|
||||||
|
const rawTokens = q.match(/(?:[^\s"']+|"[^"]*"|'[^']*')+/g) || [];
|
||||||
|
// Only apply when more than one token and no boolean operators or grouping
|
||||||
|
if (
|
||||||
|
rawTokens.length > 1 &&
|
||||||
|
!/(\bAND\b|\bOR\b|\bNOT\b|\(|\))/i.test(q) &&
|
||||||
|
!/\[|\]/.test(q)
|
||||||
|
) {
|
||||||
|
const andConds: any[] = [];
|
||||||
|
for (let token of rawTokens) {
|
||||||
|
// field:value token
|
||||||
|
const fv = token.match(/^(\w+):(.+)$/);
|
||||||
|
if (fv) {
|
||||||
|
const field = fv[1];
|
||||||
|
let value = fv[2];
|
||||||
|
if (!searchableFields.includes(field)) {
|
||||||
|
throw new Error(`Field '${field}' is not searchable for class ${this.name}`);
|
||||||
|
}
|
||||||
|
// Strip surrounding quotes if present
|
||||||
|
if ((value.startsWith('"') && value.endsWith('"')) || (value.startsWith("'") && value.endsWith("'"))) {
|
||||||
|
value = value.slice(1, -1);
|
||||||
|
}
|
||||||
|
// Wildcard search?
|
||||||
|
if (value.includes('*') || value.includes('?')) {
|
||||||
|
const escaped = value.replace(/([.+^${}()|[\\]\\])/g, '\\$1');
|
||||||
|
const pattern = escaped.replace(/\*/g, '.*').replace(/\?/g, '.');
|
||||||
|
andConds.push({ [field]: { $regex: pattern, $options: 'i' } });
|
||||||
|
} else {
|
||||||
|
andConds.push({ [field]: value });
|
||||||
|
}
|
||||||
|
} else if ((token.startsWith('"') && token.endsWith('"')) || (token.startsWith("'") && token.endsWith("'"))) {
|
||||||
|
// Quoted free phrase across all fields
|
||||||
|
const phrase = token.slice(1, -1);
|
||||||
|
const parts = phrase.split(/\s+/).map((t) => escapeForRegex(t));
|
||||||
|
const pattern = parts.join('\\s+');
|
||||||
|
andConds.push({ $or: searchableFields.map((f) => ({ [f]: { $regex: pattern, $options: 'i' } })) });
|
||||||
|
} else {
|
||||||
|
// Free term across all fields
|
||||||
|
const esc = escapeForRegex(token);
|
||||||
|
andConds.push({ $or: searchableFields.map((f) => ({ [f]: { $regex: esc, $options: 'i' } })) });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return await (this as any).execQuery({ $and: andConds }, opts);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// detect advanced Lucene syntax: field:value, wildcards, boolean, grouping
|
||||||
|
const luceneSyntax = /(\w+:[^\s]+)|\*|\?|\bAND\b|\bOR\b|\bNOT\b|\(|\)/;
|
||||||
|
if (luceneSyntax.test(q)) {
|
||||||
|
const filter = (this as any).createSearchFilter(q);
|
||||||
|
return await (this as any).execQuery(filter, opts);
|
||||||
|
}
|
||||||
|
// multi-term unquoted -> AND of regex across fields for each term
|
||||||
|
const terms = q.split(/\s+/);
|
||||||
|
if (terms.length > 1) {
|
||||||
|
const andConds = terms.map((term) => {
|
||||||
|
const esc = escapeForRegex(term);
|
||||||
|
const ors = searchableFields.map((f) => ({ [f]: { $regex: esc, $options: 'i' } }));
|
||||||
|
return { $or: ors };
|
||||||
|
});
|
||||||
|
return await (this as any).execQuery({ $and: andConds }, opts);
|
||||||
|
}
|
||||||
|
// single term -> regex across all searchable fields
|
||||||
|
const esc = escapeForRegex(q);
|
||||||
|
const orConds = searchableFields.map((f) => ({ [f]: { $regex: esc, $options: 'i' } }));
|
||||||
|
return await (this as any).execQuery({ $or: orConds }, opts);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
// INSTANCE
|
||||||
|
|
||||||
|
// INSTANCE
|
||||||
|
|
||||||
|
/**
|
||||||
|
* how the Doc in memory was created, may prove useful later.
|
||||||
|
*/
|
||||||
|
public creationStatus: TDocCreation = 'new';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* updated from db in any case where doc comes from db
|
||||||
|
*/
|
||||||
|
@globalSvDb()
|
||||||
|
_createdAt: string = new Date().toISOString();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* will be updated everytime the doc is saved
|
||||||
|
*/
|
||||||
|
@globalSvDb()
|
||||||
|
_updatedAt: string = new Date().toISOString();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* an array of saveable properties of ALL doc
|
||||||
|
* Note: Set by decorators on prototype - NOT declared as instance property to avoid shadowing in Deno
|
||||||
|
* Declared with definite assignment assertion to satisfy TypeScript without creating instance property
|
||||||
|
*/
|
||||||
|
declare globalSaveableProperties: string[];
|
||||||
|
|
||||||
|
/**
|
||||||
|
* unique indexes
|
||||||
|
* Note: Set by decorators on prototype - NOT declared as instance property to avoid shadowing in Deno
|
||||||
|
*/
|
||||||
|
declare uniqueIndexes: string[];
|
||||||
|
|
||||||
|
/**
|
||||||
|
* regular indexes with their options
|
||||||
|
* Note: Set by decorators on prototype - NOT declared as instance property to avoid shadowing in Deno
|
||||||
|
*/
|
||||||
|
declare regularIndexes: Array<{field: string, options: IIndexOptions}>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* an array of saveable properties of a specific doc
|
||||||
|
* Note: Set by decorators on prototype - NOT declared as instance property to avoid shadowing in Deno
|
||||||
|
*/
|
||||||
|
declare saveableProperties: string[];
|
||||||
|
|
||||||
|
/**
|
||||||
|
* name
|
||||||
|
*/
|
||||||
|
public name: string;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* primary id in the database
|
||||||
|
*/
|
||||||
|
public dbDocUniqueId: string;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* class constructor
|
||||||
|
*/
|
||||||
|
constructor() {}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* saves this instance (optionally within a transaction)
|
||||||
|
*/
|
||||||
|
public async save(opts?: { session?: plugins.mongodb.ClientSession }) {
|
||||||
|
// allow hook before saving
|
||||||
|
if (typeof (this as any).beforeSave === 'function') {
|
||||||
|
await (this as any).beforeSave();
|
||||||
|
}
|
||||||
|
// tslint:disable-next-line: no-this-assignment
|
||||||
|
const self: any = this;
|
||||||
|
let dbResult: any;
|
||||||
|
// update timestamp
|
||||||
|
this._updatedAt = new Date().toISOString();
|
||||||
|
// perform insert or update
|
||||||
|
switch (this.creationStatus) {
|
||||||
|
case 'db':
|
||||||
|
dbResult = await this.getCollectionSafe().update(self, { session: opts?.session });
|
||||||
|
break;
|
||||||
|
case 'new':
|
||||||
|
dbResult = await this.getCollectionSafe().insert(self, { session: opts?.session });
|
||||||
|
this.creationStatus = 'db';
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
logger.log('error', 'neither new nor in db?');
|
||||||
|
}
|
||||||
|
// allow hook after saving
|
||||||
|
if (typeof (this as any).afterSave === 'function') {
|
||||||
|
await (this as any).afterSave();
|
||||||
|
}
|
||||||
|
return dbResult;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* deletes a document from the database (optionally within a transaction)
|
||||||
|
*/
|
||||||
|
public async delete(opts?: { session?: plugins.mongodb.ClientSession }) {
|
||||||
|
// allow hook before deleting
|
||||||
|
if (typeof (this as any).beforeDelete === 'function') {
|
||||||
|
await (this as any).beforeDelete();
|
||||||
|
}
|
||||||
|
// perform deletion
|
||||||
|
const result = await this.getCollectionSafe().delete(this, { session: opts?.session });
|
||||||
|
// allow hook after delete
|
||||||
|
if (typeof (this as any).afterDelete === 'function') {
|
||||||
|
await (this as any).afterDelete();
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* also store any referenced objects to DB
|
||||||
|
* better for data consistency
|
||||||
|
*/
|
||||||
|
public saveDeep(savedMapArg: plugins.lik.ObjectMap<SmartDataDbDoc<any, any>> = null) {
|
||||||
|
if (!savedMapArg) {
|
||||||
|
savedMapArg = new plugins.lik.ObjectMap<SmartDataDbDoc<any, any>>();
|
||||||
|
}
|
||||||
|
savedMapArg.add(this);
|
||||||
|
this.save();
|
||||||
|
for (const propertyKey of Object.keys(this)) {
|
||||||
|
const property: any = this[propertyKey];
|
||||||
|
if (property instanceof SmartDataDbDoc && !savedMapArg.checkForObject(property)) {
|
||||||
|
property.saveDeep(savedMapArg);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* updates an object from db
|
||||||
|
*/
|
||||||
|
public async updateFromDb(): Promise<boolean> {
|
||||||
|
const mongoDbNativeDoc = await this.getCollectionSafe().findOne(await this.createIdentifiableObject());
|
||||||
|
if (!mongoDbNativeDoc) {
|
||||||
|
return false; // Document not found in database
|
||||||
|
}
|
||||||
|
for (const key of Object.keys(mongoDbNativeDoc)) {
|
||||||
|
const rawValue = mongoDbNativeDoc[key];
|
||||||
|
const optionsMap = (this.constructor as any)._svDbOptions || {};
|
||||||
|
const opts = optionsMap[key];
|
||||||
|
this[key] = opts && typeof opts.deserialize === 'function'
|
||||||
|
? opts.deserialize(rawValue)
|
||||||
|
: rawValue;
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* creates a saveable object so the instance can be persisted as json in the database
|
||||||
|
*/
|
||||||
|
public async createSavableObject(): Promise<TImplements> {
|
||||||
|
const saveableObject: unknown = {}; // is not exposed to outside, so any is ok here
|
||||||
|
const globalProps = this.globalSaveableProperties || [];
|
||||||
|
const specificProps = this.saveableProperties || [];
|
||||||
|
const saveableProperties = [...globalProps, ...specificProps];
|
||||||
|
// apply custom serialization if configured
|
||||||
|
const optionsMap = (this.constructor as any)._svDbOptions || {};
|
||||||
|
for (const propertyNameString of saveableProperties) {
|
||||||
|
const rawValue = (this as any)[propertyNameString];
|
||||||
|
const opts = optionsMap[propertyNameString];
|
||||||
|
(saveableObject as any)[propertyNameString] = opts && typeof opts.serialize === 'function'
|
||||||
|
? opts.serialize(rawValue)
|
||||||
|
: rawValue;
|
||||||
|
}
|
||||||
|
return saveableObject as TImplements;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* creates an identifiable object for operations that require filtering
|
||||||
|
*/
|
||||||
|
public async createIdentifiableObject() {
|
||||||
|
const identifiableObject: any = {}; // is not exposed to outside, so any is ok here
|
||||||
|
for (const propertyNameString of this.uniqueIndexes) {
|
||||||
|
identifiableObject[propertyNameString] = this[propertyNameString];
|
||||||
|
}
|
||||||
|
return identifiableObject;
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,7 +1,7 @@
|
|||||||
import * as plugins from './smartdata.plugins.js';
|
import * as plugins from './plugins.js';
|
||||||
import { Collection } from './smartdata.classes.collection.js';
|
import { Collection } from './classes.collection.js';
|
||||||
import { SmartdataDb } from './smartdata.classes.db.js';
|
import { SmartdataDb } from './classes.db.js';
|
||||||
import { SmartDataDbDoc, svDb, unI } from './smartdata.classes.doc.js';
|
import { SmartDataDbDoc, svDb, unI } from './classes.doc.js';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* EasyStore allows the storage of easy objects. It also allows easy sharing of the object between different instances
|
* EasyStore allows the storage of easy objects. It also allows easy sharing of the object between different instances
|
||||||
@@ -17,18 +17,36 @@ export class EasyStore<T> {
|
|||||||
@unI()
|
@unI()
|
||||||
public nameId: string;
|
public nameId: string;
|
||||||
|
|
||||||
|
@svDb()
|
||||||
|
public ephemeral: {
|
||||||
|
activated: boolean;
|
||||||
|
timeout: number;
|
||||||
|
};
|
||||||
|
|
||||||
|
@svDb()
|
||||||
|
lastEdit: number;
|
||||||
|
|
||||||
@svDb()
|
@svDb()
|
||||||
public data: Partial<T>;
|
public data: Partial<T>;
|
||||||
}
|
}
|
||||||
return SmartdataEasyStore;
|
return SmartdataEasyStore;
|
||||||
})();
|
})();
|
||||||
|
|
||||||
constructor(nameIdArg: string, smnartdataDbRefArg: SmartdataDb) {
|
constructor(nameIdArg: string, smartdataDbRefArg: SmartdataDb) {
|
||||||
this.smartdataDbRef = smnartdataDbRefArg;
|
this.smartdataDbRef = smartdataDbRefArg;
|
||||||
this.nameId = nameIdArg;
|
this.nameId = nameIdArg;
|
||||||
}
|
}
|
||||||
|
|
||||||
private async getEasyStore() {
|
private easyStorePromise: Promise<InstanceType<typeof this.easyStoreClass>>;
|
||||||
|
private async getEasyStore(): Promise<InstanceType<typeof this.easyStoreClass>> {
|
||||||
|
if (this.easyStorePromise) {
|
||||||
|
return this.easyStorePromise;
|
||||||
|
}
|
||||||
|
|
||||||
|
// first run from here
|
||||||
|
const deferred = plugins.smartpromise.defer<InstanceType<typeof this.easyStoreClass>>();
|
||||||
|
this.easyStorePromise = deferred.promise;
|
||||||
|
|
||||||
let easyStore = await this.easyStoreClass.getInstance({
|
let easyStore = await this.easyStoreClass.getInstance({
|
||||||
nameId: this.nameId,
|
nameId: this.nameId,
|
||||||
});
|
});
|
||||||
@@ -39,7 +57,8 @@ export class EasyStore<T> {
|
|||||||
easyStore.data = {};
|
easyStore.data = {};
|
||||||
await easyStore.save();
|
await easyStore.save();
|
||||||
}
|
}
|
||||||
return easyStore;
|
deferred.resolve(easyStore);
|
||||||
|
return this.easyStorePromise;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -61,7 +80,7 @@ export class EasyStore<T> {
|
|||||||
/**
|
/**
|
||||||
* writes a specific key to the keyValueStore
|
* writes a specific key to the keyValueStore
|
||||||
*/
|
*/
|
||||||
public async writeKey(keyArg: keyof T, valueArg: any) {
|
public async writeKey<TKey extends keyof T>(keyArg: TKey, valueArg: T[TKey]) {
|
||||||
const easyStore = await this.getEasyStore();
|
const easyStore = await this.getEasyStore();
|
||||||
easyStore.data[keyArg] = valueArg;
|
easyStore.data[keyArg] = valueArg;
|
||||||
await easyStore.save();
|
await easyStore.save();
|
||||||
@@ -90,4 +109,13 @@ export class EasyStore<T> {
|
|||||||
easyStore.data = {};
|
easyStore.data = {};
|
||||||
await easyStore.save();
|
await easyStore.save();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public async cleanUpEphemeral() {
|
||||||
|
// Clean up ephemeral data periodically while connected
|
||||||
|
while (this.smartdataDbRef.status === 'connected') {
|
||||||
|
await plugins.smartdelay.delayFor(60000); // Check every minute
|
||||||
|
// TODO: Implement actual cleanup logic for ephemeral data
|
||||||
|
// For now, this prevents the infinite CPU loop
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
780
ts/classes.lucene.adapter.ts
Normal file
780
ts/classes.lucene.adapter.ts
Normal file
@@ -0,0 +1,780 @@
|
|||||||
|
/**
|
||||||
|
* Lucene to MongoDB query adapter for SmartData
|
||||||
|
*/
|
||||||
|
import * as plugins from './plugins.js';
|
||||||
|
import { logger } from './logging.js';
|
||||||
|
|
||||||
|
// Types
|
||||||
|
type NodeType =
|
||||||
|
| 'TERM'
|
||||||
|
| 'PHRASE'
|
||||||
|
| 'FIELD'
|
||||||
|
| 'AND'
|
||||||
|
| 'OR'
|
||||||
|
| 'NOT'
|
||||||
|
| 'RANGE'
|
||||||
|
| 'WILDCARD'
|
||||||
|
| 'FUZZY'
|
||||||
|
| 'GROUP';
|
||||||
|
|
||||||
|
interface QueryNode {
|
||||||
|
type: NodeType;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface TermNode extends QueryNode {
|
||||||
|
type: 'TERM';
|
||||||
|
value: string;
|
||||||
|
boost?: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface PhraseNode extends QueryNode {
|
||||||
|
type: 'PHRASE';
|
||||||
|
value: string;
|
||||||
|
proximity?: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface FieldNode extends QueryNode {
|
||||||
|
type: 'FIELD';
|
||||||
|
field: string;
|
||||||
|
value: AnyQueryNode;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface BooleanNode extends QueryNode {
|
||||||
|
type: 'AND' | 'OR' | 'NOT';
|
||||||
|
left: AnyQueryNode;
|
||||||
|
right: AnyQueryNode;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface RangeNode extends QueryNode {
|
||||||
|
type: 'RANGE';
|
||||||
|
field: string;
|
||||||
|
lower: string;
|
||||||
|
upper: string;
|
||||||
|
includeLower: boolean;
|
||||||
|
includeUpper: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface WildcardNode extends QueryNode {
|
||||||
|
type: 'WILDCARD';
|
||||||
|
value: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface FuzzyNode extends QueryNode {
|
||||||
|
type: 'FUZZY';
|
||||||
|
value: string;
|
||||||
|
maxEdits: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface GroupNode extends QueryNode {
|
||||||
|
type: 'GROUP';
|
||||||
|
value: AnyQueryNode;
|
||||||
|
}
|
||||||
|
|
||||||
|
type AnyQueryNode =
|
||||||
|
| TermNode
|
||||||
|
| PhraseNode
|
||||||
|
| FieldNode
|
||||||
|
| BooleanNode
|
||||||
|
| RangeNode
|
||||||
|
| WildcardNode
|
||||||
|
| FuzzyNode
|
||||||
|
| GroupNode;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Lucene query parser
|
||||||
|
*/
|
||||||
|
export class LuceneParser {
|
||||||
|
private pos: number = 0;
|
||||||
|
private input: string = '';
|
||||||
|
private tokens: string[] = [];
|
||||||
|
|
||||||
|
constructor() {}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parse a Lucene query string into an AST
|
||||||
|
*/
|
||||||
|
parse(query: string): AnyQueryNode {
|
||||||
|
this.input = query.trim();
|
||||||
|
this.pos = 0;
|
||||||
|
this.tokens = this.tokenize(this.input);
|
||||||
|
|
||||||
|
return this.parseQuery();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Tokenize the input string into tokens
|
||||||
|
*/
|
||||||
|
private tokenize(input: string): string[] {
|
||||||
|
const specialChars = /[()\[\]{}"~^:]/;
|
||||||
|
const operators = /AND|OR|NOT|TO/;
|
||||||
|
|
||||||
|
let tokens: string[] = [];
|
||||||
|
let current = '';
|
||||||
|
let inQuote = false;
|
||||||
|
|
||||||
|
for (let i = 0; i < input.length; i++) {
|
||||||
|
const char = input[i];
|
||||||
|
|
||||||
|
// Handle quoted strings
|
||||||
|
if (char === '"') {
|
||||||
|
if (inQuote) {
|
||||||
|
tokens.push(current + char);
|
||||||
|
current = '';
|
||||||
|
inQuote = false;
|
||||||
|
} else {
|
||||||
|
if (current) tokens.push(current);
|
||||||
|
current = char;
|
||||||
|
inQuote = true;
|
||||||
|
}
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (inQuote) {
|
||||||
|
current += char;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle whitespace
|
||||||
|
if (char === ' ' || char === '\t' || char === '\n') {
|
||||||
|
if (current) {
|
||||||
|
tokens.push(current);
|
||||||
|
current = '';
|
||||||
|
}
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle special characters
|
||||||
|
if (specialChars.test(char)) {
|
||||||
|
if (current) {
|
||||||
|
tokens.push(current);
|
||||||
|
current = '';
|
||||||
|
}
|
||||||
|
tokens.push(char);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
current += char;
|
||||||
|
|
||||||
|
// Check if current is an operator
|
||||||
|
if (operators.test(current) && (i + 1 === input.length || /\s/.test(input[i + 1]))) {
|
||||||
|
tokens.push(current);
|
||||||
|
current = '';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (current) tokens.push(current);
|
||||||
|
|
||||||
|
return tokens;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parse the main query expression
|
||||||
|
*/
|
||||||
|
private parseQuery(): AnyQueryNode {
|
||||||
|
const left = this.parseBooleanOperand();
|
||||||
|
|
||||||
|
if (this.pos < this.tokens.length) {
|
||||||
|
const token = this.tokens[this.pos];
|
||||||
|
|
||||||
|
if (token === 'AND' || token === 'OR') {
|
||||||
|
this.pos++;
|
||||||
|
const right = this.parseQuery();
|
||||||
|
return {
|
||||||
|
type: token as 'AND' | 'OR',
|
||||||
|
left,
|
||||||
|
right,
|
||||||
|
};
|
||||||
|
} else if (token === 'NOT' || token === '-') {
|
||||||
|
this.pos++;
|
||||||
|
const right = this.parseQuery();
|
||||||
|
return {
|
||||||
|
type: 'NOT',
|
||||||
|
left,
|
||||||
|
right,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return left;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parse boolean operands (terms, phrases, fields, groups)
|
||||||
|
*/
|
||||||
|
private parseBooleanOperand(): AnyQueryNode {
|
||||||
|
if (this.pos >= this.tokens.length) {
|
||||||
|
throw new Error('Unexpected end of input');
|
||||||
|
}
|
||||||
|
|
||||||
|
const token = this.tokens[this.pos];
|
||||||
|
|
||||||
|
// Handle grouping with parentheses
|
||||||
|
if (token === '(') {
|
||||||
|
this.pos++;
|
||||||
|
const group = this.parseQuery();
|
||||||
|
|
||||||
|
if (this.pos < this.tokens.length && this.tokens[this.pos] === ')') {
|
||||||
|
this.pos++;
|
||||||
|
return { type: 'GROUP', value: group } as GroupNode;
|
||||||
|
} else {
|
||||||
|
throw new Error('Unclosed group');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle fields (field:value)
|
||||||
|
if (this.pos + 1 < this.tokens.length && this.tokens[this.pos + 1] === ':') {
|
||||||
|
const field = token;
|
||||||
|
this.pos += 2; // Skip field and colon
|
||||||
|
|
||||||
|
if (this.pos < this.tokens.length) {
|
||||||
|
const value = this.parseBooleanOperand();
|
||||||
|
return { type: 'FIELD', field, value } as FieldNode;
|
||||||
|
} else {
|
||||||
|
throw new Error('Expected value after field');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle range queries
|
||||||
|
if (token === '[' || token === '{') {
|
||||||
|
return this.parseRange();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle phrases ("term term")
|
||||||
|
if (token.startsWith('"') && token.endsWith('"')) {
|
||||||
|
const phrase = token.slice(1, -1);
|
||||||
|
this.pos++;
|
||||||
|
|
||||||
|
// Check for proximity operator
|
||||||
|
let proximity: number | undefined;
|
||||||
|
if (this.pos < this.tokens.length && this.tokens[this.pos] === '~') {
|
||||||
|
this.pos++;
|
||||||
|
if (this.pos < this.tokens.length && /^\d+$/.test(this.tokens[this.pos])) {
|
||||||
|
proximity = parseInt(this.tokens[this.pos], 10);
|
||||||
|
this.pos++;
|
||||||
|
} else {
|
||||||
|
throw new Error('Expected number after proximity operator');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return { type: 'PHRASE', value: phrase, proximity } as PhraseNode;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle wildcards
|
||||||
|
if (token.includes('*') || token.includes('?')) {
|
||||||
|
this.pos++;
|
||||||
|
return { type: 'WILDCARD', value: token } as WildcardNode;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle fuzzy searches
|
||||||
|
if (this.pos + 1 < this.tokens.length && this.tokens[this.pos + 1] === '~') {
|
||||||
|
const term = token;
|
||||||
|
this.pos += 2; // Skip term and tilde
|
||||||
|
|
||||||
|
let maxEdits = 2; // Default
|
||||||
|
if (this.pos < this.tokens.length && /^\d+$/.test(this.tokens[this.pos])) {
|
||||||
|
maxEdits = parseInt(this.tokens[this.pos], 10);
|
||||||
|
this.pos++;
|
||||||
|
}
|
||||||
|
|
||||||
|
return { type: 'FUZZY', value: term, maxEdits } as FuzzyNode;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Simple term
|
||||||
|
this.pos++;
|
||||||
|
return { type: 'TERM', value: token } as TermNode;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parse range queries
|
||||||
|
*/
|
||||||
|
private parseRange(): RangeNode {
|
||||||
|
const includeLower = this.tokens[this.pos] === '[';
|
||||||
|
const includeUpper = this.tokens[this.pos + 4] === ']';
|
||||||
|
|
||||||
|
// Ensure tokens for lower, TO, upper, and closing bracket exist
|
||||||
|
if (this.pos + 4 >= this.tokens.length) {
|
||||||
|
throw new Error('Invalid range query syntax');
|
||||||
|
}
|
||||||
|
this.pos++; // Skip open bracket
|
||||||
|
|
||||||
|
const lower = this.tokens[this.pos];
|
||||||
|
this.pos++;
|
||||||
|
|
||||||
|
if (this.tokens[this.pos] !== 'TO') {
|
||||||
|
throw new Error('Expected TO in range query');
|
||||||
|
}
|
||||||
|
this.pos++;
|
||||||
|
|
||||||
|
const upper = this.tokens[this.pos];
|
||||||
|
this.pos++;
|
||||||
|
|
||||||
|
if (this.tokens[this.pos] !== (includeLower ? ']' : '}')) {
|
||||||
|
throw new Error('Invalid range query closing bracket');
|
||||||
|
}
|
||||||
|
this.pos++;
|
||||||
|
|
||||||
|
// For simplicity, assuming the field is handled separately
|
||||||
|
return {
|
||||||
|
type: 'RANGE',
|
||||||
|
field: '', // This will be filled by the field node
|
||||||
|
lower,
|
||||||
|
upper,
|
||||||
|
includeLower,
|
||||||
|
includeUpper,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Transformer for Lucene AST to MongoDB query
|
||||||
|
* FIXED VERSION - proper MongoDB query structure
|
||||||
|
*/
|
||||||
|
export class LuceneToMongoTransformer {
|
||||||
|
private defaultFields: string[];
|
||||||
|
constructor(defaultFields: string[] = []) {
|
||||||
|
this.defaultFields = defaultFields;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Escape special characters for use in RegExp patterns
|
||||||
|
*/
|
||||||
|
private escapeRegex(input: string): string {
|
||||||
|
return input.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Transform a Lucene AST node to a MongoDB query
|
||||||
|
*/
|
||||||
|
transform(node: AnyQueryNode, searchFields?: string[]): any {
|
||||||
|
switch (node.type) {
|
||||||
|
case 'TERM':
|
||||||
|
return this.transformTerm(node, searchFields);
|
||||||
|
case 'PHRASE':
|
||||||
|
return this.transformPhrase(node, searchFields);
|
||||||
|
case 'FIELD':
|
||||||
|
return this.transformField(node);
|
||||||
|
case 'AND':
|
||||||
|
return this.transformAnd(node);
|
||||||
|
case 'OR':
|
||||||
|
return this.transformOr(node);
|
||||||
|
case 'NOT':
|
||||||
|
return this.transformNot(node);
|
||||||
|
case 'RANGE':
|
||||||
|
return this.transformRange(node);
|
||||||
|
case 'WILDCARD':
|
||||||
|
return this.transformWildcard(node, searchFields);
|
||||||
|
case 'FUZZY':
|
||||||
|
return this.transformFuzzy(node, searchFields);
|
||||||
|
case 'GROUP':
|
||||||
|
return this.transform(node.value, searchFields);
|
||||||
|
default:
|
||||||
|
throw new Error(`Unsupported node type: ${(node as any).type}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Transform a term to MongoDB query
|
||||||
|
* FIXED: properly structured $or query for multiple fields
|
||||||
|
*/
|
||||||
|
private transformTerm(node: TermNode, searchFields?: string[]): any {
|
||||||
|
// Build regex pattern, support wildcard (*) and fuzzy (?) if present
|
||||||
|
const term = node.value;
|
||||||
|
// Determine regex pattern: wildcard conversion or exact escape
|
||||||
|
let pattern: string;
|
||||||
|
if (term.includes('*') || term.includes('?')) {
|
||||||
|
pattern = this.luceneWildcardToRegex(term);
|
||||||
|
} else {
|
||||||
|
pattern = this.escapeRegex(term);
|
||||||
|
}
|
||||||
|
// Search across provided fields or default fields
|
||||||
|
const fields = searchFields && searchFields.length > 0 ? searchFields : this.defaultFields;
|
||||||
|
const orConditions = fields.map((field) => ({
|
||||||
|
[field]: { $regex: pattern, $options: 'i' },
|
||||||
|
}));
|
||||||
|
return { $or: orConditions };
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Transform a phrase to MongoDB query
|
||||||
|
* FIXED: properly structured $or query for multiple fields
|
||||||
|
*/
|
||||||
|
private transformPhrase(node: PhraseNode, searchFields?: string[]): any {
|
||||||
|
// Use regex across provided fields or default fields, respecting word boundaries
|
||||||
|
const parts = node.value.split(/\s+/).map((t) => this.escapeRegex(t));
|
||||||
|
const pattern = parts.join('\\s+');
|
||||||
|
const fields = searchFields && searchFields.length > 0 ? searchFields : this.defaultFields;
|
||||||
|
const orConditions = fields.map((field) => ({
|
||||||
|
[field]: { $regex: pattern, $options: 'i' },
|
||||||
|
}));
|
||||||
|
return { $or: orConditions };
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Transform a field query to MongoDB query
|
||||||
|
*/
|
||||||
|
private transformField(node: FieldNode): any {
|
||||||
|
// Handle special case for range queries on fields
|
||||||
|
if (node.value.type === 'RANGE') {
|
||||||
|
const rangeNode = node.value as RangeNode;
|
||||||
|
rangeNode.field = node.field;
|
||||||
|
return this.transformRange(rangeNode);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle special case for wildcards on fields
|
||||||
|
if (node.value.type === 'WILDCARD') {
|
||||||
|
return {
|
||||||
|
[node.field]: {
|
||||||
|
$regex: this.luceneWildcardToRegex((node.value as WildcardNode).value),
|
||||||
|
$options: 'i',
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle special case for fuzzy searches on fields
|
||||||
|
if (node.value.type === 'FUZZY') {
|
||||||
|
return {
|
||||||
|
[node.field]: {
|
||||||
|
$regex: this.createFuzzyRegex((node.value as FuzzyNode).value),
|
||||||
|
$options: 'i',
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Special case for exact term matches on fields (supporting wildcard characters)
|
||||||
|
if (node.value.type === 'TERM') {
|
||||||
|
const val = (node.value as TermNode).value;
|
||||||
|
if (val.includes('*') || val.includes('?')) {
|
||||||
|
const regex = this.luceneWildcardToRegex(val);
|
||||||
|
return { [node.field]: { $regex: regex, $options: 'i' } };
|
||||||
|
}
|
||||||
|
return { [node.field]: { $regex: val, $options: 'i' } };
|
||||||
|
}
|
||||||
|
|
||||||
|
// Special case for phrase matches on fields
|
||||||
|
if (node.value.type === 'PHRASE') {
|
||||||
|
return {
|
||||||
|
[node.field]: {
|
||||||
|
$regex: `${(node.value as PhraseNode).value.replace(/\s+/g, '\\s+')}`,
|
||||||
|
$options: 'i',
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// For other cases, we'll transform the value and apply it to the field
|
||||||
|
const transformedValue = this.transform(node.value);
|
||||||
|
|
||||||
|
// If the transformed value uses $text, we need to adapt it for the field
|
||||||
|
if (transformedValue.$text) {
|
||||||
|
return { [node.field]: { $regex: transformedValue.$text.$search, $options: 'i' } };
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle $or and $and cases
|
||||||
|
if (transformedValue.$or || transformedValue.$and) {
|
||||||
|
// This is a bit complex - we need to restructure the query to apply the field
|
||||||
|
// For now, simplify by just using a regex on the field
|
||||||
|
const term = this.extractTermFromBooleanQuery(transformedValue);
|
||||||
|
if (term) {
|
||||||
|
return { [node.field]: { $regex: term, $options: 'i' } };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return { [node.field]: transformedValue };
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Extract a term from a boolean query (simplification)
|
||||||
|
*/
|
||||||
|
private extractTermFromBooleanQuery(query: any): string | null {
|
||||||
|
if (query.$or && Array.isArray(query.$or) && query.$or.length > 0) {
|
||||||
|
const firstClause = query.$or[0];
|
||||||
|
for (const field in firstClause) {
|
||||||
|
if (firstClause[field].$regex) {
|
||||||
|
return firstClause[field].$regex;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (query.$and && Array.isArray(query.$and) && query.$and.length > 0) {
|
||||||
|
const firstClause = query.$and[0];
|
||||||
|
for (const field in firstClause) {
|
||||||
|
if (firstClause[field].$regex) {
|
||||||
|
return firstClause[field].$regex;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Transform AND operator to MongoDB query
|
||||||
|
* FIXED: $and must be an array
|
||||||
|
*/
|
||||||
|
private transformAnd(node: BooleanNode): any {
|
||||||
|
return { $and: [this.transform(node.left), this.transform(node.right)] };
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Transform OR operator to MongoDB query
|
||||||
|
* FIXED: $or must be an array
|
||||||
|
*/
|
||||||
|
private transformOr(node: BooleanNode): any {
|
||||||
|
return { $or: [this.transform(node.left), this.transform(node.right)] };
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Transform NOT operator to MongoDB query
|
||||||
|
* FIXED: $and must be an array and $not usage
|
||||||
|
*/
|
||||||
|
private transformNot(node: BooleanNode): any {
|
||||||
|
const leftQuery = this.transform(node.left);
|
||||||
|
const rightQuery = this.transform(node.right);
|
||||||
|
|
||||||
|
// Create a query that includes left but excludes right
|
||||||
|
if (rightQuery.$text) {
|
||||||
|
// For text searches, we need a different approach
|
||||||
|
// We'll use a negated regex instead
|
||||||
|
const searchTerm = rightQuery.$text.$search.replace(/"/g, '');
|
||||||
|
|
||||||
|
// Determine the fields to apply the negation to
|
||||||
|
const notConditions = [];
|
||||||
|
|
||||||
|
for (const field in leftQuery) {
|
||||||
|
if (field !== '$or' && field !== '$and') {
|
||||||
|
notConditions.push({
|
||||||
|
[field]: { $not: { $regex: searchTerm, $options: 'i' } },
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// If left query has $or or $and, we need to handle it differently
|
||||||
|
if (leftQuery.$or) {
|
||||||
|
return {
|
||||||
|
$and: [leftQuery, { $nor: [{ $or: notConditions }] }],
|
||||||
|
};
|
||||||
|
} else {
|
||||||
|
// Simple case - just add $not to each field
|
||||||
|
return {
|
||||||
|
$and: [leftQuery, { $and: notConditions }],
|
||||||
|
};
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// For other queries, we can use $not directly
|
||||||
|
// We need to handle different structures based on the rightQuery
|
||||||
|
let notQuery = {};
|
||||||
|
|
||||||
|
if (rightQuery.$or) {
|
||||||
|
notQuery = { $nor: rightQuery.$or };
|
||||||
|
} else if (rightQuery.$and) {
|
||||||
|
// Convert $and to $nor
|
||||||
|
notQuery = { $nor: rightQuery.$and };
|
||||||
|
} else {
|
||||||
|
// Simple field condition
|
||||||
|
for (const field in rightQuery) {
|
||||||
|
notQuery[field] = { $not: rightQuery[field] };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return { $and: [leftQuery, notQuery] };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Transform range query to MongoDB query
|
||||||
|
*/
|
||||||
|
private transformRange(node: RangeNode): any {
|
||||||
|
const range: any = {};
|
||||||
|
|
||||||
|
if (node.lower !== '*') {
|
||||||
|
range[node.includeLower ? '$gte' : '$gt'] = this.parseValue(node.lower);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (node.upper !== '*') {
|
||||||
|
range[node.includeUpper ? '$lte' : '$lt'] = this.parseValue(node.upper);
|
||||||
|
}
|
||||||
|
|
||||||
|
return { [node.field]: range };
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Transform wildcard query to MongoDB query
|
||||||
|
* FIXED: properly structured for multiple fields
|
||||||
|
*/
|
||||||
|
private transformWildcard(node: WildcardNode, searchFields?: string[]): any {
|
||||||
|
// Convert Lucene wildcards to MongoDB regex
|
||||||
|
const regex = this.luceneWildcardToRegex(node.value);
|
||||||
|
|
||||||
|
// If specific fields are provided, search wildcard across those fields
|
||||||
|
if (searchFields && searchFields.length > 0) {
|
||||||
|
const orConditions = searchFields.map((field) => ({
|
||||||
|
[field]: { $regex: regex, $options: 'i' },
|
||||||
|
}));
|
||||||
|
|
||||||
|
return { $or: orConditions };
|
||||||
|
}
|
||||||
|
|
||||||
|
// By default, apply to the default field
|
||||||
|
return { $regex: regex, $options: 'i' };
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Transform fuzzy query to MongoDB query
|
||||||
|
* FIXED: properly structured for multiple fields
|
||||||
|
*/
|
||||||
|
private transformFuzzy(node: FuzzyNode, searchFields?: string[]): any {
|
||||||
|
// MongoDB doesn't have built-in fuzzy search
|
||||||
|
// This is a very basic approach using regex
|
||||||
|
const regex = this.createFuzzyRegex(node.value);
|
||||||
|
|
||||||
|
// If specific fields are provided, search fuzzy term across those fields
|
||||||
|
if (searchFields && searchFields.length > 0) {
|
||||||
|
const orConditions = searchFields.map((field) => ({
|
||||||
|
[field]: { $regex: regex, $options: 'i' },
|
||||||
|
}));
|
||||||
|
|
||||||
|
return { $or: orConditions };
|
||||||
|
}
|
||||||
|
|
||||||
|
// By default, apply to the default field
|
||||||
|
return { $regex: regex, $options: 'i' };
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Convert Lucene wildcards to MongoDB regex patterns
|
||||||
|
*/
|
||||||
|
public luceneWildcardToRegex(wildcardPattern: string): string {
|
||||||
|
// Replace Lucene wildcards with regex equivalents
|
||||||
|
// * => .*
|
||||||
|
// ? => .
|
||||||
|
// Also escape regex special chars
|
||||||
|
return wildcardPattern
|
||||||
|
.replace(/([.+^${}()|\\])/g, '\\$1') // Escape regex special chars
|
||||||
|
.replace(/\*/g, '.*')
|
||||||
|
.replace(/\?/g, '.');
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a simplified fuzzy search regex
|
||||||
|
*/
|
||||||
|
private createFuzzyRegex(term: string): string {
|
||||||
|
// For a very simple approach, we allow some characters to be optional
|
||||||
|
let regex = '';
|
||||||
|
for (let i = 0; i < term.length; i++) {
|
||||||
|
// Make every other character optional (simplified fuzzy)
|
||||||
|
if (i % 2 === 1) {
|
||||||
|
regex += term[i] + '?';
|
||||||
|
} else {
|
||||||
|
regex += term[i];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return regex;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parse string values to appropriate types (numbers, dates, etc.)
|
||||||
|
*/
|
||||||
|
private parseValue(value: string): any {
|
||||||
|
// Try to parse as number
|
||||||
|
if (/^-?\d+$/.test(value)) {
|
||||||
|
return parseInt(value, 10);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (/^-?\d+\.\d+$/.test(value)) {
|
||||||
|
return parseFloat(value);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Try to parse as date (simplified)
|
||||||
|
const date = new Date(value);
|
||||||
|
if (!isNaN(date.getTime())) {
|
||||||
|
return date;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Default to string
|
||||||
|
return value;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Main adapter class
|
||||||
|
*/
|
||||||
|
export class SmartdataLuceneAdapter {
|
||||||
|
private parser: LuceneParser;
|
||||||
|
private transformer: LuceneToMongoTransformer;
|
||||||
|
private defaultSearchFields: string[] = [];
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param defaultSearchFields - Optional array of field names to search across when no field is specified
|
||||||
|
*/
|
||||||
|
constructor(defaultSearchFields?: string[]) {
|
||||||
|
this.parser = new LuceneParser();
|
||||||
|
// Pass default searchable fields into transformer
|
||||||
|
this.transformer = new LuceneToMongoTransformer(defaultSearchFields || []);
|
||||||
|
if (defaultSearchFields) {
|
||||||
|
this.defaultSearchFields = defaultSearchFields;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Convert a Lucene query string to a MongoDB query object
|
||||||
|
* @param luceneQuery - The Lucene query string to convert
|
||||||
|
* @param searchFields - Optional array of field names to search across (overrides defaultSearchFields)
|
||||||
|
*/
|
||||||
|
convert(luceneQuery: string, searchFields?: string[]): any {
|
||||||
|
try {
|
||||||
|
// For simple single-term queries (no field:, boolean, grouping), use simpler regex
|
||||||
|
if (
|
||||||
|
!luceneQuery.includes(':') &&
|
||||||
|
!luceneQuery.includes(' AND ') &&
|
||||||
|
!luceneQuery.includes(' OR ') &&
|
||||||
|
!luceneQuery.includes(' NOT ') &&
|
||||||
|
!luceneQuery.includes('(') &&
|
||||||
|
!luceneQuery.includes('[')
|
||||||
|
) {
|
||||||
|
const fieldsToSearch = searchFields || this.defaultSearchFields;
|
||||||
|
if (fieldsToSearch && fieldsToSearch.length > 0) {
|
||||||
|
// Handle wildcard characters in query
|
||||||
|
let pattern = luceneQuery;
|
||||||
|
if (luceneQuery.includes('*') || luceneQuery.includes('?')) {
|
||||||
|
// Use transformer to convert wildcard pattern
|
||||||
|
pattern = this.transformer.luceneWildcardToRegex(luceneQuery);
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
$or: fieldsToSearch.map((field) => ({
|
||||||
|
[field]: { $regex: pattern, $options: 'i' },
|
||||||
|
})),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// For more complex queries, use the full parser
|
||||||
|
// Parse the Lucene query into an AST
|
||||||
|
const ast = this.parser.parse(luceneQuery);
|
||||||
|
|
||||||
|
// Use provided searchFields, fall back to defaultSearchFields
|
||||||
|
const fieldsToSearch = searchFields || this.defaultSearchFields;
|
||||||
|
|
||||||
|
// Transform the AST to a MongoDB query
|
||||||
|
return this.transformWithFields(ast, fieldsToSearch);
|
||||||
|
} catch (error) {
|
||||||
|
logger.log('error', `Failed to convert Lucene query "${luceneQuery}":`, error);
|
||||||
|
throw new Error(`Failed to convert Lucene query: ${error}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Helper method to transform the AST with field information
|
||||||
|
*/
|
||||||
|
private transformWithFields(node: AnyQueryNode, searchFields: string[]): any {
|
||||||
|
// Special case for term nodes without a specific field
|
||||||
|
if (
|
||||||
|
node.type === 'TERM' ||
|
||||||
|
node.type === 'PHRASE' ||
|
||||||
|
node.type === 'WILDCARD' ||
|
||||||
|
node.type === 'FUZZY'
|
||||||
|
) {
|
||||||
|
return this.transformer.transform(node, searchFields);
|
||||||
|
}
|
||||||
|
|
||||||
|
// For other node types, use the standard transformation
|
||||||
|
return this.transformer.transform(node);
|
||||||
|
}
|
||||||
|
}
|
||||||
73
ts/classes.watcher.ts
Normal file
73
ts/classes.watcher.ts
Normal file
@@ -0,0 +1,73 @@
|
|||||||
|
import { SmartDataDbDoc } from './classes.doc.js';
|
||||||
|
import * as plugins from './plugins.js';
|
||||||
|
import { EventEmitter } from 'node:events';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* a wrapper for the native mongodb cursor. Exposes better
|
||||||
|
*/
|
||||||
|
/**
|
||||||
|
* Wraps a MongoDB ChangeStream with RxJS and EventEmitter support.
|
||||||
|
*/
|
||||||
|
export class SmartdataDbWatcher<T = any> extends EventEmitter {
|
||||||
|
// STATIC
|
||||||
|
public readyDeferred = plugins.smartpromise.defer();
|
||||||
|
|
||||||
|
// INSTANCE
|
||||||
|
private changeStream: plugins.mongodb.ChangeStream<T>;
|
||||||
|
private rawSubject: plugins.smartrx.rxjs.Subject<T>;
|
||||||
|
/** Emits change documents (or arrays of documents if buffered) */
|
||||||
|
public changeSubject: any;
|
||||||
|
/**
|
||||||
|
* @param changeStreamArg native MongoDB ChangeStream
|
||||||
|
* @param smartdataDbDocArg document class for instance creation
|
||||||
|
* @param opts.bufferTimeMs optional milliseconds to buffer events via RxJS
|
||||||
|
*/
|
||||||
|
constructor(
|
||||||
|
changeStreamArg: plugins.mongodb.ChangeStream<T>,
|
||||||
|
smartdataDbDocArg: typeof SmartDataDbDoc,
|
||||||
|
opts?: { bufferTimeMs?: number },
|
||||||
|
) {
|
||||||
|
super();
|
||||||
|
this.rawSubject = new plugins.smartrx.rxjs.Subject<T>();
|
||||||
|
// Apply buffering if requested
|
||||||
|
if (opts && opts.bufferTimeMs) {
|
||||||
|
this.changeSubject = this.rawSubject.pipe(plugins.smartrx.rxjs.ops.bufferTime(opts.bufferTimeMs));
|
||||||
|
} else {
|
||||||
|
this.changeSubject = this.rawSubject;
|
||||||
|
}
|
||||||
|
this.changeStream = changeStreamArg;
|
||||||
|
this.changeStream.on('change', async (item: any) => {
|
||||||
|
let docInstance: T = null;
|
||||||
|
if (item.fullDocument) {
|
||||||
|
docInstance = smartdataDbDocArg.createInstanceFromMongoDbNativeDoc(
|
||||||
|
item.fullDocument
|
||||||
|
) as any as T;
|
||||||
|
}
|
||||||
|
// Notify subscribers
|
||||||
|
this.rawSubject.next(docInstance);
|
||||||
|
this.emit('change', docInstance);
|
||||||
|
});
|
||||||
|
// Signal readiness after one tick
|
||||||
|
plugins.smartdelay.delayFor(0).then(() => {
|
||||||
|
this.readyDeferred.resolve();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Close the change stream, complete the RxJS subject, and remove listeners.
|
||||||
|
*/
|
||||||
|
public async close(): Promise<void> {
|
||||||
|
// Close MongoDB ChangeStream
|
||||||
|
await this.changeStream.close();
|
||||||
|
// Complete the subject to teardown any buffering operators
|
||||||
|
this.rawSubject.complete();
|
||||||
|
// Remove all EventEmitter listeners
|
||||||
|
this.removeAllListeners();
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Alias for close(), matching README usage
|
||||||
|
*/
|
||||||
|
public async stop(): Promise<void> {
|
||||||
|
return this.close();
|
||||||
|
}
|
||||||
|
}
|
||||||
18
ts/index.ts
18
ts/index.ts
@@ -1,10 +1,14 @@
|
|||||||
export * from './smartdata.classes.db.js';
|
export * from './classes.db.js';
|
||||||
export * from './smartdata.classes.collection.js';
|
export * from './classes.collection.js';
|
||||||
export * from './smartdata.classes.doc.js';
|
export * from './classes.doc.js';
|
||||||
export * from './smartdata.classes.easystore.js';
|
export * from './classes.easystore.js';
|
||||||
export * from './smartdata.classes.cursor.js';
|
export * from './classes.cursor.js';
|
||||||
|
|
||||||
|
import * as convenience from './classes.convenience.js';
|
||||||
|
|
||||||
|
export { convenience };
|
||||||
|
|
||||||
// to be removed with the next breaking update
|
// to be removed with the next breaking update
|
||||||
import * as plugins from './smartdata.plugins.js';
|
import type * as plugins from './plugins.js';
|
||||||
type IMongoDescriptor = plugins.tsclass.database.IMongoDescriptor;
|
type IMongoDescriptor = plugins.tsclass.database.IMongoDescriptor;
|
||||||
export type { IMongoDescriptor }
|
export type { IMongoDescriptor };
|
||||||
|
|||||||
@@ -1,3 +1,3 @@
|
|||||||
import * as plugins from './smartdata.plugins.js';
|
import * as plugins from './plugins.js';
|
||||||
|
|
||||||
export const logger = new plugins.smartlog.ConsoleLog();
|
export const logger = new plugins.smartlog.ConsoleLog();
|
||||||
29
ts/plugins.ts
Normal file
29
ts/plugins.ts
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
// tsclass scope
|
||||||
|
import * as tsclass from '@tsclass/tsclass';
|
||||||
|
|
||||||
|
export { tsclass };
|
||||||
|
|
||||||
|
// @pushrocks scope
|
||||||
|
import * as lik from '@push.rocks/lik';
|
||||||
|
import * as smartdelay from '@push.rocks/smartdelay';
|
||||||
|
import * as smartlog from '@push.rocks/smartlog';
|
||||||
|
import * as smartpromise from '@push.rocks/smartpromise';
|
||||||
|
import * as smartrx from '@push.rocks/smartrx';
|
||||||
|
import * as smartstring from '@push.rocks/smartstring';
|
||||||
|
import * as smarttime from '@push.rocks/smarttime';
|
||||||
|
import * as smartunique from '@push.rocks/smartunique';
|
||||||
|
import * as taskbuffer from '@push.rocks/taskbuffer';
|
||||||
|
import * as mongodb from 'mongodb';
|
||||||
|
|
||||||
|
export {
|
||||||
|
lik,
|
||||||
|
smartdelay,
|
||||||
|
smartpromise,
|
||||||
|
smartlog,
|
||||||
|
smartrx,
|
||||||
|
mongodb,
|
||||||
|
smartstring,
|
||||||
|
smarttime,
|
||||||
|
smartunique,
|
||||||
|
taskbuffer,
|
||||||
|
};
|
||||||
@@ -1,276 +0,0 @@
|
|||||||
import * as plugins from './smartdata.plugins.js';
|
|
||||||
import { SmartdataDb } from './smartdata.classes.db.js';
|
|
||||||
import { SmartdataDbCursor } from './smartdata.classes.cursor.js';
|
|
||||||
import { SmartDataDbDoc } from './smartdata.classes.doc.js';
|
|
||||||
import { SmartdataDbWatcher } from './smartdata.classes.watcher.js';
|
|
||||||
import { CollectionFactory } from './smartdata.classes.collectionfactory.js';
|
|
||||||
|
|
||||||
export interface IFindOptions {
|
|
||||||
limit?: number;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
*
|
|
||||||
*/
|
|
||||||
export interface IDocValidationFunc<T> {
|
|
||||||
(doc: T): boolean;
|
|
||||||
}
|
|
||||||
|
|
||||||
export type TDelayed<TDelayedArg> = () => TDelayedArg;
|
|
||||||
|
|
||||||
const collectionFactory = new CollectionFactory();
|
|
||||||
|
|
||||||
/**
|
|
||||||
* This is a decorator that will tell the decorated class what dbTable to use
|
|
||||||
* @param dbArg
|
|
||||||
*/
|
|
||||||
export function Collection(dbArg: SmartdataDb | TDelayed<SmartdataDb>) {
|
|
||||||
return function classDecorator<T extends { new (...args: any[]): {} }>(constructor: T) {
|
|
||||||
return class extends constructor {
|
|
||||||
public static get collection() {
|
|
||||||
if (!(dbArg instanceof SmartdataDb)) {
|
|
||||||
dbArg = dbArg();
|
|
||||||
}
|
|
||||||
return collectionFactory.getCollection(constructor.name, dbArg);
|
|
||||||
}
|
|
||||||
public get collection() {
|
|
||||||
if (!(dbArg instanceof SmartdataDb)) {
|
|
||||||
dbArg = dbArg();
|
|
||||||
}
|
|
||||||
return collectionFactory.getCollection(constructor.name, dbArg);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface IManager {
|
|
||||||
db: SmartdataDb;
|
|
||||||
}
|
|
||||||
|
|
||||||
export const setDefaultManagerForDoc = <T>(managerArg: IManager, dbDocArg: T): T => {
|
|
||||||
(dbDocArg as any).prototype.defaultManager = managerArg;
|
|
||||||
return dbDocArg;
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
|
||||||
* This is a decorator that will tell the decorated class what dbTable to use
|
|
||||||
* @param dbArg
|
|
||||||
*/
|
|
||||||
export function Manager<TManager extends IManager>(managerArg?: TManager | TDelayed<TManager>) {
|
|
||||||
return function classDecorator<T extends { new (...args: any[]): any }>(constructor: T) {
|
|
||||||
return class extends constructor {
|
|
||||||
public static get collection() {
|
|
||||||
let dbArg: SmartdataDb;
|
|
||||||
if (!managerArg) {
|
|
||||||
dbArg = this.prototype.defaultManager.db;
|
|
||||||
} else if (managerArg['db']) {
|
|
||||||
dbArg = (managerArg as TManager).db;
|
|
||||||
} else {
|
|
||||||
dbArg = (managerArg as TDelayed<TManager>)().db;
|
|
||||||
}
|
|
||||||
return collectionFactory.getCollection(constructor.name, dbArg);
|
|
||||||
}
|
|
||||||
public get collection() {
|
|
||||||
let dbArg: SmartdataDb;
|
|
||||||
if (!managerArg) {
|
|
||||||
//console.log(this.defaultManager.db);
|
|
||||||
//process.exit(0)
|
|
||||||
dbArg = this.defaultManager.db;
|
|
||||||
} else if (managerArg['db']) {
|
|
||||||
dbArg = (managerArg as TManager).db;
|
|
||||||
} else {
|
|
||||||
dbArg = (managerArg as TDelayed<TManager>)().db;
|
|
||||||
}
|
|
||||||
return collectionFactory.getCollection(constructor.name, dbArg);
|
|
||||||
}
|
|
||||||
public static get manager() {
|
|
||||||
let manager: TManager;
|
|
||||||
if (!managerArg) {
|
|
||||||
manager = this.prototype.defaultManager;
|
|
||||||
} else if (managerArg['db']) {
|
|
||||||
manager = managerArg as TManager;
|
|
||||||
} else {
|
|
||||||
manager = (managerArg as TDelayed<TManager>)();
|
|
||||||
}
|
|
||||||
return manager;
|
|
||||||
}
|
|
||||||
public get manager() {
|
|
||||||
let manager: TManager;
|
|
||||||
if (!managerArg) {
|
|
||||||
manager = this.defaultManager;
|
|
||||||
} else if (managerArg['db']) {
|
|
||||||
manager = managerArg as TManager;
|
|
||||||
} else {
|
|
||||||
manager = (managerArg as TDelayed<TManager>)();
|
|
||||||
}
|
|
||||||
return manager;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
// tslint:disable-next-line: max-classes-per-file
|
|
||||||
export class SmartdataCollection<T> {
|
|
||||||
/**
|
|
||||||
* the collection that is used
|
|
||||||
*/
|
|
||||||
public mongoDbCollection: plugins.mongodb.Collection;
|
|
||||||
public objectValidation: IDocValidationFunc<T> = null;
|
|
||||||
public collectionName: string;
|
|
||||||
public smartdataDb: SmartdataDb;
|
|
||||||
public uniqueIndexes: string[] = [];
|
|
||||||
|
|
||||||
constructor(classNameArg: string, smartDataDbArg: SmartdataDb) {
|
|
||||||
// tell the collection where it belongs
|
|
||||||
this.collectionName = classNameArg;
|
|
||||||
this.smartdataDb = smartDataDbArg;
|
|
||||||
|
|
||||||
// tell the db class about it (important since Db uses different systems under the hood)
|
|
||||||
this.smartdataDb.addCollection(this);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* makes sure a collection exists within MongoDb that maps to the SmartdataCollection
|
|
||||||
*/
|
|
||||||
public async init() {
|
|
||||||
if (!this.mongoDbCollection) {
|
|
||||||
// connect this instance to a MongoDB collection
|
|
||||||
const availableMongoDbCollections = await this.smartdataDb.mongoDb.collections();
|
|
||||||
const wantedCollection = availableMongoDbCollections.find((collection) => {
|
|
||||||
return collection.collectionName === this.collectionName;
|
|
||||||
});
|
|
||||||
if (!wantedCollection) {
|
|
||||||
await this.smartdataDb.mongoDb.createCollection(this.collectionName);
|
|
||||||
console.log(`Successfully initiated Collection ${this.collectionName}`);
|
|
||||||
}
|
|
||||||
this.mongoDbCollection = this.smartdataDb.mongoDb.collection(this.collectionName);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* mark unique index
|
|
||||||
*/
|
|
||||||
public markUniqueIndexes(keyArrayArg: string[] = []) {
|
|
||||||
for (const key of keyArrayArg) {
|
|
||||||
if (!this.uniqueIndexes.includes(key)) {
|
|
||||||
this.mongoDbCollection.createIndex(key, {
|
|
||||||
unique: true,
|
|
||||||
});
|
|
||||||
// make sure we only call this once and not for every doc we create
|
|
||||||
this.uniqueIndexes.push(key);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* adds a validation function that all newly inserted and updated objects have to pass
|
|
||||||
*/
|
|
||||||
public addDocValidation(funcArg: IDocValidationFunc<T>) {
|
|
||||||
this.objectValidation = funcArg;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* finds an object in the DbCollection
|
|
||||||
*/
|
|
||||||
public async findOne(filterObject: any): Promise<any> {
|
|
||||||
await this.init();
|
|
||||||
const cursor = this.mongoDbCollection.find(filterObject);
|
|
||||||
const result = await cursor.next();
|
|
||||||
cursor.close();
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
public async getCursor(filterObjectArg: any, dbDocArg: typeof SmartDataDbDoc): Promise<SmartdataDbCursor<any>> {
|
|
||||||
await this.init();
|
|
||||||
const cursor = this.mongoDbCollection.find(filterObjectArg);
|
|
||||||
return new SmartdataDbCursor(cursor, dbDocArg);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* finds an object in the DbCollection
|
|
||||||
*/
|
|
||||||
public async findAll(filterObject: any): Promise<any[]> {
|
|
||||||
await this.init();
|
|
||||||
const cursor = this.mongoDbCollection.find(filterObject);
|
|
||||||
const result = await cursor.toArray();
|
|
||||||
cursor.close();
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* watches the collection while applying a filter
|
|
||||||
*/
|
|
||||||
public async watch(filterObject: any, smartdataDbDocArg: typeof SmartDataDbDoc): Promise<SmartdataDbWatcher> {
|
|
||||||
await this.init();
|
|
||||||
const changeStream = this.mongoDbCollection.watch([
|
|
||||||
{
|
|
||||||
$match: filterObject
|
|
||||||
}
|
|
||||||
], {
|
|
||||||
fullDocument: 'updateLookup'
|
|
||||||
});
|
|
||||||
const smartdataWatcher = new SmartdataDbWatcher(changeStream, smartdataDbDocArg);
|
|
||||||
await smartdataWatcher.readyDeferred.promise;
|
|
||||||
return smartdataWatcher;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* create an object in the database
|
|
||||||
*/
|
|
||||||
public async insert(dbDocArg: T & SmartDataDbDoc<T, unknown>): Promise<any> {
|
|
||||||
await this.init();
|
|
||||||
await this.checkDoc(dbDocArg);
|
|
||||||
this.markUniqueIndexes(dbDocArg.uniqueIndexes);
|
|
||||||
const saveableObject = await dbDocArg.createSavableObject();
|
|
||||||
const result = await this.mongoDbCollection.insertOne(saveableObject);
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* inserts object into the DbCollection
|
|
||||||
*/
|
|
||||||
public async update(dbDocArg: T & SmartDataDbDoc<T, unknown>): Promise<any> {
|
|
||||||
await this.init();
|
|
||||||
await this.checkDoc(dbDocArg);
|
|
||||||
const identifiableObject = await dbDocArg.createIdentifiableObject();
|
|
||||||
const saveableObject = await dbDocArg.createSavableObject();
|
|
||||||
const updateableObject: any = {};
|
|
||||||
for (const key of Object.keys(saveableObject)) {
|
|
||||||
if (identifiableObject[key]) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
updateableObject[key] = saveableObject[key];
|
|
||||||
}
|
|
||||||
const result = await this.mongoDbCollection.updateOne(
|
|
||||||
identifiableObject,
|
|
||||||
{ $set: updateableObject },
|
|
||||||
{ upsert: true }
|
|
||||||
);
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
public async delete(dbDocArg: T & SmartDataDbDoc<T, unknown>): Promise<any> {
|
|
||||||
await this.init();
|
|
||||||
await this.checkDoc(dbDocArg);
|
|
||||||
const identifiableObject = await dbDocArg.createIdentifiableObject();
|
|
||||||
await this.mongoDbCollection.deleteOne(identifiableObject);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* checks a Doc for constraints
|
|
||||||
* if this.objectValidation is not set it passes.
|
|
||||||
*/
|
|
||||||
private checkDoc(docArg: T): Promise<void> {
|
|
||||||
const done = plugins.smartq.defer<void>();
|
|
||||||
let validationResult = true;
|
|
||||||
if (this.objectValidation) {
|
|
||||||
validationResult = this.objectValidation(docArg);
|
|
||||||
}
|
|
||||||
if (validationResult) {
|
|
||||||
done.resolve();
|
|
||||||
} else {
|
|
||||||
done.reject('validation of object did not pass');
|
|
||||||
}
|
|
||||||
return done.promise;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,83 +0,0 @@
|
|||||||
import * as plugins from './smartdata.plugins.js';
|
|
||||||
import { ObjectMap } from '@pushrocks/lik';
|
|
||||||
|
|
||||||
import { SmartdataCollection } from './smartdata.classes.collection.js';
|
|
||||||
import { EasyStore } from './smartdata.classes.easystore.js';
|
|
||||||
|
|
||||||
import { logger } from './smartdata.logging.js';
|
|
||||||
|
|
||||||
/**
|
|
||||||
* interface - indicates the connection status of the db
|
|
||||||
*/
|
|
||||||
export type TConnectionStatus = 'initial' | 'disconnected' | 'connected' | 'failed';
|
|
||||||
|
|
||||||
export class SmartdataDb {
|
|
||||||
smartdataOptions: plugins.tsclass.database.IMongoDescriptor;
|
|
||||||
mongoDbClient: plugins.mongodb.MongoClient;
|
|
||||||
mongoDb: plugins.mongodb.Db;
|
|
||||||
status: TConnectionStatus;
|
|
||||||
smartdataCollectionMap = new ObjectMap<SmartdataCollection<any>>();
|
|
||||||
|
|
||||||
constructor(smartdataOptions: plugins.tsclass.database.IMongoDescriptor) {
|
|
||||||
this.smartdataOptions = smartdataOptions;
|
|
||||||
this.status = 'initial';
|
|
||||||
}
|
|
||||||
|
|
||||||
// easystore
|
|
||||||
public async createEasyStore(nameIdArg: string) {
|
|
||||||
const easyStore = new EasyStore(nameIdArg, this);
|
|
||||||
return easyStore;
|
|
||||||
}
|
|
||||||
|
|
||||||
// basic connection stuff ----------------------------------------------
|
|
||||||
|
|
||||||
/**
|
|
||||||
* connects to the database that was specified during instance creation
|
|
||||||
*/
|
|
||||||
public async init(): Promise<any> {
|
|
||||||
const finalConnectionUrl = this.smartdataOptions.mongoDbUrl
|
|
||||||
.replace('<USERNAME>', this.smartdataOptions.mongoDbUser)
|
|
||||||
.replace('<username>', this.smartdataOptions.mongoDbUser)
|
|
||||||
.replace('<USER>', this.smartdataOptions.mongoDbUser)
|
|
||||||
.replace('<user>', this.smartdataOptions.mongoDbUser)
|
|
||||||
.replace('<PASSWORD>', this.smartdataOptions.mongoDbPass)
|
|
||||||
.replace('<password>', this.smartdataOptions.mongoDbPass)
|
|
||||||
.replace('<DBNAME>', this.smartdataOptions.mongoDbName)
|
|
||||||
.replace('<dbname>', this.smartdataOptions.mongoDbName);
|
|
||||||
|
|
||||||
this.mongoDbClient = await plugins.mongodb.MongoClient.connect(finalConnectionUrl, {
|
|
||||||
maxPoolSize: 100,
|
|
||||||
maxIdleTimeMS: 10,
|
|
||||||
});
|
|
||||||
this.mongoDb = this.mongoDbClient.db(this.smartdataOptions.mongoDbName);
|
|
||||||
this.status = 'connected';
|
|
||||||
console.log(`Connected to database ${this.smartdataOptions.mongoDbName}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* closes the connection to the databse
|
|
||||||
*/
|
|
||||||
public async close(): Promise<any> {
|
|
||||||
await this.mongoDbClient.close();
|
|
||||||
this.status = 'disconnected';
|
|
||||||
logger.log('info', `disconnected from database ${this.smartdataOptions.mongoDbName}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
// handle table to class distribution
|
|
||||||
|
|
||||||
public addCollection(SmartdataCollectionArg: SmartdataCollection<any>) {
|
|
||||||
this.smartdataCollectionMap.add(SmartdataCollectionArg);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Gets a collection's name and returns a SmartdataCollection instance
|
|
||||||
* @param nameArg
|
|
||||||
* @returns DbTable
|
|
||||||
*/
|
|
||||||
public async getSmartdataCollectionByName<T>(nameArg: string): Promise<SmartdataCollection<T>> {
|
|
||||||
const resultCollection = await this.smartdataCollectionMap.find(async (dbTableArg) => {
|
|
||||||
return dbTableArg.collectionName === nameArg;
|
|
||||||
});
|
|
||||||
return resultCollection;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,279 +0,0 @@
|
|||||||
import * as plugins from './smartdata.plugins.js';
|
|
||||||
|
|
||||||
import { ObjectMap } from '@pushrocks/lik';
|
|
||||||
|
|
||||||
import { SmartdataDb } from './smartdata.classes.db.js';
|
|
||||||
import { SmartdataDbCursor } from './smartdata.classes.cursor.js';
|
|
||||||
import { IManager, SmartdataCollection } from './smartdata.classes.collection.js';
|
|
||||||
import { SmartdataDbWatcher } from './smartdata.classes.watcher.js';
|
|
||||||
|
|
||||||
export type TDocCreation = 'db' | 'new' | 'mixed';
|
|
||||||
|
|
||||||
/**
|
|
||||||
* saveable - saveable decorator to be used on class properties
|
|
||||||
*/
|
|
||||||
export function svDb() {
|
|
||||||
return (target: SmartDataDbDoc<unknown, unknown>, key: string) => {
|
|
||||||
console.log(`called svDb() on >${target.constructor.name}.${key}<`);
|
|
||||||
if (!target.saveableProperties) {
|
|
||||||
target.saveableProperties = [];
|
|
||||||
}
|
|
||||||
target.saveableProperties.push(key);
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* unique index - decorator to mark a unique index
|
|
||||||
*/
|
|
||||||
export function unI() {
|
|
||||||
return (target: SmartDataDbDoc<unknown, unknown>, key: string) => {
|
|
||||||
console.log(`called unI on >>${target.constructor.name}.${key}<<`);
|
|
||||||
|
|
||||||
// mark the index as unique
|
|
||||||
if (!target.uniqueIndexes) {
|
|
||||||
target.uniqueIndexes = [];
|
|
||||||
}
|
|
||||||
target.uniqueIndexes.push(key);
|
|
||||||
|
|
||||||
// and also save it
|
|
||||||
if (!target.saveableProperties) {
|
|
||||||
target.saveableProperties = [];
|
|
||||||
}
|
|
||||||
target.saveableProperties.push(key);
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
export const convertFilterForMongoDb = (filterArg: { [key: string]: any }) => {
|
|
||||||
const convertedFilter: { [key: string]: any } = {};
|
|
||||||
const convertFilterArgument = (keyPathArg2: string, filterArg2: any) => {
|
|
||||||
if (typeof filterArg2 === 'object') {
|
|
||||||
for (const key of Object.keys(filterArg2)) {
|
|
||||||
if (key.startsWith('$')) {
|
|
||||||
convertedFilter[keyPathArg2] = filterArg2;
|
|
||||||
return;
|
|
||||||
} else if (key.includes('.')) {
|
|
||||||
throw new Error('keys cannot contain dots');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for (const key of Object.keys(filterArg2)) {
|
|
||||||
convertFilterArgument(`${keyPathArg2}.${key}`, filterArg2[key]);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
convertedFilter[keyPathArg2] = filterArg2;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
for (const key of Object.keys(filterArg)) {
|
|
||||||
convertFilterArgument(key, filterArg[key]);
|
|
||||||
}
|
|
||||||
return convertedFilter;
|
|
||||||
};
|
|
||||||
|
|
||||||
export class SmartDataDbDoc<T extends TImplements, TImplements, TManager extends IManager = any> {
|
|
||||||
/**
|
|
||||||
* the collection object an Doc belongs to
|
|
||||||
*/
|
|
||||||
public static collection: SmartdataCollection<any>;
|
|
||||||
public collection: SmartdataCollection<any>;
|
|
||||||
public static defaultManager;
|
|
||||||
public static manager;
|
|
||||||
public manager: TManager;
|
|
||||||
|
|
||||||
// STATIC
|
|
||||||
public static createInstanceFromMongoDbNativeDoc<T>(
|
|
||||||
this: plugins.tsclass.typeFest.Class<T>,
|
|
||||||
mongoDbNativeDocArg: any
|
|
||||||
): T {
|
|
||||||
const newInstance = new this();
|
|
||||||
(newInstance as any).creationStatus = 'db';
|
|
||||||
for (const key of Object.keys(mongoDbNativeDocArg)) {
|
|
||||||
newInstance[key] = mongoDbNativeDocArg[key];
|
|
||||||
}
|
|
||||||
return newInstance;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* gets all instances as array
|
|
||||||
* @param this
|
|
||||||
* @param filterArg
|
|
||||||
* @returns
|
|
||||||
*/
|
|
||||||
public static async getInstances<T>(
|
|
||||||
this: plugins.tsclass.typeFest.Class<T>,
|
|
||||||
filterArg: plugins.tsclass.typeFest.PartialDeep<T>
|
|
||||||
): Promise<T[]> {
|
|
||||||
const foundDocs = await (this as any).collection.findAll(convertFilterForMongoDb(filterArg));
|
|
||||||
const returnArray = [];
|
|
||||||
for (const foundDoc of foundDocs) {
|
|
||||||
const newInstance: T = (this as any).createInstanceFromMongoDbNativeDoc(foundDoc);
|
|
||||||
returnArray.push(newInstance);
|
|
||||||
}
|
|
||||||
return returnArray;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* gets the first matching instance
|
|
||||||
* @param this
|
|
||||||
* @param filterArg
|
|
||||||
* @returns
|
|
||||||
*/
|
|
||||||
public static async getInstance<T>(
|
|
||||||
this: plugins.tsclass.typeFest.Class<T>,
|
|
||||||
filterArg: plugins.tsclass.typeFest.PartialDeep<T>
|
|
||||||
): Promise<T> {
|
|
||||||
const foundDoc = await (this as any).collection.findOne(convertFilterForMongoDb(filterArg));
|
|
||||||
if (foundDoc) {
|
|
||||||
const newInstance: T = (this as any).createInstanceFromMongoDbNativeDoc(foundDoc);
|
|
||||||
return newInstance;
|
|
||||||
} else {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* get cursor
|
|
||||||
* @returns
|
|
||||||
*/
|
|
||||||
public static async getCursor<T>(
|
|
||||||
this: plugins.tsclass.typeFest.Class<T>,
|
|
||||||
filterArg: plugins.tsclass.typeFest.PartialDeep<T>
|
|
||||||
) {
|
|
||||||
const collection: SmartdataCollection<T> = (this as any).collection;
|
|
||||||
const cursor: SmartdataDbCursor<T> = await collection.getCursor(
|
|
||||||
convertFilterForMongoDb(filterArg),
|
|
||||||
this as any as typeof SmartDataDbDoc
|
|
||||||
);
|
|
||||||
return cursor;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* watch the collection
|
|
||||||
* @param this
|
|
||||||
* @param filterArg
|
|
||||||
* @param forEachFunction
|
|
||||||
*/
|
|
||||||
public static async watch<T>(
|
|
||||||
this: plugins.tsclass.typeFest.Class<T>,
|
|
||||||
filterArg: plugins.tsclass.typeFest.PartialDeep<T>
|
|
||||||
) {
|
|
||||||
const collection: SmartdataCollection<T> = (this as any).collection;
|
|
||||||
const watcher: SmartdataDbWatcher<T> = await collection.watch(
|
|
||||||
convertFilterForMongoDb(filterArg),
|
|
||||||
(this as any)
|
|
||||||
);
|
|
||||||
return watcher;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* run a function for all instances
|
|
||||||
* @returns
|
|
||||||
*/
|
|
||||||
public static async forEach<T>(
|
|
||||||
this: plugins.tsclass.typeFest.Class<T>,
|
|
||||||
filterArg: plugins.tsclass.typeFest.PartialDeep<T>,
|
|
||||||
forEachFunction: (itemArg: T) => Promise<any>
|
|
||||||
) {
|
|
||||||
const cursor: SmartdataDbCursor<T> = await (this as any).getCursor(filterArg);
|
|
||||||
await cursor.forEach(forEachFunction);
|
|
||||||
}
|
|
||||||
|
|
||||||
// INSTANCE
|
|
||||||
|
|
||||||
/**
|
|
||||||
* how the Doc in memory was created, may prove useful later.
|
|
||||||
*/
|
|
||||||
public creationStatus: TDocCreation = 'new';
|
|
||||||
|
|
||||||
/**
|
|
||||||
* unique indexes
|
|
||||||
*/
|
|
||||||
public uniqueIndexes: string[];
|
|
||||||
|
|
||||||
/**
|
|
||||||
* an array of saveable properties of a doc
|
|
||||||
*/
|
|
||||||
public saveableProperties: string[];
|
|
||||||
|
|
||||||
/**
|
|
||||||
* name
|
|
||||||
*/
|
|
||||||
public name: string;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* primary id in the database
|
|
||||||
*/
|
|
||||||
public dbDocUniqueId: string;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* class constructor
|
|
||||||
*/
|
|
||||||
constructor() {}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* saves this instance but not any connected items
|
|
||||||
* may lead to data inconsistencies, but is faster
|
|
||||||
*/
|
|
||||||
public async save() {
|
|
||||||
// tslint:disable-next-line: no-this-assignment
|
|
||||||
const self: any = this;
|
|
||||||
let dbResult: any;
|
|
||||||
switch (this.creationStatus) {
|
|
||||||
case 'db':
|
|
||||||
dbResult = await this.collection.update(self);
|
|
||||||
break;
|
|
||||||
case 'new':
|
|
||||||
dbResult = await this.collection.insert(self);
|
|
||||||
this.creationStatus = 'db';
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
console.error('neither new nor in db?');
|
|
||||||
}
|
|
||||||
return dbResult;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* deletes a document from the database
|
|
||||||
*/
|
|
||||||
public async delete() {
|
|
||||||
await this.collection.delete(this);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* also store any referenced objects to DB
|
|
||||||
* better for data consistency
|
|
||||||
*/
|
|
||||||
public saveDeep(savedMapArg: ObjectMap<SmartDataDbDoc<any, any>> = null) {
|
|
||||||
if (!savedMapArg) {
|
|
||||||
savedMapArg = new ObjectMap<SmartDataDbDoc<any, any>>();
|
|
||||||
}
|
|
||||||
savedMapArg.add(this);
|
|
||||||
this.save();
|
|
||||||
for (const propertyKey of Object.keys(this)) {
|
|
||||||
const property: any = this[propertyKey];
|
|
||||||
if (property instanceof SmartDataDbDoc && !savedMapArg.checkForObject(property)) {
|
|
||||||
property.saveDeep(savedMapArg);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* creates a saveable object so the instance can be persisted as json in the database
|
|
||||||
*/
|
|
||||||
public async createSavableObject(): Promise<TImplements> {
|
|
||||||
const saveableObject: unknown = {}; // is not exposed to outside, so any is ok here
|
|
||||||
for (const propertyNameString of this.saveableProperties) {
|
|
||||||
saveableObject[propertyNameString] = this[propertyNameString];
|
|
||||||
}
|
|
||||||
return saveableObject as TImplements;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* creates an identifiable object for operations that require filtering
|
|
||||||
*/
|
|
||||||
public async createIdentifiableObject() {
|
|
||||||
const identifiableObject: any = {}; // is not exposed to outside, so any is ok here
|
|
||||||
for (const propertyNameString of this.uniqueIndexes) {
|
|
||||||
identifiableObject[propertyNameString] = this[propertyNameString];
|
|
||||||
}
|
|
||||||
return identifiableObject;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,28 +0,0 @@
|
|||||||
import { SmartDataDbDoc } from './smartdata.classes.doc.js';
|
|
||||||
import * as plugins from './smartdata.plugins.js';
|
|
||||||
|
|
||||||
/**
|
|
||||||
* a wrapper for the native mongodb cursor. Exposes better
|
|
||||||
*/
|
|
||||||
export class SmartdataDbWatcher<T = any> {
|
|
||||||
// STATIC
|
|
||||||
public readyDeferred = plugins.smartpromise.defer();
|
|
||||||
|
|
||||||
// INSTANCE
|
|
||||||
private changeStream: plugins.mongodb.ChangeStream<T>;
|
|
||||||
|
|
||||||
public changeSubject = new plugins.smartrx.rxjs.Subject<T>();
|
|
||||||
constructor(changeStreamArg: plugins.mongodb.ChangeStream<T>, smartdataDbDocArg: typeof SmartDataDbDoc) {
|
|
||||||
this.changeStream = changeStreamArg;
|
|
||||||
this.changeStream.on('change', async (item: T) => {
|
|
||||||
this.changeSubject.next(smartdataDbDocArg.createInstanceFromMongoDbNativeDoc(item) as any as T);
|
|
||||||
})
|
|
||||||
plugins.smartdelay.delayFor(0).then(() => {
|
|
||||||
this.readyDeferred.resolve();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
public async close() {
|
|
||||||
await this.changeStream.close();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,17 +0,0 @@
|
|||||||
// tsclass scope
|
|
||||||
import * as tsclass from '@tsclass/tsclass';
|
|
||||||
|
|
||||||
export { tsclass };
|
|
||||||
|
|
||||||
// @pushrocks scope
|
|
||||||
import * as smartlog from '@pushrocks/smartlog';
|
|
||||||
import * as lodash from 'lodash';
|
|
||||||
import * as mongodb from 'mongodb';
|
|
||||||
import * as smartdelay from '@pushrocks/smartdelay'
|
|
||||||
import * as smartpromise from '@pushrocks/smartpromise';
|
|
||||||
import * as smartq from '@pushrocks/smartpromise';
|
|
||||||
import * as smartrx from '@pushrocks/smartrx';
|
|
||||||
import * as smartstring from '@pushrocks/smartstring';
|
|
||||||
import * as smartunique from '@pushrocks/smartunique';
|
|
||||||
|
|
||||||
export { smartdelay, smartpromise, smartlog, lodash, smartq, smartrx, mongodb, smartstring, smartunique };
|
|
||||||
@@ -1,9 +1,14 @@
|
|||||||
{
|
{
|
||||||
"compilerOptions": {
|
"compilerOptions": {
|
||||||
"experimentalDecorators": true,
|
|
||||||
"useDefineForClassFields": false,
|
|
||||||
"target": "ES2022",
|
"target": "ES2022",
|
||||||
"module": "ES2022",
|
"module": "NodeNext",
|
||||||
"moduleResolution": "nodenext"
|
"moduleResolution": "NodeNext",
|
||||||
}
|
"esModuleInterop": true,
|
||||||
}
|
"verbatimModuleSyntax": true,
|
||||||
|
"baseUrl": ".",
|
||||||
|
"paths": {}
|
||||||
|
},
|
||||||
|
"exclude": [
|
||||||
|
"dist_*/**/*.d.ts"
|
||||||
|
]
|
||||||
|
}
|
||||||
Reference in New Issue
Block a user