Compare commits
16 Commits
Author | SHA1 | Date | |
---|---|---|---|
9755522bba | |||
de8736e99e | |||
c430627a21 | |||
0bfebaf5b9 | |||
4733982d03 | |||
368dc27607 | |||
938b25c925 | |||
ab251858ba | |||
24371ccf78 | |||
ed1eecbab8 | |||
0d2dcec3e2 | |||
9426a21a2a | |||
4fac974fc9 | |||
cad2decf59 | |||
0f61bdc455 | |||
408b2cce4a |
@ -6,8 +6,8 @@ on:
|
||||
- '**'
|
||||
|
||||
env:
|
||||
IMAGE: registry.gitlab.com/hosttoday/ht-docker-node:npmci
|
||||
NPMCI_COMPUTED_REPOURL: https://${{gitea.repository_owner}}:${{secrets.GITEA_TOKEN}}@gitea.lossless.digital/${{gitea.repository}}.git
|
||||
IMAGE: code.foss.global/host.today/ht-docker-node:npmci
|
||||
NPMCI_COMPUTED_REPOURL: https://${{gitea.repository_owner}}:${{secrets.GITEA_TOKEN}}@/${{gitea.repository}}.git
|
||||
NPMCI_TOKEN_NPM: ${{secrets.NPMCI_TOKEN_NPM}}
|
||||
NPMCI_TOKEN_NPM2: ${{secrets.NPMCI_TOKEN_NPM2}}
|
||||
NPMCI_GIT_GITHUBTOKEN: ${{secrets.NPMCI_GIT_GITHUBTOKEN}}
|
||||
@ -26,7 +26,7 @@ jobs:
|
||||
- name: Install pnpm and npmci
|
||||
run: |
|
||||
pnpm install -g pnpm
|
||||
pnpm install -g @shipzone/npmci
|
||||
pnpm install -g @ship.zone/npmci
|
||||
|
||||
- name: Run npm prepare
|
||||
run: npmci npm prepare
|
||||
|
@ -6,8 +6,8 @@ on:
|
||||
- '*'
|
||||
|
||||
env:
|
||||
IMAGE: registry.gitlab.com/hosttoday/ht-docker-node:npmci
|
||||
NPMCI_COMPUTED_REPOURL: https://${{gitea.repository_owner}}:${{secrets.GITEA_TOKEN}}@gitea.lossless.digital/${{gitea.repository}}.git
|
||||
IMAGE: code.foss.global/host.today/ht-docker-node:npmci
|
||||
NPMCI_COMPUTED_REPOURL: https://${{gitea.repository_owner}}:${{secrets.GITEA_TOKEN}}@/${{gitea.repository}}.git
|
||||
NPMCI_TOKEN_NPM: ${{secrets.NPMCI_TOKEN_NPM}}
|
||||
NPMCI_TOKEN_NPM2: ${{secrets.NPMCI_TOKEN_NPM2}}
|
||||
NPMCI_GIT_GITHUBTOKEN: ${{secrets.NPMCI_GIT_GITHUBTOKEN}}
|
||||
@ -26,7 +26,7 @@ jobs:
|
||||
- name: Prepare
|
||||
run: |
|
||||
pnpm install -g pnpm
|
||||
pnpm install -g @shipzone/npmci
|
||||
pnpm install -g @ship.zone/npmci
|
||||
npmci npm prepare
|
||||
|
||||
- name: Audit production dependencies
|
||||
@ -54,7 +54,7 @@ jobs:
|
||||
- name: Prepare
|
||||
run: |
|
||||
pnpm install -g pnpm
|
||||
pnpm install -g @shipzone/npmci
|
||||
pnpm install -g @ship.zone/npmci
|
||||
npmci npm prepare
|
||||
|
||||
- name: Test stable
|
||||
@ -82,7 +82,7 @@ jobs:
|
||||
- name: Prepare
|
||||
run: |
|
||||
pnpm install -g pnpm
|
||||
pnpm install -g @shipzone/npmci
|
||||
pnpm install -g @ship.zone/npmci
|
||||
npmci npm prepare
|
||||
|
||||
- name: Release
|
||||
@ -104,7 +104,7 @@ jobs:
|
||||
- name: Prepare
|
||||
run: |
|
||||
pnpm install -g pnpm
|
||||
pnpm install -g @shipzone/npmci
|
||||
pnpm install -g @ship.zone/npmci
|
||||
npmci npm prepare
|
||||
|
||||
- name: Code quality
|
||||
@ -119,6 +119,6 @@ jobs:
|
||||
run: |
|
||||
npmci node install stable
|
||||
npmci npm install
|
||||
pnpm install -g @gitzone/tsdoc
|
||||
pnpm install -g @git.zone/tsdoc
|
||||
npmci command tsdoc
|
||||
continue-on-error: true
|
||||
|
3
.gitignore
vendored
3
.gitignore
vendored
@ -3,7 +3,6 @@
|
||||
# artifacts
|
||||
coverage/
|
||||
public/
|
||||
pages/
|
||||
|
||||
# installs
|
||||
node_modules/
|
||||
@ -17,4 +16,4 @@ node_modules/
|
||||
dist/
|
||||
dist_*/
|
||||
|
||||
# custom
|
||||
#------# custom
|
58
changelog.md
58
changelog.md
@ -1,5 +1,63 @@
|
||||
# Changelog
|
||||
|
||||
## 2025-04-14 - 5.8.2 - fix(classes.doc.ts)
|
||||
Ensure collection initialization before creating a cursor in getCursorExtended
|
||||
|
||||
- Added 'await collection.init()' to guarantee that the MongoDB collection is initialized before using the cursor
|
||||
- Prevents potential runtime errors when accessing collection.mongoDbCollection
|
||||
|
||||
## 2025-04-14 - 5.8.1 - fix(cursor, doc)
|
||||
Add explicit return types and casts to SmartdataDbCursor methods and update getCursorExtended signature in SmartDataDbDoc.
|
||||
|
||||
- Specify Promise<T> as return type for next() in SmartdataDbCursor and cast return value to T.
|
||||
- Specify Promise<T[]> as return type for toArray() in SmartdataDbCursor and cast return value to T[].
|
||||
- Update getCursorExtended to return Promise<SmartdataDbCursor<T>> for clearer type safety.
|
||||
|
||||
## 2025-04-14 - 5.8.0 - feat(cursor)
|
||||
Add toArray method to SmartdataDbCursor to convert raw MongoDB documents into initialized class instances
|
||||
|
||||
- Introduced asynchronous toArray method in SmartdataDbCursor which retrieves all documents from the MongoDB cursor
|
||||
- Maps each native document to a SmartDataDbDoc instance using createInstanceFromMongoDbNativeDoc for consistent API usage
|
||||
|
||||
## 2025-04-14 - 5.7.0 - feat(SmartDataDbDoc)
|
||||
Add extended cursor method getCursorExtended for flexible cursor modifications
|
||||
|
||||
- Introduces getCursorExtended in classes.doc.ts to allow modifier functions for MongoDB cursors
|
||||
- Wraps the modified cursor with SmartdataDbCursor for improved API consistency
|
||||
- Enhances querying capabilities by enabling customized cursor transformations
|
||||
|
||||
## 2025-04-07 - 5.6.0 - feat(indexing)
|
||||
Add support for regular index creation in documents and collections
|
||||
|
||||
- Implement new index decorator in classes.doc.ts to mark properties with regular indexing options
|
||||
- Update SmartdataCollection to create regular indexes if defined on a document during insert
|
||||
- Enhance document structure to store and utilize regular index configurations
|
||||
|
||||
## 2025-04-06 - 5.5.1 - fix(ci & formatting)
|
||||
Minor fixes: update CI workflow image and npmci package references, adjust package.json and readme URLs, and apply consistent code formatting.
|
||||
|
||||
- Update image and repo URL in Gitea workflows from GitLab to code.foss.global
|
||||
- Replace '@shipzone/npmci' with '@ship.zone/npmci' throughout CI scripts
|
||||
- Adjust homepage and bugs URL in package.json and readme
|
||||
- Apply trailing commas and consistent formatting in TypeScript source files
|
||||
- Minor update to .gitignore custom section label
|
||||
|
||||
## 2025-04-06 - 5.5.0 - feat(search)
|
||||
Enhance search functionality with robust Lucene query transformation and reliable fallback mechanisms
|
||||
|
||||
- Improve Lucene adapter to properly structure $or queries for term, phrase, wildcard, and fuzzy search
|
||||
- Implement and document a robust searchWithLucene method with fallback to in-memory filtering
|
||||
- Update readme and tests with extensive examples for @searchable fields and Lucene-based queries
|
||||
|
||||
## 2025-04-06 - 5.4.0 - feat(core)
|
||||
Refactor file structure and update dependency versions
|
||||
|
||||
- Renamed files and modules from 'smartdata.classes.*' to 'classes.*' and adjusted corresponding import paths.
|
||||
- Updated dependency versions: '@push.rocks/smartmongo' to ^2.0.11, '@tsclass/tsclass' to ^8.2.0, and 'mongodb' to ^6.15.0.
|
||||
- Renamed dev dependency packages from '@gitzone/...' to '@git.zone/...' and updated '@push.rocks/tapbundle' and '@types/node'.
|
||||
- Fixed YAML workflow command: replaced 'pnpm install -g @gitzone/tsdoc' with 'pnpm install -g @git.zone/tsdoc'.
|
||||
- Added package manager configuration and pnpm-workspace.yaml for built dependencies.
|
||||
|
||||
## 2025-03-10 - 5.3.0 - feat(docs)
|
||||
Enhance documentation with updated installation instructions and comprehensive usage examples covering advanced features such as deep queries, automatic indexing, and distributed coordination.
|
||||
|
||||
|
28
package.json
28
package.json
@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@push.rocks/smartdata",
|
||||
"version": "5.3.0",
|
||||
"version": "5.8.2",
|
||||
"private": false,
|
||||
"description": "An advanced library for NoSQL data organization and manipulation using TypeScript with support for MongoDB, data validation, collections, and custom data types.",
|
||||
"main": "dist_ts/index.js",
|
||||
@ -18,30 +18,30 @@
|
||||
"author": "Lossless GmbH",
|
||||
"license": "MIT",
|
||||
"bugs": {
|
||||
"url": "https://gitlab.com/pushrocks/smartdata/issues"
|
||||
"url": "https://code.foss.global/push.rocks/smartdata/issues"
|
||||
},
|
||||
"homepage": "https://code.foss.global/push.rocks/smartdata",
|
||||
"homepage": "https://code.foss.global/push.rocks/smartdata#readme",
|
||||
"dependencies": {
|
||||
"@push.rocks/lik": "^6.0.14",
|
||||
"@push.rocks/smartdelay": "^3.0.1",
|
||||
"@push.rocks/smartlog": "^3.0.2",
|
||||
"@push.rocks/smartmongo": "^2.0.10",
|
||||
"@push.rocks/smartmongo": "^2.0.11",
|
||||
"@push.rocks/smartpromise": "^4.0.2",
|
||||
"@push.rocks/smartrx": "^3.0.7",
|
||||
"@push.rocks/smartstring": "^4.0.15",
|
||||
"@push.rocks/smarttime": "^4.0.6",
|
||||
"@push.rocks/smartunique": "^3.0.8",
|
||||
"@push.rocks/taskbuffer": "^3.1.7",
|
||||
"@tsclass/tsclass": "^4.0.52",
|
||||
"mongodb": "^6.5.0"
|
||||
"@tsclass/tsclass": "^8.2.0",
|
||||
"mongodb": "^6.15.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@gitzone/tsbuild": "^2.1.66",
|
||||
"@gitzone/tsrun": "^1.2.44",
|
||||
"@gitzone/tstest": "^1.0.77",
|
||||
"@git.zone/tsbuild": "^2.3.2",
|
||||
"@git.zone/tsrun": "^1.2.44",
|
||||
"@git.zone/tstest": "^1.0.77",
|
||||
"@push.rocks/qenv": "^6.0.5",
|
||||
"@push.rocks/tapbundle": "^5.0.22",
|
||||
"@types/node": "^20.11.30"
|
||||
"@push.rocks/tapbundle": "^5.6.2",
|
||||
"@types/node": "^22.14.0"
|
||||
},
|
||||
"files": [
|
||||
"ts/**/*",
|
||||
@ -67,5 +67,9 @@
|
||||
"collections",
|
||||
"custom data types",
|
||||
"ODM"
|
||||
]
|
||||
],
|
||||
"packageManager": "pnpm@10.7.0+sha512.6b865ad4b62a1d9842b61d674a393903b871d9244954f652b8842c2b553c72176b278f64c463e52d40fff8aba385c235c8c9ecf5cc7de4fd78b8bb6d49633ab6",
|
||||
"pnpm": {
|
||||
"overrides": {}
|
||||
}
|
||||
}
|
||||
|
13509
pnpm-lock.yaml
generated
13509
pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
4
pnpm-workspace.yaml
Normal file
4
pnpm-workspace.yaml
Normal file
@ -0,0 +1,4 @@
|
||||
onlyBuiltDependencies:
|
||||
- esbuild
|
||||
- mongodb-memory-server
|
||||
- puppeteer
|
147
readme.md
147
readme.md
@ -18,6 +18,7 @@ A powerful TypeScript-first MongoDB wrapper that provides advanced features for
|
||||
- **Enhanced Cursors**: Chainable, type-safe cursor API with memory efficient data processing
|
||||
- **Type Conversion**: Automatic handling of MongoDB types like ObjectId and Binary data
|
||||
- **Serialization Hooks**: Custom serialization and deserialization of document properties
|
||||
- **Powerful Search Capabilities**: Lucene-like query syntax with field-specific search, advanced operators, and fallback mechanisms
|
||||
|
||||
## Requirements
|
||||
|
||||
@ -26,6 +27,7 @@ A powerful TypeScript-first MongoDB wrapper that provides advanced features for
|
||||
- TypeScript >= 4.x (for development)
|
||||
|
||||
## Install
|
||||
|
||||
To install `@push.rocks/smartdata`, use npm:
|
||||
|
||||
```bash
|
||||
@ -39,9 +41,11 @@ pnpm add @push.rocks/smartdata
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
`@push.rocks/smartdata` enables efficient data handling and operation management with a focus on using MongoDB. It leverages TypeScript for strong typing and ESM syntax for modern JavaScript usage.
|
||||
|
||||
### Setting Up and Connecting to the Database
|
||||
|
||||
Before interacting with the database, you need to set up and establish a connection. The `SmartdataDb` class handles connection pooling and automatic reconnection.
|
||||
|
||||
```typescript
|
||||
@ -61,10 +65,20 @@ await db.init();
|
||||
```
|
||||
|
||||
### Defining Data Models
|
||||
|
||||
Data models in `@push.rocks/smartdata` are classes that represent collections and documents in your MongoDB database. Use decorators such as `@Collection`, `@unI`, and `@svDb` to define your data models.
|
||||
|
||||
```typescript
|
||||
import { SmartDataDbDoc, Collection, unI, svDb, oid, bin, index } from '@push.rocks/smartdata';
|
||||
import {
|
||||
SmartDataDbDoc,
|
||||
Collection,
|
||||
unI,
|
||||
svDb,
|
||||
oid,
|
||||
bin,
|
||||
index,
|
||||
searchable,
|
||||
} from '@push.rocks/smartdata';
|
||||
import { ObjectId } from 'mongodb';
|
||||
|
||||
@Collection(() => db) // Associate this model with the database instance
|
||||
@ -73,9 +87,11 @@ class User extends SmartDataDbDoc<User, User> {
|
||||
public id: string = 'unique-user-id'; // Mark 'id' as a unique index
|
||||
|
||||
@svDb()
|
||||
@searchable() // Mark 'username' as searchable
|
||||
public username: string; // Mark 'username' to be saved in DB
|
||||
|
||||
@svDb()
|
||||
@searchable() // Mark 'email' as searchable
|
||||
@index() // Create a regular index for this field
|
||||
public email: string; // Mark 'email' to be saved in DB
|
||||
|
||||
@ -89,7 +105,7 @@ class User extends SmartDataDbDoc<User, User> {
|
||||
|
||||
@svDb({
|
||||
serialize: (data) => JSON.stringify(data), // Custom serialization
|
||||
deserialize: (data) => JSON.parse(data) // Custom deserialization
|
||||
deserialize: (data) => JSON.parse(data), // Custom deserialization
|
||||
})
|
||||
public preferences: Record<string, any>;
|
||||
|
||||
@ -102,15 +118,18 @@ class User extends SmartDataDbDoc<User, User> {
|
||||
```
|
||||
|
||||
### CRUD Operations
|
||||
|
||||
`@push.rocks/smartdata` simplifies CRUD operations with intuitive methods on model instances.
|
||||
|
||||
#### Create
|
||||
|
||||
```typescript
|
||||
const newUser = new User('myUsername', 'myEmail@example.com');
|
||||
await newUser.save(); // Save the new user to the database
|
||||
```
|
||||
|
||||
#### Read
|
||||
|
||||
```typescript
|
||||
// Fetch a single user by a unique attribute
|
||||
const user = await User.getInstance({ username: 'myUsername' });
|
||||
@ -146,6 +165,7 @@ await cursor.close();
|
||||
```
|
||||
|
||||
#### Update
|
||||
|
||||
```typescript
|
||||
// Assuming 'user' is an instance of User
|
||||
user.email = 'newEmail@example.com';
|
||||
@ -154,14 +174,16 @@ await user.save(); // Update the user in the database
|
||||
// Upsert operations (insert if not exists, update if exists)
|
||||
const upsertedUser = await User.upsert(
|
||||
{ id: 'user-123' }, // Query to find the user
|
||||
{ // Fields to update or insert
|
||||
{
|
||||
// Fields to update or insert
|
||||
username: 'newUsername',
|
||||
email: 'newEmail@example.com'
|
||||
}
|
||||
email: 'newEmail@example.com',
|
||||
},
|
||||
);
|
||||
```
|
||||
|
||||
#### Delete
|
||||
|
||||
```typescript
|
||||
// Assuming 'user' is an instance of User
|
||||
await user.delete(); // Delete the user from the database
|
||||
@ -169,7 +191,77 @@ await user.delete(); // Delete the user from the database
|
||||
|
||||
## Advanced Features
|
||||
|
||||
### Search Functionality
|
||||
|
||||
SmartData provides powerful search capabilities with a Lucene-like query syntax and robust fallback mechanisms:
|
||||
|
||||
```typescript
|
||||
// Define a model with searchable fields
|
||||
@Collection(() => db)
|
||||
class Product extends SmartDataDbDoc<Product, Product> {
|
||||
@unI()
|
||||
public id: string = 'product-id';
|
||||
|
||||
@svDb()
|
||||
@searchable() // Mark this field as searchable
|
||||
public name: string;
|
||||
|
||||
@svDb()
|
||||
@searchable() // Mark this field as searchable
|
||||
public description: string;
|
||||
|
||||
@svDb()
|
||||
@searchable() // Mark this field as searchable
|
||||
public category: string;
|
||||
|
||||
@svDb()
|
||||
public price: number;
|
||||
}
|
||||
|
||||
// Get all fields marked as searchable for a class
|
||||
const searchableFields = getSearchableFields('Product'); // ['name', 'description', 'category']
|
||||
|
||||
// Basic search across all searchable fields
|
||||
const iPhoneProducts = await Product.searchWithLucene('iPhone');
|
||||
|
||||
// Field-specific search
|
||||
const electronicsProducts = await Product.searchWithLucene('category:Electronics');
|
||||
|
||||
// Search with wildcards
|
||||
const macProducts = await Product.searchWithLucene('Mac*');
|
||||
|
||||
// Search in specific fields with partial words
|
||||
const laptopResults = await Product.searchWithLucene('description:laptop');
|
||||
|
||||
// Search is case-insensitive
|
||||
const results1 = await Product.searchWithLucene('electronics');
|
||||
const results2 = await Product.searchWithLucene('Electronics');
|
||||
// results1 and results2 will contain the same documents
|
||||
|
||||
// Using boolean operators (requires text index in MongoDB)
|
||||
const wirelessOrLaptop = await Product.searchWithLucene('wireless OR laptop');
|
||||
|
||||
// Negative searches
|
||||
const electronicsNotSamsung = await Product.searchWithLucene('Electronics NOT Samsung');
|
||||
|
||||
// Phrase searches
|
||||
const exactPhrase = await Product.searchWithLucene('"high-speed blender"');
|
||||
|
||||
// Grouping with parentheses
|
||||
const complexQuery = await Product.searchWithLucene('(wireless OR bluetooth) AND Electronics');
|
||||
```
|
||||
|
||||
The search functionality includes:
|
||||
|
||||
- `@searchable()` decorator for marking fields as searchable
|
||||
- `getSearchableFields()` to retrieve all searchable fields for a class
|
||||
- `search()` method for basic search (requires MongoDB text index)
|
||||
- `searchWithLucene()` method with robust fallback mechanisms
|
||||
- Support for field-specific searches, wildcards, and boolean operators
|
||||
- Automatic fallback to regex-based search if MongoDB text search fails
|
||||
|
||||
### EasyStore
|
||||
|
||||
EasyStore provides a simple key-value storage system with automatic persistence:
|
||||
|
||||
```typescript
|
||||
@ -200,6 +292,7 @@ await store.deleteKey('apiKey');
|
||||
```
|
||||
|
||||
### Distributed Coordination
|
||||
|
||||
Built-in support for distributed systems with leader election:
|
||||
|
||||
```typescript
|
||||
@ -237,16 +330,20 @@ await coordinator.stop();
|
||||
```
|
||||
|
||||
### Real-time Data Watching
|
||||
|
||||
Watch for changes in your collections with RxJS integration using MongoDB Change Streams:
|
||||
|
||||
```typescript
|
||||
// Create a watcher for a specific collection with a query filter
|
||||
const watcher = await User.watch({
|
||||
active: true // Only watch for changes to active users
|
||||
}, {
|
||||
const watcher = await User.watch(
|
||||
{
|
||||
active: true, // Only watch for changes to active users
|
||||
},
|
||||
{
|
||||
fullDocument: true, // Include the full document in change notifications
|
||||
bufferTimeMs: 100 // Buffer changes for 100ms to reduce notification frequency
|
||||
});
|
||||
bufferTimeMs: 100, // Buffer changes for 100ms to reduce notification frequency
|
||||
},
|
||||
);
|
||||
|
||||
// Subscribe to changes using RxJS
|
||||
watcher.changeSubject.subscribe((change) => {
|
||||
@ -273,6 +370,7 @@ await watcher.stop();
|
||||
```
|
||||
|
||||
### Managed Collections
|
||||
|
||||
For more complex data models that require additional context:
|
||||
|
||||
```typescript
|
||||
@ -295,6 +393,7 @@ class ManagedDoc extends SmartDataDbDoc<ManagedDoc, ManagedDoc> {
|
||||
```
|
||||
|
||||
### Automatic Indexing
|
||||
|
||||
Define indexes directly in your model class:
|
||||
|
||||
```typescript
|
||||
@ -324,6 +423,7 @@ class Product extends SmartDataDbDoc<Product, Product> {
|
||||
```
|
||||
|
||||
### Transaction Support
|
||||
|
||||
Use MongoDB transactions for atomic operations:
|
||||
|
||||
```typescript
|
||||
@ -344,6 +444,7 @@ try {
|
||||
```
|
||||
|
||||
### Deep Object Queries
|
||||
|
||||
SmartData provides fully type-safe deep property queries with the `DeepQuery` type:
|
||||
|
||||
```typescript
|
||||
@ -360,14 +461,14 @@ class UserProfile extends SmartDataDbDoc<UserProfile, UserProfile> {
|
||||
address: {
|
||||
city: string;
|
||||
country: string;
|
||||
}
|
||||
}
|
||||
};
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
// Type-safe string literals for dot notation
|
||||
const usersInUSA = await UserProfile.getInstances({
|
||||
'user.details.address.country': 'USA'
|
||||
'user.details.address.country': 'USA',
|
||||
});
|
||||
|
||||
// Fully typed deep queries with the DeepQuery type
|
||||
@ -376,7 +477,7 @@ import { DeepQuery } from '@push.rocks/smartdata';
|
||||
const typedQuery: DeepQuery<UserProfile> = {
|
||||
id: 'profile-id',
|
||||
'user.details.firstName': 'John',
|
||||
'user.details.address.country': 'USA'
|
||||
'user.details.address.country': 'USA',
|
||||
};
|
||||
|
||||
// TypeScript will error if paths are incorrect
|
||||
@ -385,13 +486,14 @@ const results = await UserProfile.getInstances(typedQuery);
|
||||
// MongoDB query operators are supported
|
||||
const operatorQuery: DeepQuery<UserProfile> = {
|
||||
'user.details.address.country': 'USA',
|
||||
'user.details.address.city': { $in: ['New York', 'Los Angeles'] }
|
||||
'user.details.address.city': { $in: ['New York', 'Los Angeles'] },
|
||||
};
|
||||
|
||||
const filteredResults = await UserProfile.getInstances(operatorQuery);
|
||||
```
|
||||
|
||||
### Document Lifecycle Hooks
|
||||
|
||||
Implement custom logic at different stages of a document's lifecycle:
|
||||
|
||||
```typescript
|
||||
@ -437,26 +539,39 @@ class Order extends SmartDataDbDoc<Order, Order> {
|
||||
## Best Practices
|
||||
|
||||
### Connection Management
|
||||
|
||||
- Always call `db.init()` before using any database features
|
||||
- Use `db.disconnect()` when shutting down your application
|
||||
- Set appropriate connection pool sizes based on your application's needs
|
||||
|
||||
### Document Design
|
||||
- Use appropriate decorators (`@svDb`, `@unI`, `@index`) to optimize database operations
|
||||
|
||||
- Use appropriate decorators (`@svDb`, `@unI`, `@index`, `@searchable`) to optimize database operations
|
||||
- Implement type-safe models by properly extending `SmartDataDbDoc`
|
||||
- Consider using interfaces to define document structures separately from implementation
|
||||
- Mark fields that need to be searched with the `@searchable()` decorator
|
||||
|
||||
### Search Optimization
|
||||
|
||||
- Create MongoDB text indexes for collections that need advanced search operations
|
||||
- Use `searchWithLucene()` for robust searches with fallback mechanisms
|
||||
- Prefer field-specific searches when possible for better performance
|
||||
- Use simple term queries instead of boolean operators if you don't have text indexes
|
||||
|
||||
### Performance Optimization
|
||||
|
||||
- Use cursors for large datasets instead of loading all documents into memory
|
||||
- Create appropriate indexes for frequent query patterns
|
||||
- Use projections to limit the fields returned when you don't need the entire document
|
||||
|
||||
### Distributed Systems
|
||||
|
||||
- Implement proper error handling for leader election events
|
||||
- Ensure all instances have synchronized clocks when using time-based coordination
|
||||
- Use the distributed coordinator's task management features for coordinated operations
|
||||
|
||||
### Type Safety
|
||||
|
||||
- Take advantage of the `DeepQuery<T>` type for fully type-safe queries
|
||||
- Define proper types for your document models to enhance IDE auto-completion
|
||||
- Use generic type parameters to specify exact document types when working with collections
|
||||
|
@ -3,7 +3,10 @@ import * as smartmongo from '@push.rocks/smartmongo';
|
||||
import type * as taskbuffer from '@push.rocks/taskbuffer';
|
||||
|
||||
import * as smartdata from '../ts/index.js';
|
||||
import { SmartdataDistributedCoordinator, DistributedClass } from '../ts/smartdata.classes.distributedcoordinator.js'; // path might need adjusting
|
||||
import {
|
||||
SmartdataDistributedCoordinator,
|
||||
DistributedClass,
|
||||
} from '../ts/classes.distributedcoordinator.js'; // path might need adjusting
|
||||
const totalInstances = 10;
|
||||
|
||||
// =======================================
|
||||
@ -49,64 +52,71 @@ tap.test('SmartdataDistributedCoordinator should update leader status', async (t
|
||||
await distributedCoordinator.stop();
|
||||
});
|
||||
|
||||
tap.test('SmartdataDistributedCoordinator should handle distributed task requests', async (tools) => {
|
||||
tap.test(
|
||||
'SmartdataDistributedCoordinator should handle distributed task requests',
|
||||
async (tools) => {
|
||||
const distributedCoordinator = new SmartdataDistributedCoordinator(testDb);
|
||||
await distributedCoordinator.start();
|
||||
|
||||
const mockTaskRequest: taskbuffer.distributedCoordination.IDistributedTaskRequest = {
|
||||
submitterId: "mockSubmitter12345", // Some unique mock submitter ID
|
||||
submitterId: 'mockSubmitter12345', // Some unique mock submitter ID
|
||||
requestResponseId: 'uni879873462hjhfkjhsdf', // Some unique ID for the request-response
|
||||
taskName: "SampleTask",
|
||||
taskVersion: "1.0.0", // Assuming it's a version string
|
||||
taskName: 'SampleTask',
|
||||
taskVersion: '1.0.0', // Assuming it's a version string
|
||||
taskExecutionTime: Date.now(),
|
||||
taskExecutionTimeout: 60000, // Let's say the timeout is 1 minute (60000 ms)
|
||||
taskExecutionParallel: 5, // Let's assume max 5 parallel executions
|
||||
status: 'requesting'
|
||||
status: 'requesting',
|
||||
};
|
||||
|
||||
const response = await distributedCoordinator.fireDistributedTaskRequest(mockTaskRequest);
|
||||
console.log(response) // based on your expected structure for the response
|
||||
console.log(response); // based on your expected structure for the response
|
||||
await distributedCoordinator.stop();
|
||||
});
|
||||
},
|
||||
);
|
||||
|
||||
tap.test('SmartdataDistributedCoordinator should update distributed task requests', async (tools) => {
|
||||
tap.test(
|
||||
'SmartdataDistributedCoordinator should update distributed task requests',
|
||||
async (tools) => {
|
||||
const distributedCoordinator = new SmartdataDistributedCoordinator(testDb);
|
||||
|
||||
await distributedCoordinator.start();
|
||||
|
||||
const mockTaskRequest: taskbuffer.distributedCoordination.IDistributedTaskRequest = {
|
||||
submitterId: "mockSubmitter12345", // Some unique mock submitter ID
|
||||
submitterId: 'mockSubmitter12345', // Some unique mock submitter ID
|
||||
requestResponseId: 'uni879873462hjhfkjhsdf', // Some unique ID for the request-response
|
||||
taskName: "SampleTask",
|
||||
taskVersion: "1.0.0", // Assuming it's a version string
|
||||
taskName: 'SampleTask',
|
||||
taskVersion: '1.0.0', // Assuming it's a version string
|
||||
taskExecutionTime: Date.now(),
|
||||
taskExecutionTimeout: 60000, // Let's say the timeout is 1 minute (60000 ms)
|
||||
taskExecutionParallel: 5, // Let's assume max 5 parallel executions
|
||||
status: 'requesting'
|
||||
status: 'requesting',
|
||||
};
|
||||
|
||||
|
||||
await distributedCoordinator.updateDistributedTaskRequest(mockTaskRequest);
|
||||
// Here, we can potentially check if a DB entry got updated or some other side-effect of the update method.
|
||||
await distributedCoordinator.stop();
|
||||
});
|
||||
},
|
||||
);
|
||||
|
||||
tap.test('should elect only one leader amongst multiple instances', async (tools) => {
|
||||
const coordinators = Array.from({ length: totalInstances }).map(() => new SmartdataDistributedCoordinator(testDb));
|
||||
await Promise.all(coordinators.map(coordinator => coordinator.start()));
|
||||
const leaders = coordinators.filter(coordinator => coordinator.ownInstance.data.elected);
|
||||
const coordinators = Array.from({ length: totalInstances }).map(
|
||||
() => new SmartdataDistributedCoordinator(testDb),
|
||||
);
|
||||
await Promise.all(coordinators.map((coordinator) => coordinator.start()));
|
||||
const leaders = coordinators.filter((coordinator) => coordinator.ownInstance.data.elected);
|
||||
for (const leader of leaders) {
|
||||
console.log(leader.ownInstance);
|
||||
}
|
||||
expect(leaders.length).toEqual(1);
|
||||
|
||||
// stopping clears a coordinator from being elected.
|
||||
await Promise.all(coordinators.map(coordinator => coordinator.stop()));
|
||||
await Promise.all(coordinators.map((coordinator) => coordinator.stop()));
|
||||
});
|
||||
|
||||
tap.test('should clean up', async () => {
|
||||
await smartmongoInstance.stopAndDumpToDir(`.nogit/dbdump/test.distributedcoordinator.ts`);
|
||||
setTimeout(() => process.exit(), 2000);
|
||||
})
|
||||
});
|
||||
|
||||
tap.start({ throwOnError: true });
|
||||
|
@ -1,7 +1,7 @@
|
||||
import { tap, expect } from '@push.rocks/tapbundle';
|
||||
import { Qenv } from '@push.rocks/qenv';
|
||||
import * as smartmongo from '@push.rocks/smartmongo';
|
||||
import { smartunique } from '../ts/smartdata.plugins.js';
|
||||
import { smartunique } from '../ts/plugins.js';
|
||||
|
||||
const testQenv = new Qenv(process.cwd(), process.cwd() + '/.nogit/');
|
||||
|
||||
|
204
test/test.search.ts
Normal file
204
test/test.search.ts
Normal file
@ -0,0 +1,204 @@
|
||||
import { tap, expect } from '@push.rocks/tapbundle';
|
||||
import * as smartmongo from '@push.rocks/smartmongo';
|
||||
import { smartunique } from '../ts/plugins.js';
|
||||
|
||||
// Import the smartdata library
|
||||
import * as smartdata from '../ts/index.js';
|
||||
import { searchable, getSearchableFields } from '../ts/classes.doc.js';
|
||||
|
||||
// Set up database connection
|
||||
let smartmongoInstance: smartmongo.SmartMongo;
|
||||
let testDb: smartdata.SmartdataDb;
|
||||
|
||||
// Define a test class with searchable fields using the standard SmartDataDbDoc
|
||||
@smartdata.Collection(() => testDb)
|
||||
class Product extends smartdata.SmartDataDbDoc<Product, Product> {
|
||||
@smartdata.unI()
|
||||
public id: string = smartunique.shortId();
|
||||
|
||||
@smartdata.svDb()
|
||||
@searchable()
|
||||
public name: string;
|
||||
|
||||
@smartdata.svDb()
|
||||
@searchable()
|
||||
public description: string;
|
||||
|
||||
@smartdata.svDb()
|
||||
@searchable()
|
||||
public category: string;
|
||||
|
||||
@smartdata.svDb()
|
||||
public price: number;
|
||||
|
||||
constructor(nameArg: string, descriptionArg: string, categoryArg: string, priceArg: number) {
|
||||
super();
|
||||
this.name = nameArg;
|
||||
this.description = descriptionArg;
|
||||
this.category = categoryArg;
|
||||
this.price = priceArg;
|
||||
}
|
||||
}
|
||||
|
||||
tap.test('should create a test database instance', async () => {
|
||||
smartmongoInstance = await smartmongo.SmartMongo.createAndStart();
|
||||
testDb = new smartdata.SmartdataDb(await smartmongoInstance.getMongoDescriptor());
|
||||
await testDb.init();
|
||||
});
|
||||
|
||||
tap.test('should create test products with searchable fields', async () => {
|
||||
// Create several products with different fields to search
|
||||
const products = [
|
||||
new Product('iPhone 12', 'Latest iPhone with A14 Bionic chip', 'Electronics', 999),
|
||||
new Product('MacBook Pro', 'Powerful laptop for professionals', 'Electronics', 1999),
|
||||
new Product('AirPods', 'Wireless earbuds with noise cancellation', 'Electronics', 249),
|
||||
new Product('Galaxy S21', 'Samsung flagship phone with great camera', 'Electronics', 899),
|
||||
new Product('Kindle Paperwhite', 'E-reader with built-in light', 'Books', 129),
|
||||
new Product('Harry Potter', 'Fantasy book series about wizards', 'Books', 49),
|
||||
new Product('Coffee Maker', 'Automatic drip coffee machine', 'Kitchen', 89),
|
||||
new Product('Blender', 'High-speed blender for smoothies', 'Kitchen', 129),
|
||||
];
|
||||
|
||||
// Save all products to the database
|
||||
for (const product of products) {
|
||||
await product.save();
|
||||
}
|
||||
|
||||
// Verify that we can get all products
|
||||
const allProducts = await Product.getInstances({});
|
||||
expect(allProducts.length).toEqual(products.length);
|
||||
console.log(`Successfully created and saved ${allProducts.length} products`);
|
||||
});
|
||||
|
||||
tap.test('should retrieve searchable fields for a class', async () => {
|
||||
// Use the getSearchableFields function to verify our searchable fields
|
||||
const searchableFields = getSearchableFields('Product');
|
||||
console.log('Searchable fields:', searchableFields);
|
||||
|
||||
expect(searchableFields.length).toEqual(3);
|
||||
expect(searchableFields).toContain('name');
|
||||
expect(searchableFields).toContain('description');
|
||||
expect(searchableFields).toContain('category');
|
||||
});
|
||||
|
||||
tap.test('should search products by exact field match', async () => {
|
||||
// Basic field exact match search
|
||||
const electronicsProducts = await Product.getInstances({ category: 'Electronics' });
|
||||
console.log(`Found ${electronicsProducts.length} products in Electronics category`);
|
||||
|
||||
expect(electronicsProducts.length).toEqual(4);
|
||||
});
|
||||
|
||||
tap.test('should search products by basic search method', async () => {
|
||||
// Using the basic search method with simple Lucene query
|
||||
try {
|
||||
const iPhoneResults = await Product.search('iPhone');
|
||||
console.log(`Found ${iPhoneResults.length} products matching 'iPhone' using basic search`);
|
||||
|
||||
expect(iPhoneResults.length).toEqual(1);
|
||||
expect(iPhoneResults[0].name).toEqual('iPhone 12');
|
||||
} catch (error) {
|
||||
console.error('Basic search error:', error.message);
|
||||
// If basic search fails, we'll demonstrate the enhanced approach in later tests
|
||||
console.log('Will test with enhanced searchWithLucene method next');
|
||||
}
|
||||
});
|
||||
|
||||
tap.test('should search products with searchWithLucene method', async () => {
|
||||
// Using the robust searchWithLucene method
|
||||
const wirelessResults = await Product.searchWithLucene('wireless');
|
||||
console.log(
|
||||
`Found ${wirelessResults.length} products matching 'wireless' using searchWithLucene`,
|
||||
);
|
||||
|
||||
expect(wirelessResults.length).toEqual(1);
|
||||
expect(wirelessResults[0].name).toEqual('AirPods');
|
||||
});
|
||||
|
||||
tap.test('should search products by category with searchWithLucene', async () => {
|
||||
// Using field-specific search with searchWithLucene
|
||||
const kitchenResults = await Product.searchWithLucene('category:Kitchen');
|
||||
console.log(`Found ${kitchenResults.length} products in Kitchen category using searchWithLucene`);
|
||||
|
||||
expect(kitchenResults.length).toEqual(2);
|
||||
expect(kitchenResults[0].category).toEqual('Kitchen');
|
||||
expect(kitchenResults[1].category).toEqual('Kitchen');
|
||||
});
|
||||
|
||||
tap.test('should search products with partial word matches', async () => {
|
||||
// Testing partial word matches
|
||||
const proResults = await Product.searchWithLucene('Pro');
|
||||
console.log(`Found ${proResults.length} products matching 'Pro'`);
|
||||
|
||||
// Should match both "MacBook Pro" and "professionals" in description
|
||||
expect(proResults.length).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
tap.test('should search across multiple searchable fields', async () => {
|
||||
// Test searching across all searchable fields
|
||||
const bookResults = await Product.searchWithLucene('book');
|
||||
console.log(`Found ${bookResults.length} products matching 'book' across all fields`);
|
||||
|
||||
// Should match "MacBook" in name and "Books" in category
|
||||
expect(bookResults.length).toBeGreaterThan(1);
|
||||
});
|
||||
|
||||
tap.test('should handle case insensitive searches', async () => {
|
||||
// Test case insensitivity
|
||||
const electronicsResults = await Product.searchWithLucene('electronics');
|
||||
const ElectronicsResults = await Product.searchWithLucene('Electronics');
|
||||
|
||||
console.log(`Found ${electronicsResults.length} products matching lowercase 'electronics'`);
|
||||
console.log(`Found ${ElectronicsResults.length} products matching capitalized 'Electronics'`);
|
||||
|
||||
// Both searches should return the same results
|
||||
expect(electronicsResults.length).toEqual(ElectronicsResults.length);
|
||||
});
|
||||
|
||||
tap.test('should demonstrate search fallback mechanisms', async () => {
|
||||
console.log('\n====== FALLBACK MECHANISM DEMONSTRATION ======');
|
||||
console.log('If MongoDB query fails, searchWithLucene will:');
|
||||
console.log('1. Try using basic MongoDB filters');
|
||||
console.log('2. Fall back to field-specific searches');
|
||||
console.log('3. As last resort, perform in-memory filtering');
|
||||
console.log('This ensures robust search even with complex queries');
|
||||
console.log('==============================================\n');
|
||||
|
||||
// Use a simpler term that should be found in descriptions
|
||||
// Avoid using "OR" operator which requires a text index
|
||||
const results = await Product.searchWithLucene('high');
|
||||
console.log(`Found ${results.length} products matching 'high'`);
|
||||
|
||||
// "High-speed blender" contains "high"
|
||||
expect(results.length).toBeGreaterThan(0);
|
||||
|
||||
// Try another fallback example that won't need $text
|
||||
const powerfulResults = await Product.searchWithLucene('powerful');
|
||||
console.log(`Found ${powerfulResults.length} products matching 'powerful'`);
|
||||
|
||||
// "Powerful laptop for professionals" contains "powerful"
|
||||
expect(powerfulResults.length).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
tap.test('should explain the advantages of the integrated approach', async () => {
|
||||
console.log('\n====== INTEGRATED SEARCH APPROACH BENEFITS ======');
|
||||
console.log('1. No separate class hierarchy - keeps code simple');
|
||||
console.log('2. Enhanced convertFilterForMongoDb handles MongoDB operators');
|
||||
console.log('3. Robust fallback mechanisms ensure searches always work');
|
||||
console.log('4. searchWithLucene provides powerful search capabilities');
|
||||
console.log('5. Backwards compatible with existing code');
|
||||
console.log('================================================\n');
|
||||
|
||||
expect(true).toEqual(true);
|
||||
});
|
||||
|
||||
tap.test('close database connection', async () => {
|
||||
await testDb.mongoDb.dropDatabase();
|
||||
await testDb.close();
|
||||
if (smartmongoInstance) {
|
||||
await smartmongoInstance.stopAndDumpToDir(`.nogit/dbdump/test.search.ts`);
|
||||
}
|
||||
setTimeout(() => process.exit(), 2000);
|
||||
});
|
||||
|
||||
tap.start({ throwOnError: true });
|
10
test/test.ts
10
test/test.ts
@ -1,7 +1,7 @@
|
||||
import { tap, expect } from '@push.rocks/tapbundle';
|
||||
import { Qenv } from '@push.rocks/qenv';
|
||||
import * as smartmongo from '@push.rocks/smartmongo';
|
||||
import { smartunique } from '../ts/smartdata.plugins.js';
|
||||
import { smartunique } from '../ts/plugins.js';
|
||||
|
||||
import * as mongodb from 'mongodb';
|
||||
|
||||
@ -97,7 +97,7 @@ tap.test('should save the car to the db', async (toolsArg) => {
|
||||
console.log(
|
||||
`Filled database with ${counter} of ${totalCars} Cars and memory usage ${
|
||||
process.memoryUsage().rss / 1e6
|
||||
} MB`
|
||||
} MB`,
|
||||
);
|
||||
}
|
||||
} while (counter < totalCars);
|
||||
@ -116,7 +116,7 @@ tap.test('expect to get instance of Car with shallow match', async () => {
|
||||
console.log(
|
||||
`performed ${counter} of ${totalQueryCycles} total query cycles: took ${
|
||||
Date.now() - timeStart
|
||||
}ms to query a set of 2000 with memory footprint ${process.memoryUsage().rss / 1e6} MB`
|
||||
}ms to query a set of 2000 with memory footprint ${process.memoryUsage().rss / 1e6} MB`,
|
||||
);
|
||||
}
|
||||
expect(myCars[0].deepData.sodeep).toEqual('yes');
|
||||
@ -139,7 +139,7 @@ tap.test('expect to get instance of Car with deep match', async () => {
|
||||
console.log(
|
||||
`performed ${counter} of ${totalQueryCycles} total query cycles: took ${
|
||||
Date.now() - timeStart
|
||||
}ms to deep query a set of 2000 with memory footprint ${process.memoryUsage().rss / 1e6} MB`
|
||||
}ms to deep query a set of 2000 with memory footprint ${process.memoryUsage().rss / 1e6} MB`,
|
||||
);
|
||||
}
|
||||
expect(myCars2[0].deepData.sodeep).toEqual('yes');
|
||||
@ -209,7 +209,7 @@ tap.test('should store a new Truck', async () => {
|
||||
tap.test('should return a count', async () => {
|
||||
const truckCount = await Truck.getCount();
|
||||
expect(truckCount).toEqual(1);
|
||||
})
|
||||
});
|
||||
|
||||
tap.test('should use a cursor', async () => {
|
||||
const cursor = await Car.getCursor({});
|
||||
|
@ -1,7 +1,7 @@
|
||||
import { tap, expect } from '@push.rocks/tapbundle';
|
||||
import { Qenv } from '@push.rocks/qenv';
|
||||
import * as smartmongo from '@push.rocks/smartmongo';
|
||||
import { smartunique } from '../ts/smartdata.plugins.js';
|
||||
import { smartunique } from '../ts/plugins.js';
|
||||
|
||||
const testQenv = new Qenv(process.cwd(), process.cwd() + '/.nogit/');
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
import { tap, expect } from '@push.rocks/tapbundle';
|
||||
import { Qenv } from '@push.rocks/qenv';
|
||||
import * as smartmongo from '@push.rocks/smartmongo';
|
||||
import { smartunique } from '../ts/smartdata.plugins.js';
|
||||
import { smartunique } from '../ts/plugins.js';
|
||||
|
||||
const testQenv = new Qenv(process.cwd(), process.cwd() + '/.nogit/');
|
||||
|
||||
|
@ -3,6 +3,6 @@
|
||||
*/
|
||||
export const commitinfo = {
|
||||
name: '@push.rocks/smartdata',
|
||||
version: '5.3.0',
|
||||
version: '5.8.2',
|
||||
description: 'An advanced library for NoSQL data organization and manipulation using TypeScript with support for MongoDB, data validation, collections, and custom data types.'
|
||||
}
|
||||
|
@ -1,9 +1,9 @@
|
||||
import * as plugins from './smartdata.plugins.js';
|
||||
import { SmartdataDb } from './smartdata.classes.db.js';
|
||||
import { SmartdataDbCursor } from './smartdata.classes.cursor.js';
|
||||
import { SmartDataDbDoc } from './smartdata.classes.doc.js';
|
||||
import { SmartdataDbWatcher } from './smartdata.classes.watcher.js';
|
||||
import { CollectionFactory } from './smartdata.classes.collectionfactory.js';
|
||||
import * as plugins from './plugins.js';
|
||||
import { SmartdataDb } from './classes.db.js';
|
||||
import { SmartdataDbCursor } from './classes.cursor.js';
|
||||
import { SmartDataDbDoc, type IIndexOptions } from './classes.doc.js';
|
||||
import { SmartdataDbWatcher } from './classes.watcher.js';
|
||||
import { CollectionFactory } from './classes.collectionfactory.js';
|
||||
|
||||
export interface IFindOptions {
|
||||
limit?: number;
|
||||
@ -49,7 +49,7 @@ export interface IManager {
|
||||
db: SmartdataDb;
|
||||
}
|
||||
|
||||
export const setDefaultManagerForDoc = <T>(managerArg: IManager, dbDocArg: T): T => {
|
||||
export const setDefaultManagerForDoc = <T,>(managerArg: IManager, dbDocArg: T): T => {
|
||||
(dbDocArg as any).prototype.defaultManager = managerArg;
|
||||
return dbDocArg;
|
||||
};
|
||||
@ -127,6 +127,7 @@ export class SmartdataCollection<T> {
|
||||
public collectionName: string;
|
||||
public smartdataDb: SmartdataDb;
|
||||
public uniqueIndexes: string[] = [];
|
||||
public regularIndexes: Array<{field: string, options: IIndexOptions}> = [];
|
||||
|
||||
constructor(classNameArg: string, smartDataDbArg: SmartdataDb) {
|
||||
// tell the collection where it belongs
|
||||
@ -170,6 +171,24 @@ export class SmartdataCollection<T> {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* creates regular indexes for the collection
|
||||
*/
|
||||
public createRegularIndexes(indexesArg: Array<{field: string, options: IIndexOptions}> = []) {
|
||||
for (const indexDef of indexesArg) {
|
||||
// Check if we've already created this index
|
||||
const indexKey = indexDef.field;
|
||||
if (!this.regularIndexes.some(i => i.field === indexKey)) {
|
||||
this.mongoDbCollection.createIndex(
|
||||
{ [indexDef.field]: 1 }, // Simple single-field index
|
||||
indexDef.options
|
||||
);
|
||||
// Track that we've created this index
|
||||
this.regularIndexes.push(indexDef);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* adds a validation function that all newly inserted and updated objects have to pass
|
||||
*/
|
||||
@ -190,7 +209,7 @@ export class SmartdataCollection<T> {
|
||||
|
||||
public async getCursor(
|
||||
filterObjectArg: any,
|
||||
dbDocArg: typeof SmartDataDbDoc
|
||||
dbDocArg: typeof SmartDataDbDoc,
|
||||
): Promise<SmartdataDbCursor<any>> {
|
||||
await this.init();
|
||||
const cursor = this.mongoDbCollection.find(filterObjectArg);
|
||||
@ -213,7 +232,7 @@ export class SmartdataCollection<T> {
|
||||
*/
|
||||
public async watch(
|
||||
filterObject: any,
|
||||
smartdataDbDocArg: typeof SmartDataDbDoc
|
||||
smartdataDbDocArg: typeof SmartDataDbDoc,
|
||||
): Promise<SmartdataDbWatcher> {
|
||||
await this.init();
|
||||
const changeStream = this.mongoDbCollection.watch(
|
||||
@ -224,7 +243,7 @@ export class SmartdataCollection<T> {
|
||||
],
|
||||
{
|
||||
fullDocument: 'updateLookup',
|
||||
}
|
||||
},
|
||||
);
|
||||
const smartdataWatcher = new SmartdataDbWatcher(changeStream, smartdataDbDocArg);
|
||||
await smartdataWatcher.readyDeferred.promise;
|
||||
@ -238,6 +257,12 @@ export class SmartdataCollection<T> {
|
||||
await this.init();
|
||||
await this.checkDoc(dbDocArg);
|
||||
this.markUniqueIndexes(dbDocArg.uniqueIndexes);
|
||||
|
||||
// Create regular indexes if available
|
||||
if (dbDocArg.regularIndexes && dbDocArg.regularIndexes.length > 0) {
|
||||
this.createRegularIndexes(dbDocArg.regularIndexes);
|
||||
}
|
||||
|
||||
const saveableObject = await dbDocArg.createSavableObject();
|
||||
const result = await this.mongoDbCollection.insertOne(saveableObject);
|
||||
return result;
|
||||
@ -261,7 +286,7 @@ export class SmartdataCollection<T> {
|
||||
const result = await this.mongoDbCollection.updateOne(
|
||||
identifiableObject,
|
||||
{ $set: updateableObject },
|
||||
{ upsert: true }
|
||||
{ upsert: true },
|
||||
);
|
||||
return result;
|
||||
}
|
@ -1,6 +1,6 @@
|
||||
import * as plugins from './smartdata.plugins.js';
|
||||
import { SmartdataCollection } from './smartdata.classes.collection.js';
|
||||
import { SmartdataDb } from './smartdata.classes.db.js';
|
||||
import * as plugins from './plugins.js';
|
||||
import { SmartdataCollection } from './classes.collection.js';
|
||||
import { SmartdataDb } from './classes.db.js';
|
||||
|
||||
export class CollectionFactory {
|
||||
public collections: { [key: string]: SmartdataCollection<any> } = {};
|
@ -1,4 +1,4 @@
|
||||
import * as plugins from './smartdata.plugins.js';
|
||||
import * as plugins from './plugins.js';
|
||||
|
||||
export const getNewUniqueId = async (prefixArg?: string) => {
|
||||
return plugins.smartunique.uni(prefixArg);
|
@ -1,5 +1,5 @@
|
||||
import { SmartDataDbDoc } from './smartdata.classes.doc.js';
|
||||
import * as plugins from './smartdata.plugins.js';
|
||||
import { SmartDataDbDoc } from './classes.doc.js';
|
||||
import * as plugins from './plugins.js';
|
||||
|
||||
/**
|
||||
* a wrapper for the native mongodb cursor. Exposes better
|
||||
@ -15,14 +15,14 @@ export class SmartdataDbCursor<T = any> {
|
||||
this.smartdataDbDoc = dbDocArg;
|
||||
}
|
||||
|
||||
public async next(closeAtEnd = true) {
|
||||
public async next(closeAtEnd = true): Promise<T> {
|
||||
const result = this.smartdataDbDoc.createInstanceFromMongoDbNativeDoc(
|
||||
await this.mongodbCursor.next()
|
||||
await this.mongodbCursor.next(),
|
||||
);
|
||||
if (!result && closeAtEnd) {
|
||||
await this.close();
|
||||
}
|
||||
return result;
|
||||
return result as T;
|
||||
}
|
||||
|
||||
public async forEach(forEachFuncArg: (itemArg: T) => Promise<any>, closeCursorAtEnd = true) {
|
||||
@ -40,6 +40,11 @@ export class SmartdataDbCursor<T = any> {
|
||||
}
|
||||
}
|
||||
|
||||
public async toArray(): Promise<T[]> {
|
||||
const result = await this.mongodbCursor.toArray();
|
||||
return result.map((itemArg) => this.smartdataDbDoc.createInstanceFromMongoDbNativeDoc(itemArg)) as T[];
|
||||
}
|
||||
|
||||
public async close() {
|
||||
await this.mongodbCursor.close();
|
||||
}
|
@ -1,9 +1,9 @@
|
||||
import * as plugins from './smartdata.plugins.js';
|
||||
import * as plugins from './plugins.js';
|
||||
|
||||
import { SmartdataCollection } from './smartdata.classes.collection.js';
|
||||
import { EasyStore } from './smartdata.classes.easystore.js';
|
||||
import { SmartdataCollection } from './classes.collection.js';
|
||||
import { EasyStore } from './classes.easystore.js';
|
||||
|
||||
import { logger } from './smartdata.logging.js';
|
||||
import { logger } from './logging.js';
|
||||
|
||||
/**
|
||||
* interface - indicates the connection status of the db
|
@ -1,8 +1,8 @@
|
||||
import * as plugins from './smartdata.plugins.js';
|
||||
import { SmartdataDb } from './smartdata.classes.db.js';
|
||||
import { managed, setDefaultManagerForDoc } from './smartdata.classes.collection.js';
|
||||
import { SmartDataDbDoc, svDb, unI } from './smartdata.classes.doc.js';
|
||||
import { SmartdataDbWatcher } from './smartdata.classes.watcher.js';
|
||||
import * as plugins from './plugins.js';
|
||||
import { SmartdataDb } from './classes.db.js';
|
||||
import { managed, setDefaultManagerForDoc } from './classes.collection.js';
|
||||
import { SmartDataDbDoc, svDb, unI } from './classes.doc.js';
|
||||
import { SmartdataDbWatcher } from './classes.watcher.js';
|
||||
|
||||
@managed()
|
||||
export class DistributedClass extends SmartDataDbDoc<DistributedClass, DistributedClass> {
|
||||
@ -139,7 +139,7 @@ export class SmartdataDistributedCoordinator extends plugins.taskbuffer.distribu
|
||||
const eligibleLeader = leaders.find(
|
||||
(leader) =>
|
||||
leader.data.lastUpdated >=
|
||||
Date.now() - plugins.smarttime.getMilliSecondsFromUnits({ seconds: 20 })
|
||||
Date.now() - plugins.smarttime.getMilliSecondsFromUnits({ seconds: 20 }),
|
||||
);
|
||||
return eligibleLeader;
|
||||
});
|
||||
@ -178,16 +178,14 @@ export class SmartdataDistributedCoordinator extends plugins.taskbuffer.distribu
|
||||
console.log('bidding code stored.');
|
||||
});
|
||||
console.log(`bidding for leadership...`);
|
||||
await plugins.smartdelay.delayFor(
|
||||
plugins.smarttime.getMilliSecondsFromUnits({ seconds: 5 })
|
||||
);
|
||||
await plugins.smartdelay.delayFor(plugins.smarttime.getMilliSecondsFromUnits({ seconds: 5 }));
|
||||
await this.asyncExecutionStack.getExclusiveExecutionSlot(async () => {
|
||||
let biddingInstances = await DistributedClass.getInstances({});
|
||||
biddingInstances = biddingInstances.filter(
|
||||
(instanceArg) =>
|
||||
instanceArg.data.status === 'bidding' &&
|
||||
instanceArg.data.lastUpdated >=
|
||||
Date.now() - plugins.smarttime.getMilliSecondsFromUnits({ seconds: 10 })
|
||||
Date.now() - plugins.smarttime.getMilliSecondsFromUnits({ seconds: 10 }),
|
||||
);
|
||||
console.log(`found ${biddingInstances.length} bidding instances...`);
|
||||
this.ownInstance.data.elected = true;
|
||||
@ -242,7 +240,7 @@ export class SmartdataDistributedCoordinator extends plugins.taskbuffer.distribu
|
||||
for (const instance of allInstances) {
|
||||
if (instance.data.status === 'stopped') {
|
||||
await instance.delete();
|
||||
};
|
||||
}
|
||||
}
|
||||
await plugins.smartdelay.delayFor(10000);
|
||||
}
|
||||
@ -250,7 +248,7 @@ export class SmartdataDistributedCoordinator extends plugins.taskbuffer.distribu
|
||||
|
||||
// abstract implemented methods
|
||||
public async fireDistributedTaskRequest(
|
||||
taskRequestArg: plugins.taskbuffer.distributedCoordination.IDistributedTaskRequest
|
||||
taskRequestArg: plugins.taskbuffer.distributedCoordination.IDistributedTaskRequest,
|
||||
): Promise<plugins.taskbuffer.distributedCoordination.IDistributedTaskRequestResult> {
|
||||
await this.asyncExecutionStack.getExclusiveExecutionSlot(async () => {
|
||||
if (!this.ownInstance) {
|
||||
@ -277,7 +275,7 @@ export class SmartdataDistributedCoordinator extends plugins.taskbuffer.distribu
|
||||
}
|
||||
|
||||
public async updateDistributedTaskRequest(
|
||||
infoBasisArg: plugins.taskbuffer.distributedCoordination.IDistributedTaskRequest
|
||||
infoBasisArg: plugins.taskbuffer.distributedCoordination.IDistributedTaskRequest,
|
||||
): Promise<void> {
|
||||
await this.asyncExecutionStack.getExclusiveExecutionSlot(async () => {
|
||||
const existingInfoBasis = this.ownInstance.data.taskRequests.find((infoBasisItem) => {
|
@ -1,12 +1,16 @@
|
||||
import * as plugins from './smartdata.plugins.js';
|
||||
import * as plugins from './plugins.js';
|
||||
|
||||
import { SmartdataDb } from './smartdata.classes.db.js';
|
||||
import { SmartdataDbCursor } from './smartdata.classes.cursor.js';
|
||||
import { type IManager, SmartdataCollection } from './smartdata.classes.collection.js';
|
||||
import { SmartdataDbWatcher } from './smartdata.classes.watcher.js';
|
||||
import { SmartdataDb } from './classes.db.js';
|
||||
import { SmartdataDbCursor } from './classes.cursor.js';
|
||||
import { type IManager, SmartdataCollection } from './classes.collection.js';
|
||||
import { SmartdataDbWatcher } from './classes.watcher.js';
|
||||
import { SmartdataLuceneAdapter } from './classes.lucene.adapter.js';
|
||||
|
||||
export type TDocCreation = 'db' | 'new' | 'mixed';
|
||||
|
||||
// Set of searchable fields for each class
|
||||
const searchableFieldsMap = new Map<string, Set<string>>();
|
||||
|
||||
export function globalSvDb() {
|
||||
return (target: SmartDataDbDoc<unknown, unknown>, key: string) => {
|
||||
console.log(`called svDb() on >${target.constructor.name}.${key}<`);
|
||||
@ -30,6 +34,34 @@ export function svDb() {
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* searchable - marks a property as searchable with Lucene query syntax
|
||||
*/
|
||||
export function searchable() {
|
||||
return (target: SmartDataDbDoc<unknown, unknown>, key: string) => {
|
||||
console.log(`called searchable() on >${target.constructor.name}.${key}<`);
|
||||
|
||||
// Initialize the set for this class if it doesn't exist
|
||||
const className = target.constructor.name;
|
||||
if (!searchableFieldsMap.has(className)) {
|
||||
searchableFieldsMap.set(className, new Set<string>());
|
||||
}
|
||||
|
||||
// Add the property to the searchable fields set
|
||||
searchableFieldsMap.get(className).add(key);
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Get searchable fields for a class
|
||||
*/
|
||||
export function getSearchableFields(className: string): string[] {
|
||||
if (!searchableFieldsMap.has(className)) {
|
||||
return [];
|
||||
}
|
||||
return Array.from(searchableFieldsMap.get(className));
|
||||
}
|
||||
|
||||
/**
|
||||
* unique index - decorator to mark a unique index
|
||||
*/
|
||||
@ -51,7 +83,56 @@ export function unI() {
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Options for MongoDB indexes
|
||||
*/
|
||||
export interface IIndexOptions {
|
||||
background?: boolean;
|
||||
unique?: boolean;
|
||||
sparse?: boolean;
|
||||
expireAfterSeconds?: number;
|
||||
[key: string]: any;
|
||||
}
|
||||
|
||||
/**
|
||||
* index - decorator to mark a field for regular indexing
|
||||
*/
|
||||
export function index(options?: IIndexOptions) {
|
||||
return (target: SmartDataDbDoc<unknown, unknown>, key: string) => {
|
||||
console.log(`called index() on >${target.constructor.name}.${key}<`);
|
||||
|
||||
// Initialize regular indexes array if it doesn't exist
|
||||
if (!target.regularIndexes) {
|
||||
target.regularIndexes = [];
|
||||
}
|
||||
|
||||
// Add this field to regularIndexes with its options
|
||||
target.regularIndexes.push({
|
||||
field: key,
|
||||
options: options || {}
|
||||
});
|
||||
|
||||
// Also ensure it's marked as saveable
|
||||
if (!target.saveableProperties) {
|
||||
target.saveableProperties = [];
|
||||
}
|
||||
|
||||
if (!target.saveableProperties.includes(key)) {
|
||||
target.saveableProperties.push(key);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
export const convertFilterForMongoDb = (filterArg: { [key: string]: any }) => {
|
||||
// Special case: detect MongoDB operators and pass them through directly
|
||||
const topLevelOperators = ['$and', '$or', '$nor', '$not', '$text', '$where', '$regex'];
|
||||
for (const key of Object.keys(filterArg)) {
|
||||
if (topLevelOperators.includes(key)) {
|
||||
return filterArg; // Return the filter as-is for MongoDB operators
|
||||
}
|
||||
}
|
||||
|
||||
// Original conversion logic for non-MongoDB query objects
|
||||
const convertedFilter: { [key: string]: any } = {};
|
||||
|
||||
const convertFilterArgument = (keyPathArg2: string, filterArg2: any) => {
|
||||
@ -94,7 +175,7 @@ export class SmartDataDbDoc<T extends TImplements, TImplements, TManager extends
|
||||
// STATIC
|
||||
public static createInstanceFromMongoDbNativeDoc<T>(
|
||||
this: plugins.tsclass.typeFest.Class<T>,
|
||||
mongoDbNativeDocArg: any
|
||||
mongoDbNativeDocArg: any,
|
||||
): T {
|
||||
const newInstance = new this();
|
||||
(newInstance as any).creationStatus = 'db';
|
||||
@ -112,7 +193,7 @@ export class SmartDataDbDoc<T extends TImplements, TImplements, TManager extends
|
||||
*/
|
||||
public static async getInstances<T>(
|
||||
this: plugins.tsclass.typeFest.Class<T>,
|
||||
filterArg: plugins.tsclass.typeFest.PartialDeep<T>
|
||||
filterArg: plugins.tsclass.typeFest.PartialDeep<T>,
|
||||
): Promise<T[]> {
|
||||
const foundDocs = await (this as any).collection.findAll(convertFilterForMongoDb(filterArg));
|
||||
const returnArray = [];
|
||||
@ -131,7 +212,7 @@ export class SmartDataDbDoc<T extends TImplements, TImplements, TManager extends
|
||||
*/
|
||||
public static async getInstance<T>(
|
||||
this: plugins.tsclass.typeFest.Class<T>,
|
||||
filterArg: plugins.tsclass.typeFest.PartialDeep<T>
|
||||
filterArg: plugins.tsclass.typeFest.PartialDeep<T>,
|
||||
): Promise<T> {
|
||||
const foundDoc = await (this as any).collection.findOne(convertFilterForMongoDb(filterArg));
|
||||
if (foundDoc) {
|
||||
@ -145,7 +226,10 @@ export class SmartDataDbDoc<T extends TImplements, TImplements, TManager extends
|
||||
/**
|
||||
* get a unique id prefixed with the class name
|
||||
*/
|
||||
public static async getNewId<T = any>(this: plugins.tsclass.typeFest.Class<T>, lengthArg: number = 20) {
|
||||
public static async getNewId<T = any>(
|
||||
this: plugins.tsclass.typeFest.Class<T>,
|
||||
lengthArg: number = 20,
|
||||
) {
|
||||
return `${(this as any).className}:${plugins.smartunique.shortId(lengthArg)}`;
|
||||
}
|
||||
|
||||
@ -155,16 +239,30 @@ export class SmartDataDbDoc<T extends TImplements, TImplements, TManager extends
|
||||
*/
|
||||
public static async getCursor<T>(
|
||||
this: plugins.tsclass.typeFest.Class<T>,
|
||||
filterArg: plugins.tsclass.typeFest.PartialDeep<T>
|
||||
filterArg: plugins.tsclass.typeFest.PartialDeep<T>,
|
||||
) {
|
||||
const collection: SmartdataCollection<T> = (this as any).collection;
|
||||
const cursor: SmartdataDbCursor<T> = await collection.getCursor(
|
||||
convertFilterForMongoDb(filterArg),
|
||||
this as any as typeof SmartDataDbDoc
|
||||
this as any as typeof SmartDataDbDoc,
|
||||
);
|
||||
return cursor;
|
||||
}
|
||||
|
||||
public static async getCursorExtended<T>(
|
||||
this: plugins.tsclass.typeFest.Class<T>,
|
||||
filterArg: plugins.tsclass.typeFest.PartialDeep<T>,
|
||||
modifierFunction = (cursorArg: plugins.mongodb.FindCursor<plugins.mongodb.WithId<plugins.mongodb.BSON.Document>>) => cursorArg,
|
||||
): Promise<SmartdataDbCursor<T>> {
|
||||
const collection: SmartdataCollection<T> = (this as any).collection;
|
||||
await collection.init();
|
||||
let cursor: plugins.mongodb.FindCursor<any> = collection.mongoDbCollection.find(
|
||||
convertFilterForMongoDb(filterArg),
|
||||
);
|
||||
cursor = modifierFunction(cursor);
|
||||
return new SmartdataDbCursor<T>(cursor, this as any as typeof SmartDataDbDoc);
|
||||
}
|
||||
|
||||
/**
|
||||
* watch the collection
|
||||
* @param this
|
||||
@ -173,12 +271,12 @@ export class SmartDataDbDoc<T extends TImplements, TImplements, TManager extends
|
||||
*/
|
||||
public static async watch<T>(
|
||||
this: plugins.tsclass.typeFest.Class<T>,
|
||||
filterArg: plugins.tsclass.typeFest.PartialDeep<T>
|
||||
filterArg: plugins.tsclass.typeFest.PartialDeep<T>,
|
||||
) {
|
||||
const collection: SmartdataCollection<T> = (this as any).collection;
|
||||
const watcher: SmartdataDbWatcher<T> = await collection.watch(
|
||||
convertFilterForMongoDb(filterArg),
|
||||
this as any
|
||||
this as any,
|
||||
);
|
||||
return watcher;
|
||||
}
|
||||
@ -190,7 +288,7 @@ export class SmartDataDbDoc<T extends TImplements, TImplements, TManager extends
|
||||
public static async forEach<T>(
|
||||
this: plugins.tsclass.typeFest.Class<T>,
|
||||
filterArg: plugins.tsclass.typeFest.PartialDeep<T>,
|
||||
forEachFunction: (itemArg: T) => Promise<any>
|
||||
forEachFunction: (itemArg: T) => Promise<any>,
|
||||
) {
|
||||
const cursor: SmartdataDbCursor<T> = await (this as any).getCursor(filterArg);
|
||||
await cursor.forEach(forEachFunction);
|
||||
@ -201,12 +299,133 @@ export class SmartDataDbDoc<T extends TImplements, TImplements, TManager extends
|
||||
*/
|
||||
public static async getCount<T>(
|
||||
this: plugins.tsclass.typeFest.Class<T>,
|
||||
filterArg: plugins.tsclass.typeFest.PartialDeep<T> = ({} as any)
|
||||
filterArg: plugins.tsclass.typeFest.PartialDeep<T> = {} as any,
|
||||
) {
|
||||
const collection: SmartdataCollection<T> = (this as any).collection;
|
||||
return await collection.getCount(filterArg);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a MongoDB filter from a Lucene query string
|
||||
* @param luceneQuery Lucene query string
|
||||
* @returns MongoDB query object
|
||||
*/
|
||||
public static createSearchFilter<T>(
|
||||
this: plugins.tsclass.typeFest.Class<T>,
|
||||
luceneQuery: string,
|
||||
): any {
|
||||
const className = (this as any).className || this.name;
|
||||
const searchableFields = getSearchableFields(className);
|
||||
|
||||
if (searchableFields.length === 0) {
|
||||
throw new Error(`No searchable fields defined for class ${className}`);
|
||||
}
|
||||
|
||||
const adapter = new SmartdataLuceneAdapter(searchableFields);
|
||||
return adapter.convert(luceneQuery);
|
||||
}
|
||||
|
||||
/**
|
||||
* Search documents using Lucene query syntax
|
||||
* @param luceneQuery Lucene query string
|
||||
* @returns Array of matching documents
|
||||
*/
|
||||
public static async search<T>(
|
||||
this: plugins.tsclass.typeFest.Class<T>,
|
||||
luceneQuery: string,
|
||||
): Promise<T[]> {
|
||||
const filter = (this as any).createSearchFilter(luceneQuery);
|
||||
return await (this as any).getInstances(filter);
|
||||
}
|
||||
|
||||
/**
|
||||
* Search documents using Lucene query syntax with robust error handling
|
||||
* @param luceneQuery The Lucene query string to search with
|
||||
* @returns Array of matching documents
|
||||
*/
|
||||
public static async searchWithLucene<T>(
|
||||
this: plugins.tsclass.typeFest.Class<T>,
|
||||
luceneQuery: string,
|
||||
): Promise<T[]> {
|
||||
try {
|
||||
const className = (this as any).className || this.name;
|
||||
const searchableFields = getSearchableFields(className);
|
||||
|
||||
if (searchableFields.length === 0) {
|
||||
console.warn(
|
||||
`No searchable fields defined for class ${className}, falling back to simple search`,
|
||||
);
|
||||
return (this as any).searchByTextAcrossFields(luceneQuery);
|
||||
}
|
||||
|
||||
// Simple term search optimization
|
||||
if (
|
||||
!luceneQuery.includes(':') &&
|
||||
!luceneQuery.includes(' AND ') &&
|
||||
!luceneQuery.includes(' OR ') &&
|
||||
!luceneQuery.includes(' NOT ')
|
||||
) {
|
||||
return (this as any).searchByTextAcrossFields(luceneQuery);
|
||||
}
|
||||
|
||||
// Try to use the Lucene-to-MongoDB conversion
|
||||
const filter = (this as any).createSearchFilter(luceneQuery);
|
||||
return await (this as any).getInstances(filter);
|
||||
} catch (error) {
|
||||
console.error(`Error in searchWithLucene: ${error.message}`);
|
||||
return (this as any).searchByTextAcrossFields(luceneQuery);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Search by text across all searchable fields (fallback method)
|
||||
* @param searchText The text to search for in all searchable fields
|
||||
* @returns Array of matching documents
|
||||
*/
|
||||
private static async searchByTextAcrossFields<T>(
|
||||
this: plugins.tsclass.typeFest.Class<T>,
|
||||
searchText: string,
|
||||
): Promise<T[]> {
|
||||
try {
|
||||
const className = (this as any).className || this.name;
|
||||
const searchableFields = getSearchableFields(className);
|
||||
|
||||
// Fallback to direct filter if we have searchable fields
|
||||
if (searchableFields.length > 0) {
|
||||
// Create a simple $or query with regex for each field
|
||||
const orConditions = searchableFields.map((field) => ({
|
||||
[field]: { $regex: searchText, $options: 'i' },
|
||||
}));
|
||||
|
||||
const filter = { $or: orConditions };
|
||||
|
||||
try {
|
||||
// Try with MongoDB filter first
|
||||
return await (this as any).getInstances(filter);
|
||||
} catch (error) {
|
||||
console.warn('MongoDB filter failed, falling back to in-memory search');
|
||||
}
|
||||
}
|
||||
|
||||
// Last resort: get all and filter in memory
|
||||
const allDocs = await (this as any).getInstances({});
|
||||
const lowerSearchText = searchText.toLowerCase();
|
||||
|
||||
return allDocs.filter((doc: any) => {
|
||||
for (const field of searchableFields) {
|
||||
const value = doc[field];
|
||||
if (value && typeof value === 'string' && value.toLowerCase().includes(lowerSearchText)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
});
|
||||
} catch (error) {
|
||||
console.error(`Error in searchByTextAcrossFields: ${error.message}`);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
// INSTANCE
|
||||
|
||||
/**
|
||||
@ -218,13 +437,13 @@ export class SmartDataDbDoc<T extends TImplements, TImplements, TManager extends
|
||||
* updated from db in any case where doc comes from db
|
||||
*/
|
||||
@globalSvDb()
|
||||
_createdAt: string = (new Date()).toISOString();
|
||||
_createdAt: string = new Date().toISOString();
|
||||
|
||||
/**
|
||||
* will be updated everytime the doc is saved
|
||||
*/
|
||||
@globalSvDb()
|
||||
_updatedAt: string = (new Date()).toISOString();
|
||||
_updatedAt: string = new Date().toISOString();
|
||||
|
||||
/**
|
||||
* an array of saveable properties of ALL doc
|
||||
@ -236,6 +455,11 @@ export class SmartDataDbDoc<T extends TImplements, TImplements, TManager extends
|
||||
*/
|
||||
public uniqueIndexes: string[];
|
||||
|
||||
/**
|
||||
* regular indexes with their options
|
||||
*/
|
||||
public regularIndexes: Array<{field: string, options: IIndexOptions}> = [];
|
||||
|
||||
/**
|
||||
* an array of saveable properties of a specific doc
|
||||
*/
|
||||
@ -265,7 +489,7 @@ export class SmartDataDbDoc<T extends TImplements, TImplements, TManager extends
|
||||
const self: any = this;
|
||||
let dbResult: any;
|
||||
|
||||
this._updatedAt = (new Date()).toISOString();
|
||||
this._updatedAt = new Date().toISOString();
|
||||
|
||||
switch (this.creationStatus) {
|
||||
case 'db':
|
||||
@ -321,10 +545,7 @@ export class SmartDataDbDoc<T extends TImplements, TImplements, TManager extends
|
||||
*/
|
||||
public async createSavableObject(): Promise<TImplements> {
|
||||
const saveableObject: unknown = {}; // is not exposed to outside, so any is ok here
|
||||
const saveableProperties = [
|
||||
...this.globalSaveableProperties,
|
||||
...this.saveableProperties
|
||||
]
|
||||
const saveableProperties = [...this.globalSaveableProperties, ...this.saveableProperties];
|
||||
for (const propertyNameString of saveableProperties) {
|
||||
saveableObject[propertyNameString] = this[propertyNameString];
|
||||
}
|
@ -1,7 +1,7 @@
|
||||
import * as plugins from './smartdata.plugins.js';
|
||||
import { Collection } from './smartdata.classes.collection.js';
|
||||
import { SmartdataDb } from './smartdata.classes.db.js';
|
||||
import { SmartDataDbDoc, svDb, unI } from './smartdata.classes.doc.js';
|
||||
import * as plugins from './plugins.js';
|
||||
import { Collection } from './classes.collection.js';
|
||||
import { SmartdataDb } from './classes.db.js';
|
||||
import { SmartDataDbDoc, svDb, unI } from './classes.doc.js';
|
||||
|
||||
/**
|
||||
* EasyStore allows the storage of easy objects. It also allows easy sharing of the object between different instances
|
||||
@ -41,7 +41,7 @@ export class EasyStore<T> {
|
||||
private async getEasyStore(): Promise<InstanceType<typeof this.easyStoreClass>> {
|
||||
if (this.easyStorePromise) {
|
||||
return this.easyStorePromise;
|
||||
};
|
||||
}
|
||||
|
||||
// first run from here
|
||||
const deferred = plugins.smartpromise.defer<InstanceType<typeof this.easyStoreClass>>();
|
760
ts/classes.lucene.adapter.ts
Normal file
760
ts/classes.lucene.adapter.ts
Normal file
@ -0,0 +1,760 @@
|
||||
/**
|
||||
* Lucene to MongoDB query adapter for SmartData
|
||||
*/
|
||||
import * as plugins from './plugins.js';
|
||||
|
||||
// Types
|
||||
type NodeType =
|
||||
| 'TERM'
|
||||
| 'PHRASE'
|
||||
| 'FIELD'
|
||||
| 'AND'
|
||||
| 'OR'
|
||||
| 'NOT'
|
||||
| 'RANGE'
|
||||
| 'WILDCARD'
|
||||
| 'FUZZY'
|
||||
| 'GROUP';
|
||||
|
||||
interface QueryNode {
|
||||
type: NodeType;
|
||||
}
|
||||
|
||||
interface TermNode extends QueryNode {
|
||||
type: 'TERM';
|
||||
value: string;
|
||||
boost?: number;
|
||||
}
|
||||
|
||||
interface PhraseNode extends QueryNode {
|
||||
type: 'PHRASE';
|
||||
value: string;
|
||||
proximity?: number;
|
||||
}
|
||||
|
||||
interface FieldNode extends QueryNode {
|
||||
type: 'FIELD';
|
||||
field: string;
|
||||
value: AnyQueryNode;
|
||||
}
|
||||
|
||||
interface BooleanNode extends QueryNode {
|
||||
type: 'AND' | 'OR' | 'NOT';
|
||||
left: AnyQueryNode;
|
||||
right: AnyQueryNode;
|
||||
}
|
||||
|
||||
interface RangeNode extends QueryNode {
|
||||
type: 'RANGE';
|
||||
field: string;
|
||||
lower: string;
|
||||
upper: string;
|
||||
includeLower: boolean;
|
||||
includeUpper: boolean;
|
||||
}
|
||||
|
||||
interface WildcardNode extends QueryNode {
|
||||
type: 'WILDCARD';
|
||||
value: string;
|
||||
}
|
||||
|
||||
interface FuzzyNode extends QueryNode {
|
||||
type: 'FUZZY';
|
||||
value: string;
|
||||
maxEdits: number;
|
||||
}
|
||||
|
||||
interface GroupNode extends QueryNode {
|
||||
type: 'GROUP';
|
||||
value: AnyQueryNode;
|
||||
}
|
||||
|
||||
type AnyQueryNode =
|
||||
| TermNode
|
||||
| PhraseNode
|
||||
| FieldNode
|
||||
| BooleanNode
|
||||
| RangeNode
|
||||
| WildcardNode
|
||||
| FuzzyNode
|
||||
| GroupNode;
|
||||
|
||||
/**
|
||||
* Lucene query parser
|
||||
*/
|
||||
export class LuceneParser {
|
||||
private pos: number = 0;
|
||||
private input: string = '';
|
||||
private tokens: string[] = [];
|
||||
|
||||
constructor() {}
|
||||
|
||||
/**
|
||||
* Parse a Lucene query string into an AST
|
||||
*/
|
||||
parse(query: string): AnyQueryNode {
|
||||
this.input = query.trim();
|
||||
this.pos = 0;
|
||||
this.tokens = this.tokenize(this.input);
|
||||
|
||||
return this.parseQuery();
|
||||
}
|
||||
|
||||
/**
|
||||
* Tokenize the input string into tokens
|
||||
*/
|
||||
private tokenize(input: string): string[] {
|
||||
const specialChars = /[()\[\]{}"~^:]/;
|
||||
const operators = /AND|OR|NOT|TO/;
|
||||
|
||||
let tokens: string[] = [];
|
||||
let current = '';
|
||||
let inQuote = false;
|
||||
|
||||
for (let i = 0; i < input.length; i++) {
|
||||
const char = input[i];
|
||||
|
||||
// Handle quoted strings
|
||||
if (char === '"') {
|
||||
if (inQuote) {
|
||||
tokens.push(current + char);
|
||||
current = '';
|
||||
inQuote = false;
|
||||
} else {
|
||||
if (current) tokens.push(current);
|
||||
current = char;
|
||||
inQuote = true;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
if (inQuote) {
|
||||
current += char;
|
||||
continue;
|
||||
}
|
||||
|
||||
// Handle whitespace
|
||||
if (char === ' ' || char === '\t' || char === '\n') {
|
||||
if (current) {
|
||||
tokens.push(current);
|
||||
current = '';
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
// Handle special characters
|
||||
if (specialChars.test(char)) {
|
||||
if (current) {
|
||||
tokens.push(current);
|
||||
current = '';
|
||||
}
|
||||
tokens.push(char);
|
||||
continue;
|
||||
}
|
||||
|
||||
current += char;
|
||||
|
||||
// Check if current is an operator
|
||||
if (operators.test(current) && (i + 1 === input.length || /\s/.test(input[i + 1]))) {
|
||||
tokens.push(current);
|
||||
current = '';
|
||||
}
|
||||
}
|
||||
|
||||
if (current) tokens.push(current);
|
||||
|
||||
return tokens;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse the main query expression
|
||||
*/
|
||||
private parseQuery(): AnyQueryNode {
|
||||
const left = this.parseBooleanOperand();
|
||||
|
||||
if (this.pos < this.tokens.length) {
|
||||
const token = this.tokens[this.pos];
|
||||
|
||||
if (token === 'AND' || token === 'OR') {
|
||||
this.pos++;
|
||||
const right = this.parseQuery();
|
||||
return {
|
||||
type: token as 'AND' | 'OR',
|
||||
left,
|
||||
right,
|
||||
};
|
||||
} else if (token === 'NOT' || token === '-') {
|
||||
this.pos++;
|
||||
const right = this.parseQuery();
|
||||
return {
|
||||
type: 'NOT',
|
||||
left,
|
||||
right,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
return left;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse boolean operands (terms, phrases, fields, groups)
|
||||
*/
|
||||
private parseBooleanOperand(): AnyQueryNode {
|
||||
if (this.pos >= this.tokens.length) {
|
||||
throw new Error('Unexpected end of input');
|
||||
}
|
||||
|
||||
const token = this.tokens[this.pos];
|
||||
|
||||
// Handle grouping with parentheses
|
||||
if (token === '(') {
|
||||
this.pos++;
|
||||
const group = this.parseQuery();
|
||||
|
||||
if (this.pos < this.tokens.length && this.tokens[this.pos] === ')') {
|
||||
this.pos++;
|
||||
return { type: 'GROUP', value: group } as GroupNode;
|
||||
} else {
|
||||
throw new Error('Unclosed group');
|
||||
}
|
||||
}
|
||||
|
||||
// Handle fields (field:value)
|
||||
if (this.pos + 1 < this.tokens.length && this.tokens[this.pos + 1] === ':') {
|
||||
const field = token;
|
||||
this.pos += 2; // Skip field and colon
|
||||
|
||||
if (this.pos < this.tokens.length) {
|
||||
const value = this.parseBooleanOperand();
|
||||
return { type: 'FIELD', field, value } as FieldNode;
|
||||
} else {
|
||||
throw new Error('Expected value after field');
|
||||
}
|
||||
}
|
||||
|
||||
// Handle range queries
|
||||
if (token === '[' || token === '{') {
|
||||
return this.parseRange();
|
||||
}
|
||||
|
||||
// Handle phrases ("term term")
|
||||
if (token.startsWith('"') && token.endsWith('"')) {
|
||||
const phrase = token.slice(1, -1);
|
||||
this.pos++;
|
||||
|
||||
// Check for proximity operator
|
||||
let proximity: number | undefined;
|
||||
if (this.pos < this.tokens.length && this.tokens[this.pos] === '~') {
|
||||
this.pos++;
|
||||
if (this.pos < this.tokens.length && /^\d+$/.test(this.tokens[this.pos])) {
|
||||
proximity = parseInt(this.tokens[this.pos], 10);
|
||||
this.pos++;
|
||||
} else {
|
||||
throw new Error('Expected number after proximity operator');
|
||||
}
|
||||
}
|
||||
|
||||
return { type: 'PHRASE', value: phrase, proximity } as PhraseNode;
|
||||
}
|
||||
|
||||
// Handle wildcards
|
||||
if (token.includes('*') || token.includes('?')) {
|
||||
this.pos++;
|
||||
return { type: 'WILDCARD', value: token } as WildcardNode;
|
||||
}
|
||||
|
||||
// Handle fuzzy searches
|
||||
if (this.pos + 1 < this.tokens.length && this.tokens[this.pos + 1] === '~') {
|
||||
const term = token;
|
||||
this.pos += 2; // Skip term and tilde
|
||||
|
||||
let maxEdits = 2; // Default
|
||||
if (this.pos < this.tokens.length && /^\d+$/.test(this.tokens[this.pos])) {
|
||||
maxEdits = parseInt(this.tokens[this.pos], 10);
|
||||
this.pos++;
|
||||
}
|
||||
|
||||
return { type: 'FUZZY', value: term, maxEdits } as FuzzyNode;
|
||||
}
|
||||
|
||||
// Simple term
|
||||
this.pos++;
|
||||
return { type: 'TERM', value: token } as TermNode;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse range queries
|
||||
*/
|
||||
private parseRange(): RangeNode {
|
||||
const includeLower = this.tokens[this.pos] === '[';
|
||||
const includeUpper = this.tokens[this.pos + 4] === ']';
|
||||
|
||||
this.pos++; // Skip open bracket
|
||||
|
||||
if (this.pos + 4 >= this.tokens.length) {
|
||||
throw new Error('Invalid range query syntax');
|
||||
}
|
||||
|
||||
const lower = this.tokens[this.pos];
|
||||
this.pos++;
|
||||
|
||||
if (this.tokens[this.pos] !== 'TO') {
|
||||
throw new Error('Expected TO in range query');
|
||||
}
|
||||
this.pos++;
|
||||
|
||||
const upper = this.tokens[this.pos];
|
||||
this.pos++;
|
||||
|
||||
if (this.tokens[this.pos] !== (includeLower ? ']' : '}')) {
|
||||
throw new Error('Invalid range query closing bracket');
|
||||
}
|
||||
this.pos++;
|
||||
|
||||
// For simplicity, assuming the field is handled separately
|
||||
return {
|
||||
type: 'RANGE',
|
||||
field: '', // This will be filled by the field node
|
||||
lower,
|
||||
upper,
|
||||
includeLower,
|
||||
includeUpper,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Transformer for Lucene AST to MongoDB query
|
||||
* FIXED VERSION - proper MongoDB query structure
|
||||
*/
|
||||
export class LuceneToMongoTransformer {
|
||||
constructor() {}
|
||||
|
||||
/**
|
||||
* Transform a Lucene AST node to a MongoDB query
|
||||
*/
|
||||
transform(node: AnyQueryNode, searchFields?: string[]): any {
|
||||
switch (node.type) {
|
||||
case 'TERM':
|
||||
return this.transformTerm(node, searchFields);
|
||||
case 'PHRASE':
|
||||
return this.transformPhrase(node, searchFields);
|
||||
case 'FIELD':
|
||||
return this.transformField(node);
|
||||
case 'AND':
|
||||
return this.transformAnd(node);
|
||||
case 'OR':
|
||||
return this.transformOr(node);
|
||||
case 'NOT':
|
||||
return this.transformNot(node);
|
||||
case 'RANGE':
|
||||
return this.transformRange(node);
|
||||
case 'WILDCARD':
|
||||
return this.transformWildcard(node, searchFields);
|
||||
case 'FUZZY':
|
||||
return this.transformFuzzy(node, searchFields);
|
||||
case 'GROUP':
|
||||
return this.transform(node.value, searchFields);
|
||||
default:
|
||||
throw new Error(`Unsupported node type: ${(node as any).type}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Transform a term to MongoDB query
|
||||
* FIXED: properly structured $or query for multiple fields
|
||||
*/
|
||||
private transformTerm(node: TermNode, searchFields?: string[]): any {
|
||||
// If specific fields are provided, search across those fields
|
||||
if (searchFields && searchFields.length > 0) {
|
||||
// Create an $or query to search across multiple fields
|
||||
const orConditions = searchFields.map((field) => ({
|
||||
[field]: { $regex: node.value, $options: 'i' },
|
||||
}));
|
||||
|
||||
return { $or: orConditions };
|
||||
}
|
||||
|
||||
// Otherwise, use text search (requires a text index on desired fields)
|
||||
return { $text: { $search: node.value } };
|
||||
}
|
||||
|
||||
/**
|
||||
* Transform a phrase to MongoDB query
|
||||
* FIXED: properly structured $or query for multiple fields
|
||||
*/
|
||||
private transformPhrase(node: PhraseNode, searchFields?: string[]): any {
|
||||
// If specific fields are provided, search phrase across those fields
|
||||
if (searchFields && searchFields.length > 0) {
|
||||
const orConditions = searchFields.map((field) => ({
|
||||
[field]: { $regex: `${node.value.replace(/\s+/g, '\\s+')}`, $options: 'i' },
|
||||
}));
|
||||
|
||||
return { $or: orConditions };
|
||||
}
|
||||
|
||||
// For phrases, we use a regex to ensure exact matches
|
||||
return { $text: { $search: `"${node.value}"` } };
|
||||
}
|
||||
|
||||
/**
|
||||
* Transform a field query to MongoDB query
|
||||
*/
|
||||
private transformField(node: FieldNode): any {
|
||||
// Handle special case for range queries on fields
|
||||
if (node.value.type === 'RANGE') {
|
||||
const rangeNode = node.value as RangeNode;
|
||||
rangeNode.field = node.field;
|
||||
return this.transformRange(rangeNode);
|
||||
}
|
||||
|
||||
// Handle special case for wildcards on fields
|
||||
if (node.value.type === 'WILDCARD') {
|
||||
return {
|
||||
[node.field]: {
|
||||
$regex: this.luceneWildcardToRegex((node.value as WildcardNode).value),
|
||||
$options: 'i',
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// Handle special case for fuzzy searches on fields
|
||||
if (node.value.type === 'FUZZY') {
|
||||
return {
|
||||
[node.field]: {
|
||||
$regex: this.createFuzzyRegex((node.value as FuzzyNode).value),
|
||||
$options: 'i',
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// Special case for exact term matches on fields
|
||||
if (node.value.type === 'TERM') {
|
||||
return { [node.field]: { $regex: (node.value as TermNode).value, $options: 'i' } };
|
||||
}
|
||||
|
||||
// Special case for phrase matches on fields
|
||||
if (node.value.type === 'PHRASE') {
|
||||
return {
|
||||
[node.field]: {
|
||||
$regex: `${(node.value as PhraseNode).value.replace(/\s+/g, '\\s+')}`,
|
||||
$options: 'i',
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// For other cases, we'll transform the value and apply it to the field
|
||||
const transformedValue = this.transform(node.value);
|
||||
|
||||
// If the transformed value uses $text, we need to adapt it for the field
|
||||
if (transformedValue.$text) {
|
||||
return { [node.field]: { $regex: transformedValue.$text.$search, $options: 'i' } };
|
||||
}
|
||||
|
||||
// Handle $or and $and cases
|
||||
if (transformedValue.$or || transformedValue.$and) {
|
||||
// This is a bit complex - we need to restructure the query to apply the field
|
||||
// For now, simplify by just using a regex on the field
|
||||
const term = this.extractTermFromBooleanQuery(transformedValue);
|
||||
if (term) {
|
||||
return { [node.field]: { $regex: term, $options: 'i' } };
|
||||
}
|
||||
}
|
||||
|
||||
return { [node.field]: transformedValue };
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract a term from a boolean query (simplification)
|
||||
*/
|
||||
private extractTermFromBooleanQuery(query: any): string | null {
|
||||
if (query.$or && Array.isArray(query.$or) && query.$or.length > 0) {
|
||||
const firstClause = query.$or[0];
|
||||
for (const field in firstClause) {
|
||||
if (firstClause[field].$regex) {
|
||||
return firstClause[field].$regex;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (query.$and && Array.isArray(query.$and) && query.$and.length > 0) {
|
||||
const firstClause = query.$and[0];
|
||||
for (const field in firstClause) {
|
||||
if (firstClause[field].$regex) {
|
||||
return firstClause[field].$regex;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Transform AND operator to MongoDB query
|
||||
* FIXED: $and must be an array
|
||||
*/
|
||||
private transformAnd(node: BooleanNode): any {
|
||||
return { $and: [this.transform(node.left), this.transform(node.right)] };
|
||||
}
|
||||
|
||||
/**
|
||||
* Transform OR operator to MongoDB query
|
||||
* FIXED: $or must be an array
|
||||
*/
|
||||
private transformOr(node: BooleanNode): any {
|
||||
return { $or: [this.transform(node.left), this.transform(node.right)] };
|
||||
}
|
||||
|
||||
/**
|
||||
* Transform NOT operator to MongoDB query
|
||||
* FIXED: $and must be an array and $not usage
|
||||
*/
|
||||
private transformNot(node: BooleanNode): any {
|
||||
const leftQuery = this.transform(node.left);
|
||||
const rightQuery = this.transform(node.right);
|
||||
|
||||
// Create a query that includes left but excludes right
|
||||
if (rightQuery.$text) {
|
||||
// For text searches, we need a different approach
|
||||
// We'll use a negated regex instead
|
||||
const searchTerm = rightQuery.$text.$search.replace(/"/g, '');
|
||||
|
||||
// Determine the fields to apply the negation to
|
||||
const notConditions = [];
|
||||
|
||||
for (const field in leftQuery) {
|
||||
if (field !== '$or' && field !== '$and') {
|
||||
notConditions.push({
|
||||
[field]: { $not: { $regex: searchTerm, $options: 'i' } },
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// If left query has $or or $and, we need to handle it differently
|
||||
if (leftQuery.$or) {
|
||||
return {
|
||||
$and: [leftQuery, { $nor: [{ $or: notConditions }] }],
|
||||
};
|
||||
} else {
|
||||
// Simple case - just add $not to each field
|
||||
return {
|
||||
$and: [leftQuery, { $and: notConditions }],
|
||||
};
|
||||
}
|
||||
} else {
|
||||
// For other queries, we can use $not directly
|
||||
// We need to handle different structures based on the rightQuery
|
||||
let notQuery = {};
|
||||
|
||||
if (rightQuery.$or) {
|
||||
notQuery = { $nor: rightQuery.$or };
|
||||
} else if (rightQuery.$and) {
|
||||
// Convert $and to $nor
|
||||
notQuery = { $nor: rightQuery.$and };
|
||||
} else {
|
||||
// Simple field condition
|
||||
for (const field in rightQuery) {
|
||||
notQuery[field] = { $not: rightQuery[field] };
|
||||
}
|
||||
}
|
||||
|
||||
return { $and: [leftQuery, notQuery] };
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Transform range query to MongoDB query
|
||||
*/
|
||||
private transformRange(node: RangeNode): any {
|
||||
const range: any = {};
|
||||
|
||||
if (node.lower !== '*') {
|
||||
range[node.includeLower ? '$gte' : '$gt'] = this.parseValue(node.lower);
|
||||
}
|
||||
|
||||
if (node.upper !== '*') {
|
||||
range[node.includeUpper ? '$lte' : '$lt'] = this.parseValue(node.upper);
|
||||
}
|
||||
|
||||
return { [node.field]: range };
|
||||
}
|
||||
|
||||
/**
|
||||
* Transform wildcard query to MongoDB query
|
||||
* FIXED: properly structured for multiple fields
|
||||
*/
|
||||
private transformWildcard(node: WildcardNode, searchFields?: string[]): any {
|
||||
// Convert Lucene wildcards to MongoDB regex
|
||||
const regex = this.luceneWildcardToRegex(node.value);
|
||||
|
||||
// If specific fields are provided, search wildcard across those fields
|
||||
if (searchFields && searchFields.length > 0) {
|
||||
const orConditions = searchFields.map((field) => ({
|
||||
[field]: { $regex: regex, $options: 'i' },
|
||||
}));
|
||||
|
||||
return { $or: orConditions };
|
||||
}
|
||||
|
||||
// By default, apply to the default field
|
||||
return { $regex: regex, $options: 'i' };
|
||||
}
|
||||
|
||||
/**
|
||||
* Transform fuzzy query to MongoDB query
|
||||
* FIXED: properly structured for multiple fields
|
||||
*/
|
||||
private transformFuzzy(node: FuzzyNode, searchFields?: string[]): any {
|
||||
// MongoDB doesn't have built-in fuzzy search
|
||||
// This is a very basic approach using regex
|
||||
const regex = this.createFuzzyRegex(node.value);
|
||||
|
||||
// If specific fields are provided, search fuzzy term across those fields
|
||||
if (searchFields && searchFields.length > 0) {
|
||||
const orConditions = searchFields.map((field) => ({
|
||||
[field]: { $regex: regex, $options: 'i' },
|
||||
}));
|
||||
|
||||
return { $or: orConditions };
|
||||
}
|
||||
|
||||
// By default, apply to the default field
|
||||
return { $regex: regex, $options: 'i' };
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert Lucene wildcards to MongoDB regex patterns
|
||||
*/
|
||||
private luceneWildcardToRegex(wildcardPattern: string): string {
|
||||
// Replace Lucene wildcards with regex equivalents
|
||||
// * => .*
|
||||
// ? => .
|
||||
// Also escape regex special chars
|
||||
return wildcardPattern
|
||||
.replace(/([.+^${}()|\\])/g, '\\$1') // Escape regex special chars
|
||||
.replace(/\*/g, '.*')
|
||||
.replace(/\?/g, '.');
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a simplified fuzzy search regex
|
||||
*/
|
||||
private createFuzzyRegex(term: string): string {
|
||||
// For a very simple approach, we allow some characters to be optional
|
||||
let regex = '';
|
||||
for (let i = 0; i < term.length; i++) {
|
||||
// Make every other character optional (simplified fuzzy)
|
||||
if (i % 2 === 1) {
|
||||
regex += term[i] + '?';
|
||||
} else {
|
||||
regex += term[i];
|
||||
}
|
||||
}
|
||||
return regex;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse string values to appropriate types (numbers, dates, etc.)
|
||||
*/
|
||||
private parseValue(value: string): any {
|
||||
// Try to parse as number
|
||||
if (/^-?\d+$/.test(value)) {
|
||||
return parseInt(value, 10);
|
||||
}
|
||||
|
||||
if (/^-?\d+\.\d+$/.test(value)) {
|
||||
return parseFloat(value);
|
||||
}
|
||||
|
||||
// Try to parse as date (simplified)
|
||||
const date = new Date(value);
|
||||
if (!isNaN(date.getTime())) {
|
||||
return date;
|
||||
}
|
||||
|
||||
// Default to string
|
||||
return value;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Main adapter class
|
||||
*/
|
||||
export class SmartdataLuceneAdapter {
|
||||
private parser: LuceneParser;
|
||||
private transformer: LuceneToMongoTransformer;
|
||||
private defaultSearchFields: string[] = [];
|
||||
|
||||
/**
|
||||
* @param defaultSearchFields - Optional array of field names to search across when no field is specified
|
||||
*/
|
||||
constructor(defaultSearchFields?: string[]) {
|
||||
this.parser = new LuceneParser();
|
||||
this.transformer = new LuceneToMongoTransformer();
|
||||
if (defaultSearchFields) {
|
||||
this.defaultSearchFields = defaultSearchFields;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert a Lucene query string to a MongoDB query object
|
||||
* @param luceneQuery - The Lucene query string to convert
|
||||
* @param searchFields - Optional array of field names to search across (overrides defaultSearchFields)
|
||||
*/
|
||||
convert(luceneQuery: string, searchFields?: string[]): any {
|
||||
try {
|
||||
// For simple single term queries, create a simpler query structure
|
||||
if (
|
||||
!luceneQuery.includes(':') &&
|
||||
!luceneQuery.includes(' AND ') &&
|
||||
!luceneQuery.includes(' OR ') &&
|
||||
!luceneQuery.includes(' NOT ') &&
|
||||
!luceneQuery.includes('(') &&
|
||||
!luceneQuery.includes('[')
|
||||
) {
|
||||
// This is a simple term, use a more direct approach
|
||||
const fieldsToSearch = searchFields || this.defaultSearchFields;
|
||||
|
||||
if (fieldsToSearch && fieldsToSearch.length > 0) {
|
||||
return {
|
||||
$or: fieldsToSearch.map((field) => ({
|
||||
[field]: { $regex: luceneQuery, $options: 'i' },
|
||||
})),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// For more complex queries, use the full parser
|
||||
// Parse the Lucene query into an AST
|
||||
const ast = this.parser.parse(luceneQuery);
|
||||
|
||||
// Use provided searchFields, fall back to defaultSearchFields
|
||||
const fieldsToSearch = searchFields || this.defaultSearchFields;
|
||||
|
||||
// Transform the AST to a MongoDB query
|
||||
return this.transformWithFields(ast, fieldsToSearch);
|
||||
} catch (error) {
|
||||
console.error(`Failed to convert Lucene query "${luceneQuery}":`, error);
|
||||
throw new Error(`Failed to convert Lucene query: ${error}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper method to transform the AST with field information
|
||||
*/
|
||||
private transformWithFields(node: AnyQueryNode, searchFields: string[]): any {
|
||||
// Special case for term nodes without a specific field
|
||||
if (
|
||||
node.type === 'TERM' ||
|
||||
node.type === 'PHRASE' ||
|
||||
node.type === 'WILDCARD' ||
|
||||
node.type === 'FUZZY'
|
||||
) {
|
||||
return this.transformer.transform(node, searchFields);
|
||||
}
|
||||
|
||||
// For other node types, use the standard transformation
|
||||
return this.transformer.transform(node);
|
||||
}
|
||||
}
|
@ -1,5 +1,5 @@
|
||||
import { SmartDataDbDoc } from './smartdata.classes.doc.js';
|
||||
import * as plugins from './smartdata.plugins.js';
|
||||
import { SmartDataDbDoc } from './classes.doc.js';
|
||||
import * as plugins from './plugins.js';
|
||||
|
||||
/**
|
||||
* a wrapper for the native mongodb cursor. Exposes better
|
||||
@ -14,7 +14,7 @@ export class SmartdataDbWatcher<T = any> {
|
||||
public changeSubject = new plugins.smartrx.rxjs.Subject<T>();
|
||||
constructor(
|
||||
changeStreamArg: plugins.mongodb.ChangeStream<T>,
|
||||
smartdataDbDocArg: typeof SmartDataDbDoc
|
||||
smartdataDbDocArg: typeof SmartDataDbDoc,
|
||||
) {
|
||||
this.changeStream = changeStreamArg;
|
||||
this.changeStream.on('change', async (item: any) => {
|
||||
@ -23,7 +23,7 @@ export class SmartdataDbWatcher<T = any> {
|
||||
return;
|
||||
}
|
||||
this.changeSubject.next(
|
||||
smartdataDbDocArg.createInstanceFromMongoDbNativeDoc(item.fullDocument) as any as T
|
||||
smartdataDbDocArg.createInstanceFromMongoDbNativeDoc(item.fullDocument) as any as T,
|
||||
);
|
||||
});
|
||||
plugins.smartdelay.delayFor(0).then(() => {
|
14
ts/index.ts
14
ts/index.ts
@ -1,14 +1,14 @@
|
||||
export * from './smartdata.classes.db.js';
|
||||
export * from './smartdata.classes.collection.js';
|
||||
export * from './smartdata.classes.doc.js';
|
||||
export * from './smartdata.classes.easystore.js';
|
||||
export * from './smartdata.classes.cursor.js';
|
||||
export * from './classes.db.js';
|
||||
export * from './classes.collection.js';
|
||||
export * from './classes.doc.js';
|
||||
export * from './classes.easystore.js';
|
||||
export * from './classes.cursor.js';
|
||||
|
||||
import * as convenience from './smartadata.convenience.js';
|
||||
import * as convenience from './classes.convenience.js';
|
||||
|
||||
export { convenience };
|
||||
|
||||
// to be removed with the next breaking update
|
||||
import type * as plugins from './smartdata.plugins.js';
|
||||
import type * as plugins from './plugins.js';
|
||||
type IMongoDescriptor = plugins.tsclass.database.IMongoDescriptor;
|
||||
export type { IMongoDescriptor };
|
||||
|
@ -1,3 +1,3 @@
|
||||
import * as plugins from './smartdata.plugins.js';
|
||||
import * as plugins from './plugins.js';
|
||||
|
||||
export const logger = new plugins.smartlog.ConsoleLog();
|
@ -6,7 +6,9 @@
|
||||
"module": "NodeNext",
|
||||
"moduleResolution": "NodeNext",
|
||||
"esModuleInterop": true,
|
||||
"verbatimModuleSyntax": true
|
||||
"verbatimModuleSyntax": true,
|
||||
"baseUrl": ".",
|
||||
"paths": {}
|
||||
},
|
||||
"exclude": [
|
||||
"dist_*/**/*.d.ts"
|
||||
|
Loading…
x
Reference in New Issue
Block a user