Compare commits
113 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 53673e37cb | |||
| 0701207acd | |||
| 36d7cb69a3 | |||
| 4924e0a151 | |||
| cd98529541 | |||
| e6a9282987 | |||
| 9dcadcd611 | |||
| 4b045ff988 | |||
| 023dd1b519 | |||
| 4971385eae | |||
| e209839962 | |||
| e44365b674 | |||
| bd154089c3 | |||
| 0be693da60 | |||
| 040c93dec3 | |||
| 21e55bd341 | |||
| f1d04fe63c | |||
| 49c4660131 | |||
| e5fd0361fc | |||
| d6a291d8d4 | |||
| fc87fd7ab7 | |||
| 203444d1a6 | |||
| cdbf1fd316 | |||
| 10108d8338 | |||
| 36abb2c7c0 | |||
| 8f00d90bb1 | |||
| 41f1758d46 | |||
| 2b7cd33996 | |||
| 80a04ca893 | |||
| 93f739c79e | |||
| a1b5bf5c0c | |||
| 084c5d137c | |||
| 4aa0592bd5 | |||
| 0313e5045a | |||
| 7442d93f58 | |||
| 35e70aae62 | |||
| cc30f43a28 | |||
| 100a8fc12e | |||
| f32403961d | |||
| 9734949241 | |||
| b70444824b | |||
| 0eb0903667 | |||
| 4d11dca22c | |||
| 3079adbbd9 | |||
| bc9de8e4d6 | |||
| 3fa7d66236 | |||
| 2a0b0b2478 | |||
| 35e99663a4 | |||
| 2cc5855206 | |||
| 8f9f2fdf05 | |||
| 7ef36b5c40 | |||
| 67a8f3fe4d | |||
| 5ae2c37519 | |||
| fcb67ec878 | |||
| 9e25494f8f | |||
| dd8ba4736a | |||
| d395310410 | |||
| 49233ce45f | |||
| fb93dce8bc | |||
| 30cbc05aa2 | |||
| 2a595a1a9a | |||
| d62b18e93c | |||
| d6176f820a | |||
| 0ebc1d5288 | |||
| 2b0003546a | |||
| 60617f2fca | |||
| 9c767d07e4 | |||
| f3aa94dcb7 | |||
| a0be0edd9d | |||
| ad24ba2f5d | |||
| b0cf4bb27f | |||
| fd29ceab80 | |||
| bcca434a24 | |||
| d4a9ad8f67 | |||
| d4c7c33668 | |||
| 8340257b00 | |||
| 32265e83f3 | |||
| e2df11cea2 | |||
| 2719ba28f6 | |||
| 6d78a7ba0c | |||
| 5897c6e7de | |||
| 20369614a2 | |||
| 7ceaf694fe | |||
| 391c6bd45d | |||
| 1a702071c6 | |||
| 0fe2f6a4ae | |||
| 20d04413c9 | |||
| e56439e9f4 | |||
| c9a9434cd9 | |||
| 5d98dd9089 | |||
| 2d635fdf7c | |||
| 1dbf3724d0 | |||
| cc7eb8c139 | |||
| 0e01ecbd1a | |||
| 2d21b40a76 | |||
| 2d1a5cdc50 | |||
| 20a41d3381 | |||
| b2019b33f8 | |||
| 1ab582db51 | |||
| 9c87f5ee5e | |||
| ef9cb193d5 | |||
| 9f706e0a70 | |||
| b2d4b82532 | |||
| 63713f4cd2 | |||
| 5d3bc13126 | |||
| 4b30234cb6 | |||
| 271971a373 | |||
| d636dab664 | |||
| 5f94db8d5d | |||
| f9866076ca | |||
| aa43a221a0 | |||
| cb63b305ad | |||
| 6fde0544f5 |
66
.gitea/workflows/default_nottags.yaml
Normal file
66
.gitea/workflows/default_nottags.yaml
Normal file
@@ -0,0 +1,66 @@
|
||||
name: Default (not tags)
|
||||
|
||||
on:
|
||||
push:
|
||||
tags-ignore:
|
||||
- '**'
|
||||
|
||||
env:
|
||||
IMAGE: code.foss.global/host.today/ht-docker-node:npmci
|
||||
NPMCI_COMPUTED_REPOURL: https://${{gitea.repository_owner}}:${{secrets.GITEA_TOKEN}}@/${{gitea.repository}}.git
|
||||
NPMCI_TOKEN_NPM: ${{secrets.NPMCI_TOKEN_NPM}}
|
||||
NPMCI_TOKEN_NPM2: ${{secrets.NPMCI_TOKEN_NPM2}}
|
||||
NPMCI_GIT_GITHUBTOKEN: ${{secrets.NPMCI_GIT_GITHUBTOKEN}}
|
||||
NPMCI_URL_CLOUDLY: ${{secrets.NPMCI_URL_CLOUDLY}}
|
||||
|
||||
jobs:
|
||||
security:
|
||||
runs-on: ubuntu-latest
|
||||
continue-on-error: true
|
||||
container:
|
||||
image: ${{ env.IMAGE }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Install pnpm and npmci
|
||||
run: |
|
||||
pnpm install -g pnpm
|
||||
pnpm install -g @ship.zone/npmci
|
||||
|
||||
- name: Run npm prepare
|
||||
run: npmci npm prepare
|
||||
|
||||
- name: Audit production dependencies
|
||||
run: |
|
||||
npmci command npm config set registry https://registry.npmjs.org
|
||||
npmci command pnpm audit --audit-level=high --prod
|
||||
continue-on-error: true
|
||||
|
||||
- name: Audit development dependencies
|
||||
run: |
|
||||
npmci command npm config set registry https://registry.npmjs.org
|
||||
npmci command pnpm audit --audit-level=high --dev
|
||||
continue-on-error: true
|
||||
|
||||
test:
|
||||
if: ${{ always() }}
|
||||
needs: security
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: ${{ env.IMAGE }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Test stable
|
||||
run: |
|
||||
npmci node install stable
|
||||
npmci npm install
|
||||
npmci npm test
|
||||
|
||||
- name: Test build
|
||||
run: |
|
||||
npmci node install stable
|
||||
npmci npm install
|
||||
npmci npm build
|
||||
124
.gitea/workflows/default_tags.yaml
Normal file
124
.gitea/workflows/default_tags.yaml
Normal file
@@ -0,0 +1,124 @@
|
||||
name: Default (tags)
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- '*'
|
||||
|
||||
env:
|
||||
IMAGE: code.foss.global/host.today/ht-docker-node:npmci
|
||||
NPMCI_COMPUTED_REPOURL: https://${{gitea.repository_owner}}:${{secrets.GITEA_TOKEN}}@/${{gitea.repository}}.git
|
||||
NPMCI_TOKEN_NPM: ${{secrets.NPMCI_TOKEN_NPM}}
|
||||
NPMCI_TOKEN_NPM2: ${{secrets.NPMCI_TOKEN_NPM2}}
|
||||
NPMCI_GIT_GITHUBTOKEN: ${{secrets.NPMCI_GIT_GITHUBTOKEN}}
|
||||
NPMCI_URL_CLOUDLY: ${{secrets.NPMCI_URL_CLOUDLY}}
|
||||
|
||||
jobs:
|
||||
security:
|
||||
runs-on: ubuntu-latest
|
||||
continue-on-error: true
|
||||
container:
|
||||
image: ${{ env.IMAGE }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Prepare
|
||||
run: |
|
||||
pnpm install -g pnpm
|
||||
pnpm install -g @ship.zone/npmci
|
||||
npmci npm prepare
|
||||
|
||||
- name: Audit production dependencies
|
||||
run: |
|
||||
npmci command npm config set registry https://registry.npmjs.org
|
||||
npmci command pnpm audit --audit-level=high --prod
|
||||
continue-on-error: true
|
||||
|
||||
- name: Audit development dependencies
|
||||
run: |
|
||||
npmci command npm config set registry https://registry.npmjs.org
|
||||
npmci command pnpm audit --audit-level=high --dev
|
||||
continue-on-error: true
|
||||
|
||||
test:
|
||||
if: ${{ always() }}
|
||||
needs: security
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: ${{ env.IMAGE }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Prepare
|
||||
run: |
|
||||
pnpm install -g pnpm
|
||||
pnpm install -g @ship.zone/npmci
|
||||
npmci npm prepare
|
||||
|
||||
- name: Test stable
|
||||
run: |
|
||||
npmci node install stable
|
||||
npmci npm install
|
||||
npmci npm test
|
||||
|
||||
- name: Test build
|
||||
run: |
|
||||
npmci node install stable
|
||||
npmci npm install
|
||||
npmci npm build
|
||||
|
||||
release:
|
||||
needs: test
|
||||
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/')
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: ${{ env.IMAGE }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Prepare
|
||||
run: |
|
||||
pnpm install -g pnpm
|
||||
pnpm install -g @ship.zone/npmci
|
||||
npmci npm prepare
|
||||
|
||||
- name: Release
|
||||
run: |
|
||||
npmci node install stable
|
||||
npmci npm publish
|
||||
|
||||
metadata:
|
||||
needs: test
|
||||
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/')
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: ${{ env.IMAGE }}
|
||||
continue-on-error: true
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Prepare
|
||||
run: |
|
||||
pnpm install -g pnpm
|
||||
pnpm install -g @ship.zone/npmci
|
||||
npmci npm prepare
|
||||
|
||||
- name: Code quality
|
||||
run: |
|
||||
npmci command npm install -g typescript
|
||||
npmci npm install
|
||||
|
||||
- name: Trigger
|
||||
run: npmci trigger
|
||||
|
||||
- name: Build docs and upload artifacts
|
||||
run: |
|
||||
npmci node install stable
|
||||
npmci npm install
|
||||
pnpm install -g @git.zone/tsdoc
|
||||
npmci command tsdoc
|
||||
continue-on-error: true
|
||||
22
.gitignore
vendored
22
.gitignore
vendored
@@ -1,5 +1,23 @@
|
||||
.nogit/
|
||||
node_modules/
|
||||
|
||||
# artifacts
|
||||
coverage/
|
||||
public/
|
||||
pages/
|
||||
|
||||
# installs
|
||||
node_modules/
|
||||
|
||||
# caches
|
||||
.yarn/
|
||||
.cache/
|
||||
.rpt2_cache
|
||||
|
||||
# builds
|
||||
dist/
|
||||
dist_*/
|
||||
|
||||
# AI
|
||||
.claude/
|
||||
.serena/
|
||||
|
||||
#------# custom
|
||||
147
.gitlab-ci.yml
147
.gitlab-ci.yml
@@ -1,147 +0,0 @@
|
||||
# gitzone standard
|
||||
image: hosttoday/ht-docker-node:npmci
|
||||
|
||||
cache:
|
||||
paths:
|
||||
- .npmci_cache/
|
||||
key: "$CI_BUILD_STAGE"
|
||||
|
||||
stages:
|
||||
- security
|
||||
- test
|
||||
- release
|
||||
- metadata
|
||||
|
||||
# ====================
|
||||
# security stage
|
||||
# ====================
|
||||
mirror:
|
||||
stage: security
|
||||
script:
|
||||
- npmci git mirror
|
||||
tags:
|
||||
- docker
|
||||
- notpriv
|
||||
|
||||
snyk:
|
||||
stage: security
|
||||
script:
|
||||
- npmci npm prepare
|
||||
- npmci command npm install -g snyk
|
||||
- npmci command npm install --ignore-scripts
|
||||
- npmci command snyk test
|
||||
tags:
|
||||
- docker
|
||||
- notpriv
|
||||
|
||||
# ====================
|
||||
# test stage
|
||||
# ====================
|
||||
testLEGACY:
|
||||
stage: test
|
||||
script:
|
||||
- npmci npm prepare
|
||||
- npmci node install legacy
|
||||
- npmci npm install
|
||||
- npmci npm test
|
||||
coverage: /\d+.?\d+?\%\s*coverage/
|
||||
tags:
|
||||
- docker
|
||||
- notpriv
|
||||
allow_failure: true
|
||||
|
||||
testLTS:
|
||||
stage: test
|
||||
script:
|
||||
- npmci npm prepare
|
||||
- npmci node install lts
|
||||
- npmci npm install
|
||||
- npmci npm test
|
||||
coverage: /\d+.?\d+?\%\s*coverage/
|
||||
tags:
|
||||
- docker
|
||||
- notpriv
|
||||
|
||||
testSTABLE:
|
||||
stage: test
|
||||
script:
|
||||
- npmci npm prepare
|
||||
- npmci node install stable
|
||||
- npmci npm install
|
||||
- npmci npm test
|
||||
coverage: /\d+.?\d+?\%\s*coverage/
|
||||
tags:
|
||||
- docker
|
||||
- notpriv
|
||||
|
||||
release:
|
||||
stage: release
|
||||
script:
|
||||
- npmci node install stable
|
||||
- npmci npm publish
|
||||
only:
|
||||
- tags
|
||||
tags:
|
||||
- docker
|
||||
- notpriv
|
||||
|
||||
# ====================
|
||||
# metadata stage
|
||||
# ====================
|
||||
codequality:
|
||||
stage: metadata
|
||||
image: docker:stable
|
||||
allow_failure: true
|
||||
services:
|
||||
- docker:stable-dind
|
||||
script:
|
||||
- export SP_VERSION=$(echo "$CI_SERVER_VERSION" | sed 's/^\([0-9]*\)\.\([0-9]*\).*/\1-\2-stable/')
|
||||
- docker run
|
||||
--env SOURCE_CODE="$PWD"
|
||||
--volume "$PWD":/code
|
||||
--volume /var/run/docker.sock:/var/run/docker.sock
|
||||
"registry.gitlab.com/gitlab-org/security-products/codequality:$SP_VERSION" /code
|
||||
artifacts:
|
||||
paths: [codeclimate.json]
|
||||
tags:
|
||||
- docker
|
||||
- priv
|
||||
|
||||
trigger:
|
||||
stage: metadata
|
||||
script:
|
||||
- npmci trigger
|
||||
only:
|
||||
- tags
|
||||
tags:
|
||||
- docker
|
||||
- notpriv
|
||||
|
||||
pages:
|
||||
image: hosttoday/ht-docker-node:npmci
|
||||
stage: metadata
|
||||
script:
|
||||
- npmci command npm install -g typedoc typescript
|
||||
- npmci npm prepare
|
||||
- npmci npm install
|
||||
- npmci command typedoc --module "commonjs" --target "ES2016" --out public/ ts/
|
||||
tags:
|
||||
- docker
|
||||
- notpriv
|
||||
only:
|
||||
- tags
|
||||
artifacts:
|
||||
expire_in: 1 week
|
||||
paths:
|
||||
- public
|
||||
allow_failure: true
|
||||
|
||||
windowsCompatibility:
|
||||
image: stefanscherer/node-windows:10-build-tools
|
||||
stage: metadata
|
||||
script:
|
||||
- npm install & npm test
|
||||
coverage: /\d+.?\d+?\%\s*coverage/
|
||||
tags:
|
||||
- windows
|
||||
allow_failure: true
|
||||
4
.snyk
4
.snyk
@@ -1,4 +0,0 @@
|
||||
# Snyk (https://snyk.io) policy file, patches or ignores known vulnerabilities.
|
||||
version: v1.12.0
|
||||
ignore: {}
|
||||
patch: {}
|
||||
11
.vscode/launch.json
vendored
Normal file
11
.vscode/launch.json
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
{
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"command": "npm test",
|
||||
"name": "Run npm test",
|
||||
"request": "launch",
|
||||
"type": "node-terminal"
|
||||
}
|
||||
]
|
||||
}
|
||||
26
.vscode/settings.json
vendored
Normal file
26
.vscode/settings.json
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
{
|
||||
"json.schemas": [
|
||||
{
|
||||
"fileMatch": ["/npmextra.json"],
|
||||
"schema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"npmci": {
|
||||
"type": "object",
|
||||
"description": "settings for npmci"
|
||||
},
|
||||
"gitzone": {
|
||||
"type": "object",
|
||||
"description": "settings for gitzone",
|
||||
"properties": {
|
||||
"projectType": {
|
||||
"type": "string",
|
||||
"enum": ["website", "element", "service", "npm", "wcc"]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
100
changelog.md
Normal file
100
changelog.md
Normal file
@@ -0,0 +1,100 @@
|
||||
# Changelog
|
||||
|
||||
## 2025-11-29 - 2.0.17 - fix(ci)
|
||||
Update CI workflows and build config; bump dependencies; code style and TS config fixes
|
||||
|
||||
- Gitea workflows updated: swapped CI image to code.foss.global, adjusted NPMCI_COMPUTED_REPOURL and replaced @shipzone/npmci with @ship.zone/npmci; tsdoc package path updated.
|
||||
- Removed legacy .gitlab-ci.yml (migrated CI to .gitea workflows).
|
||||
- Bumped dependencies and devDependencies (e.g. @elastic/elasticsearch -> ^9.2.0, @git.zone/* packages, @push.rocks/* packages) and added repository/bugs/homepage/pnpm/packageManager metadata to package.json.
|
||||
- Tests updated: import path change to @git.zone/tstest/tapbundle and test runner export changed to default export (export default tap.start()).
|
||||
- TypeScript config changes: module and moduleResolution set to NodeNext and added exclude for dist_*/**/*.d.ts.
|
||||
- Code cleanups and formatting: normalized object/argument formatting, trailing commas, safer ElasticClient call shapes (explicit option objects), and minor refactors across ElasticDoc, FastPush, KVStore, ElasticIndex, ElasticScheduler and smartlog destination.
|
||||
- Added .gitignore entries for local AI tool directories and added readme.hints.md and npmextra.json.
|
||||
|
||||
## 2023-08-30 - 2.0.2..2.0.16 - core
|
||||
Series of maintenance releases and small bugfixes on the 2.0.x line.
|
||||
|
||||
- Multiple "fix(core): update" commits across 2.0.2 → 2.0.16 addressing small bugs and stability/maintenance issues.
|
||||
- No single large feature added in these patch releases; recommended to consult individual release diffs if you need a precise change per patch.
|
||||
|
||||
## 2023-08-25 - 2.0.0 - core
|
||||
Major 2.0.0 release containing core updates and the transition from the 1.x line.
|
||||
|
||||
- Bumped major version to 2.0.0 with core updates.
|
||||
- This release follows a breaking-change update introduced on the 1.x line (see 1.0.56 below). Review breaking changes before upgrading.
|
||||
|
||||
## 2023-08-25 - 1.0.56 - core (BREAKING CHANGE)
|
||||
Breaking change introduced on the 1.x line.
|
||||
|
||||
- BREAKING CHANGE: core updated. Consumers should review the change and adapt integration code before upgrading from 1.0.55 → 1.0.56 (or migrating to 2.0.x).
|
||||
|
||||
## 2023-08-18 - 1.0.40..1.0.55 - maintenance
|
||||
Maintenance and fixes across many 1.0.x releases (mid 2023).
|
||||
|
||||
- Numerous "fix(core): update" commits across 1.0.40 → 1.0.55 addressing stability and minor bug fixes.
|
||||
- Includes smaller testing updates (e.g., fix(test): update in the 1.0.x series).
|
||||
|
||||
## 2023-07-05 - 1.0.32..1.0.44 - maintenance
|
||||
Maintenance sweep in the 1.0.x line (July 2023).
|
||||
|
||||
- Multiple small core fixes and updates across these patch releases.
|
||||
- No large feature additions; stability and incremental improvements only.
|
||||
|
||||
## 2019-11-02 - 1.0.26..1.0.30 - maintenance
|
||||
Patch-level fixes and cleanup in late 2019.
|
||||
|
||||
- Several "fix(core): update" releases to address minor issues and keep dependencies up to date.
|
||||
|
||||
## 2018-11-10 - 1.0.20 - core
|
||||
Cleanup related to indices.
|
||||
|
||||
- fix(clean up old indices): update — housekeeping and cleanup of old indices.
|
||||
|
||||
## 2018-11-03 - 1.0.13 - core
|
||||
Security/tooling update.
|
||||
|
||||
- fix(core): add snyk — added Snyk related changes (security/scan tooling integration).
|
||||
|
||||
## 2018-09-15 - 1.0.11 - core
|
||||
Dependency and compatibility updates.
|
||||
|
||||
- fix(core): update dependencies and bonsai.io compatibility — updated dependencies and ensured compatibility with bonsai.io.
|
||||
|
||||
## 2018-08-12 - 1.0.9 - test
|
||||
Testing improvements.
|
||||
|
||||
- fix(test): update — improvements/adjustments to test suite.
|
||||
|
||||
## 2018-03-03 - 1.0.7 - system
|
||||
System-level change.
|
||||
|
||||
- "system change" — internal/system modification (no public API feature).
|
||||
|
||||
## 2018-01-27 - 1.0.4 - quality/style
|
||||
Coverage and style updates.
|
||||
|
||||
- adjust coverageTreshold — adjusted test coverage threshold.
|
||||
- update style / update — code style and minor cleanup.
|
||||
|
||||
## 2018-01-27 - 1.0.3 - core (feat)
|
||||
Winston logging integration (added, later removed in a subsequent release).
|
||||
|
||||
- feat(core): implement winston support — initial addition of Winston logging support.
|
||||
|
||||
## 2018-01-27 - 1.0.6 - winston (fix)
|
||||
Removal of previously added logging integration.
|
||||
|
||||
- fix(winston): remove winston — removed Winston integration introduced earlier.
|
||||
|
||||
## 2018-01-26 - 1.0.2 - core (feat)
|
||||
Index generation improvement.
|
||||
|
||||
- feat(core): update index generation — improvements to index generation logic.
|
||||
|
||||
## 2018-01-24 - 1.0.1 - core (initial)
|
||||
Project initial commit and initial cleanup.
|
||||
|
||||
- feat(core): initial commit — project bootstrap.
|
||||
- fix(core): cleanup — initial cleanup and adjustments after the first commit.
|
||||
|
||||
Note: Versions that only contain bare version-tag commits (commit messages identical to the version string) have been summarized as ranges above. For detailed per-patch changes consult individual release diffs.
|
||||
31
dist/elasticlog.classes.elasticlog.d.ts
vendored
31
dist/elasticlog.classes.elasticlog.d.ts
vendored
@@ -1,31 +0,0 @@
|
||||
import { Client as ElasticClient } from 'elasticsearch';
|
||||
import { ILogContext } from 'smartlog-interfaces';
|
||||
import { LogScheduler } from './elasticlog.classes.logscheduler';
|
||||
export interface IStandardLogParams {
|
||||
message: string;
|
||||
severity: string;
|
||||
}
|
||||
export interface IElasticLogConstructorOptions {
|
||||
port: number;
|
||||
domain: string;
|
||||
ssl: boolean;
|
||||
user?: string;
|
||||
pass?: string;
|
||||
logContext: ILogContext;
|
||||
}
|
||||
export declare class ElasticLog<T> {
|
||||
client: ElasticClient;
|
||||
logContext: ILogContext;
|
||||
logScheduler: LogScheduler;
|
||||
/**
|
||||
* sets up an instance of Elastic log
|
||||
* @param optionsArg
|
||||
*/
|
||||
constructor(optionsArg: IElasticLogConstructorOptions);
|
||||
/**
|
||||
* computes the host string from the constructor options
|
||||
* @param optionsArg
|
||||
*/
|
||||
private computeHostString(optionsArg);
|
||||
log(logObject: IStandardLogParams, scheduleOverwrite?: boolean): Promise<void>;
|
||||
}
|
||||
77
dist/elasticlog.classes.elasticlog.js
vendored
77
dist/elasticlog.classes.elasticlog.js
vendored
@@ -1,77 +0,0 @@
|
||||
"use strict";
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
// interfaces
|
||||
const elasticsearch_1 = require("elasticsearch");
|
||||
// other classes
|
||||
const elasticlog_classes_logscheduler_1 = require("./elasticlog.classes.logscheduler");
|
||||
class ElasticLog {
|
||||
/**
|
||||
* sets up an instance of Elastic log
|
||||
* @param optionsArg
|
||||
*/
|
||||
constructor(optionsArg) {
|
||||
this.logScheduler = new elasticlog_classes_logscheduler_1.LogScheduler(this);
|
||||
this.logContext = optionsArg.logContext;
|
||||
this.client = new elasticsearch_1.Client({
|
||||
host: this.computeHostString(optionsArg),
|
||||
log: 'trace'
|
||||
});
|
||||
}
|
||||
/**
|
||||
* computes the host string from the constructor options
|
||||
* @param optionsArg
|
||||
*/
|
||||
computeHostString(optionsArg) {
|
||||
let hostString = `${optionsArg.domain}:${optionsArg.port}`;
|
||||
if (optionsArg.user && optionsArg.pass) {
|
||||
hostString = `${optionsArg.user}:${optionsArg.pass}@${hostString}`;
|
||||
}
|
||||
if (optionsArg.ssl) {
|
||||
hostString = `https://${hostString}`;
|
||||
}
|
||||
else {
|
||||
hostString = `http://${hostString}`;
|
||||
}
|
||||
return hostString;
|
||||
}
|
||||
log(logObject, scheduleOverwrite = false) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const now = new Date();
|
||||
if (this.logScheduler.logsScheduled && !scheduleOverwrite) {
|
||||
this.logScheduler.scheduleLog(logObject);
|
||||
return;
|
||||
}
|
||||
this.client.index({
|
||||
index: `logs-${now.getFullYear()}.${('0' + (now.getMonth() + 1)).slice(-2)}.${now.getDate()}`,
|
||||
type: 'log',
|
||||
body: {
|
||||
'@timestamp': now.toISOString(),
|
||||
zone: this.logContext.zone,
|
||||
container: this.logContext.containerName,
|
||||
environment: this.logContext.environment,
|
||||
severity: logObject.severity,
|
||||
message: logObject.message
|
||||
}
|
||||
}, (error, response) => {
|
||||
if (error) {
|
||||
console.log('ElasticLog encountered an error:');
|
||||
console.log(error);
|
||||
this.logScheduler.addFailedLog(logObject);
|
||||
}
|
||||
else {
|
||||
console.log(`ElasticLog: ${logObject.message}`);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
exports.ElasticLog = ElasticLog;
|
||||
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiZWxhc3RpY2xvZy5jbGFzc2VzLmVsYXN0aWNsb2cuanMiLCJzb3VyY2VSb290IjoiIiwic291cmNlcyI6WyIuLi90cy9lbGFzdGljbG9nLmNsYXNzZXMuZWxhc3RpY2xvZy50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiOzs7Ozs7Ozs7O0FBQUEsYUFBYTtBQUNiLGlEQUF3RDtBQUd4RCxnQkFBZ0I7QUFDaEIsdUZBQWlFO0FBZ0JqRTtJQUtFOzs7T0FHRztJQUNILFlBQVksVUFBeUM7UUFOckQsaUJBQVksR0FBRyxJQUFJLDhDQUFZLENBQUMsSUFBSSxDQUFDLENBQUM7UUFPcEMsSUFBSSxDQUFDLFVBQVUsR0FBRyxVQUFVLENBQUMsVUFBVSxDQUFDO1FBQ3hDLElBQUksQ0FBQyxNQUFNLEdBQUcsSUFBSSxzQkFBYSxDQUFDO1lBQzlCLElBQUksRUFBRSxJQUFJLENBQUMsaUJBQWlCLENBQUMsVUFBVSxDQUFDO1lBQ3hDLEdBQUcsRUFBRSxPQUFPO1NBQ2IsQ0FBQyxDQUFDO0lBQ0wsQ0FBQztJQUVEOzs7T0FHRztJQUNLLGlCQUFpQixDQUFDLFVBQXlDO1FBQ2pFLElBQUksVUFBVSxHQUFHLEdBQUcsVUFBVSxDQUFDLE1BQU0sSUFBSSxVQUFVLENBQUMsSUFBSSxFQUFFLENBQUM7UUFDM0QsRUFBRSxDQUFDLENBQUMsVUFBVSxDQUFDLElBQUksSUFBSSxVQUFVLENBQUMsSUFBSSxDQUFDLENBQUMsQ0FBQztZQUN2QyxVQUFVLEdBQUcsR0FBRyxVQUFVLENBQUMsSUFBSSxJQUFJLFVBQVUsQ0FBQyxJQUFJLElBQUksVUFBVSxFQUFFLENBQUM7UUFDckUsQ0FBQztRQUNELEVBQUUsQ0FBQyxDQUFDLFVBQVUsQ0FBQyxHQUFHLENBQUMsQ0FBQyxDQUFDO1lBQ25CLFVBQVUsR0FBRyxXQUFXLFVBQVUsRUFBRSxDQUFDO1FBQ3ZDLENBQUM7UUFBQyxJQUFJLENBQUMsQ0FBQztZQUNOLFVBQVUsR0FBRyxVQUFVLFVBQVUsRUFBRSxDQUFDO1FBQ3RDLENBQUM7UUFDRCxNQUFNLENBQUMsVUFBVSxDQUFDO0lBQ3BCLENBQUM7SUFFSyxHQUFHLENBQUMsU0FBNkIsRUFBRSxpQkFBaUIsR0FBRyxLQUFLOztZQUNoRSxNQUFNLEdBQUcsR0FBRyxJQUFJLElBQUksRUFBRSxDQUFDO1lBQ3ZCLEVBQUUsQ0FBQyxDQUFDLElBQUksQ0FBQyxZQUFZLENBQUMsYUFBYSxJQUFJLENBQUMsaUJBQWlCLENBQUMsQ0FBQyxDQUFDO2dCQUMxRCxJQUFJLENBQUMsWUFBWSxDQUFDLFdBQVcsQ0FBQyxTQUFTLENBQUMsQ0FBQztnQkFDekMsTUFBTSxDQUFDO1lBQ1QsQ0FBQztZQUNELElBQUksQ0FBQyxNQUFNLENBQUMsS0FBSyxDQUNmO2dCQUNFLEtBQUssRUFBRSxRQUFRLEdBQUcsQ0FBQyxXQUFXLEVBQUUsSUFBSSxDQUFDLEdBQUcsR0FBRyxDQUFDLEdBQUcsQ0FBQyxRQUFRLEVBQUUsR0FBRyxDQUFDLENBQUMsQ0FBQyxDQUFDLEtBQUssQ0FDcEUsQ0FBQyxDQUFDLENBQ0gsSUFBSSxHQUFHLENBQUMsT0FBTyxFQUFFLEVBQUU7Z0JBQ3BCLElBQUksRUFBRSxLQUFLO2dCQUNYLElBQUksRUFBRTtvQkFDSixZQUFZLEVBQUUsR0FBRyxDQUFDLFdBQVcsRUFBRTtvQkFDL0IsSUFBSSxFQUFFLElBQUksQ0FBQyxVQUFVLENBQUMsSUFBSTtvQkFDMUIsU0FBUyxFQUFFLElBQUksQ0FBQyxVQUFVLENBQUMsYUFBYTtvQkFDeEMsV0FBVyxFQUFFLElBQUksQ0FBQyxVQUFVLENBQUMsV0FBVztvQkFDeEMsUUFBUSxFQUFFLFNBQVMsQ0FBQyxRQUFRO29CQUM1QixPQUFPLEVBQUUsU0FBUyxDQUFDLE9BQU87aUJBQzNCO2FBQ0YsRUFDRCxDQUFDLEtBQUssRUFBRSxRQUFRLEVBQUUsRUFBRTtnQkFDbEIsRUFBRSxDQUFDLENBQUMsS0FBSyxDQUFDLENBQUMsQ0FBQztvQkFDVixPQUFPLENBQUMsR0FBRyxDQUFDLGtDQUFrQyxDQUFDLENBQUM7b0JBQ2hELE9BQU8sQ0FBQyxHQUFHLENBQUMsS0FBSyxDQUFDLENBQUM7b0JBQ25CLElBQUksQ0FBQyxZQUFZLENBQUMsWUFBWSxDQUFDLFNBQVMsQ0FBQyxDQUFDO2dCQUM1QyxDQUFDO2dCQUFDLElBQUksQ0FBQyxDQUFDO29CQUNOLE9BQU8sQ0FBQyxHQUFHLENBQUMsZUFBZSxTQUFTLENBQUMsT0FBTyxFQUFFLENBQUMsQ0FBQztnQkFDbEQsQ0FBQztZQUNILENBQUMsQ0FDRixDQUFDO1FBQ0osQ0FBQztLQUFBO0NBQ0Y7QUFsRUQsZ0NBa0VDIn0=
|
||||
11
dist/elasticlog.classes.logscheduler.d.ts
vendored
11
dist/elasticlog.classes.logscheduler.d.ts
vendored
@@ -1,11 +0,0 @@
|
||||
import { ElasticLog, IStandardLogParams } from './elasticlog.classes.elasticlog';
|
||||
export declare class LogScheduler {
|
||||
elasticLogRef: ElasticLog<any>;
|
||||
logsScheduled: boolean;
|
||||
logStorage: any[];
|
||||
constructor(elasticLogRefArg: ElasticLog<any>);
|
||||
addFailedLog(objectArg: any | IStandardLogParams): void;
|
||||
scheduleLog(logObject: any): void;
|
||||
setRetry(): void;
|
||||
deferSend(): void;
|
||||
}
|
||||
42
dist/elasticlog.classes.logscheduler.js
vendored
42
dist/elasticlog.classes.logscheduler.js
vendored
@@ -1,42 +0,0 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
class LogScheduler {
|
||||
constructor(elasticLogRefArg) {
|
||||
this.logsScheduled = false;
|
||||
this.logStorage = [];
|
||||
this.elasticLogRef = elasticLogRefArg;
|
||||
}
|
||||
addFailedLog(objectArg) {
|
||||
this.logStorage.push(objectArg);
|
||||
this.setRetry();
|
||||
}
|
||||
scheduleLog(logObject) {
|
||||
this.logStorage.push(logObject);
|
||||
}
|
||||
setRetry() {
|
||||
setTimeout(() => {
|
||||
const oldStorage = this.logStorage;
|
||||
this.logStorage = [];
|
||||
for (let logObject of oldStorage) {
|
||||
this.elasticLogRef.log(logObject, true);
|
||||
}
|
||||
if (this.logStorage.length === 0) {
|
||||
console.log('ElasticLog retry success!!!');
|
||||
this.logsScheduled = false;
|
||||
}
|
||||
else {
|
||||
console.log('ElasticLog retry failed');
|
||||
this.setRetry();
|
||||
}
|
||||
}, 5000);
|
||||
}
|
||||
deferSend() {
|
||||
if (!this.logsScheduled) {
|
||||
console.log('Retry ElasticLog in 5 seconds!');
|
||||
this.logsScheduled = true;
|
||||
this.setRetry();
|
||||
}
|
||||
}
|
||||
}
|
||||
exports.LogScheduler = LogScheduler;
|
||||
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiZWxhc3RpY2xvZy5jbGFzc2VzLmxvZ3NjaGVkdWxlci5qcyIsInNvdXJjZVJvb3QiOiIiLCJzb3VyY2VzIjpbIi4uL3RzL2VsYXN0aWNsb2cuY2xhc3Nlcy5sb2dzY2hlZHVsZXIudHMiXSwibmFtZXMiOltdLCJtYXBwaW5ncyI6Ijs7QUFFQTtJQUtFLFlBQVksZ0JBQWlDO1FBSDdDLGtCQUFhLEdBQUcsS0FBSyxDQUFDO1FBQ3RCLGVBQVUsR0FBVSxFQUFFLENBQUM7UUFHckIsSUFBSSxDQUFDLGFBQWEsR0FBRyxnQkFBZ0IsQ0FBQztJQUN4QyxDQUFDO0lBRUQsWUFBWSxDQUFDLFNBQW1DO1FBQzlDLElBQUksQ0FBQyxVQUFVLENBQUMsSUFBSSxDQUFDLFNBQVMsQ0FBQyxDQUFDO1FBQ2hDLElBQUksQ0FBQyxRQUFRLEVBQUUsQ0FBQztJQUNsQixDQUFDO0lBQ0QsV0FBVyxDQUFDLFNBQWM7UUFDeEIsSUFBSSxDQUFDLFVBQVUsQ0FBQyxJQUFJLENBQUMsU0FBUyxDQUFDLENBQUM7SUFDbEMsQ0FBQztJQUVELFFBQVE7UUFDTixVQUFVLENBQUMsR0FBRyxFQUFFO1lBQ2QsTUFBTSxVQUFVLEdBQUcsSUFBSSxDQUFDLFVBQVUsQ0FBQztZQUNuQyxJQUFJLENBQUMsVUFBVSxHQUFHLEVBQUUsQ0FBQztZQUNyQixHQUFHLENBQUMsQ0FBQyxJQUFJLFNBQVMsSUFBSSxVQUFVLENBQUMsQ0FBQyxDQUFDO2dCQUNqQyxJQUFJLENBQUMsYUFBYSxDQUFDLEdBQUcsQ0FBQyxTQUFTLEVBQUUsSUFBSSxDQUFDLENBQUM7WUFDMUMsQ0FBQztZQUNELEVBQUUsQ0FBQyxDQUFDLElBQUksQ0FBQyxVQUFVLENBQUMsTUFBTSxLQUFLLENBQUMsQ0FBQyxDQUFDLENBQUM7Z0JBQ2pDLE9BQU8sQ0FBQyxHQUFHLENBQUMsNkJBQTZCLENBQUMsQ0FBQztnQkFDM0MsSUFBSSxDQUFDLGFBQWEsR0FBRyxLQUFLLENBQUM7WUFDN0IsQ0FBQztZQUFDLElBQUksQ0FBQyxDQUFDO2dCQUNOLE9BQU8sQ0FBQyxHQUFHLENBQUMseUJBQXlCLENBQUMsQ0FBQztnQkFDdkMsSUFBSSxDQUFDLFFBQVEsRUFBRSxDQUFDO1lBQ2xCLENBQUM7UUFDSCxDQUFDLEVBQUUsSUFBSSxDQUFDLENBQUM7SUFDWCxDQUFDO0lBRUQsU0FBUztRQUNQLEVBQUUsQ0FBQyxDQUFDLENBQUMsSUFBSSxDQUFDLGFBQWEsQ0FBQyxDQUFDLENBQUM7WUFDeEIsT0FBTyxDQUFDLEdBQUcsQ0FBQyxnQ0FBZ0MsQ0FBQyxDQUFDO1lBQzlDLElBQUksQ0FBQyxhQUFhLEdBQUcsSUFBSSxDQUFDO1lBQzFCLElBQUksQ0FBQyxRQUFRLEVBQUUsQ0FBQztRQUNsQixDQUFDO0lBQ0gsQ0FBQztDQUNGO0FBekNELG9DQXlDQyJ9
|
||||
4
dist/elasticlog.plugins.d.ts
vendored
4
dist/elasticlog.plugins.d.ts
vendored
@@ -1,4 +0,0 @@
|
||||
import * as elasticsearch from 'elasticsearch';
|
||||
import * as smartdelay from 'smartdelay';
|
||||
import * as smartlogInterfaces from 'smartlog-interfaces';
|
||||
export { elasticsearch, smartdelay, smartlogInterfaces };
|
||||
9
dist/elasticlog.plugins.js
vendored
9
dist/elasticlog.plugins.js
vendored
@@ -1,9 +0,0 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const elasticsearch = require("elasticsearch");
|
||||
exports.elasticsearch = elasticsearch;
|
||||
const smartdelay = require("smartdelay");
|
||||
exports.smartdelay = smartdelay;
|
||||
const smartlogInterfaces = require("smartlog-interfaces");
|
||||
exports.smartlogInterfaces = smartlogInterfaces;
|
||||
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiZWxhc3RpY2xvZy5wbHVnaW5zLmpzIiwic291cmNlUm9vdCI6IiIsInNvdXJjZXMiOlsiLi4vdHMvZWxhc3RpY2xvZy5wbHVnaW5zLnRzIl0sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiI7O0FBQUEsK0NBQStDO0FBR3RDLHNDQUFhO0FBRnRCLHlDQUF5QztBQUVqQixnQ0FBVTtBQURsQywwREFBMEQ7QUFDdEIsZ0RBQWtCIn0=
|
||||
1
dist/index.d.ts
vendored
1
dist/index.d.ts
vendored
@@ -1 +0,0 @@
|
||||
export * from './elasticlog.classes.elasticlog';
|
||||
7
dist/index.js
vendored
7
dist/index.js
vendored
@@ -1,7 +0,0 @@
|
||||
"use strict";
|
||||
function __export(m) {
|
||||
for (var p in m) if (!exports.hasOwnProperty(p)) exports[p] = m[p];
|
||||
}
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
__export(require("./elasticlog.classes.elasticlog"));
|
||||
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiaW5kZXguanMiLCJzb3VyY2VSb290IjoiIiwic291cmNlcyI6WyIuLi90cy9pbmRleC50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiOzs7OztBQUFBLHFEQUFnRCJ9
|
||||
@@ -2,5 +2,16 @@
|
||||
"npmci": {
|
||||
"npmGlobalTools": [],
|
||||
"npmAccessLevel": "public"
|
||||
},
|
||||
"gitzone": {
|
||||
"projectType": "npm",
|
||||
"module": {
|
||||
"githost": "gitlab.com",
|
||||
"gitscope": "mojoio",
|
||||
"gitrepo": "elasticsearch",
|
||||
"description": "log to elasticsearch in a kibana compatible format",
|
||||
"npmPackagename": "@mojoio/elasticsearch",
|
||||
"license": "MIT"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
1104
package-lock.json
generated
1104
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
64
package.json
64
package.json
@@ -1,31 +1,59 @@
|
||||
{
|
||||
"name": "@mojoio/elasticsearch",
|
||||
"version": "1.0.17",
|
||||
"name": "@apiclient.xyz/elasticsearch",
|
||||
"version": "2.0.17",
|
||||
"private": false,
|
||||
"description": "log to elasticsearch in a kibana compatible format",
|
||||
"main": "dist/index.js",
|
||||
"typings": "dist/index.d.ts",
|
||||
"main": "dist_ts/index.js",
|
||||
"typings": "dist_ts/index.d.ts",
|
||||
"author": "Lossless GmbH",
|
||||
"license": "MIT",
|
||||
"scripts": {
|
||||
"test": "(tstest test/)",
|
||||
"format": "(gitzone format)",
|
||||
"build": "echo \"Not needed for now\""
|
||||
"build": "(tsbuild --allowimplicitany)",
|
||||
"buildDocs": "tsdoc"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@gitzone/tsbuild": "^2.0.22",
|
||||
"@gitzone/tsrun": "^1.1.13",
|
||||
"@gitzone/tstest": "^1.0.15",
|
||||
"@pushrocks/qenv": "^2.0.2",
|
||||
"@pushrocks/tapbundle": "^3.0.7",
|
||||
"@types/node": "^10.12.2",
|
||||
"tslint": "^5.11.0",
|
||||
"tslint-config-prettier": "^1.15.0"
|
||||
"@git.zone/tsbuild": "^3.1.0",
|
||||
"@git.zone/tsrun": "^2.0.0",
|
||||
"@git.zone/tstest": "^3.1.3",
|
||||
"@push.rocks/qenv": "^6.1.3",
|
||||
"@types/node": "^24.10.1"
|
||||
},
|
||||
"dependencies": {
|
||||
"@pushrocks/smartdelay": "^2.0.2",
|
||||
"@pushrocks/smartlog-interfaces": "^2.0.2",
|
||||
"@types/elasticsearch": "^5.0.28",
|
||||
"elasticsearch": "^15.2.0"
|
||||
}
|
||||
"@elastic/elasticsearch": "^9.2.0",
|
||||
"@push.rocks/lik": "^6.2.2",
|
||||
"@push.rocks/smartdelay": "^3.0.5",
|
||||
"@push.rocks/smartlog-interfaces": "^3.0.2",
|
||||
"@push.rocks/smartpromise": "^4.2.3",
|
||||
"@push.rocks/smarttime": "^4.1.1"
|
||||
},
|
||||
"files": [
|
||||
"ts/**/*",
|
||||
"ts_web/**/*",
|
||||
"dist/**/*",
|
||||
"dist_*/**/*",
|
||||
"dist_ts/**/*",
|
||||
"dist_ts_web/**/*",
|
||||
"assets/**/*",
|
||||
"cli.js",
|
||||
"npmextra.json",
|
||||
"readme.md"
|
||||
],
|
||||
"type": "module",
|
||||
"browserslist": [
|
||||
"last 1 chrome versions"
|
||||
],
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://gitlab.com/mojoio/elasticsearch.git"
|
||||
},
|
||||
"bugs": {
|
||||
"url": "https://gitlab.com/mojoio/elasticsearch/issues"
|
||||
},
|
||||
"homepage": "https://gitlab.com/mojoio/elasticsearch#readme",
|
||||
"pnpm": {
|
||||
"overrides": {}
|
||||
},
|
||||
"packageManager": "pnpm@10.18.1+sha512.77a884a165cbba2d8d1c19e3b4880eee6d2fcabd0d879121e282196b80042351d5eb3ca0935fa599da1dc51265cc68816ad2bddd2a2de5ea9fdf92adbec7cd34"
|
||||
}
|
||||
|
||||
8236
pnpm-lock.yaml
generated
Normal file
8236
pnpm-lock.yaml
generated
Normal file
File diff suppressed because it is too large
Load Diff
3
readme.hints.md
Normal file
3
readme.hints.md
Normal file
@@ -0,0 +1,3 @@
|
||||
# Project Readme Hints
|
||||
|
||||
This is the initial readme hints file.
|
||||
331
readme.md
331
readme.md
@@ -1,34 +1,317 @@
|
||||
# elasticlog
|
||||
# @apiclient.xyz/elasticsearch
|
||||
|
||||
log to elasticsearch in a kibana compatible format
|
||||
> 🔍 **Modern TypeScript client for Elasticsearch with built-in Kibana compatibility and advanced logging features**
|
||||
|
||||
## Availabililty
|
||||
A powerful, type-safe wrapper around the official Elasticsearch client that provides intelligent log management, document handling, key-value storage, and fast data ingestion - all optimized for production use.
|
||||
|
||||
[](https://www.npmjs.com/package/elasticlog)
|
||||
[](https://GitLab.com/pushrocks/elasticlog)
|
||||
[](https://github.com/pushrocks/elasticlog)
|
||||
[](https://pushrocks.gitlab.io/elasticlog/)
|
||||
## Issue Reporting and Security
|
||||
|
||||
## Status for master
|
||||
For reporting bugs, issues, or security vulnerabilities, please visit [community.foss.global/](https://community.foss.global/). This is the central community hub for all issue reporting. Developers who sign and comply with our contribution agreement and go through identification can also get a [code.foss.global/](https://code.foss.global/) account to submit Pull Requests directly.
|
||||
|
||||
[](https://GitLab.com/pushrocks/elasticlog/commits/master)
|
||||
[](https://GitLab.com/pushrocks/elasticlog/commits/master)
|
||||
[](https://www.npmjs.com/package/elasticlog)
|
||||
[](https://david-dm.org/pushrocks/elasticlog)
|
||||
[](https://www.bithound.io/github/pushrocks/elasticlog/master/dependencies/npm)
|
||||
[](https://www.bithound.io/github/pushrocks/elasticlog)
|
||||
[](https://snyk.io/test/npm/elasticlog)
|
||||
[](https://nodejs.org/dist/latest-v6.x/docs/api/)
|
||||
[](https://nodejs.org/dist/latest-v6.x/docs/api/)
|
||||
[](http://standardjs.com/)
|
||||
## Features ✨
|
||||
|
||||
## Usage
|
||||
- **🎯 SmartLog Destination** - Full-featured logging destination compatible with Kibana, automatic index rotation, and retention management
|
||||
- **📦 ElasticDoc** - Advanced document management with piping sessions, snapshots, and automatic cleanup
|
||||
- **🚀 FastPush** - High-performance bulk document insertion with automatic index management
|
||||
- **💾 KVStore** - Simple key-value storage interface backed by Elasticsearch
|
||||
- **🔧 TypeScript First** - Complete type safety with full TypeScript support
|
||||
- **🌊 Data Streams** - Built-in support for Elasticsearch data streams
|
||||
- **⚡ Production Ready** - Designed for high-throughput production environments
|
||||
|
||||
Use TypeScript for best in class instellisense.
|
||||
## Installation 📦
|
||||
|
||||
For further information read the linked docs at the top of this README.
|
||||
```bash
|
||||
npm install @apiclient.xyz/elasticsearch
|
||||
# or
|
||||
pnpm install @apiclient.xyz/elasticsearch
|
||||
```
|
||||
|
||||
> MIT licensed | **©** [Lossless GmbH](https://lossless.gmbh)
|
||||
> | By using this npm module you agree to our [privacy policy](https://lossless.gmbH/privacy.html)
|
||||
## Quick Start 🚀
|
||||
|
||||
[](https://push.rocks)
|
||||
### SmartLog Destination
|
||||
|
||||
Perfect for application logging with automatic index rotation and Kibana compatibility:
|
||||
|
||||
```typescript
|
||||
import { ElsSmartlogDestination } from '@apiclient.xyz/elasticsearch';
|
||||
|
||||
const logger = new ElsSmartlogDestination({
|
||||
indexPrefix: 'app-logs',
|
||||
indexRetention: 7, // Keep logs for 7 days
|
||||
node: 'http://localhost:9200',
|
||||
auth: {
|
||||
username: 'elastic',
|
||||
password: 'your-password',
|
||||
},
|
||||
});
|
||||
|
||||
// Log messages that automatically appear in Kibana
|
||||
await logger.log({
|
||||
timestamp: Date.now(),
|
||||
type: 'increment',
|
||||
level: 'info',
|
||||
context: {
|
||||
company: 'YourCompany',
|
||||
companyunit: 'api-service',
|
||||
containerName: 'web-server',
|
||||
environment: 'production',
|
||||
runtime: 'node',
|
||||
zone: 'us-east-1',
|
||||
},
|
||||
message: 'User authentication successful',
|
||||
correlation: null,
|
||||
});
|
||||
```
|
||||
|
||||
### ElasticDoc - Document Management
|
||||
|
||||
Handle documents with advanced features like piping sessions and snapshots:
|
||||
|
||||
```typescript
|
||||
import { ElasticDoc } from '@apiclient.xyz/elasticsearch';
|
||||
|
||||
const docManager = new ElasticDoc({
|
||||
index: 'products',
|
||||
node: 'http://localhost:9200',
|
||||
auth: {
|
||||
username: 'elastic',
|
||||
password: 'your-password',
|
||||
},
|
||||
});
|
||||
|
||||
// Start a piping session to manage document lifecycle
|
||||
await docManager.startPipingSession({});
|
||||
|
||||
// Add or update documents
|
||||
await docManager.pipeDocument({
|
||||
docId: 'product-001',
|
||||
timestamp: new Date().toISOString(),
|
||||
doc: {
|
||||
name: 'Premium Widget',
|
||||
price: 99.99,
|
||||
inStock: true
|
||||
},
|
||||
});
|
||||
|
||||
await docManager.pipeDocument({
|
||||
docId: 'product-002',
|
||||
timestamp: new Date().toISOString(),
|
||||
doc: {
|
||||
name: 'Deluxe Gadget',
|
||||
price: 149.99,
|
||||
inStock: false
|
||||
},
|
||||
});
|
||||
|
||||
// End session - automatically removes documents not in this session
|
||||
await docManager.endPipingSession();
|
||||
|
||||
// Take and store snapshots with custom aggregations
|
||||
await docManager.takeSnapshot(async (iterator, prevSnapshot) => {
|
||||
const aggregationData = [];
|
||||
for await (const doc of iterator) {
|
||||
aggregationData.push(doc);
|
||||
}
|
||||
return {
|
||||
date: new Date().toISOString(),
|
||||
aggregationData,
|
||||
};
|
||||
});
|
||||
```
|
||||
|
||||
### FastPush - Bulk Data Ingestion
|
||||
|
||||
Efficiently push large datasets with automatic index management:
|
||||
|
||||
```typescript
|
||||
import { FastPush } from '@apiclient.xyz/elasticsearch';
|
||||
|
||||
const fastPush = new FastPush({
|
||||
node: 'http://localhost:9200',
|
||||
auth: {
|
||||
username: 'elastic',
|
||||
password: 'your-password',
|
||||
},
|
||||
});
|
||||
|
||||
const documents = [
|
||||
{ id: 1, name: 'Document 1', data: 'Some data' },
|
||||
{ id: 2, name: 'Document 2', data: 'More data' },
|
||||
// ... thousands more documents
|
||||
];
|
||||
|
||||
// Push all documents with automatic batching
|
||||
await fastPush.pushDocuments('bulk-data', documents, {
|
||||
deleteOldData: true, // Clear old data before inserting
|
||||
});
|
||||
```
|
||||
|
||||
### KVStore - Key-Value Storage
|
||||
|
||||
Simple key-value storage backed by the power of Elasticsearch:
|
||||
|
||||
```typescript
|
||||
import { KVStore } from '@apiclient.xyz/elasticsearch';
|
||||
|
||||
const kvStore = new KVStore({
|
||||
index: 'app-config',
|
||||
node: 'http://localhost:9200',
|
||||
auth: {
|
||||
username: 'elastic',
|
||||
password: 'your-password',
|
||||
},
|
||||
});
|
||||
|
||||
// Set values
|
||||
await kvStore.set('api-key', 'sk-1234567890');
|
||||
await kvStore.set('feature-flags', JSON.stringify({ newUI: true }));
|
||||
|
||||
// Get values
|
||||
const apiKey = await kvStore.get('api-key');
|
||||
console.log(apiKey); // 'sk-1234567890'
|
||||
|
||||
// Clear all data
|
||||
await kvStore.clear();
|
||||
```
|
||||
|
||||
## Core Classes 🏗️
|
||||
|
||||
### ElsSmartlogDestination
|
||||
|
||||
The main logging destination class that provides:
|
||||
- Automatic index rotation based on date
|
||||
- Configurable retention policies
|
||||
- Kibana-compatible log format
|
||||
- Data stream support
|
||||
- Built-in scheduler for maintenance tasks
|
||||
|
||||
### ElasticDoc
|
||||
|
||||
Advanced document management with:
|
||||
- Piping sessions for tracking document lifecycles
|
||||
- Automatic cleanup of stale documents
|
||||
- Snapshot functionality with custom processors
|
||||
- Iterator-based document access
|
||||
- Fast-forward mode for incremental processing
|
||||
|
||||
### FastPush
|
||||
|
||||
High-performance bulk operations:
|
||||
- Automatic batching for optimal performance
|
||||
- Index management (create, delete, clear)
|
||||
- Dynamic mapping support
|
||||
- Efficient bulk API usage
|
||||
|
||||
### KVStore
|
||||
|
||||
Simple key-value interface:
|
||||
- Elasticsearch-backed storage
|
||||
- Async/await API
|
||||
- Automatic index initialization
|
||||
- Clear and get operations
|
||||
|
||||
## Advanced Usage 🎓
|
||||
|
||||
### Index Rotation and Retention
|
||||
|
||||
```typescript
|
||||
const logger = new ElsSmartlogDestination({
|
||||
indexPrefix: 'myapp',
|
||||
indexRetention: 30, // Keep 30 days of logs
|
||||
node: 'http://localhost:9200',
|
||||
});
|
||||
|
||||
// Indices are automatically created as: myapp-2025-01-22
|
||||
// Old indices are automatically deleted after 30 days
|
||||
```
|
||||
|
||||
### Document Iteration
|
||||
|
||||
```typescript
|
||||
// Iterate over all documents in an index
|
||||
const iterator = docManager.getDocumentIterator();
|
||||
for await (const doc of iterator) {
|
||||
console.log(doc);
|
||||
}
|
||||
|
||||
// Only process new documents since last run
|
||||
docManager.fastForward = true;
|
||||
await docManager.startPipingSession({ onlyNew: true });
|
||||
```
|
||||
|
||||
### Custom Snapshots
|
||||
|
||||
```typescript
|
||||
await docManager.takeSnapshot(async (iterator, prevSnapshot) => {
|
||||
let totalValue = 0;
|
||||
let count = 0;
|
||||
|
||||
for await (const doc of iterator) {
|
||||
totalValue += doc._source.price;
|
||||
count++;
|
||||
}
|
||||
|
||||
return {
|
||||
date: new Date().toISOString(),
|
||||
aggregationData: {
|
||||
totalValue,
|
||||
averagePrice: totalValue / count,
|
||||
count,
|
||||
previousSnapshot: prevSnapshot,
|
||||
},
|
||||
};
|
||||
});
|
||||
```
|
||||
|
||||
## API Compatibility 🔄
|
||||
|
||||
This module is built on top of `@elastic/elasticsearch` v9.x and is compatible with:
|
||||
- Elasticsearch 8.x and 9.x clusters
|
||||
- Kibana 8.x and 9.x for log visualization
|
||||
- OpenSearch (with some limitations)
|
||||
|
||||
## TypeScript Support 💙
|
||||
|
||||
Full TypeScript support with comprehensive type definitions:
|
||||
|
||||
```typescript
|
||||
import type {
|
||||
IElasticDocConstructorOptions,
|
||||
ISnapshot,
|
||||
SnapshotProcessor
|
||||
} from '@apiclient.xyz/elasticsearch';
|
||||
```
|
||||
|
||||
## Performance Considerations ⚡
|
||||
|
||||
- **Bulk Operations**: FastPush uses 1000-document batches by default
|
||||
- **Connection Pooling**: Reuses Elasticsearch client connections
|
||||
- **Index Management**: Automatic index creation and deletion
|
||||
- **Data Streams**: Built-in support for efficient log ingestion
|
||||
|
||||
## Best Practices 💡
|
||||
|
||||
1. **Always use authentication** in production environments
|
||||
2. **Set appropriate retention policies** to manage storage costs
|
||||
3. **Use piping sessions** to automatically clean up stale documents
|
||||
4. **Leverage snapshots** for point-in-time analytics
|
||||
5. **Configure index templates** for consistent mappings
|
||||
|
||||
## License and Legal Information
|
||||
|
||||
This repository contains open-source code that is licensed under the MIT License. A copy of the MIT License can be found in the [license](license) file within this repository.
|
||||
|
||||
**Please note:** The MIT License does not grant permission to use the trade names, trademarks, service marks, or product names of the project, except as required for reasonable and customary use in describing the origin of the work and reproducing the content of the NOTICE file.
|
||||
|
||||
### Trademarks
|
||||
|
||||
This project is owned and maintained by Task Venture Capital GmbH. The names and logos associated with Task Venture Capital GmbH and any related products or services are trademarks of Task Venture Capital GmbH and are not included within the scope of the MIT license granted herein. Use of these trademarks must comply with Task Venture Capital GmbH's Trademark Guidelines, and any usage must be approved in writing by Task Venture Capital GmbH.
|
||||
|
||||
### Company Information
|
||||
|
||||
Task Venture Capital GmbH
|
||||
Registered at District court Bremen HRB 35230 HB, Germany
|
||||
|
||||
For any legal inquiries or if you require further information, please contact us via email at hello@task.vc.
|
||||
|
||||
By using this repository, you acknowledge that you have read this section, agree to comply with its terms, and understand that the licensing of the code does not imply endorsement by Task Venture Capital GmbH of any derivative works.
|
||||
|
||||
89
test/test.nonci.ts
Normal file
89
test/test.nonci.ts
Normal file
@@ -0,0 +1,89 @@
|
||||
import { tap, expect } from '@git.zone/tstest/tapbundle';
|
||||
import { Qenv } from '@push.rocks/qenv';
|
||||
import * as elasticsearch from '../ts/index.js';
|
||||
|
||||
let testElasticLog: elasticsearch.ElsSmartlogDestination<any>;
|
||||
let testElasticDoc: elasticsearch.ElasticDoc;
|
||||
|
||||
tap.test('first test', async () => {
|
||||
testElasticLog = new elasticsearch.ElsSmartlogDestination({
|
||||
indexPrefix: 'testprefix',
|
||||
indexRetention: 7,
|
||||
node: 'http://localhost:9200',
|
||||
auth: {
|
||||
username: 'elastic',
|
||||
password: 'YourPassword',
|
||||
},
|
||||
});
|
||||
expect(testElasticLog).toBeInstanceOf(elasticsearch.ElsSmartlogDestination);
|
||||
});
|
||||
|
||||
tap.test('should send a message to Elasticsearch', async () => {
|
||||
await testElasticLog.log({
|
||||
timestamp: Date.now(),
|
||||
type: 'increment',
|
||||
level: 'info',
|
||||
context: {
|
||||
company: 'Lossless GmbH',
|
||||
companyunit: 'lossless.cloud',
|
||||
containerName: 'testcontainer',
|
||||
environment: 'test',
|
||||
runtime: 'node',
|
||||
zone: 'ship.zone',
|
||||
},
|
||||
message: 'GET https://myroute.to.a.cool.destination/sorare?hello=there',
|
||||
correlation: null,
|
||||
});
|
||||
});
|
||||
|
||||
tap.test('should create an ElasticDoc instance', async () => {
|
||||
testElasticDoc = new elasticsearch.ElasticDoc({
|
||||
index: 'testindex',
|
||||
node: 'http://localhost:9200',
|
||||
auth: {
|
||||
username: 'elastic',
|
||||
password: 'YourPassword',
|
||||
},
|
||||
});
|
||||
expect(testElasticDoc).toBeInstanceOf(elasticsearch.ElasticDoc);
|
||||
});
|
||||
|
||||
tap.test('should add and update documents in a piping session', async () => {
|
||||
await testElasticDoc.startPipingSession({});
|
||||
await testElasticDoc.pipeDocument({
|
||||
docId: '1',
|
||||
timestamp: new Date().toISOString(),
|
||||
doc: { name: 'doc1' },
|
||||
});
|
||||
await testElasticDoc.pipeDocument({
|
||||
docId: '2',
|
||||
timestamp: new Date().toISOString(),
|
||||
doc: { name: 'doc2' },
|
||||
});
|
||||
await testElasticDoc.pipeDocument({
|
||||
docId: '1',
|
||||
timestamp: new Date().toISOString(),
|
||||
doc: { name: 'updated doc1' },
|
||||
});
|
||||
});
|
||||
|
||||
tap.test('should delete documents not part of the piping session', async () => {
|
||||
await testElasticDoc.endPipingSession();
|
||||
});
|
||||
|
||||
tap.test('should take and store snapshot', async () => {
|
||||
await testElasticDoc.takeSnapshot(async (iterator, prevSnapshot) => {
|
||||
const aggregationData = [];
|
||||
for await (const doc of iterator) {
|
||||
// Sample aggregation: counting documents
|
||||
aggregationData.push(doc);
|
||||
}
|
||||
const snapshot = {
|
||||
date: new Date().toISOString(),
|
||||
aggregationData,
|
||||
};
|
||||
return snapshot;
|
||||
});
|
||||
});
|
||||
|
||||
export default tap.start();
|
||||
37
test/test.ts
37
test/test.ts
@@ -1,37 +0,0 @@
|
||||
import { expect, tap } from '@pushrocks/tapbundle';
|
||||
import { Qenv } from '@pushrocks/qenv';
|
||||
import * as elasticsearch from '../ts/index';
|
||||
|
||||
const testQenv = new Qenv('./', './.nogit/');
|
||||
|
||||
let testElasticLog: elasticsearch.ElasticSearch<any>;
|
||||
|
||||
tap.test('first test', async () => {
|
||||
testElasticLog = new elasticsearch.ElasticSearch({
|
||||
domain: process.env.ELK_DOMAIN,
|
||||
port: parseInt(process.env.ELK_PORT, 10),
|
||||
ssl: true,
|
||||
user: process.env.ELK_USER,
|
||||
pass: process.env.ELK_PASS
|
||||
});
|
||||
expect(testElasticLog).to.be.instanceOf(elasticsearch.ElasticSearch);
|
||||
});
|
||||
|
||||
tap.test('should send a message to Elasticsearch', async () => {
|
||||
testElasticLog.log({
|
||||
timestamp: Date.now(),
|
||||
type: 'increment',
|
||||
level: 'info',
|
||||
context: {
|
||||
company: 'Lossless GmbH',
|
||||
companyunit: 'lossless.cloud',
|
||||
containerName: 'testcontainer',
|
||||
environment: 'test',
|
||||
runtime: 'node',
|
||||
zone: 'ship.zone'
|
||||
},
|
||||
message: 'hi, this is a testMessage'
|
||||
});
|
||||
});
|
||||
|
||||
tap.start();
|
||||
8
ts/00_commitinfo_data.ts
Normal file
8
ts/00_commitinfo_data.ts
Normal file
@@ -0,0 +1,8 @@
|
||||
/**
|
||||
* autocreated commitinfo by @push.rocks/commitinfo
|
||||
*/
|
||||
export const commitinfo = {
|
||||
name: '@apiclient.xyz/elasticsearch',
|
||||
version: '2.0.17',
|
||||
description: 'log to elasticsearch in a kibana compatible format'
|
||||
}
|
||||
@@ -1,89 +0,0 @@
|
||||
// interfaces
|
||||
import { Client as ElasticClient } from 'elasticsearch';
|
||||
import { ILogContext, ILogPackage, ILogDestination } from '@pushrocks/smartlog-interfaces';
|
||||
|
||||
// other classes
|
||||
import { LogScheduler } from './elasticsearch.classes.logscheduler';
|
||||
|
||||
export interface IStandardLogParams {
|
||||
message: string;
|
||||
severity: string;
|
||||
}
|
||||
|
||||
export interface IElasticLogConstructorOptions {
|
||||
port: number;
|
||||
domain: string;
|
||||
ssl: boolean;
|
||||
user?: string;
|
||||
pass?: string;
|
||||
}
|
||||
|
||||
export class ElasticSearch<T> {
|
||||
client: ElasticClient;
|
||||
logScheduler = new LogScheduler(this);
|
||||
|
||||
/**
|
||||
* sets up an instance of Elastic log
|
||||
* @param optionsArg
|
||||
*/
|
||||
constructor(optionsArg: IElasticLogConstructorOptions) {
|
||||
this.client = new ElasticClient({
|
||||
host: this.computeHostString(optionsArg),
|
||||
log: 'trace'
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* computes the host string from the constructor options
|
||||
* @param optionsArg
|
||||
*/
|
||||
private computeHostString(optionsArg: IElasticLogConstructorOptions): string {
|
||||
let hostString = `${optionsArg.domain}:${optionsArg.port}`;
|
||||
if (optionsArg.user && optionsArg.pass) {
|
||||
hostString = `${optionsArg.user}:${optionsArg.pass}@${hostString}`;
|
||||
}
|
||||
if (optionsArg.ssl) {
|
||||
hostString = `https://${hostString}`;
|
||||
} else {
|
||||
hostString = `http://${hostString}`;
|
||||
}
|
||||
return hostString;
|
||||
}
|
||||
|
||||
public async log(logPackageArg: ILogPackage, scheduleOverwrite = false) {
|
||||
const now = new Date();
|
||||
if (this.logScheduler.logsScheduled && !scheduleOverwrite) {
|
||||
this.logScheduler.scheduleLog(logPackageArg);
|
||||
return;
|
||||
}
|
||||
this.client.index(
|
||||
{
|
||||
index: `smartlog-${now.getFullYear()}.${('0' + (now.getMonth() + 1)).slice(-2)}.${(
|
||||
'0' + now.getDate()
|
||||
).slice(-2)}`,
|
||||
type: 'log',
|
||||
body: {
|
||||
'@timestamp': new Date(logPackageArg.timestamp).toISOString(),
|
||||
...logPackageArg
|
||||
}
|
||||
},
|
||||
(error, response) => {
|
||||
if (error) {
|
||||
console.log('ElasticLog encountered an error:');
|
||||
console.log(error);
|
||||
this.logScheduler.addFailedLog(logPackageArg);
|
||||
} else {
|
||||
console.log(`ElasticLog: ${logPackageArg.message}`);
|
||||
}
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
get logDestination (): ILogDestination {
|
||||
return {
|
||||
handleLog: (smartlogPackageArg: ILogPackage) => {
|
||||
this.log(smartlogPackageArg);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,44 +0,0 @@
|
||||
import { ElasticSearch, IStandardLogParams } from './elasticsearch.classes.elasticsearch';
|
||||
|
||||
export class LogScheduler {
|
||||
elasticLogRef: ElasticSearch<any>;
|
||||
logsScheduled = false;
|
||||
logStorage: any[] = [];
|
||||
|
||||
constructor(elasticLogRefArg: ElasticSearch<any>) {
|
||||
this.elasticLogRef = elasticLogRefArg;
|
||||
}
|
||||
|
||||
addFailedLog(objectArg: any | IStandardLogParams) {
|
||||
this.logStorage.push(objectArg);
|
||||
this.setRetry();
|
||||
}
|
||||
scheduleLog(logObject: any) {
|
||||
this.logStorage.push(logObject);
|
||||
}
|
||||
|
||||
setRetry() {
|
||||
setTimeout(() => {
|
||||
const oldStorage = this.logStorage;
|
||||
this.logStorage = [];
|
||||
for (let logObject of oldStorage) {
|
||||
this.elasticLogRef.log(logObject, true);
|
||||
}
|
||||
if (this.logStorage.length === 0) {
|
||||
console.log('ElasticLog retry success!!!');
|
||||
this.logsScheduled = false;
|
||||
} else {
|
||||
console.log('ElasticLog retry failed');
|
||||
this.setRetry();
|
||||
}
|
||||
}, 5000);
|
||||
}
|
||||
|
||||
deferSend() {
|
||||
if (!this.logsScheduled) {
|
||||
console.log('Retry ElasticLog in 5 seconds!');
|
||||
this.logsScheduled = true;
|
||||
this.setRetry();
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,4 +0,0 @@
|
||||
import * as elasticsearch from 'elasticsearch';
|
||||
import * as smartdelay from '@pushrocks/smartdelay';
|
||||
import * as smartlogInterfaces from '@pushrocks/smartlog-interfaces';
|
||||
export { elasticsearch, smartdelay, smartlogInterfaces };
|
||||
273
ts/els.classes.elasticdoc.ts
Normal file
273
ts/els.classes.elasticdoc.ts
Normal file
@@ -0,0 +1,273 @@
|
||||
import { Client as ElasticClient } from '@elastic/elasticsearch';
|
||||
|
||||
export interface IElasticDocConstructorOptions {
|
||||
index: string;
|
||||
node: string;
|
||||
auth?: {
|
||||
username: string;
|
||||
password: string;
|
||||
};
|
||||
}
|
||||
|
||||
export interface ISnapshot {
|
||||
date: string;
|
||||
aggregationData: any;
|
||||
}
|
||||
|
||||
export type SnapshotProcessor = (
|
||||
iterator: AsyncIterable<any>,
|
||||
prevSnapshot: ISnapshot | null,
|
||||
) => Promise<ISnapshot>;
|
||||
|
||||
export class ElasticDoc {
|
||||
public client: ElasticClient;
|
||||
public index: string;
|
||||
private sessionDocs: Set<string> = new Set();
|
||||
private indexInitialized: boolean = false;
|
||||
private latestTimestamp: string | null = null; // Store the latest timestamp
|
||||
private onlyNew: boolean = false; // Whether to only pipe new docs
|
||||
public fastForward: boolean = false; // Whether to fast forward to the latest timestamp
|
||||
|
||||
private BATCH_SIZE = 1000;
|
||||
|
||||
constructor(options: IElasticDocConstructorOptions) {
|
||||
this.client = new ElasticClient({
|
||||
node: options.node,
|
||||
...(options.auth && { auth: options.auth }),
|
||||
});
|
||||
this.index = options.index;
|
||||
}
|
||||
|
||||
private async ensureIndexExists(doc: any) {
|
||||
if (!this.indexInitialized) {
|
||||
const indexExists = await this.client.indices.exists({
|
||||
index: this.index,
|
||||
});
|
||||
if (!indexExists) {
|
||||
const mappings = this.createMappingsFromDoc(doc);
|
||||
await this.client.indices.create({
|
||||
index: this.index,
|
||||
// mappings,
|
||||
settings: {
|
||||
// You can define the settings according to your requirements here
|
||||
},
|
||||
});
|
||||
}
|
||||
this.indexInitialized = true;
|
||||
}
|
||||
}
|
||||
|
||||
private createMappingsFromDoc(doc: any): any {
|
||||
const properties: any = {};
|
||||
for (const key in doc) {
|
||||
if (key === '@timestamp') {
|
||||
properties[key] = { type: 'date' };
|
||||
continue;
|
||||
}
|
||||
properties[key] = {
|
||||
type: typeof doc[key] === 'number' ? 'float' : 'text',
|
||||
};
|
||||
}
|
||||
return { properties };
|
||||
}
|
||||
|
||||
async startPipingSession(options: { onlyNew?: boolean }) {
|
||||
this.sessionDocs.clear();
|
||||
this.onlyNew = options.onlyNew;
|
||||
const indexExists = await this.client.indices.exists({ index: this.index });
|
||||
if (this.onlyNew && indexExists) {
|
||||
const response = await this.client.search({
|
||||
index: this.index,
|
||||
sort: '@timestamp:desc',
|
||||
size: 1,
|
||||
});
|
||||
|
||||
// If the search query succeeded, the index exists.
|
||||
const hit = response.hits.hits[0];
|
||||
this.latestTimestamp = hit?._source?.['@timestamp'] || null;
|
||||
|
||||
if (this.latestTimestamp) {
|
||||
console.log(
|
||||
`Working in "onlyNew" mode. Hence we are omitting documents prior to ${this.latestTimestamp}`,
|
||||
);
|
||||
} else {
|
||||
console.log(
|
||||
`Working in "onlyNew" mode, but no documents found in index ${this.index}. Hence processing all documents now.`,
|
||||
);
|
||||
}
|
||||
} else if (this.onlyNew && !indexExists) {
|
||||
console.log(
|
||||
`Working in "onlyNew" mode, but index ${this.index} does not exist. Hence processing all documents now.`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
async pipeDocument(optionsArg: {
|
||||
docId: string;
|
||||
timestamp?: string | number;
|
||||
doc: any;
|
||||
}) {
|
||||
await this.ensureIndexExists(optionsArg.doc);
|
||||
|
||||
const documentBody = {
|
||||
...optionsArg.doc,
|
||||
...(optionsArg.timestamp && { '@timestamp': optionsArg.timestamp }),
|
||||
};
|
||||
|
||||
// If 'onlyNew' is true, compare the document timestamp with the latest timestamp
|
||||
if (this.onlyNew) {
|
||||
if (
|
||||
this.latestTimestamp &&
|
||||
optionsArg.timestamp <= this.latestTimestamp
|
||||
) {
|
||||
this.fastForward = true;
|
||||
} else {
|
||||
this.fastForward = false;
|
||||
await this.client.index({
|
||||
index: this.index,
|
||||
id: optionsArg.docId,
|
||||
body: documentBody,
|
||||
});
|
||||
}
|
||||
} else {
|
||||
this.fastForward = false;
|
||||
await this.client.index({
|
||||
index: this.index,
|
||||
id: optionsArg.docId,
|
||||
body: documentBody,
|
||||
});
|
||||
}
|
||||
this.sessionDocs.add(optionsArg.docId);
|
||||
}
|
||||
|
||||
async endPipingSession() {
|
||||
const allDocIds: string[] = [];
|
||||
const responseQueue = [];
|
||||
let response = await this.client.search({
|
||||
index: this.index,
|
||||
scroll: '1m',
|
||||
size: this.BATCH_SIZE,
|
||||
});
|
||||
while (true) {
|
||||
response.hits.hits.forEach((hit: any) => allDocIds.push(hit._id));
|
||||
if (!response.hits.hits.length) {
|
||||
break;
|
||||
}
|
||||
response = await this.client.scroll({
|
||||
scroll_id: response._scroll_id,
|
||||
scroll: '1m',
|
||||
});
|
||||
}
|
||||
|
||||
for (const docId of allDocIds) {
|
||||
if (!this.sessionDocs.has(docId)) {
|
||||
responseQueue.push({
|
||||
delete: {
|
||||
_index: this.index,
|
||||
_id: docId,
|
||||
},
|
||||
});
|
||||
|
||||
if (responseQueue.length >= this.BATCH_SIZE) {
|
||||
await this.client.bulk({ refresh: true, body: responseQueue });
|
||||
responseQueue.length = 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (responseQueue.length > 0) {
|
||||
await this.client.bulk({ refresh: true, body: responseQueue });
|
||||
}
|
||||
|
||||
this.sessionDocs.clear();
|
||||
}
|
||||
|
||||
async takeSnapshot(processIterator: SnapshotProcessor) {
|
||||
const snapshotIndex = `${this.index}_snapshots`;
|
||||
|
||||
const indexExists = await this.client.indices.exists({
|
||||
index: snapshotIndex,
|
||||
});
|
||||
if (!indexExists) {
|
||||
await this.client.indices.create({
|
||||
index: snapshotIndex,
|
||||
mappings: {
|
||||
properties: {
|
||||
date: {
|
||||
type: 'date',
|
||||
},
|
||||
aggregationData: {
|
||||
type: 'object',
|
||||
enabled: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
const documentIterator = this.getDocumentIterator();
|
||||
|
||||
const newSnapshot = await processIterator(
|
||||
documentIterator,
|
||||
await this.getLastSnapshot(),
|
||||
);
|
||||
|
||||
await this.storeSnapshot(newSnapshot);
|
||||
}
|
||||
|
||||
private async getLastSnapshot(): Promise<ISnapshot | null> {
|
||||
const snapshotIndex = `${this.index}_snapshots`;
|
||||
const indexExists = await this.client.indices.exists({
|
||||
index: snapshotIndex,
|
||||
});
|
||||
|
||||
if (!indexExists) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const response = await this.client.search({
|
||||
index: snapshotIndex,
|
||||
sort: 'date:desc',
|
||||
size: 1,
|
||||
});
|
||||
|
||||
if (response.hits.hits.length > 0) {
|
||||
const hit = response.hits.hits[0];
|
||||
return {
|
||||
date: hit._source['date'],
|
||||
aggregationData: hit._source['aggregationData'],
|
||||
};
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
private async *getDocumentIterator() {
|
||||
let response = await this.client.search({
|
||||
index: this.index,
|
||||
scroll: '1m',
|
||||
size: this.BATCH_SIZE,
|
||||
});
|
||||
while (true) {
|
||||
for (const hit of response.hits.hits) {
|
||||
yield hit._source;
|
||||
}
|
||||
|
||||
if (!response.hits.hits.length) {
|
||||
break;
|
||||
}
|
||||
|
||||
response = await this.client.scroll({
|
||||
scroll_id: response._scroll_id,
|
||||
scroll: '1m',
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
private async storeSnapshot(snapshot: ISnapshot) {
|
||||
await this.client.index({
|
||||
index: `${this.index}_snapshots`,
|
||||
body: snapshot,
|
||||
});
|
||||
}
|
||||
}
|
||||
106
ts/els.classes.elasticindex.ts
Normal file
106
ts/els.classes.elasticindex.ts
Normal file
@@ -0,0 +1,106 @@
|
||||
import * as plugins from './els.plugins.js';
|
||||
import { ElsSmartlogDestination } from './els.classes.smartlogdestination.js';
|
||||
import { type ILogPackage } from '@push.rocks/smartlog-interfaces';
|
||||
import { Stringmap } from '@push.rocks/lik';
|
||||
|
||||
export class ElasticIndex {
|
||||
private stringmap = new Stringmap();
|
||||
private elasticSearchRef: ElsSmartlogDestination<any>;
|
||||
|
||||
constructor(elasticSearchInstanceArg: ElsSmartlogDestination<ILogPackage>) {
|
||||
this.elasticSearchRef = elasticSearchInstanceArg;
|
||||
}
|
||||
|
||||
public async ensureIndex(prefixArg: string, indexNameArg: string) {
|
||||
if (this.stringmap.checkString(indexNameArg)) {
|
||||
return indexNameArg;
|
||||
}
|
||||
|
||||
const responseArg = await this.elasticSearchRef.client.cat
|
||||
.indices({
|
||||
format: 'json',
|
||||
bytes: 'mb',
|
||||
})
|
||||
.catch((err) => {
|
||||
console.log(err);
|
||||
});
|
||||
|
||||
if (!responseArg) {
|
||||
throw new Error('Could not get valid response from elastic search');
|
||||
}
|
||||
|
||||
if (Array.isArray(responseArg)) {
|
||||
const filteredIndices = responseArg.filter((indexObjectArg) => {
|
||||
return indexObjectArg.index.startsWith(prefixArg);
|
||||
});
|
||||
const filteredIndexNames = filteredIndices.map((indexObjectArg) => {
|
||||
return indexObjectArg.index;
|
||||
});
|
||||
await this.deleteOldIndices(prefixArg, filteredIndexNames);
|
||||
}
|
||||
|
||||
let index = null;
|
||||
|
||||
if (Array.isArray(responseArg)) {
|
||||
index = responseArg.find((indexItemArg) => {
|
||||
return indexItemArg.index === indexNameArg;
|
||||
});
|
||||
}
|
||||
|
||||
if (!index) {
|
||||
await this.createNewIndex(indexNameArg);
|
||||
}
|
||||
|
||||
this.stringmap.addString(indexNameArg);
|
||||
return index;
|
||||
}
|
||||
|
||||
public async createNewIndex(indexNameArg: string) {
|
||||
const response = await this.elasticSearchRef.client.indices.create({
|
||||
wait_for_active_shards: 1,
|
||||
index: indexNameArg,
|
||||
mappings: {
|
||||
properties: {
|
||||
'@timestamp': {
|
||||
type: 'date',
|
||||
},
|
||||
logPackageArg: {
|
||||
properties: {
|
||||
payload: {
|
||||
type: 'object',
|
||||
dynamic: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
public async deleteOldIndices(prefixArg: string, indicesArray: string[]) {
|
||||
const todayAsUnix: number = Date.now();
|
||||
const rententionPeriodAsUnix: number = plugins.smarttime.units.days(
|
||||
this.elasticSearchRef.indexRetention,
|
||||
);
|
||||
for (const indexName of indicesArray) {
|
||||
if (!indexName.startsWith(prefixArg)) continue;
|
||||
const indexRegex = new RegExp(
|
||||
`^${prefixArg}-([0-9]*)-([0-9]*)-([0-9]*)$`,
|
||||
);
|
||||
const regexResult = indexRegex.exec(indexName);
|
||||
const dateAsUnix: number = new Date(
|
||||
`${regexResult[1]}-${regexResult[2]}-${regexResult[3]}`,
|
||||
).getTime();
|
||||
if (todayAsUnix - rententionPeriodAsUnix > dateAsUnix) {
|
||||
console.log(`found old index ${indexName}`);
|
||||
const response = await this.elasticSearchRef.client.indices
|
||||
.delete({
|
||||
index: indexName,
|
||||
})
|
||||
.catch((err) => {
|
||||
console.log(err);
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
66
ts/els.classes.elasticscheduler.ts
Normal file
66
ts/els.classes.elasticscheduler.ts
Normal file
@@ -0,0 +1,66 @@
|
||||
import {
|
||||
ElsSmartlogDestination,
|
||||
type IStandardLogParams,
|
||||
} from './els.classes.smartlogdestination.js';
|
||||
|
||||
export class ElasticScheduler {
|
||||
elasticSearchRef: ElsSmartlogDestination<any>;
|
||||
docsScheduled = false;
|
||||
docsStorage: any[] = [];
|
||||
|
||||
// maximum size of the buffer
|
||||
maxBufferSize = 500;
|
||||
|
||||
constructor(elasticLogRefArg: ElsSmartlogDestination<any>) {
|
||||
this.elasticSearchRef = elasticLogRefArg;
|
||||
}
|
||||
|
||||
public addFailedDoc(objectArg: any | IStandardLogParams) {
|
||||
this.addToStorage(objectArg);
|
||||
this.setRetry();
|
||||
}
|
||||
|
||||
public scheduleDoc(logObject: any) {
|
||||
this.addToStorage(logObject);
|
||||
}
|
||||
|
||||
private addToStorage(logObject: any) {
|
||||
this.docsStorage.push(logObject);
|
||||
|
||||
// if buffer is full, send logs immediately
|
||||
if (this.docsStorage.length >= this.maxBufferSize) {
|
||||
this.flushLogsToElasticSearch();
|
||||
}
|
||||
}
|
||||
|
||||
private flushLogsToElasticSearch() {
|
||||
const oldStorage = this.docsStorage;
|
||||
this.docsStorage = [];
|
||||
|
||||
for (let logObject of oldStorage) {
|
||||
this.elasticSearchRef.log(logObject, true);
|
||||
}
|
||||
}
|
||||
|
||||
public setRetry() {
|
||||
setTimeout(() => {
|
||||
this.flushLogsToElasticSearch();
|
||||
|
||||
if (this.docsStorage.length === 0) {
|
||||
console.log('ElasticLog retry success!!!');
|
||||
this.docsScheduled = false;
|
||||
} else {
|
||||
console.log('ElasticLog retry failed');
|
||||
this.setRetry();
|
||||
}
|
||||
}, 5000);
|
||||
}
|
||||
|
||||
public deferSend() {
|
||||
if (!this.docsScheduled) {
|
||||
console.log('Retry ElasticLog in 5 seconds!');
|
||||
this.docsScheduled = true;
|
||||
this.setRetry();
|
||||
}
|
||||
}
|
||||
}
|
||||
67
ts/els.classes.fastpush.ts
Normal file
67
ts/els.classes.fastpush.ts
Normal file
@@ -0,0 +1,67 @@
|
||||
import { Client as ElasticClient } from '@elastic/elasticsearch';
|
||||
|
||||
interface FastPushOptions {
|
||||
deleteOldData?: boolean; // Clear the index
|
||||
deleteIndex?: boolean; // Delete the entire index
|
||||
}
|
||||
|
||||
export class FastPush {
|
||||
private client: ElasticClient;
|
||||
|
||||
constructor(node: string, auth?: { username: string; password: string }) {
|
||||
this.client = new ElasticClient({
|
||||
node: node,
|
||||
...(auth && { auth: auth }),
|
||||
});
|
||||
}
|
||||
|
||||
async pushToIndex(
|
||||
indexName: string,
|
||||
docArray: any[],
|
||||
options?: FastPushOptions,
|
||||
) {
|
||||
if (docArray.length === 0) return;
|
||||
|
||||
const indexExists = await this.client.indices.exists({ index: indexName });
|
||||
|
||||
if (indexExists) {
|
||||
if (options?.deleteIndex) {
|
||||
await this.client.indices.delete({ index: indexName });
|
||||
} else if (options?.deleteOldData) {
|
||||
await this.client.deleteByQuery({
|
||||
index: indexName,
|
||||
query: {
|
||||
match_all: {},
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (!indexExists || options?.deleteIndex) {
|
||||
// Create index with mappings (for simplicity, we use dynamic mapping)
|
||||
await this.client.indices.create({
|
||||
index: indexName,
|
||||
mappings: {
|
||||
dynamic: 'true',
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
// Bulk insert documents
|
||||
const bulkBody = [];
|
||||
for (const doc of docArray) {
|
||||
bulkBody.push({
|
||||
index: {
|
||||
_index: indexName,
|
||||
},
|
||||
});
|
||||
bulkBody.push(doc);
|
||||
}
|
||||
|
||||
await this.client.bulk({ body: bulkBody });
|
||||
}
|
||||
}
|
||||
|
||||
// Usage example:
|
||||
// const fastPush = new FastPush('http://localhost:9200', { username: 'elastic', password: 'password' });
|
||||
// fastPush.pushToIndex('my_index', [{ name: 'John', age: 30 }, { name: 'Jane', age: 25 }], { deleteOldData: true });
|
||||
109
ts/els.classes.kvstore.ts
Normal file
109
ts/els.classes.kvstore.ts
Normal file
@@ -0,0 +1,109 @@
|
||||
import * as plugins from './els.plugins.js';
|
||||
import { Client as ElasticClient } from '@elastic/elasticsearch';
|
||||
|
||||
export interface IElasticKVStoreConstructorOptions {
|
||||
index: string;
|
||||
node: string;
|
||||
auth?: {
|
||||
username: string;
|
||||
password: string;
|
||||
};
|
||||
}
|
||||
|
||||
export class ElasticKVStore {
|
||||
public client: ElasticClient;
|
||||
public index: string;
|
||||
private readyDeferred: any;
|
||||
|
||||
constructor(options: IElasticKVStoreConstructorOptions) {
|
||||
this.client = new ElasticClient({
|
||||
node: options.node,
|
||||
...(options.auth && { auth: options.auth }),
|
||||
});
|
||||
this.index = options.index;
|
||||
this.readyDeferred = plugins.smartpromise.defer();
|
||||
this.setupIndex();
|
||||
}
|
||||
|
||||
private async setupIndex() {
|
||||
try {
|
||||
const indexExists = await this.client.indices.exists({
|
||||
index: this.index,
|
||||
});
|
||||
|
||||
if (!indexExists) {
|
||||
await this.client.indices.create({
|
||||
index: this.index,
|
||||
mappings: {
|
||||
properties: {
|
||||
key: {
|
||||
type: 'keyword',
|
||||
},
|
||||
value: {
|
||||
type: 'text',
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
this.readyDeferred.resolve();
|
||||
} catch (err) {
|
||||
this.readyDeferred.reject(err);
|
||||
}
|
||||
}
|
||||
|
||||
async set(key: string, value: string) {
|
||||
await this.readyDeferred.promise;
|
||||
await this.client.index({
|
||||
index: this.index,
|
||||
id: key,
|
||||
body: {
|
||||
key,
|
||||
value,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
async get(key: string): Promise<string | null> {
|
||||
await this.readyDeferred.promise;
|
||||
|
||||
try {
|
||||
const response = await this.client.get({
|
||||
index: this.index,
|
||||
id: key,
|
||||
});
|
||||
return response._source['value'];
|
||||
} catch (error) {
|
||||
if (error.meta && error.meta.statusCode === 404) {
|
||||
return null;
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async delete(key: string) {
|
||||
await this.readyDeferred.promise;
|
||||
|
||||
try {
|
||||
await this.client.delete({
|
||||
index: this.index,
|
||||
id: key,
|
||||
});
|
||||
} catch (error) {
|
||||
if (error.meta && error.meta.statusCode !== 404) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async clear() {
|
||||
await this.readyDeferred.promise;
|
||||
|
||||
await this.client.deleteByQuery({
|
||||
index: this.index,
|
||||
query: {
|
||||
match_all: {},
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
77
ts/els.classes.smartlogdestination.ts
Normal file
77
ts/els.classes.smartlogdestination.ts
Normal file
@@ -0,0 +1,77 @@
|
||||
import { Client as ElasticClient } from '@elastic/elasticsearch';
|
||||
import type {
|
||||
ILogContext,
|
||||
ILogPackage,
|
||||
ILogDestination,
|
||||
} from '@push.rocks/smartlog-interfaces';
|
||||
import { ElasticScheduler } from './els.classes.elasticscheduler.js';
|
||||
import { ElasticIndex } from './els.classes.elasticindex.js';
|
||||
|
||||
export interface IStandardLogParams {
|
||||
message: string;
|
||||
severity: string;
|
||||
}
|
||||
|
||||
export interface IElasticSearchConstructorOptions {
|
||||
indexPrefix: string;
|
||||
indexRetention: number;
|
||||
node: string;
|
||||
auth?: {
|
||||
username: string;
|
||||
password: string;
|
||||
};
|
||||
}
|
||||
|
||||
export class ElsSmartlogDestination<T> {
|
||||
public client: ElasticClient;
|
||||
public elasticScheduler = new ElasticScheduler(this);
|
||||
public elasticIndex: ElasticIndex = new ElasticIndex(this);
|
||||
|
||||
public indexPrefix: string;
|
||||
public indexRetention: number;
|
||||
|
||||
constructor(optionsArg: IElasticSearchConstructorOptions) {
|
||||
this.client = new ElasticClient({
|
||||
node: optionsArg.node,
|
||||
...(optionsArg.auth && { auth: optionsArg.auth }),
|
||||
});
|
||||
this.indexPrefix = `${optionsArg.indexPrefix}`;
|
||||
this.indexRetention = optionsArg.indexRetention;
|
||||
this.setupDataStream();
|
||||
}
|
||||
|
||||
private async setupDataStream() {
|
||||
// Define an index template
|
||||
await this.client.indices.putIndexTemplate({
|
||||
name: `${this.indexPrefix}_template`,
|
||||
index_patterns: [`${this.indexPrefix}-*`],
|
||||
data_stream: {},
|
||||
});
|
||||
}
|
||||
|
||||
public async log(logPackageArg: ILogPackage, scheduleOverwrite = false) {
|
||||
const now = new Date();
|
||||
const indexToUse = `${this.indexPrefix}-data-stream`; // Use data stream name
|
||||
|
||||
if (this.elasticScheduler.docsScheduled && !scheduleOverwrite) {
|
||||
this.elasticScheduler.scheduleDoc(logPackageArg);
|
||||
return;
|
||||
}
|
||||
|
||||
await this.client.index({
|
||||
index: indexToUse,
|
||||
body: {
|
||||
'@timestamp': new Date(logPackageArg.timestamp).toISOString(),
|
||||
...logPackageArg,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
get logDestination(): ILogDestination {
|
||||
return {
|
||||
handleLog: async (smartlogPackageArg: ILogPackage) => {
|
||||
await this.log(smartlogPackageArg);
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
15
ts/els.plugins.ts
Normal file
15
ts/els.plugins.ts
Normal file
@@ -0,0 +1,15 @@
|
||||
import * as elasticsearch from '@elastic/elasticsearch';
|
||||
import * as lik from '@push.rocks/lik';
|
||||
import * as smartdelay from '@push.rocks/smartdelay';
|
||||
import * as smartlogInterfaces from '@push.rocks/smartlog-interfaces';
|
||||
import * as smartpromise from '@push.rocks/smartpromise';
|
||||
import * as smarttime from '@push.rocks/smarttime';
|
||||
|
||||
export {
|
||||
elasticsearch,
|
||||
lik,
|
||||
smartdelay,
|
||||
smartlogInterfaces,
|
||||
smartpromise,
|
||||
smarttime,
|
||||
};
|
||||
@@ -1 +1,4 @@
|
||||
export * from './elasticsearch.classes.elasticsearch';
|
||||
export * from './els.classes.smartlogdestination.js';
|
||||
export * from './els.classes.fastpush.js';
|
||||
export * from './els.classes.elasticdoc.js';
|
||||
export * from './els.classes.kvstore.js';
|
||||
|
||||
12
tsconfig.json
Normal file
12
tsconfig.json
Normal file
@@ -0,0 +1,12 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "ES2022",
|
||||
"module": "NodeNext",
|
||||
"moduleResolution": "NodeNext",
|
||||
"esModuleInterop": true,
|
||||
"verbatimModuleSyntax": true
|
||||
},
|
||||
"exclude": [
|
||||
"dist_*/**/*.d.ts"
|
||||
]
|
||||
}
|
||||
17
tslint.json
17
tslint.json
@@ -1,17 +0,0 @@
|
||||
{
|
||||
"extends": ["tslint:latest", "tslint-config-prettier"],
|
||||
"rules": {
|
||||
"semicolon": [true, "always"],
|
||||
"no-console": false,
|
||||
"ordered-imports": false,
|
||||
"object-literal-sort-keys": false,
|
||||
"member-ordering": {
|
||||
"options":{
|
||||
"order": [
|
||||
"static-method"
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
"defaultSeverity": "warning"
|
||||
}
|
||||
Reference in New Issue
Block a user