Compare commits

...

98 Commits

Author SHA1 Message Date
53673e37cb v2.0.17 2025-11-29 09:48:31 +00:00
0701207acd fix(ci): Update CI workflows and build config; bump dependencies; code style and TS config fixes 2025-11-29 09:48:31 +00:00
36d7cb69a3 2.0.16 2023-08-30 12:49:39 +02:00
4924e0a151 fix(core): update 2023-08-30 12:49:39 +02:00
cd98529541 2.0.15 2023-08-30 12:45:39 +02:00
e6a9282987 fix(core): update 2023-08-30 12:45:39 +02:00
9dcadcd611 2.0.14 2023-08-29 12:11:16 +02:00
4b045ff988 fix(core): update 2023-08-29 12:11:15 +02:00
023dd1b519 2.0.13 2023-08-29 12:10:39 +02:00
4971385eae fix(core): update 2023-08-29 12:10:38 +02:00
e209839962 2.0.12 2023-08-29 11:48:33 +02:00
e44365b674 fix(core): update 2023-08-29 11:48:32 +02:00
bd154089c3 2.0.11 2023-08-29 11:44:49 +02:00
0be693da60 fix(core): update 2023-08-29 11:44:48 +02:00
040c93dec3 2.0.10 2023-08-29 11:43:07 +02:00
21e55bd341 fix(core): update 2023-08-29 11:43:07 +02:00
f1d04fe63c 2.0.9 2023-08-29 11:33:07 +02:00
49c4660131 fix(core): update 2023-08-29 11:33:06 +02:00
e5fd0361fc 2.0.8 2023-08-29 11:15:23 +02:00
d6a291d8d4 fix(core): update 2023-08-29 11:15:22 +02:00
fc87fd7ab7 2.0.7 2023-08-29 11:11:25 +02:00
203444d1a6 fix(core): update 2023-08-29 11:11:25 +02:00
cdbf1fd316 2.0.6 2023-08-29 10:03:38 +02:00
10108d8338 fix(core): update 2023-08-29 10:03:37 +02:00
36abb2c7c0 2.0.5 2023-08-29 10:00:43 +02:00
8f00d90bb1 fix(core): update 2023-08-29 10:00:42 +02:00
41f1758d46 2.0.4 2023-08-29 09:56:59 +02:00
2b7cd33996 fix(core): update 2023-08-29 09:56:58 +02:00
80a04ca893 2.0.3 2023-08-29 09:31:43 +02:00
93f739c79e fix(core): update 2023-08-29 09:31:42 +02:00
a1b5bf5c0c 2.0.2 2023-08-29 09:18:16 +02:00
084c5d137c fix(core): update 2023-08-29 09:18:15 +02:00
4aa0592bd5 2.0.1 2023-08-25 16:07:53 +02:00
0313e5045a fix(core): update 2023-08-25 16:07:52 +02:00
7442d93f58 2.0.0 2023-08-25 16:07:17 +02:00
35e70aae62 BREAKING CHANGE(core): update 2023-08-25 16:07:16 +02:00
cc30f43a28 1.0.56 2023-08-25 16:06:53 +02:00
100a8fc12e fix(core): update 2023-08-25 16:06:52 +02:00
f32403961d 1.0.55 2023-08-18 07:58:10 +02:00
9734949241 fix(core): update 2023-08-18 07:58:09 +02:00
b70444824b 1.0.54 2023-08-17 20:53:28 +02:00
0eb0903667 fix(core): update 2023-08-17 20:53:28 +02:00
4d11dca22c 1.0.53 2023-08-17 19:24:35 +02:00
3079adbbd9 fix(core): update 2023-08-17 19:24:34 +02:00
bc9de8e4d6 1.0.52 2023-08-17 19:21:27 +02:00
3fa7d66236 fix(core): update 2023-08-17 19:21:26 +02:00
2a0b0b2478 1.0.51 2023-08-14 13:09:21 +02:00
35e99663a4 fix(core): update 2023-08-14 13:09:20 +02:00
2cc5855206 1.0.50 2023-08-14 10:58:29 +02:00
8f9f2fdf05 fix(core): update 2023-08-14 10:58:28 +02:00
7ef36b5c40 1.0.49 2023-08-02 03:17:30 +02:00
67a8f3fe4d fix(core): update 2023-08-02 03:17:30 +02:00
5ae2c37519 1.0.48 2023-08-02 03:11:18 +02:00
fcb67ec878 fix(core): update 2023-08-02 03:11:17 +02:00
9e25494f8f 1.0.47 2023-08-01 15:05:46 +02:00
dd8ba4736a fix(core): update 2023-08-01 15:05:45 +02:00
d395310410 1.0.46 2023-08-01 12:38:54 +02:00
49233ce45f fix(core): update 2023-08-01 12:38:53 +02:00
fb93dce8bc 1.0.45 2023-08-01 12:24:22 +02:00
30cbc05aa2 fix(core): update 2023-08-01 12:24:22 +02:00
2a595a1a9a 1.0.44 2023-07-05 23:45:48 +02:00
d62b18e93c fix(core): update 2023-07-05 23:45:48 +02:00
d6176f820a 1.0.43 2023-07-05 17:27:24 +02:00
0ebc1d5288 fix(core): update 2023-07-05 17:27:24 +02:00
2b0003546a 1.0.42 2023-07-05 15:31:01 +02:00
60617f2fca fix(core): update 2023-07-05 15:31:00 +02:00
9c767d07e4 1.0.41 2023-07-05 10:26:00 +02:00
f3aa94dcb7 fix(core): update 2023-07-05 10:25:59 +02:00
a0be0edd9d 1.0.40 2023-07-05 10:22:53 +02:00
ad24ba2f5d fix(core): update 2023-07-05 10:22:53 +02:00
b0cf4bb27f 1.0.39 2023-07-05 09:38:43 +02:00
fd29ceab80 fix(core): update 2023-07-05 09:38:43 +02:00
bcca434a24 1.0.38 2023-07-04 13:57:55 +02:00
d4a9ad8f67 fix(core): update 2023-07-04 13:57:55 +02:00
d4c7c33668 1.0.37 2023-07-04 10:50:19 +02:00
8340257b00 fix(core): update 2023-07-04 10:50:18 +02:00
32265e83f3 1.0.36 2023-07-04 10:13:15 +02:00
e2df11cea2 fix(core): update 2023-07-04 10:13:15 +02:00
2719ba28f6 1.0.35 2023-07-04 09:59:58 +02:00
6d78a7ba0c fix(core): update 2023-07-04 09:59:58 +02:00
5897c6e7de 1.0.34 2023-07-04 09:46:04 +02:00
20369614a2 fix(core): update 2023-07-04 09:46:04 +02:00
7ceaf694fe 1.0.33 2023-07-04 09:33:19 +02:00
391c6bd45d fix(core): update 2023-07-04 09:33:18 +02:00
1a702071c6 1.0.32 2023-07-04 09:13:45 +02:00
0fe2f6a4ae fix(core): update 2023-07-04 09:13:44 +02:00
20d04413c9 1.0.31 2023-07-04 09:13:15 +02:00
e56439e9f4 fix(core): update 2023-07-04 09:13:14 +02:00
c9a9434cd9 1.0.30 2019-11-03 00:51:18 +01:00
5d98dd9089 fix(core): update 2019-11-03 00:51:18 +01:00
2d635fdf7c 1.0.29 2019-11-03 00:49:35 +01:00
1dbf3724d0 fix(core): update 2019-11-03 00:49:35 +01:00
cc7eb8c139 1.0.28 2019-11-03 00:47:19 +01:00
0e01ecbd1a fix(core): update 2019-11-03 00:47:18 +01:00
2d21b40a76 1.0.27 2019-11-02 01:42:04 +01:00
2d1a5cdc50 fix(core): update 2019-11-02 01:42:03 +01:00
20a41d3381 1.0.26 2018-12-13 00:19:50 +01:00
b2019b33f8 fix(core): update 2018-12-13 00:19:50 +01:00
32 changed files with 9779 additions and 1798 deletions

View File

@@ -0,0 +1,66 @@
name: Default (not tags)
on:
push:
tags-ignore:
- '**'
env:
IMAGE: code.foss.global/host.today/ht-docker-node:npmci
NPMCI_COMPUTED_REPOURL: https://${{gitea.repository_owner}}:${{secrets.GITEA_TOKEN}}@/${{gitea.repository}}.git
NPMCI_TOKEN_NPM: ${{secrets.NPMCI_TOKEN_NPM}}
NPMCI_TOKEN_NPM2: ${{secrets.NPMCI_TOKEN_NPM2}}
NPMCI_GIT_GITHUBTOKEN: ${{secrets.NPMCI_GIT_GITHUBTOKEN}}
NPMCI_URL_CLOUDLY: ${{secrets.NPMCI_URL_CLOUDLY}}
jobs:
security:
runs-on: ubuntu-latest
continue-on-error: true
container:
image: ${{ env.IMAGE }}
steps:
- uses: actions/checkout@v3
- name: Install pnpm and npmci
run: |
pnpm install -g pnpm
pnpm install -g @ship.zone/npmci
- name: Run npm prepare
run: npmci npm prepare
- name: Audit production dependencies
run: |
npmci command npm config set registry https://registry.npmjs.org
npmci command pnpm audit --audit-level=high --prod
continue-on-error: true
- name: Audit development dependencies
run: |
npmci command npm config set registry https://registry.npmjs.org
npmci command pnpm audit --audit-level=high --dev
continue-on-error: true
test:
if: ${{ always() }}
needs: security
runs-on: ubuntu-latest
container:
image: ${{ env.IMAGE }}
steps:
- uses: actions/checkout@v3
- name: Test stable
run: |
npmci node install stable
npmci npm install
npmci npm test
- name: Test build
run: |
npmci node install stable
npmci npm install
npmci npm build

View File

@@ -0,0 +1,124 @@
name: Default (tags)
on:
push:
tags:
- '*'
env:
IMAGE: code.foss.global/host.today/ht-docker-node:npmci
NPMCI_COMPUTED_REPOURL: https://${{gitea.repository_owner}}:${{secrets.GITEA_TOKEN}}@/${{gitea.repository}}.git
NPMCI_TOKEN_NPM: ${{secrets.NPMCI_TOKEN_NPM}}
NPMCI_TOKEN_NPM2: ${{secrets.NPMCI_TOKEN_NPM2}}
NPMCI_GIT_GITHUBTOKEN: ${{secrets.NPMCI_GIT_GITHUBTOKEN}}
NPMCI_URL_CLOUDLY: ${{secrets.NPMCI_URL_CLOUDLY}}
jobs:
security:
runs-on: ubuntu-latest
continue-on-error: true
container:
image: ${{ env.IMAGE }}
steps:
- uses: actions/checkout@v3
- name: Prepare
run: |
pnpm install -g pnpm
pnpm install -g @ship.zone/npmci
npmci npm prepare
- name: Audit production dependencies
run: |
npmci command npm config set registry https://registry.npmjs.org
npmci command pnpm audit --audit-level=high --prod
continue-on-error: true
- name: Audit development dependencies
run: |
npmci command npm config set registry https://registry.npmjs.org
npmci command pnpm audit --audit-level=high --dev
continue-on-error: true
test:
if: ${{ always() }}
needs: security
runs-on: ubuntu-latest
container:
image: ${{ env.IMAGE }}
steps:
- uses: actions/checkout@v3
- name: Prepare
run: |
pnpm install -g pnpm
pnpm install -g @ship.zone/npmci
npmci npm prepare
- name: Test stable
run: |
npmci node install stable
npmci npm install
npmci npm test
- name: Test build
run: |
npmci node install stable
npmci npm install
npmci npm build
release:
needs: test
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/')
runs-on: ubuntu-latest
container:
image: ${{ env.IMAGE }}
steps:
- uses: actions/checkout@v3
- name: Prepare
run: |
pnpm install -g pnpm
pnpm install -g @ship.zone/npmci
npmci npm prepare
- name: Release
run: |
npmci node install stable
npmci npm publish
metadata:
needs: test
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/')
runs-on: ubuntu-latest
container:
image: ${{ env.IMAGE }}
continue-on-error: true
steps:
- uses: actions/checkout@v3
- name: Prepare
run: |
pnpm install -g pnpm
pnpm install -g @ship.zone/npmci
npmci npm prepare
- name: Code quality
run: |
npmci command npm install -g typescript
npmci npm install
- name: Trigger
run: npmci trigger
- name: Build docs and upload artifacts
run: |
npmci node install stable
npmci npm install
pnpm install -g @git.zone/tsdoc
npmci command tsdoc
continue-on-error: true

22
.gitignore vendored
View File

@@ -1,5 +1,23 @@
.nogit/
node_modules/
# artifacts
coverage/
public/
pages/
# installs
node_modules/
# caches
.yarn/
.cache/
.rpt2_cache
# builds
dist/
dist_*/
# AI
.claude/
.serena/
#------# custom

View File

@@ -1,125 +0,0 @@
# gitzone standard
image: hosttoday/ht-docker-node:npmci
cache:
paths:
- .npmci_cache/
key: "$CI_BUILD_STAGE"
stages:
- security
- test
- release
- metadata
# ====================
# security stage
# ====================
mirror:
stage: security
script:
- npmci git mirror
tags:
- docker
- notpriv
snyk:
stage: security
script:
- npmci npm prepare
- npmci command npm install -g snyk
- npmci command npm install --ignore-scripts
- npmci command snyk test
tags:
- docker
- notpriv
# ====================
# test stage
# ====================
testLTS:
stage: test
script:
- npmci npm prepare
- npmci node install lts
- npmci npm install
- npmci npm test
coverage: /\d+.?\d+?\%\s*coverage/
tags:
- docker
- notpriv
testSTABLE:
stage: test
script:
- npmci npm prepare
- npmci node install stable
- npmci npm install
- npmci npm test
coverage: /\d+.?\d+?\%\s*coverage/
tags:
- docker
- notpriv
release:
stage: release
script:
- npmci node install stable
- npmci npm publish
only:
- tags
tags:
- docker
- notpriv
# ====================
# metadata stage
# ====================
codequality:
stage: metadata
image: docker:stable
allow_failure: true
services:
- docker:stable-dind
script:
- export SP_VERSION=$(echo "$CI_SERVER_VERSION" | sed 's/^\([0-9]*\)\.\([0-9]*\).*/\1-\2-stable/')
- docker run
--env SOURCE_CODE="$PWD"
--volume "$PWD":/code
--volume /var/run/docker.sock:/var/run/docker.sock
"registry.gitlab.com/gitlab-org/security-products/codequality:$SP_VERSION" /code
artifacts:
paths: [codeclimate.json]
tags:
- docker
- priv
trigger:
stage: metadata
script:
- npmci trigger
only:
- tags
tags:
- docker
- notpriv
pages:
image: hosttoday/ht-docker-node:npmci
stage: metadata
script:
- npmci command npm install -g typedoc typescript
- npmci npm prepare
- npmci npm install
- npmci command typedoc --module "commonjs" --target "ES2016" --out public/ ts/
tags:
- docker
- notpriv
only:
- tags
artifacts:
expire_in: 1 week
paths:
- public
allow_failure: true

4
.snyk
View File

@@ -1,4 +0,0 @@
# Snyk (https://snyk.io) policy file, patches or ignores known vulnerabilities.
version: v1.12.0
ignore: {}
patch: {}

11
.vscode/launch.json vendored Normal file
View File

@@ -0,0 +1,11 @@
{
"version": "0.2.0",
"configurations": [
{
"command": "npm test",
"name": "Run npm test",
"request": "launch",
"type": "node-terminal"
}
]
}

26
.vscode/settings.json vendored Normal file
View File

@@ -0,0 +1,26 @@
{
"json.schemas": [
{
"fileMatch": ["/npmextra.json"],
"schema": {
"type": "object",
"properties": {
"npmci": {
"type": "object",
"description": "settings for npmci"
},
"gitzone": {
"type": "object",
"description": "settings for gitzone",
"properties": {
"projectType": {
"type": "string",
"enum": ["website", "element", "service", "npm", "wcc"]
}
}
}
}
}
}
]
}

100
changelog.md Normal file
View File

@@ -0,0 +1,100 @@
# Changelog
## 2025-11-29 - 2.0.17 - fix(ci)
Update CI workflows and build config; bump dependencies; code style and TS config fixes
- Gitea workflows updated: swapped CI image to code.foss.global, adjusted NPMCI_COMPUTED_REPOURL and replaced @shipzone/npmci with @ship.zone/npmci; tsdoc package path updated.
- Removed legacy .gitlab-ci.yml (migrated CI to .gitea workflows).
- Bumped dependencies and devDependencies (e.g. @elastic/elasticsearch -> ^9.2.0, @git.zone/* packages, @push.rocks/* packages) and added repository/bugs/homepage/pnpm/packageManager metadata to package.json.
- Tests updated: import path change to @git.zone/tstest/tapbundle and test runner export changed to default export (export default tap.start()).
- TypeScript config changes: module and moduleResolution set to NodeNext and added exclude for dist_*/**/*.d.ts.
- Code cleanups and formatting: normalized object/argument formatting, trailing commas, safer ElasticClient call shapes (explicit option objects), and minor refactors across ElasticDoc, FastPush, KVStore, ElasticIndex, ElasticScheduler and smartlog destination.
- Added .gitignore entries for local AI tool directories and added readme.hints.md and npmextra.json.
## 2023-08-30 - 2.0.2..2.0.16 - core
Series of maintenance releases and small bugfixes on the 2.0.x line.
- Multiple "fix(core): update" commits across 2.0.2 → 2.0.16 addressing small bugs and stability/maintenance issues.
- No single large feature added in these patch releases; recommended to consult individual release diffs if you need a precise change per patch.
## 2023-08-25 - 2.0.0 - core
Major 2.0.0 release containing core updates and the transition from the 1.x line.
- Bumped major version to 2.0.0 with core updates.
- This release follows a breaking-change update introduced on the 1.x line (see 1.0.56 below). Review breaking changes before upgrading.
## 2023-08-25 - 1.0.56 - core (BREAKING CHANGE)
Breaking change introduced on the 1.x line.
- BREAKING CHANGE: core updated. Consumers should review the change and adapt integration code before upgrading from 1.0.55 → 1.0.56 (or migrating to 2.0.x).
## 2023-08-18 - 1.0.40..1.0.55 - maintenance
Maintenance and fixes across many 1.0.x releases (mid 2023).
- Numerous "fix(core): update" commits across 1.0.40 → 1.0.55 addressing stability and minor bug fixes.
- Includes smaller testing updates (e.g., fix(test): update in the 1.0.x series).
## 2023-07-05 - 1.0.32..1.0.44 - maintenance
Maintenance sweep in the 1.0.x line (July 2023).
- Multiple small core fixes and updates across these patch releases.
- No large feature additions; stability and incremental improvements only.
## 2019-11-02 - 1.0.26..1.0.30 - maintenance
Patch-level fixes and cleanup in late 2019.
- Several "fix(core): update" releases to address minor issues and keep dependencies up to date.
## 2018-11-10 - 1.0.20 - core
Cleanup related to indices.
- fix(clean up old indices): update — housekeeping and cleanup of old indices.
## 2018-11-03 - 1.0.13 - core
Security/tooling update.
- fix(core): add snyk — added Snyk related changes (security/scan tooling integration).
## 2018-09-15 - 1.0.11 - core
Dependency and compatibility updates.
- fix(core): update dependencies and bonsai.io compatibility — updated dependencies and ensured compatibility with bonsai.io.
## 2018-08-12 - 1.0.9 - test
Testing improvements.
- fix(test): update — improvements/adjustments to test suite.
## 2018-03-03 - 1.0.7 - system
System-level change.
- "system change" — internal/system modification (no public API feature).
## 2018-01-27 - 1.0.4 - quality/style
Coverage and style updates.
- adjust coverageTreshold — adjusted test coverage threshold.
- update style / update — code style and minor cleanup.
## 2018-01-27 - 1.0.3 - core (feat)
Winston logging integration (added, later removed in a subsequent release).
- feat(core): implement winston support — initial addition of Winston logging support.
## 2018-01-27 - 1.0.6 - winston (fix)
Removal of previously added logging integration.
- fix(winston): remove winston — removed Winston integration introduced earlier.
## 2018-01-26 - 1.0.2 - core (feat)
Index generation improvement.
- feat(core): update index generation — improvements to index generation logic.
## 2018-01-24 - 1.0.1 - core (initial)
Project initial commit and initial cleanup.
- feat(core): initial commit — project bootstrap.
- fix(core): cleanup — initial cleanup and adjustments after the first commit.
Note: Versions that only contain bare version-tag commits (commit messages identical to the version string) have been summarized as ranges above. For detailed per-patch changes consult individual release diffs.

View File

@@ -2,5 +2,16 @@
"npmci": {
"npmGlobalTools": [],
"npmAccessLevel": "public"
},
"gitzone": {
"projectType": "npm",
"module": {
"githost": "gitlab.com",
"gitscope": "mojoio",
"gitrepo": "elasticsearch",
"description": "log to elasticsearch in a kibana compatible format",
"npmPackagename": "@mojoio/elasticsearch",
"license": "MIT"
}
}
}
}

1298
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,34 +1,59 @@
{
"name": "@mojoio/elasticsearch",
"version": "1.0.25",
"name": "@apiclient.xyz/elasticsearch",
"version": "2.0.17",
"private": false,
"description": "log to elasticsearch in a kibana compatible format",
"main": "dist/index.js",
"typings": "dist/index.d.ts",
"main": "dist_ts/index.js",
"typings": "dist_ts/index.d.ts",
"author": "Lossless GmbH",
"license": "MIT",
"scripts": {
"test": "(tstest test/)",
"format": "(gitzone format)",
"build": "(tsbuild)"
"build": "(tsbuild --allowimplicitany)",
"buildDocs": "tsdoc"
},
"devDependencies": {
"@gitzone/tsbuild": "^2.0.22",
"@gitzone/tsrun": "^1.1.13",
"@gitzone/tstest": "^1.0.15",
"@pushrocks/qenv": "^2.0.2",
"@pushrocks/tapbundle": "^3.0.7",
"@types/node": "^10.12.2",
"tslint": "^5.11.0",
"tslint-config-prettier": "^1.15.0"
"@git.zone/tsbuild": "^3.1.0",
"@git.zone/tsrun": "^2.0.0",
"@git.zone/tstest": "^3.1.3",
"@push.rocks/qenv": "^6.1.3",
"@types/node": "^24.10.1"
},
"dependencies": {
"@pushrocks/lik": "^3.0.2",
"@pushrocks/smartdelay": "^2.0.2",
"@pushrocks/smartlog-interfaces": "^2.0.2",
"@pushrocks/smartpromise": "^2.0.5",
"@pushrocks/smarttime": "^3.0.2",
"@types/elasticsearch": "^5.0.28",
"elasticsearch": "^15.2.0"
}
"@elastic/elasticsearch": "^9.2.0",
"@push.rocks/lik": "^6.2.2",
"@push.rocks/smartdelay": "^3.0.5",
"@push.rocks/smartlog-interfaces": "^3.0.2",
"@push.rocks/smartpromise": "^4.2.3",
"@push.rocks/smarttime": "^4.1.1"
},
"files": [
"ts/**/*",
"ts_web/**/*",
"dist/**/*",
"dist_*/**/*",
"dist_ts/**/*",
"dist_ts_web/**/*",
"assets/**/*",
"cli.js",
"npmextra.json",
"readme.md"
],
"type": "module",
"browserslist": [
"last 1 chrome versions"
],
"repository": {
"type": "git",
"url": "https://gitlab.com/mojoio/elasticsearch.git"
},
"bugs": {
"url": "https://gitlab.com/mojoio/elasticsearch/issues"
},
"homepage": "https://gitlab.com/mojoio/elasticsearch#readme",
"pnpm": {
"overrides": {}
},
"packageManager": "pnpm@10.18.1+sha512.77a884a165cbba2d8d1c19e3b4880eee6d2fcabd0d879121e282196b80042351d5eb3ca0935fa599da1dc51265cc68816ad2bddd2a2de5ea9fdf92adbec7cd34"
}

8236
pnpm-lock.yaml generated Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -1,5 +1,2 @@
vars:
- ELK_DOMAIN
- ELK_PORT
- ELK_USER
- ELK_PASS
required:

3
readme.hints.md Normal file
View File

@@ -0,0 +1,3 @@
# Project Readme Hints
This is the initial readme hints file.

331
readme.md
View File

@@ -1,34 +1,317 @@
# elasticlog
# @apiclient.xyz/elasticsearch
log to elasticsearch in a kibana compatible format
> 🔍 **Modern TypeScript client for Elasticsearch with built-in Kibana compatibility and advanced logging features**
## Availabililty
A powerful, type-safe wrapper around the official Elasticsearch client that provides intelligent log management, document handling, key-value storage, and fast data ingestion - all optimized for production use.
[![npm](https://pushrocks.gitlab.io/assets/repo-button-npm.svg)](https://www.npmjs.com/package/elasticlog)
[![git](https://pushrocks.gitlab.io/assets/repo-button-git.svg)](https://GitLab.com/pushrocks/elasticlog)
[![git](https://pushrocks.gitlab.io/assets/repo-button-mirror.svg)](https://github.com/pushrocks/elasticlog)
[![docs](https://pushrocks.gitlab.io/assets/repo-button-docs.svg)](https://pushrocks.gitlab.io/elasticlog/)
## Issue Reporting and Security
## Status for master
For reporting bugs, issues, or security vulnerabilities, please visit [community.foss.global/](https://community.foss.global/). This is the central community hub for all issue reporting. Developers who sign and comply with our contribution agreement and go through identification can also get a [code.foss.global/](https://code.foss.global/) account to submit Pull Requests directly.
[![build status](https://GitLab.com/pushrocks/elasticlog/badges/master/build.svg)](https://GitLab.com/pushrocks/elasticlog/commits/master)
[![coverage report](https://GitLab.com/pushrocks/elasticlog/badges/master/coverage.svg)](https://GitLab.com/pushrocks/elasticlog/commits/master)
[![npm downloads per month](https://img.shields.io/npm/dm/elasticlog.svg)](https://www.npmjs.com/package/elasticlog)
[![Dependency Status](https://david-dm.org/pushrocks/elasticlog.svg)](https://david-dm.org/pushrocks/elasticlog)
[![bitHound Dependencies](https://www.bithound.io/github/pushrocks/elasticlog/badges/dependencies.svg)](https://www.bithound.io/github/pushrocks/elasticlog/master/dependencies/npm)
[![bitHound Code](https://www.bithound.io/github/pushrocks/elasticlog/badges/code.svg)](https://www.bithound.io/github/pushrocks/elasticlog)
[![Known Vulnerabilities](https://snyk.io/test/npm/elasticlog/badge.svg)](https://snyk.io/test/npm/elasticlog)
[![TypeScript](https://img.shields.io/badge/TypeScript-2.x-blue.svg)](https://nodejs.org/dist/latest-v6.x/docs/api/)
[![node](https://img.shields.io/badge/node->=%206.x.x-blue.svg)](https://nodejs.org/dist/latest-v6.x/docs/api/)
[![JavaScript Style Guide](https://img.shields.io/badge/code%20style-standard-brightgreen.svg)](http://standardjs.com/)
## Features ✨
## Usage
- **🎯 SmartLog Destination** - Full-featured logging destination compatible with Kibana, automatic index rotation, and retention management
- **📦 ElasticDoc** - Advanced document management with piping sessions, snapshots, and automatic cleanup
- **🚀 FastPush** - High-performance bulk document insertion with automatic index management
- **💾 KVStore** - Simple key-value storage interface backed by Elasticsearch
- **🔧 TypeScript First** - Complete type safety with full TypeScript support
- **🌊 Data Streams** - Built-in support for Elasticsearch data streams
- **⚡ Production Ready** - Designed for high-throughput production environments
Use TypeScript for best in class instellisense.
## Installation 📦
For further information read the linked docs at the top of this README.
```bash
npm install @apiclient.xyz/elasticsearch
# or
pnpm install @apiclient.xyz/elasticsearch
```
> MIT licensed | **©** [Lossless GmbH](https://lossless.gmbh)
> | By using this npm module you agree to our [privacy policy](https://lossless.gmbH/privacy.html)
## Quick Start 🚀
[![repo-footer](https://pushrocks.gitlab.io/assets/repo-footer.svg)](https://push.rocks)
### SmartLog Destination
Perfect for application logging with automatic index rotation and Kibana compatibility:
```typescript
import { ElsSmartlogDestination } from '@apiclient.xyz/elasticsearch';
const logger = new ElsSmartlogDestination({
indexPrefix: 'app-logs',
indexRetention: 7, // Keep logs for 7 days
node: 'http://localhost:9200',
auth: {
username: 'elastic',
password: 'your-password',
},
});
// Log messages that automatically appear in Kibana
await logger.log({
timestamp: Date.now(),
type: 'increment',
level: 'info',
context: {
company: 'YourCompany',
companyunit: 'api-service',
containerName: 'web-server',
environment: 'production',
runtime: 'node',
zone: 'us-east-1',
},
message: 'User authentication successful',
correlation: null,
});
```
### ElasticDoc - Document Management
Handle documents with advanced features like piping sessions and snapshots:
```typescript
import { ElasticDoc } from '@apiclient.xyz/elasticsearch';
const docManager = new ElasticDoc({
index: 'products',
node: 'http://localhost:9200',
auth: {
username: 'elastic',
password: 'your-password',
},
});
// Start a piping session to manage document lifecycle
await docManager.startPipingSession({});
// Add or update documents
await docManager.pipeDocument({
docId: 'product-001',
timestamp: new Date().toISOString(),
doc: {
name: 'Premium Widget',
price: 99.99,
inStock: true
},
});
await docManager.pipeDocument({
docId: 'product-002',
timestamp: new Date().toISOString(),
doc: {
name: 'Deluxe Gadget',
price: 149.99,
inStock: false
},
});
// End session - automatically removes documents not in this session
await docManager.endPipingSession();
// Take and store snapshots with custom aggregations
await docManager.takeSnapshot(async (iterator, prevSnapshot) => {
const aggregationData = [];
for await (const doc of iterator) {
aggregationData.push(doc);
}
return {
date: new Date().toISOString(),
aggregationData,
};
});
```
### FastPush - Bulk Data Ingestion
Efficiently push large datasets with automatic index management:
```typescript
import { FastPush } from '@apiclient.xyz/elasticsearch';
const fastPush = new FastPush({
node: 'http://localhost:9200',
auth: {
username: 'elastic',
password: 'your-password',
},
});
const documents = [
{ id: 1, name: 'Document 1', data: 'Some data' },
{ id: 2, name: 'Document 2', data: 'More data' },
// ... thousands more documents
];
// Push all documents with automatic batching
await fastPush.pushDocuments('bulk-data', documents, {
deleteOldData: true, // Clear old data before inserting
});
```
### KVStore - Key-Value Storage
Simple key-value storage backed by the power of Elasticsearch:
```typescript
import { KVStore } from '@apiclient.xyz/elasticsearch';
const kvStore = new KVStore({
index: 'app-config',
node: 'http://localhost:9200',
auth: {
username: 'elastic',
password: 'your-password',
},
});
// Set values
await kvStore.set('api-key', 'sk-1234567890');
await kvStore.set('feature-flags', JSON.stringify({ newUI: true }));
// Get values
const apiKey = await kvStore.get('api-key');
console.log(apiKey); // 'sk-1234567890'
// Clear all data
await kvStore.clear();
```
## Core Classes 🏗️
### ElsSmartlogDestination
The main logging destination class that provides:
- Automatic index rotation based on date
- Configurable retention policies
- Kibana-compatible log format
- Data stream support
- Built-in scheduler for maintenance tasks
### ElasticDoc
Advanced document management with:
- Piping sessions for tracking document lifecycles
- Automatic cleanup of stale documents
- Snapshot functionality with custom processors
- Iterator-based document access
- Fast-forward mode for incremental processing
### FastPush
High-performance bulk operations:
- Automatic batching for optimal performance
- Index management (create, delete, clear)
- Dynamic mapping support
- Efficient bulk API usage
### KVStore
Simple key-value interface:
- Elasticsearch-backed storage
- Async/await API
- Automatic index initialization
- Clear and get operations
## Advanced Usage 🎓
### Index Rotation and Retention
```typescript
const logger = new ElsSmartlogDestination({
indexPrefix: 'myapp',
indexRetention: 30, // Keep 30 days of logs
node: 'http://localhost:9200',
});
// Indices are automatically created as: myapp-2025-01-22
// Old indices are automatically deleted after 30 days
```
### Document Iteration
```typescript
// Iterate over all documents in an index
const iterator = docManager.getDocumentIterator();
for await (const doc of iterator) {
console.log(doc);
}
// Only process new documents since last run
docManager.fastForward = true;
await docManager.startPipingSession({ onlyNew: true });
```
### Custom Snapshots
```typescript
await docManager.takeSnapshot(async (iterator, prevSnapshot) => {
let totalValue = 0;
let count = 0;
for await (const doc of iterator) {
totalValue += doc._source.price;
count++;
}
return {
date: new Date().toISOString(),
aggregationData: {
totalValue,
averagePrice: totalValue / count,
count,
previousSnapshot: prevSnapshot,
},
};
});
```
## API Compatibility 🔄
This module is built on top of `@elastic/elasticsearch` v9.x and is compatible with:
- Elasticsearch 8.x and 9.x clusters
- Kibana 8.x and 9.x for log visualization
- OpenSearch (with some limitations)
## TypeScript Support 💙
Full TypeScript support with comprehensive type definitions:
```typescript
import type {
IElasticDocConstructorOptions,
ISnapshot,
SnapshotProcessor
} from '@apiclient.xyz/elasticsearch';
```
## Performance Considerations ⚡
- **Bulk Operations**: FastPush uses 1000-document batches by default
- **Connection Pooling**: Reuses Elasticsearch client connections
- **Index Management**: Automatic index creation and deletion
- **Data Streams**: Built-in support for efficient log ingestion
## Best Practices 💡
1. **Always use authentication** in production environments
2. **Set appropriate retention policies** to manage storage costs
3. **Use piping sessions** to automatically clean up stale documents
4. **Leverage snapshots** for point-in-time analytics
5. **Configure index templates** for consistent mappings
## License and Legal Information
This repository contains open-source code that is licensed under the MIT License. A copy of the MIT License can be found in the [license](license) file within this repository.
**Please note:** The MIT License does not grant permission to use the trade names, trademarks, service marks, or product names of the project, except as required for reasonable and customary use in describing the origin of the work and reproducing the content of the NOTICE file.
### Trademarks
This project is owned and maintained by Task Venture Capital GmbH. The names and logos associated with Task Venture Capital GmbH and any related products or services are trademarks of Task Venture Capital GmbH and are not included within the scope of the MIT license granted herein. Use of these trademarks must comply with Task Venture Capital GmbH's Trademark Guidelines, and any usage must be approved in writing by Task Venture Capital GmbH.
### Company Information
Task Venture Capital GmbH
Registered at District court Bremen HRB 35230 HB, Germany
For any legal inquiries or if you require further information, please contact us via email at hello@task.vc.
By using this repository, you acknowledge that you have read this section, agree to comply with its terms, and understand that the licensing of the code does not imply endorsement by Task Venture Capital GmbH of any derivative works.

89
test/test.nonci.ts Normal file
View File

@@ -0,0 +1,89 @@
import { tap, expect } from '@git.zone/tstest/tapbundle';
import { Qenv } from '@push.rocks/qenv';
import * as elasticsearch from '../ts/index.js';
let testElasticLog: elasticsearch.ElsSmartlogDestination<any>;
let testElasticDoc: elasticsearch.ElasticDoc;
tap.test('first test', async () => {
testElasticLog = new elasticsearch.ElsSmartlogDestination({
indexPrefix: 'testprefix',
indexRetention: 7,
node: 'http://localhost:9200',
auth: {
username: 'elastic',
password: 'YourPassword',
},
});
expect(testElasticLog).toBeInstanceOf(elasticsearch.ElsSmartlogDestination);
});
tap.test('should send a message to Elasticsearch', async () => {
await testElasticLog.log({
timestamp: Date.now(),
type: 'increment',
level: 'info',
context: {
company: 'Lossless GmbH',
companyunit: 'lossless.cloud',
containerName: 'testcontainer',
environment: 'test',
runtime: 'node',
zone: 'ship.zone',
},
message: 'GET https://myroute.to.a.cool.destination/sorare?hello=there',
correlation: null,
});
});
tap.test('should create an ElasticDoc instance', async () => {
testElasticDoc = new elasticsearch.ElasticDoc({
index: 'testindex',
node: 'http://localhost:9200',
auth: {
username: 'elastic',
password: 'YourPassword',
},
});
expect(testElasticDoc).toBeInstanceOf(elasticsearch.ElasticDoc);
});
tap.test('should add and update documents in a piping session', async () => {
await testElasticDoc.startPipingSession({});
await testElasticDoc.pipeDocument({
docId: '1',
timestamp: new Date().toISOString(),
doc: { name: 'doc1' },
});
await testElasticDoc.pipeDocument({
docId: '2',
timestamp: new Date().toISOString(),
doc: { name: 'doc2' },
});
await testElasticDoc.pipeDocument({
docId: '1',
timestamp: new Date().toISOString(),
doc: { name: 'updated doc1' },
});
});
tap.test('should delete documents not part of the piping session', async () => {
await testElasticDoc.endPipingSession();
});
tap.test('should take and store snapshot', async () => {
await testElasticDoc.takeSnapshot(async (iterator, prevSnapshot) => {
const aggregationData = [];
for await (const doc of iterator) {
// Sample aggregation: counting documents
aggregationData.push(doc);
}
const snapshot = {
date: new Date().toISOString(),
aggregationData,
};
return snapshot;
});
});
export default tap.start();

View File

@@ -1,39 +0,0 @@
import { expect, tap } from '@pushrocks/tapbundle';
import { Qenv } from '@pushrocks/qenv';
import * as elasticsearch from '../ts/index';
const testQenv = new Qenv('./', './.nogit/');
let testElasticLog: elasticsearch.ElasticSearch<any>;
tap.test('first test', async () => {
testElasticLog = new elasticsearch.ElasticSearch({
indexPrefix: 'smartlog',
indexRetention: 7,
domain: process.env.ELK_DOMAIN,
port: parseInt(process.env.ELK_PORT, 10),
ssl: true,
user: process.env.ELK_USER,
pass: process.env.ELK_PASS
});
expect(testElasticLog).to.be.instanceOf(elasticsearch.ElasticSearch);
});
tap.test('should send a message to Elasticsearch', async () => {
testElasticLog.log({
timestamp: Date.now(),
type: 'increment',
level: 'info',
context: {
company: 'Lossless GmbH',
companyunit: 'lossless.cloud',
containerName: 'testcontainer',
environment: 'test',
runtime: 'node',
zone: 'ship.zone'
},
message: 'hi, this is a testMessage'
});
});
tap.start();

8
ts/00_commitinfo_data.ts Normal file
View File

@@ -0,0 +1,8 @@
/**
* autocreated commitinfo by @push.rocks/commitinfo
*/
export const commitinfo = {
name: '@apiclient.xyz/elasticsearch',
version: '2.0.17',
description: 'log to elasticsearch in a kibana compatible format'
}

View File

@@ -1,105 +0,0 @@
import * as plugins from './elasticsearch.plugins';
import { ElasticSearch } from './elasticsearch.classes.elasticsearch';
import { ILogPackage } from '@pushrocks/smartlog-interfaces';
import { Stringmap } from '@pushrocks/lik';
export class ElasticIndex {
private stringmap = new Stringmap();
private elasticSearchRef: ElasticSearch<any>;
constructor(elasticSearchInstanceArg: ElasticSearch<ILogPackage>) {
this.elasticSearchRef = elasticSearchInstanceArg;
}
public async ensureIndex(indexArg: string) {
const done = plugins.smartpromise.defer();
if (this.stringmap.checkString(indexArg)) {
done.resolve();
return;
}
this.elasticSearchRef.client.cat.indices(
{
format: 'json',
bytes: 'm'
},
async (err, responseArg: any[]) => {
if(err) {
console.log(err);
return;
}
// lets delete indexes that violate the retention
if(Array.isArray(responseArg)) {
const filteredIndices = responseArg.filter(indexObjectArg => {
return indexObjectArg.index.startsWith('smartlog');
});
const filteredIndexNames = filteredIndices.map(indexObjectArg => {
return indexObjectArg.index;
});
this.deleteOldIndices(filteredIndexNames);
}
let index = null;
if(Array.isArray(responseArg)) {
index = responseArg.find(indexObject => {
return indexObject.index === indexArg;
});
}
if (!index) {
const done2 = plugins.smartpromise.defer();
this.elasticSearchRef.client.indices.create(
{
waitForActiveShards: '2',
index: indexArg
},
(error, response) => {
// console.lof(response)
done2.resolve();
}
);
await done2.promise;
}
this.stringmap.addString(indexArg);
done.resolve();
}
);
await done.promise;
}
public createNewIndex(indexNameArg: string) {
}
public async deleteOldIndices(indicesArray: string[]) {
const todayAsUnix: number = Date.now();
const rententionPeriodAsUnix: number = plugins.smarttime.units.days(
this.elasticSearchRef.indexRetention
);
for (const indexName of indicesArray) {
const regexResult = /^smartlog-([0-9]*)\.([0-9]*)\.([0-9]*)$/.exec(indexName);
const dateAsUnix: number = new Date(
`${regexResult[1]}-${regexResult[2]}-${regexResult[3]}`
).getTime();
if (todayAsUnix - rententionPeriodAsUnix > dateAsUnix) {
console.log(`found old index ${indexName}`);
const done2 = plugins.smartpromise.defer();
this.elasticSearchRef.client.indices.delete(
{
index: indexName
},
(err2, response2) => {
if (err2) {
console.log(err2);
}
console.log(`deleted ${indexName}`);
done2.resolve();
}
);
await done2.promise;
}
}
}
}

View File

@@ -1,44 +0,0 @@
import { ElasticSearch, IStandardLogParams } from './elasticsearch.classes.elasticsearch';
export class ElasticScheduler {
elasticSearchRef: ElasticSearch<any>;
docsScheduled = false;
docsStorage: any[] = [];
constructor(elasticLogRefArg: ElasticSearch<any>) {
this.elasticSearchRef = elasticLogRefArg;
}
public addFailedDoc(objectArg: any | IStandardLogParams) {
this.docsStorage.push(objectArg);
this.setRetry();
}
public scheduleDoc(logObject: any) {
this.docsStorage.push(logObject);
}
public setRetry() {
setTimeout(() => {
const oldStorage = this.docsStorage;
this.docsStorage = [];
for (let logObject of oldStorage) {
this.elasticSearchRef.log(logObject, true);
}
if (this.docsStorage.length === 0) {
console.log('ElasticLog retry success!!!');
this.docsScheduled = false;
} else {
console.log('ElasticLog retry failed');
this.setRetry();
}
}, 5000);
}
public deferSend() {
if (!this.docsScheduled) {
console.log('Retry ElasticLog in 5 seconds!');
this.docsScheduled = true;
this.setRetry();
}
}
}

View File

@@ -1,104 +0,0 @@
// interfaces
import { Client as ElasticClient } from 'elasticsearch';
import { ILogContext, ILogPackage, ILogDestination } from '@pushrocks/smartlog-interfaces';
// other classes
import { ElasticScheduler } from './elasticsearch.classes.elasticscheduler';
import { ElasticIndex } from './elasticsearch.classes.elasticindex';
export interface IStandardLogParams {
message: string;
severity: string;
}
export interface IElasticSearchConstructorOptions {
indexPrefix: string;
indexRetention: number;
port: number;
domain: string;
ssl: boolean;
user?: string;
pass?: string;
}
export class ElasticSearch<T> {
public client: ElasticClient;
public elasticScheduler = new ElasticScheduler(this);
public elasticIndex: ElasticIndex = new ElasticIndex(this);
public indexPrefix: string;
public indexRetention: number;
/**
* sets up an instance of Elastic log
* @param optionsArg
*/
constructor(optionsArg: IElasticSearchConstructorOptions) {
this.client = new ElasticClient({
host: this.computeHostString(optionsArg)
// log: 'trace'
});
this.indexPrefix = optionsArg.indexPrefix;
this.indexRetention = optionsArg.indexRetention;
}
/**
* computes the host string from the constructor options
* @param optionsArg
*/
private computeHostString(optionsArg: IElasticSearchConstructorOptions): string {
let hostString = `${optionsArg.domain}:${optionsArg.port}`;
if (optionsArg.user && optionsArg.pass) {
hostString = `${optionsArg.user}:${optionsArg.pass}@${hostString}`;
}
if (optionsArg.ssl) {
hostString = `https://${hostString}`;
} else {
hostString = `http://${hostString}`;
}
return hostString;
}
public async log(logPackageArg: ILogPackage, scheduleOverwrite = false) {
const now = new Date();
const indexToUse = `${this.indexPrefix}-${now.getFullYear()}.${(
'0' +
(now.getMonth() + 1)
).slice(-2)}.${('0' + now.getDate()).slice(-2)}`;
if (this.elasticScheduler.docsScheduled && !scheduleOverwrite) {
this.elasticScheduler.scheduleDoc(logPackageArg);
return;
}
await this.elasticIndex.ensureIndex(indexToUse);
this.client.index(
{
index: indexToUse,
type: 'log',
body: {
'@timestamp': new Date(logPackageArg.timestamp).toISOString(),
...logPackageArg
}
},
(error, response) => {
if (error) {
console.log('ElasticLog encountered an error:');
console.log(error);
this.elasticScheduler.addFailedDoc(logPackageArg);
} else {
// console.log(`ElasticLog: ${logPackageArg.message}`);
}
}
);
}
get logDestination(): ILogDestination {
return {
handleLog: (smartlogPackageArg: ILogPackage) => {
this.log(smartlogPackageArg);
}
};
}
}

View File

@@ -1,8 +0,0 @@
import * as elasticsearch from 'elasticsearch';
import * as lik from '@pushrocks/lik';
import * as smartdelay from '@pushrocks/smartdelay';
import * as smartlogInterfaces from '@pushrocks/smartlog-interfaces';
import * as smartpromise from '@pushrocks/smartpromise';
import * as smarttime from '@pushrocks/smarttime';
export { elasticsearch, lik, smartdelay, smartlogInterfaces, smartpromise, smarttime };

View File

@@ -0,0 +1,273 @@
import { Client as ElasticClient } from '@elastic/elasticsearch';
export interface IElasticDocConstructorOptions {
index: string;
node: string;
auth?: {
username: string;
password: string;
};
}
export interface ISnapshot {
date: string;
aggregationData: any;
}
export type SnapshotProcessor = (
iterator: AsyncIterable<any>,
prevSnapshot: ISnapshot | null,
) => Promise<ISnapshot>;
export class ElasticDoc {
public client: ElasticClient;
public index: string;
private sessionDocs: Set<string> = new Set();
private indexInitialized: boolean = false;
private latestTimestamp: string | null = null; // Store the latest timestamp
private onlyNew: boolean = false; // Whether to only pipe new docs
public fastForward: boolean = false; // Whether to fast forward to the latest timestamp
private BATCH_SIZE = 1000;
constructor(options: IElasticDocConstructorOptions) {
this.client = new ElasticClient({
node: options.node,
...(options.auth && { auth: options.auth }),
});
this.index = options.index;
}
private async ensureIndexExists(doc: any) {
if (!this.indexInitialized) {
const indexExists = await this.client.indices.exists({
index: this.index,
});
if (!indexExists) {
const mappings = this.createMappingsFromDoc(doc);
await this.client.indices.create({
index: this.index,
// mappings,
settings: {
// You can define the settings according to your requirements here
},
});
}
this.indexInitialized = true;
}
}
private createMappingsFromDoc(doc: any): any {
const properties: any = {};
for (const key in doc) {
if (key === '@timestamp') {
properties[key] = { type: 'date' };
continue;
}
properties[key] = {
type: typeof doc[key] === 'number' ? 'float' : 'text',
};
}
return { properties };
}
async startPipingSession(options: { onlyNew?: boolean }) {
this.sessionDocs.clear();
this.onlyNew = options.onlyNew;
const indexExists = await this.client.indices.exists({ index: this.index });
if (this.onlyNew && indexExists) {
const response = await this.client.search({
index: this.index,
sort: '@timestamp:desc',
size: 1,
});
// If the search query succeeded, the index exists.
const hit = response.hits.hits[0];
this.latestTimestamp = hit?._source?.['@timestamp'] || null;
if (this.latestTimestamp) {
console.log(
`Working in "onlyNew" mode. Hence we are omitting documents prior to ${this.latestTimestamp}`,
);
} else {
console.log(
`Working in "onlyNew" mode, but no documents found in index ${this.index}. Hence processing all documents now.`,
);
}
} else if (this.onlyNew && !indexExists) {
console.log(
`Working in "onlyNew" mode, but index ${this.index} does not exist. Hence processing all documents now.`,
);
}
}
async pipeDocument(optionsArg: {
docId: string;
timestamp?: string | number;
doc: any;
}) {
await this.ensureIndexExists(optionsArg.doc);
const documentBody = {
...optionsArg.doc,
...(optionsArg.timestamp && { '@timestamp': optionsArg.timestamp }),
};
// If 'onlyNew' is true, compare the document timestamp with the latest timestamp
if (this.onlyNew) {
if (
this.latestTimestamp &&
optionsArg.timestamp <= this.latestTimestamp
) {
this.fastForward = true;
} else {
this.fastForward = false;
await this.client.index({
index: this.index,
id: optionsArg.docId,
body: documentBody,
});
}
} else {
this.fastForward = false;
await this.client.index({
index: this.index,
id: optionsArg.docId,
body: documentBody,
});
}
this.sessionDocs.add(optionsArg.docId);
}
async endPipingSession() {
const allDocIds: string[] = [];
const responseQueue = [];
let response = await this.client.search({
index: this.index,
scroll: '1m',
size: this.BATCH_SIZE,
});
while (true) {
response.hits.hits.forEach((hit: any) => allDocIds.push(hit._id));
if (!response.hits.hits.length) {
break;
}
response = await this.client.scroll({
scroll_id: response._scroll_id,
scroll: '1m',
});
}
for (const docId of allDocIds) {
if (!this.sessionDocs.has(docId)) {
responseQueue.push({
delete: {
_index: this.index,
_id: docId,
},
});
if (responseQueue.length >= this.BATCH_SIZE) {
await this.client.bulk({ refresh: true, body: responseQueue });
responseQueue.length = 0;
}
}
}
if (responseQueue.length > 0) {
await this.client.bulk({ refresh: true, body: responseQueue });
}
this.sessionDocs.clear();
}
async takeSnapshot(processIterator: SnapshotProcessor) {
const snapshotIndex = `${this.index}_snapshots`;
const indexExists = await this.client.indices.exists({
index: snapshotIndex,
});
if (!indexExists) {
await this.client.indices.create({
index: snapshotIndex,
mappings: {
properties: {
date: {
type: 'date',
},
aggregationData: {
type: 'object',
enabled: true,
},
},
},
});
}
const documentIterator = this.getDocumentIterator();
const newSnapshot = await processIterator(
documentIterator,
await this.getLastSnapshot(),
);
await this.storeSnapshot(newSnapshot);
}
private async getLastSnapshot(): Promise<ISnapshot | null> {
const snapshotIndex = `${this.index}_snapshots`;
const indexExists = await this.client.indices.exists({
index: snapshotIndex,
});
if (!indexExists) {
return null;
}
const response = await this.client.search({
index: snapshotIndex,
sort: 'date:desc',
size: 1,
});
if (response.hits.hits.length > 0) {
const hit = response.hits.hits[0];
return {
date: hit._source['date'],
aggregationData: hit._source['aggregationData'],
};
} else {
return null;
}
}
private async *getDocumentIterator() {
let response = await this.client.search({
index: this.index,
scroll: '1m',
size: this.BATCH_SIZE,
});
while (true) {
for (const hit of response.hits.hits) {
yield hit._source;
}
if (!response.hits.hits.length) {
break;
}
response = await this.client.scroll({
scroll_id: response._scroll_id,
scroll: '1m',
});
}
}
private async storeSnapshot(snapshot: ISnapshot) {
await this.client.index({
index: `${this.index}_snapshots`,
body: snapshot,
});
}
}

View File

@@ -0,0 +1,106 @@
import * as plugins from './els.plugins.js';
import { ElsSmartlogDestination } from './els.classes.smartlogdestination.js';
import { type ILogPackage } from '@push.rocks/smartlog-interfaces';
import { Stringmap } from '@push.rocks/lik';
export class ElasticIndex {
private stringmap = new Stringmap();
private elasticSearchRef: ElsSmartlogDestination<any>;
constructor(elasticSearchInstanceArg: ElsSmartlogDestination<ILogPackage>) {
this.elasticSearchRef = elasticSearchInstanceArg;
}
public async ensureIndex(prefixArg: string, indexNameArg: string) {
if (this.stringmap.checkString(indexNameArg)) {
return indexNameArg;
}
const responseArg = await this.elasticSearchRef.client.cat
.indices({
format: 'json',
bytes: 'mb',
})
.catch((err) => {
console.log(err);
});
if (!responseArg) {
throw new Error('Could not get valid response from elastic search');
}
if (Array.isArray(responseArg)) {
const filteredIndices = responseArg.filter((indexObjectArg) => {
return indexObjectArg.index.startsWith(prefixArg);
});
const filteredIndexNames = filteredIndices.map((indexObjectArg) => {
return indexObjectArg.index;
});
await this.deleteOldIndices(prefixArg, filteredIndexNames);
}
let index = null;
if (Array.isArray(responseArg)) {
index = responseArg.find((indexItemArg) => {
return indexItemArg.index === indexNameArg;
});
}
if (!index) {
await this.createNewIndex(indexNameArg);
}
this.stringmap.addString(indexNameArg);
return index;
}
public async createNewIndex(indexNameArg: string) {
const response = await this.elasticSearchRef.client.indices.create({
wait_for_active_shards: 1,
index: indexNameArg,
mappings: {
properties: {
'@timestamp': {
type: 'date',
},
logPackageArg: {
properties: {
payload: {
type: 'object',
dynamic: true,
},
},
},
},
},
});
}
public async deleteOldIndices(prefixArg: string, indicesArray: string[]) {
const todayAsUnix: number = Date.now();
const rententionPeriodAsUnix: number = plugins.smarttime.units.days(
this.elasticSearchRef.indexRetention,
);
for (const indexName of indicesArray) {
if (!indexName.startsWith(prefixArg)) continue;
const indexRegex = new RegExp(
`^${prefixArg}-([0-9]*)-([0-9]*)-([0-9]*)$`,
);
const regexResult = indexRegex.exec(indexName);
const dateAsUnix: number = new Date(
`${regexResult[1]}-${regexResult[2]}-${regexResult[3]}`,
).getTime();
if (todayAsUnix - rententionPeriodAsUnix > dateAsUnix) {
console.log(`found old index ${indexName}`);
const response = await this.elasticSearchRef.client.indices
.delete({
index: indexName,
})
.catch((err) => {
console.log(err);
});
}
}
}
}

View File

@@ -0,0 +1,66 @@
import {
ElsSmartlogDestination,
type IStandardLogParams,
} from './els.classes.smartlogdestination.js';
export class ElasticScheduler {
elasticSearchRef: ElsSmartlogDestination<any>;
docsScheduled = false;
docsStorage: any[] = [];
// maximum size of the buffer
maxBufferSize = 500;
constructor(elasticLogRefArg: ElsSmartlogDestination<any>) {
this.elasticSearchRef = elasticLogRefArg;
}
public addFailedDoc(objectArg: any | IStandardLogParams) {
this.addToStorage(objectArg);
this.setRetry();
}
public scheduleDoc(logObject: any) {
this.addToStorage(logObject);
}
private addToStorage(logObject: any) {
this.docsStorage.push(logObject);
// if buffer is full, send logs immediately
if (this.docsStorage.length >= this.maxBufferSize) {
this.flushLogsToElasticSearch();
}
}
private flushLogsToElasticSearch() {
const oldStorage = this.docsStorage;
this.docsStorage = [];
for (let logObject of oldStorage) {
this.elasticSearchRef.log(logObject, true);
}
}
public setRetry() {
setTimeout(() => {
this.flushLogsToElasticSearch();
if (this.docsStorage.length === 0) {
console.log('ElasticLog retry success!!!');
this.docsScheduled = false;
} else {
console.log('ElasticLog retry failed');
this.setRetry();
}
}, 5000);
}
public deferSend() {
if (!this.docsScheduled) {
console.log('Retry ElasticLog in 5 seconds!');
this.docsScheduled = true;
this.setRetry();
}
}
}

View File

@@ -0,0 +1,67 @@
import { Client as ElasticClient } from '@elastic/elasticsearch';
interface FastPushOptions {
deleteOldData?: boolean; // Clear the index
deleteIndex?: boolean; // Delete the entire index
}
export class FastPush {
private client: ElasticClient;
constructor(node: string, auth?: { username: string; password: string }) {
this.client = new ElasticClient({
node: node,
...(auth && { auth: auth }),
});
}
async pushToIndex(
indexName: string,
docArray: any[],
options?: FastPushOptions,
) {
if (docArray.length === 0) return;
const indexExists = await this.client.indices.exists({ index: indexName });
if (indexExists) {
if (options?.deleteIndex) {
await this.client.indices.delete({ index: indexName });
} else if (options?.deleteOldData) {
await this.client.deleteByQuery({
index: indexName,
query: {
match_all: {},
},
});
}
}
if (!indexExists || options?.deleteIndex) {
// Create index with mappings (for simplicity, we use dynamic mapping)
await this.client.indices.create({
index: indexName,
mappings: {
dynamic: 'true',
},
});
}
// Bulk insert documents
const bulkBody = [];
for (const doc of docArray) {
bulkBody.push({
index: {
_index: indexName,
},
});
bulkBody.push(doc);
}
await this.client.bulk({ body: bulkBody });
}
}
// Usage example:
// const fastPush = new FastPush('http://localhost:9200', { username: 'elastic', password: 'password' });
// fastPush.pushToIndex('my_index', [{ name: 'John', age: 30 }, { name: 'Jane', age: 25 }], { deleteOldData: true });

109
ts/els.classes.kvstore.ts Normal file
View File

@@ -0,0 +1,109 @@
import * as plugins from './els.plugins.js';
import { Client as ElasticClient } from '@elastic/elasticsearch';
export interface IElasticKVStoreConstructorOptions {
index: string;
node: string;
auth?: {
username: string;
password: string;
};
}
export class ElasticKVStore {
public client: ElasticClient;
public index: string;
private readyDeferred: any;
constructor(options: IElasticKVStoreConstructorOptions) {
this.client = new ElasticClient({
node: options.node,
...(options.auth && { auth: options.auth }),
});
this.index = options.index;
this.readyDeferred = plugins.smartpromise.defer();
this.setupIndex();
}
private async setupIndex() {
try {
const indexExists = await this.client.indices.exists({
index: this.index,
});
if (!indexExists) {
await this.client.indices.create({
index: this.index,
mappings: {
properties: {
key: {
type: 'keyword',
},
value: {
type: 'text',
},
},
},
});
}
this.readyDeferred.resolve();
} catch (err) {
this.readyDeferred.reject(err);
}
}
async set(key: string, value: string) {
await this.readyDeferred.promise;
await this.client.index({
index: this.index,
id: key,
body: {
key,
value,
},
});
}
async get(key: string): Promise<string | null> {
await this.readyDeferred.promise;
try {
const response = await this.client.get({
index: this.index,
id: key,
});
return response._source['value'];
} catch (error) {
if (error.meta && error.meta.statusCode === 404) {
return null;
}
throw error;
}
}
async delete(key: string) {
await this.readyDeferred.promise;
try {
await this.client.delete({
index: this.index,
id: key,
});
} catch (error) {
if (error.meta && error.meta.statusCode !== 404) {
throw error;
}
}
}
async clear() {
await this.readyDeferred.promise;
await this.client.deleteByQuery({
index: this.index,
query: {
match_all: {},
},
});
}
}

View File

@@ -0,0 +1,77 @@
import { Client as ElasticClient } from '@elastic/elasticsearch';
import type {
ILogContext,
ILogPackage,
ILogDestination,
} from '@push.rocks/smartlog-interfaces';
import { ElasticScheduler } from './els.classes.elasticscheduler.js';
import { ElasticIndex } from './els.classes.elasticindex.js';
export interface IStandardLogParams {
message: string;
severity: string;
}
export interface IElasticSearchConstructorOptions {
indexPrefix: string;
indexRetention: number;
node: string;
auth?: {
username: string;
password: string;
};
}
export class ElsSmartlogDestination<T> {
public client: ElasticClient;
public elasticScheduler = new ElasticScheduler(this);
public elasticIndex: ElasticIndex = new ElasticIndex(this);
public indexPrefix: string;
public indexRetention: number;
constructor(optionsArg: IElasticSearchConstructorOptions) {
this.client = new ElasticClient({
node: optionsArg.node,
...(optionsArg.auth && { auth: optionsArg.auth }),
});
this.indexPrefix = `${optionsArg.indexPrefix}`;
this.indexRetention = optionsArg.indexRetention;
this.setupDataStream();
}
private async setupDataStream() {
// Define an index template
await this.client.indices.putIndexTemplate({
name: `${this.indexPrefix}_template`,
index_patterns: [`${this.indexPrefix}-*`],
data_stream: {},
});
}
public async log(logPackageArg: ILogPackage, scheduleOverwrite = false) {
const now = new Date();
const indexToUse = `${this.indexPrefix}-data-stream`; // Use data stream name
if (this.elasticScheduler.docsScheduled && !scheduleOverwrite) {
this.elasticScheduler.scheduleDoc(logPackageArg);
return;
}
await this.client.index({
index: indexToUse,
body: {
'@timestamp': new Date(logPackageArg.timestamp).toISOString(),
...logPackageArg,
},
});
}
get logDestination(): ILogDestination {
return {
handleLog: async (smartlogPackageArg: ILogPackage) => {
await this.log(smartlogPackageArg);
},
};
}
}

15
ts/els.plugins.ts Normal file
View File

@@ -0,0 +1,15 @@
import * as elasticsearch from '@elastic/elasticsearch';
import * as lik from '@push.rocks/lik';
import * as smartdelay from '@push.rocks/smartdelay';
import * as smartlogInterfaces from '@push.rocks/smartlog-interfaces';
import * as smartpromise from '@push.rocks/smartpromise';
import * as smarttime from '@push.rocks/smarttime';
export {
elasticsearch,
lik,
smartdelay,
smartlogInterfaces,
smartpromise,
smarttime,
};

View File

@@ -1 +1,4 @@
export * from './elasticsearch.classes.elasticsearch';
export * from './els.classes.smartlogdestination.js';
export * from './els.classes.fastpush.js';
export * from './els.classes.elasticdoc.js';
export * from './els.classes.kvstore.js';

12
tsconfig.json Normal file
View File

@@ -0,0 +1,12 @@
{
"compilerOptions": {
"target": "ES2022",
"module": "NodeNext",
"moduleResolution": "NodeNext",
"esModuleInterop": true,
"verbatimModuleSyntax": true
},
"exclude": [
"dist_*/**/*.d.ts"
]
}

View File

@@ -1,17 +0,0 @@
{
"extends": ["tslint:latest", "tslint-config-prettier"],
"rules": {
"semicolon": [true, "always"],
"no-console": false,
"ordered-imports": false,
"object-literal-sort-keys": false,
"member-ordering": {
"options":{
"order": [
"static-method"
]
}
}
},
"defaultSeverity": "warning"
}