Compare commits
66 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 9d56109d0a | |||
| aace102868 | |||
| 26449e9171 | |||
| c91b7a200b | |||
| fde082974f | |||
| 84c355c499 | |||
| 367bacb954 | |||
| 2adbed8fdb | |||
| 2d7981aa6f | |||
| 458da47c9c | |||
| 92688a7f7f | |||
| 1aed44f035 | |||
| 07895c2767 | |||
| a791c3e8c7 | |||
| 2c2910fafe | |||
| 094b3a8a50 | |||
| 6ed2abca4a | |||
| f9df862b0a | |||
| 2db69618c3 | |||
| a6bb61764b | |||
| 7d867ea6ab | |||
| d03f086c92 | |||
| 83c1e2bb4e | |||
| faa5d6d542 | |||
| 6e06e8108b | |||
| 45ce23ec11 | |||
| 93ef6a3d6b | |||
| 609873b4ad | |||
| 4a8bbc3d13 | |||
| 51c2d4f6e0 | |||
| 45091d6b8c | |||
| eae7300439 | |||
| 7753e58036 | |||
| 333e9c1316 | |||
| 479e0725e6 | |||
| fbf177b482 | |||
| 3272b87e10 | |||
| 87c3548f91 | |||
| af3d461593 | |||
| bd9b3bb985 | |||
| 169e49b93d | |||
| 2db4010648 | |||
| b4ae49b604 | |||
| 9bdd9484f1 | |||
| 1dcfbe7f5d | |||
| e644fed03c | |||
| b67acc1b78 | |||
| 51e76623f9 | |||
| f792bd1907 | |||
| 0139634902 | |||
| 100cba8d74 | |||
| 2af84de938 | |||
| f5ba97aa3d | |||
| 73529b353c | |||
| 88d1b6596f | |||
| 6ef8812a79 | |||
| 3034ef2edd | |||
| 4417c880b6 | |||
| 2f976ac5ce | |||
| ff78573d16 | |||
| aae91111e2 | |||
| 09e597ab87 | |||
| d3ef78af11 | |||
| 4ea0dece4f | |||
| ed550b7ff9 | |||
| ab4ed2602f |
66
.gitea/workflows/default_nottags.yaml
Normal file
66
.gitea/workflows/default_nottags.yaml
Normal file
@@ -0,0 +1,66 @@
|
|||||||
|
name: Default (not tags)
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
tags-ignore:
|
||||||
|
- '**'
|
||||||
|
|
||||||
|
env:
|
||||||
|
IMAGE: registry.gitlab.com/hosttoday/ht-docker-node:npmci
|
||||||
|
NPMCI_COMPUTED_REPOURL: https://${{gitea.repository_owner}}:${{secrets.GITEA_TOKEN}}@gitea.lossless.digital/${{gitea.repository}}.git
|
||||||
|
NPMCI_TOKEN_NPM: ${{secrets.NPMCI_TOKEN_NPM}}
|
||||||
|
NPMCI_TOKEN_NPM2: ${{secrets.NPMCI_TOKEN_NPM2}}
|
||||||
|
NPMCI_GIT_GITHUBTOKEN: ${{secrets.NPMCI_GIT_GITHUBTOKEN}}
|
||||||
|
NPMCI_URL_CLOUDLY: ${{secrets.NPMCI_URL_CLOUDLY}}
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
security:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
continue-on-error: true
|
||||||
|
container:
|
||||||
|
image: ${{ env.IMAGE }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- name: Install pnpm and npmci
|
||||||
|
run: |
|
||||||
|
pnpm install -g pnpm
|
||||||
|
pnpm install -g @shipzone/npmci
|
||||||
|
|
||||||
|
- name: Run npm prepare
|
||||||
|
run: npmci npm prepare
|
||||||
|
|
||||||
|
- name: Audit production dependencies
|
||||||
|
run: |
|
||||||
|
npmci command npm config set registry https://registry.npmjs.org
|
||||||
|
npmci command pnpm audit --audit-level=high --prod
|
||||||
|
continue-on-error: true
|
||||||
|
|
||||||
|
- name: Audit development dependencies
|
||||||
|
run: |
|
||||||
|
npmci command npm config set registry https://registry.npmjs.org
|
||||||
|
npmci command pnpm audit --audit-level=high --dev
|
||||||
|
continue-on-error: true
|
||||||
|
|
||||||
|
test:
|
||||||
|
if: ${{ always() }}
|
||||||
|
needs: security
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
container:
|
||||||
|
image: ${{ env.IMAGE }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- name: Test stable
|
||||||
|
run: |
|
||||||
|
npmci node install stable
|
||||||
|
npmci npm install
|
||||||
|
npmci npm test
|
||||||
|
|
||||||
|
- name: Test build
|
||||||
|
run: |
|
||||||
|
npmci node install stable
|
||||||
|
npmci npm install
|
||||||
|
npmci npm build
|
||||||
124
.gitea/workflows/default_tags.yaml
Normal file
124
.gitea/workflows/default_tags.yaml
Normal file
@@ -0,0 +1,124 @@
|
|||||||
|
name: Default (tags)
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
tags:
|
||||||
|
- '*'
|
||||||
|
|
||||||
|
env:
|
||||||
|
IMAGE: registry.gitlab.com/hosttoday/ht-docker-node:npmci
|
||||||
|
NPMCI_COMPUTED_REPOURL: https://${{gitea.repository_owner}}:${{secrets.GITEA_TOKEN}}@gitea.lossless.digital/${{gitea.repository}}.git
|
||||||
|
NPMCI_TOKEN_NPM: ${{secrets.NPMCI_TOKEN_NPM}}
|
||||||
|
NPMCI_TOKEN_NPM2: ${{secrets.NPMCI_TOKEN_NPM2}}
|
||||||
|
NPMCI_GIT_GITHUBTOKEN: ${{secrets.NPMCI_GIT_GITHUBTOKEN}}
|
||||||
|
NPMCI_URL_CLOUDLY: ${{secrets.NPMCI_URL_CLOUDLY}}
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
security:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
continue-on-error: true
|
||||||
|
container:
|
||||||
|
image: ${{ env.IMAGE }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- name: Prepare
|
||||||
|
run: |
|
||||||
|
pnpm install -g pnpm
|
||||||
|
pnpm install -g @shipzone/npmci
|
||||||
|
npmci npm prepare
|
||||||
|
|
||||||
|
- name: Audit production dependencies
|
||||||
|
run: |
|
||||||
|
npmci command npm config set registry https://registry.npmjs.org
|
||||||
|
npmci command pnpm audit --audit-level=high --prod
|
||||||
|
continue-on-error: true
|
||||||
|
|
||||||
|
- name: Audit development dependencies
|
||||||
|
run: |
|
||||||
|
npmci command npm config set registry https://registry.npmjs.org
|
||||||
|
npmci command pnpm audit --audit-level=high --dev
|
||||||
|
continue-on-error: true
|
||||||
|
|
||||||
|
test:
|
||||||
|
if: ${{ always() }}
|
||||||
|
needs: security
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
container:
|
||||||
|
image: ${{ env.IMAGE }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- name: Prepare
|
||||||
|
run: |
|
||||||
|
pnpm install -g pnpm
|
||||||
|
pnpm install -g @shipzone/npmci
|
||||||
|
npmci npm prepare
|
||||||
|
|
||||||
|
- name: Test stable
|
||||||
|
run: |
|
||||||
|
npmci node install stable
|
||||||
|
npmci npm install
|
||||||
|
npmci npm test
|
||||||
|
|
||||||
|
- name: Test build
|
||||||
|
run: |
|
||||||
|
npmci node install stable
|
||||||
|
npmci npm install
|
||||||
|
npmci npm build
|
||||||
|
|
||||||
|
release:
|
||||||
|
needs: test
|
||||||
|
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/')
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
container:
|
||||||
|
image: ${{ env.IMAGE }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- name: Prepare
|
||||||
|
run: |
|
||||||
|
pnpm install -g pnpm
|
||||||
|
pnpm install -g @shipzone/npmci
|
||||||
|
npmci npm prepare
|
||||||
|
|
||||||
|
- name: Release
|
||||||
|
run: |
|
||||||
|
npmci node install stable
|
||||||
|
npmci npm publish
|
||||||
|
|
||||||
|
metadata:
|
||||||
|
needs: test
|
||||||
|
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/')
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
container:
|
||||||
|
image: ${{ env.IMAGE }}
|
||||||
|
continue-on-error: true
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- name: Prepare
|
||||||
|
run: |
|
||||||
|
pnpm install -g pnpm
|
||||||
|
pnpm install -g @shipzone/npmci
|
||||||
|
npmci npm prepare
|
||||||
|
|
||||||
|
- name: Code quality
|
||||||
|
run: |
|
||||||
|
npmci command npm install -g typescript
|
||||||
|
npmci npm install
|
||||||
|
|
||||||
|
- name: Trigger
|
||||||
|
run: npmci trigger
|
||||||
|
|
||||||
|
- name: Build docs and upload artifacts
|
||||||
|
run: |
|
||||||
|
npmci node install stable
|
||||||
|
npmci npm install
|
||||||
|
pnpm install -g @git.zone/tsdoc
|
||||||
|
npmci command tsdoc
|
||||||
|
continue-on-error: true
|
||||||
141
.gitlab-ci.yml
141
.gitlab-ci.yml
@@ -1,141 +0,0 @@
|
|||||||
# gitzone ci_default
|
|
||||||
image: registry.gitlab.com/hosttoday/ht-docker-node:npmci
|
|
||||||
|
|
||||||
cache:
|
|
||||||
paths:
|
|
||||||
- .npmci_cache/
|
|
||||||
key: '$CI_BUILD_STAGE'
|
|
||||||
|
|
||||||
stages:
|
|
||||||
- security
|
|
||||||
- test
|
|
||||||
- release
|
|
||||||
- metadata
|
|
||||||
|
|
||||||
before_script:
|
|
||||||
- npm install -g @shipzone/npmci
|
|
||||||
|
|
||||||
# ====================
|
|
||||||
# security stage
|
|
||||||
# ====================
|
|
||||||
mirror:
|
|
||||||
stage: security
|
|
||||||
script:
|
|
||||||
- npmci git mirror
|
|
||||||
only:
|
|
||||||
- tags
|
|
||||||
tags:
|
|
||||||
- lossless
|
|
||||||
- docker
|
|
||||||
- notpriv
|
|
||||||
|
|
||||||
auditProductionDependencies:
|
|
||||||
image: registry.gitlab.com/hosttoday/ht-docker-node:npmci
|
|
||||||
stage: security
|
|
||||||
script:
|
|
||||||
- npmci npm prepare
|
|
||||||
- npmci command npm install --production --ignore-scripts
|
|
||||||
- npmci command npm config set registry https://registry.npmjs.org
|
|
||||||
- npmci command npm audit --audit-level=high --only=prod --production
|
|
||||||
tags:
|
|
||||||
- docker
|
|
||||||
allow_failure: true
|
|
||||||
|
|
||||||
auditDevDependencies:
|
|
||||||
image: registry.gitlab.com/hosttoday/ht-docker-node:npmci
|
|
||||||
stage: security
|
|
||||||
script:
|
|
||||||
- npmci npm prepare
|
|
||||||
- npmci command npm install --ignore-scripts
|
|
||||||
- npmci command npm config set registry https://registry.npmjs.org
|
|
||||||
- npmci command npm audit --audit-level=high --only=dev
|
|
||||||
tags:
|
|
||||||
- docker
|
|
||||||
allow_failure: true
|
|
||||||
|
|
||||||
# ====================
|
|
||||||
# test stage
|
|
||||||
# ====================
|
|
||||||
|
|
||||||
testStable:
|
|
||||||
stage: test
|
|
||||||
script:
|
|
||||||
- npmci npm prepare
|
|
||||||
- npmci node install stable
|
|
||||||
- npmci npm install
|
|
||||||
- npmci npm test
|
|
||||||
coverage: /\d+.?\d+?\%\s*coverage/
|
|
||||||
tags:
|
|
||||||
- docker
|
|
||||||
|
|
||||||
testBuild:
|
|
||||||
stage: test
|
|
||||||
script:
|
|
||||||
- npmci npm prepare
|
|
||||||
- npmci node install stable
|
|
||||||
- npmci npm install
|
|
||||||
- npmci command npm run build
|
|
||||||
coverage: /\d+.?\d+?\%\s*coverage/
|
|
||||||
tags:
|
|
||||||
- docker
|
|
||||||
|
|
||||||
release:
|
|
||||||
stage: release
|
|
||||||
script:
|
|
||||||
- npmci node install stable
|
|
||||||
- npmci npm publish
|
|
||||||
only:
|
|
||||||
- tags
|
|
||||||
tags:
|
|
||||||
- lossless
|
|
||||||
- docker
|
|
||||||
- notpriv
|
|
||||||
|
|
||||||
# ====================
|
|
||||||
# metadata stage
|
|
||||||
# ====================
|
|
||||||
codequality:
|
|
||||||
stage: metadata
|
|
||||||
allow_failure: true
|
|
||||||
only:
|
|
||||||
- tags
|
|
||||||
script:
|
|
||||||
- npmci command npm install -g tslint typescript
|
|
||||||
- npmci npm prepare
|
|
||||||
- npmci npm install
|
|
||||||
- npmci command "tslint -c tslint.json ./ts/**/*.ts"
|
|
||||||
tags:
|
|
||||||
- lossless
|
|
||||||
- docker
|
|
||||||
- priv
|
|
||||||
|
|
||||||
trigger:
|
|
||||||
stage: metadata
|
|
||||||
script:
|
|
||||||
- npmci trigger
|
|
||||||
only:
|
|
||||||
- tags
|
|
||||||
tags:
|
|
||||||
- lossless
|
|
||||||
- docker
|
|
||||||
- notpriv
|
|
||||||
|
|
||||||
pages:
|
|
||||||
stage: metadata
|
|
||||||
script:
|
|
||||||
- npmci node install lts
|
|
||||||
- npmci command npm install -g @gitzone/tsdoc
|
|
||||||
- npmci npm prepare
|
|
||||||
- npmci npm install
|
|
||||||
- npmci command tsdoc
|
|
||||||
tags:
|
|
||||||
- lossless
|
|
||||||
- docker
|
|
||||||
- notpriv
|
|
||||||
only:
|
|
||||||
- tags
|
|
||||||
artifacts:
|
|
||||||
expire_in: 1 week
|
|
||||||
paths:
|
|
||||||
- public
|
|
||||||
allow_failure: true
|
|
||||||
17
changelog.md
Normal file
17
changelog.md
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
# Changelog
|
||||||
|
|
||||||
|
## 2026-02-27 - 2.2.0 - feat(core)
|
||||||
|
introduce typed ClickHouse table API, query builder, and result handling; enhance HTTP client and add schema evolution, batch inserts and mutations; update docs/tests and bump deps
|
||||||
|
|
||||||
|
- Add generic ClickhouseTable with full table lifecycle, auto-schema-evolution and schema-sync helpers
|
||||||
|
- Add ClickhouseQueryBuilder for fluent typed queries and SQL generation (includes count/first/execute)
|
||||||
|
- Add ClickhouseResultSet with utility methods (first, last, map, filter, toObservable)
|
||||||
|
- Enhance ClickhouseHttpClient: typed query (queryTyped), robust JSONEachRow parsing, error handling, insertBatch, mutatePromise and improved request handling
|
||||||
|
- Keep backward compatibility via TimeDataTable refactor to wrap new ClickhouseTable API
|
||||||
|
- Export new modules from ts/index.ts and update README and tests to cover new features
|
||||||
|
- Bump devDependencies/dependencies, add pnpm patched dependency and patches/agentkeepalive patch, and update npmextra.json metadata
|
||||||
|
|
||||||
|
## 2.1.0 - feat(core): Added comprehensive support for `SmartClickHouseDb` and `TimeDataTable` with features including time data table creation, data insertion, bulk data insertion, querying, data deletion, and real-time data observation. Included standalone Clickhouse HTTP client implementation.
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
- Fixed test case for table deletion and optimized code
|
||||||
2
license
2
license
@@ -1,4 +1,4 @@
|
|||||||
Copyright (c) 2022 Lossless GmbH (hello@lossless.com)
|
Copyright (c) 2022 Task Venture Capital GmbH (hello@task.vc)
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
|||||||
@@ -1,18 +1,47 @@
|
|||||||
{
|
{
|
||||||
"gitzone": {
|
"@git.zone/cli": {
|
||||||
"projectType": "npm",
|
"projectType": "npm",
|
||||||
"module": {
|
"module": {
|
||||||
"githost": "gitlab.com",
|
"githost": "code.foss.global",
|
||||||
"gitscope": "pushrocks",
|
"gitscope": "push.rocks",
|
||||||
"gitrepo": "smartclickhouse",
|
"gitrepo": "smartclickhouse",
|
||||||
"description": "an odm for talking to clickhouse",
|
"description": "A TypeScript-based ODM for ClickHouse databases with full CRUD support, fluent query builder, configurable engines, and automatic schema evolution.",
|
||||||
"npmPackagename": "@pushrocks/smartclickhouse",
|
"npmPackagename": "@push.rocks/smartclickhouse",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"projectDomain": "push.rocks"
|
"projectDomain": "push.rocks",
|
||||||
|
"keywords": [
|
||||||
|
"ClickHouse",
|
||||||
|
"ODM",
|
||||||
|
"database",
|
||||||
|
"TypeScript",
|
||||||
|
"query builder",
|
||||||
|
"CRUD",
|
||||||
|
"analytics",
|
||||||
|
"time-series",
|
||||||
|
"schema evolution",
|
||||||
|
"MergeTree",
|
||||||
|
"ReplacingMergeTree",
|
||||||
|
"fluent API",
|
||||||
|
"builder pattern",
|
||||||
|
"data management",
|
||||||
|
"bulk insertion",
|
||||||
|
"real-time data",
|
||||||
|
"observables",
|
||||||
|
"streaming"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"release": {
|
||||||
|
"registries": [
|
||||||
|
"https://verdaccio.lossless.digital",
|
||||||
|
"https://registry.npmjs.org"
|
||||||
|
],
|
||||||
|
"accessLevel": "public"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"npmci": {
|
"@git.zone/tsdoc": {
|
||||||
"npmGlobalTools": [],
|
"legal": "\n## License and Legal Information\n\nThis repository contains open-source code that is licensed under the MIT License. A copy of the MIT License can be found in the [license](license) file within this repository. \n\n**Please note:** The MIT License does not grant permission to use the trade names, trademarks, service marks, or product names of the project, except as required for reasonable and customary use in describing the origin of the work and reproducing the content of the NOTICE file.\n\n### Trademarks\n\nThis project is owned and maintained by Task Venture Capital GmbH. The names and logos associated with Task Venture Capital GmbH and any related products or services are trademarks of Task Venture Capital GmbH and are not included within the scope of the MIT license granted herein. Use of these trademarks must comply with Task Venture Capital GmbH's Trademark Guidelines, and any usage must be approved in writing by Task Venture Capital GmbH.\n\n### Company Information\n\nTask Venture Capital GmbH \nRegistered at District court Bremen HRB 35230 HB, Germany\n\nFor any legal inquiries or if you require further information, please contact us via email at hello@task.vc.\n\nBy using this repository, you acknowledge that you have read this section, agree to comply with its terms, and understand that the licensing of the code does not imply endorsement by Task Venture Capital GmbH of any derivative works.\n"
|
||||||
"npmAccessLevel": "public"
|
},
|
||||||
|
"@ship.zone/szci": {
|
||||||
|
"npmGlobalTools": []
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
25842
package-lock.json
generated
25842
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
66
package.json
66
package.json
@@ -1,27 +1,35 @@
|
|||||||
{
|
{
|
||||||
"name": "@pushrocks/smartclickhouse",
|
"name": "@push.rocks/smartclickhouse",
|
||||||
"version": "1.0.3",
|
"version": "2.2.0",
|
||||||
"private": false,
|
"private": false,
|
||||||
"description": "an odm for talking to clickhouse",
|
"description": "A TypeScript-based ODM for ClickHouse databases with full CRUD support, fluent query builder, configurable engines, and automatic schema evolution.",
|
||||||
"main": "dist_ts/index.js",
|
"main": "dist_ts/index.js",
|
||||||
"typings": "dist_ts/index.d.ts",
|
"typings": "dist_ts/index.d.ts",
|
||||||
|
"type": "module",
|
||||||
"author": "Lossless GmbH",
|
"author": "Lossless GmbH",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"test": "(tstest test/ --web)",
|
"test": "(tstest test/ --verbose --logfile --timeout 60)",
|
||||||
"build": "(tsbuild --web)"
|
"build": "(tsbuild --web --allowimplicitany)",
|
||||||
|
"createGrafana": "docker run --name grafana -d -p 4000:3000 grafana/grafana-oss",
|
||||||
|
"createClickhouse": "docker run --name some-clickhouse-server --ulimit nofile=262144:262144 -p 8123:8123 -p 9000:9000 --volume=$PWD/.nogit/testdatabase:/var/lib/clickhouse clickhouse/clickhouse-server",
|
||||||
|
"buildDocs": "tsdoc"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@gitzone/tsbuild": "^2.1.25",
|
"@git.zone/tsbuild": "^4.1.2",
|
||||||
"@gitzone/tsbundle": "^1.0.78",
|
"@git.zone/tsbundle": "^2.9.0",
|
||||||
"@gitzone/tstest": "^1.0.44",
|
"@git.zone/tsrun": "^2.0.1",
|
||||||
"@pushrocks/tapbundle": "^4.0.8",
|
"@git.zone/tstest": "^3.1.8",
|
||||||
"@types/node": "^17.0.21",
|
"@push.rocks/tapbundle": "^6.0.3",
|
||||||
"tslint": "^6.1.3",
|
"@types/node": "^22.15.17"
|
||||||
"tslint-config-prettier": "^1.15.0"
|
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"clickhouse": "^2.4.4"
|
"@push.rocks/smartdelay": "^3.0.5",
|
||||||
|
"@push.rocks/smartobject": "^1.0.12",
|
||||||
|
"@push.rocks/smartpromise": "^4.2.3",
|
||||||
|
"@push.rocks/smartrx": "^3.0.10",
|
||||||
|
"@push.rocks/smarturl": "^3.1.0",
|
||||||
|
"@push.rocks/webrequest": "^4.0.2"
|
||||||
},
|
},
|
||||||
"browserslist": [
|
"browserslist": [
|
||||||
"last 1 chrome versions"
|
"last 1 chrome versions"
|
||||||
@@ -37,5 +45,35 @@
|
|||||||
"cli.js",
|
"cli.js",
|
||||||
"npmextra.json",
|
"npmextra.json",
|
||||||
"readme.md"
|
"readme.md"
|
||||||
]
|
],
|
||||||
|
"keywords": [
|
||||||
|
"ClickHouse",
|
||||||
|
"ODM",
|
||||||
|
"database",
|
||||||
|
"TypeScript",
|
||||||
|
"query builder",
|
||||||
|
"CRUD",
|
||||||
|
"analytics",
|
||||||
|
"time-series",
|
||||||
|
"schema evolution",
|
||||||
|
"MergeTree",
|
||||||
|
"ReplacingMergeTree",
|
||||||
|
"fluent API",
|
||||||
|
"builder pattern",
|
||||||
|
"data management",
|
||||||
|
"bulk insertion",
|
||||||
|
"real-time data",
|
||||||
|
"observables",
|
||||||
|
"streaming"
|
||||||
|
],
|
||||||
|
"pnpm": {
|
||||||
|
"patchedDependencies": {
|
||||||
|
"agentkeepalive@4.5.0": "patches/agentkeepalive@4.5.0.patch"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"homepage": "https://code.foss.global/push.rocks/smartclickhouse",
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "https://code.foss.global/push.rocks/smartclickhouse.git"
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
14
patches/agentkeepalive@4.5.0.patch
Normal file
14
patches/agentkeepalive@4.5.0.patch
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
diff --git a/History.md b/History.md
|
||||||
|
deleted file mode 100644
|
||||||
|
index 6877834dd92a5c71416d47b8d5f92a16aff5c1e6..0000000000000000000000000000000000000000
|
||||||
|
diff --git a/index.js b/index.js
|
||||||
|
index 6ca1513463724d5ab388b5fa4cfc44df0d93ff3d..968047aa93d8584af82b712fa957dd6d99645245 100644
|
||||||
|
--- a/index.js
|
||||||
|
+++ b/index.js
|
||||||
|
@@ -1,5 +1,6 @@
|
||||||
|
'use strict';
|
||||||
|
|
||||||
|
module.exports = require('./lib/agent');
|
||||||
|
+module.exports.HttpAgent = module.exports;
|
||||||
|
module.exports.HttpsAgent = require('./lib/https_agent');
|
||||||
|
module.exports.constants = require('./lib/constants');
|
||||||
10718
pnpm-lock.yaml
generated
Normal file
10718
pnpm-lock.yaml
generated
Normal file
File diff suppressed because it is too large
Load Diff
1
readme.hints.md
Normal file
1
readme.hints.md
Normal file
@@ -0,0 +1 @@
|
|||||||
|
- there is a local playground with clickhouse hosted under http://localhost:8123/play
|
||||||
447
readme.md
447
readme.md
@@ -1,39 +1,430 @@
|
|||||||
# @pushrocks/smartclickhouse
|
# @push.rocks/smartclickhouse
|
||||||
an odm for talking to clickhouse
|
|
||||||
|
|
||||||
## Availabililty and Links
|
A TypeScript-based ODM for ClickHouse databases with full CRUD support, a fluent query builder, configurable engines, and automatic schema evolution.
|
||||||
* [npmjs.org (npm package)](https://www.npmjs.com/package/@pushrocks/smartclickhouse)
|
|
||||||
* [gitlab.com (source)](https://gitlab.com/pushrocks/smartclickhouse)
|
|
||||||
* [github.com (source mirror)](https://github.com/pushrocks/smartclickhouse)
|
|
||||||
* [docs (typedoc)](https://pushrocks.gitlab.io/smartclickhouse/)
|
|
||||||
|
|
||||||
## Status for master
|
## Issue Reporting and Security
|
||||||
|
|
||||||
Status Category | Status Badge
|
For reporting bugs, issues, or security vulnerabilities, please visit [community.foss.global/](https://community.foss.global/). This is the central community hub for all issue reporting. Developers who sign and comply with our contribution agreement and go through identification can also get a [code.foss.global/](https://code.foss.global/) account to submit Pull Requests directly.
|
||||||
-- | --
|
|
||||||
GitLab Pipelines | [](https://lossless.cloud)
|
## Install
|
||||||
GitLab Pipline Test Coverage | [](https://lossless.cloud)
|
|
||||||
npm | [](https://lossless.cloud)
|
```sh
|
||||||
Snyk | [](https://lossless.cloud)
|
pnpm install @push.rocks/smartclickhouse
|
||||||
TypeScript Support | [](https://lossless.cloud)
|
```
|
||||||
node Support | [](https://nodejs.org/dist/latest-v10.x/docs/api/)
|
|
||||||
Code Style | [](https://lossless.cloud)
|
|
||||||
PackagePhobia (total standalone install weight) | [](https://lossless.cloud)
|
|
||||||
PackagePhobia (package size on registry) | [](https://lossless.cloud)
|
|
||||||
BundlePhobia (total size when bundled) | [](https://lossless.cloud)
|
|
||||||
Platform support | [](https://lossless.cloud) [](https://lossless.cloud)
|
|
||||||
|
|
||||||
## Usage
|
## Usage
|
||||||
|
|
||||||
Use TypeScript for best in class intellisense
|
### 🔌 Connecting to ClickHouse
|
||||||
|
|
||||||
## Contribution
|
```typescript
|
||||||
|
import { SmartClickHouseDb } from '@push.rocks/smartclickhouse';
|
||||||
|
|
||||||
We are always happy for code contributions. If you are not the code contributing type that is ok. Still, maintaining Open Source repositories takes considerable time and thought. If you like the quality of what we do and our modules are useful to you we would appreciate a little monthly contribution: You can [contribute one time](https://lossless.link/contribute-onetime) or [contribute monthly](https://lossless.link/contribute). :)
|
const db = new SmartClickHouseDb({
|
||||||
|
url: 'http://localhost:8123',
|
||||||
|
database: 'myDatabase',
|
||||||
|
username: 'default', // optional
|
||||||
|
password: 'secret', // optional
|
||||||
|
unref: true, // optional — allow process exit during startup
|
||||||
|
});
|
||||||
|
|
||||||
For further information read the linked docs at the top of this readme.
|
await db.start(); // pings until available, creates database if needed
|
||||||
|
await db.start(true); // drops and recreates database (useful for test suites)
|
||||||
|
```
|
||||||
|
|
||||||
> MIT licensed | **©** [Lossless GmbH](https://lossless.gmbh)
|
The library communicates with ClickHouse over its HTTP interface — no native protocol driver required.
|
||||||
| By using this npm module you agree to our [privacy policy](https://lossless.gmbH/privacy)
|
|
||||||
|
|
||||||
[](https://maintainedby.lossless.com)
|
---
|
||||||
|
|
||||||
|
### 📋 Creating a Typed Table
|
||||||
|
|
||||||
|
Use `db.createTable<T>()` with full control over engine, ordering, partitioning, and TTL:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
interface ILogEntry {
|
||||||
|
timestamp: number;
|
||||||
|
level: string;
|
||||||
|
message: string;
|
||||||
|
service: string;
|
||||||
|
duration: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
const logs = await db.createTable<ILogEntry>({
|
||||||
|
tableName: 'logs',
|
||||||
|
orderBy: ['timestamp', 'service'],
|
||||||
|
partitionBy: "toYYYYMM(timestamp)",
|
||||||
|
columns: [
|
||||||
|
{ name: 'timestamp', type: "DateTime64(3, 'Europe/Berlin')" },
|
||||||
|
{ name: 'level', type: 'String' },
|
||||||
|
{ name: 'message', type: 'String' },
|
||||||
|
{ name: 'service', type: 'String' },
|
||||||
|
{ name: 'duration', type: 'Float64' },
|
||||||
|
],
|
||||||
|
ttl: { column: 'timestamp', interval: '90 DAY' },
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
#### ⚙️ Engine Configuration
|
||||||
|
|
||||||
|
Supports the full MergeTree family:
|
||||||
|
|
||||||
|
| Engine | Use Case |
|
||||||
|
|---|---|
|
||||||
|
| `MergeTree` | Default — append-only, great for logs and events |
|
||||||
|
| `ReplacingMergeTree` | Upsert-style mutable data (deduplicates on `OPTIMIZE`) |
|
||||||
|
| `SummingMergeTree` | Pre-aggregated counters and metrics |
|
||||||
|
| `AggregatingMergeTree` | Materialized aggregate states |
|
||||||
|
| `CollapsingMergeTree` | Mutable rows via sign-based collapsing |
|
||||||
|
| `VersionedCollapsingMergeTree` | Versioned collapsing for concurrent updates |
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// ReplacingMergeTree for upsert-style mutable data
|
||||||
|
const users = await db.createTable<IUser>({
|
||||||
|
tableName: 'users',
|
||||||
|
engine: { engine: 'ReplacingMergeTree', versionColumn: 'updatedAt' },
|
||||||
|
orderBy: 'userId',
|
||||||
|
});
|
||||||
|
|
||||||
|
// SummingMergeTree for pre-aggregated metrics
|
||||||
|
const metrics = await db.createTable<IMetric>({
|
||||||
|
tableName: 'metrics',
|
||||||
|
engine: { engine: 'SummingMergeTree' },
|
||||||
|
orderBy: ['date', 'metricName'],
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
#### 🧬 Auto-Schema Evolution
|
||||||
|
|
||||||
|
When `autoSchemaEvolution` is enabled (default), new columns are created automatically from your data via `ALTER TABLE ADD COLUMN`:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
const flexTable = await db.createTable<any>({
|
||||||
|
tableName: 'events',
|
||||||
|
orderBy: 'timestamp' as any,
|
||||||
|
autoSchemaEvolution: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
// First insert creates the base schema
|
||||||
|
await flexTable.insert({ timestamp: Date.now(), message: 'hello' });
|
||||||
|
|
||||||
|
// New fields trigger ALTER TABLE ADD COLUMN automatically
|
||||||
|
await flexTable.insert({
|
||||||
|
timestamp: Date.now(),
|
||||||
|
message: 'world',
|
||||||
|
userId: 'u123', // → new String column
|
||||||
|
responseTime: 150.5, // → new Float64 column
|
||||||
|
tags: ['a', 'b'], // → new Array(String) column
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
Nested objects are automatically flattened (e.g. `{ deep: { field: 'value' } }` becomes column `deep_field`).
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### ✏️ Inserting Data
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// Single row
|
||||||
|
await logs.insert({
|
||||||
|
timestamp: Date.now(),
|
||||||
|
level: 'info',
|
||||||
|
message: 'Request processed',
|
||||||
|
service: 'api',
|
||||||
|
duration: 42.5,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Multiple rows
|
||||||
|
await logs.insertMany([
|
||||||
|
{ timestamp: Date.now(), level: 'info', message: 'msg1', service: 'api', duration: 10 },
|
||||||
|
{ timestamp: Date.now(), level: 'error', message: 'msg2', service: 'worker', duration: 500 },
|
||||||
|
]);
|
||||||
|
|
||||||
|
// Large batch with configurable chunk size
|
||||||
|
await logs.insertBatch(largeArray, { batchSize: 50000 });
|
||||||
|
```
|
||||||
|
|
||||||
|
#### 🌊 Streaming Inserts
|
||||||
|
|
||||||
|
Use `createInsertStream()` for push-based insert buffering with automatic batch flushing:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
const stream = logs.createInsertStream({ batchSize: 100, flushIntervalMs: 1000 });
|
||||||
|
|
||||||
|
stream.push({ timestamp: Date.now(), level: 'info', message: 'event1', service: 'api', duration: 10 });
|
||||||
|
stream.push({ timestamp: Date.now(), level: 'info', message: 'event2', service: 'api', duration: 20 });
|
||||||
|
|
||||||
|
// Signal end-of-stream and wait for final flush
|
||||||
|
stream.signalComplete();
|
||||||
|
await stream.completed;
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### 🔍 Querying with the Fluent Builder
|
||||||
|
|
||||||
|
The query builder provides type-safe, chainable query construction:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// Basic filtered query
|
||||||
|
const errors = await logs.query()
|
||||||
|
.where('level', '=', 'error')
|
||||||
|
.orderBy('timestamp', 'DESC')
|
||||||
|
.limit(100)
|
||||||
|
.toArray();
|
||||||
|
|
||||||
|
// Multiple conditions with AND / OR
|
||||||
|
const result = await logs.query()
|
||||||
|
.where('service', '=', 'api')
|
||||||
|
.and('duration', '>', 1000)
|
||||||
|
.and('level', 'IN', ['error', 'warn'])
|
||||||
|
.orderBy('timestamp', 'DESC')
|
||||||
|
.limit(50)
|
||||||
|
.toArray();
|
||||||
|
|
||||||
|
// OR conditions
|
||||||
|
const mixed = await logs.query()
|
||||||
|
.where('level', '=', 'error')
|
||||||
|
.or('duration', '>', 5000)
|
||||||
|
.toArray();
|
||||||
|
|
||||||
|
// Get first match
|
||||||
|
const latest = await logs.query()
|
||||||
|
.orderBy('timestamp', 'DESC')
|
||||||
|
.first();
|
||||||
|
|
||||||
|
// Count
|
||||||
|
const errorCount = await logs.query()
|
||||||
|
.where('level', '=', 'error')
|
||||||
|
.count();
|
||||||
|
|
||||||
|
// Pagination with limit/offset
|
||||||
|
const page2 = await logs.query()
|
||||||
|
.orderBy('timestamp', 'DESC')
|
||||||
|
.limit(20)
|
||||||
|
.offset(20)
|
||||||
|
.toArray();
|
||||||
|
|
||||||
|
// Aggregation with raw expressions
|
||||||
|
const stats = await logs.query()
|
||||||
|
.selectRaw('service', 'count() as requests', 'avg(duration) as avgDuration')
|
||||||
|
.groupBy('service')
|
||||||
|
.having('requests > 100')
|
||||||
|
.orderBy('requests' as any, 'DESC')
|
||||||
|
.toArray();
|
||||||
|
|
||||||
|
// Select specific columns
|
||||||
|
const names = await logs.query()
|
||||||
|
.select('service', 'level')
|
||||||
|
.limit(10)
|
||||||
|
.toArray();
|
||||||
|
|
||||||
|
// Raw WHERE expression for advanced use cases
|
||||||
|
const advanced = await logs.query()
|
||||||
|
.whereRaw("toHour(timestamp) BETWEEN 9 AND 17")
|
||||||
|
.toArray();
|
||||||
|
|
||||||
|
// Debug — inspect generated SQL without executing
|
||||||
|
console.log(logs.query().where('level', '=', 'error').limit(10).toSQL());
|
||||||
|
// → SELECT * FROM mydb.logs WHERE level = 'error' LIMIT 10 FORMAT JSONEachRow
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Supported Operators
|
||||||
|
|
||||||
|
`=`, `!=`, `>`, `>=`, `<`, `<=`, `LIKE`, `NOT LIKE`, `IN`, `NOT IN`, `BETWEEN`
|
||||||
|
|
||||||
|
#### 📦 Result Sets
|
||||||
|
|
||||||
|
Use `.execute()` to get a `ClickhouseResultSet` with convenience methods:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
const resultSet = await logs.query()
|
||||||
|
.orderBy('timestamp', 'DESC')
|
||||||
|
.limit(100)
|
||||||
|
.execute();
|
||||||
|
|
||||||
|
resultSet.isEmpty(); // boolean
|
||||||
|
resultSet.rowCount; // number
|
||||||
|
resultSet.first(); // T | null
|
||||||
|
resultSet.last(); // T | null
|
||||||
|
resultSet.map(r => r.service); // string[]
|
||||||
|
resultSet.filter(r => r.duration > 100); // ClickhouseResultSet<T>
|
||||||
|
resultSet.toObservable(); // RxJS Observable<T>
|
||||||
|
resultSet.toArray(); // T[]
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### 🔄 Updating Data
|
||||||
|
|
||||||
|
Updates use ClickHouse mutations (`ALTER TABLE UPDATE`). The library automatically waits for mutations to complete.
|
||||||
|
|
||||||
|
> 💡 For frequently updated data, consider using `ReplacingMergeTree` instead — it's the idiomatic ClickHouse approach for mutable rows.
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
await logs.update(
|
||||||
|
{ level: 'warn' }, // SET clause
|
||||||
|
(q) => q.where('level', '=', 'warning'), // WHERE clause
|
||||||
|
);
|
||||||
|
```
|
||||||
|
|
||||||
|
A WHERE clause is **required** — you can't accidentally update every row.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### 🗑️ Deleting Data
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// Targeted delete with builder
|
||||||
|
await logs.deleteWhere(
|
||||||
|
(q) => q.where('level', '=', 'debug').and('timestamp', '<', cutoffDate),
|
||||||
|
);
|
||||||
|
|
||||||
|
// Delete by age (interval syntax)
|
||||||
|
await logs.deleteOlderThan('timestamp', '30 DAY');
|
||||||
|
|
||||||
|
// Drop entire table
|
||||||
|
await logs.drop();
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### 👀 Watching for New Data
|
||||||
|
|
||||||
|
Stream new entries via polling with an RxJS Observable:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
const subscription = logs.watch({ pollInterval: 2000 }).subscribe((entry) => {
|
||||||
|
console.log('New entry:', entry);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Stop watching
|
||||||
|
subscription.unsubscribe();
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### 🛠️ Utilities
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
await logs.getRowCount(); // total row count
|
||||||
|
await logs.optimize(true); // OPTIMIZE TABLE FINAL (dedup for ReplacingMergeTree)
|
||||||
|
await logs.waitForMutations(); // wait for pending mutations to complete
|
||||||
|
await logs.updateColumns(); // refresh column metadata from system.columns
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### 🔧 Raw Queries
|
||||||
|
|
||||||
|
Execute arbitrary SQL directly on the database:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
const result = await db.query<{ total: string }>(
|
||||||
|
'SELECT count() as total FROM mydb.logs FORMAT JSONEachRow'
|
||||||
|
);
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### 🏛️ Backward Compatibility
|
||||||
|
|
||||||
|
The legacy `getTable()` API still works exactly as before. It returns a `TimeDataTable` pre-configured with MergeTree, timestamp ordering, auto-schema evolution, and TTL:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
const table = await db.getTable('analytics');
|
||||||
|
|
||||||
|
// Insert — accepts arbitrary JSON objects, auto-flattens nested fields
|
||||||
|
await table.addData({
|
||||||
|
timestamp: Date.now(),
|
||||||
|
message: 'hello',
|
||||||
|
nested: { field: 'value' }, // stored as column `nested_field`
|
||||||
|
});
|
||||||
|
|
||||||
|
// Query
|
||||||
|
const entries = await table.getLastEntries(10);
|
||||||
|
const recent = await table.getEntriesNewerThan(Date.now() - 60000);
|
||||||
|
const range = await table.getEntriesBetween(startMs, endMs);
|
||||||
|
|
||||||
|
// Delete
|
||||||
|
await table.deleteOldEntries(30); // remove entries older than 30 days
|
||||||
|
|
||||||
|
// Watch
|
||||||
|
table.watchNewEntries().subscribe(entry => console.log(entry));
|
||||||
|
|
||||||
|
// Drop
|
||||||
|
await table.delete();
|
||||||
|
```
|
||||||
|
|
||||||
|
You can also use the factory function directly:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { createTimeDataTable } from '@push.rocks/smartclickhouse';
|
||||||
|
|
||||||
|
const table = await createTimeDataTable(db, 'analytics', 90 /* retain days */);
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### 🐳 Running ClickHouse Locally
|
||||||
|
|
||||||
|
```sh
|
||||||
|
docker run --name clickhouse-server \
|
||||||
|
--ulimit nofile=262144:262144 \
|
||||||
|
-p 8123:8123 -p 9000:9000 \
|
||||||
|
-e CLICKHOUSE_DEFAULT_ACCESS_MANAGEMENT=1 \
|
||||||
|
clickhouse/clickhouse-server
|
||||||
|
```
|
||||||
|
|
||||||
|
The HTTP interface is available at `http://localhost:8123` with a playground at `http://localhost:8123/play`.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### 📚 Exported Types
|
||||||
|
|
||||||
|
The library exports all types for full TypeScript integration:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import type {
|
||||||
|
TClickhouseColumnType, // String, UInt64, Float64, DateTime64, Array(...), etc.
|
||||||
|
TClickhouseEngine, // MergeTree family engine names
|
||||||
|
IEngineConfig, // Engine + version/sign column config
|
||||||
|
IClickhouseTableOptions, // Full table creation options
|
||||||
|
IColumnDefinition, // Column name + type + default + codec
|
||||||
|
IColumnInfo, // Column metadata from system.columns
|
||||||
|
TComparisonOperator, // =, !=, >, <, LIKE, IN, BETWEEN, etc.
|
||||||
|
} from '@push.rocks/smartclickhouse';
|
||||||
|
```
|
||||||
|
|
||||||
|
Utility functions are also exported:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { escapeClickhouseValue, detectClickhouseType } from '@push.rocks/smartclickhouse';
|
||||||
|
|
||||||
|
escapeClickhouseValue("O'Brien"); // → "'O\\'Brien'"
|
||||||
|
escapeClickhouseValue(42); // → '42'
|
||||||
|
escapeClickhouseValue(['a', 'b']); // → "('a', 'b')"
|
||||||
|
|
||||||
|
detectClickhouseType('hello'); // → 'String'
|
||||||
|
detectClickhouseType(3.14); // → 'Float64'
|
||||||
|
detectClickhouseType([1, 2]); // → 'Array(Float64)'
|
||||||
|
```
|
||||||
|
|
||||||
|
## License and Legal Information
|
||||||
|
|
||||||
|
This repository contains open-source code licensed under the MIT License. A copy of the license can be found in the [LICENSE](./LICENSE) file.
|
||||||
|
|
||||||
|
**Please note:** The MIT License does not grant permission to use the trade names, trademarks, service marks, or product names of the project, except as required for reasonable and customary use in describing the origin of the work and reproducing the content of the NOTICE file.
|
||||||
|
|
||||||
|
### Trademarks
|
||||||
|
|
||||||
|
This project is owned and maintained by Task Venture Capital GmbH. The names and logos associated with Task Venture Capital GmbH and any related products or services are trademarks of Task Venture Capital GmbH or third parties, and are not included within the scope of the MIT license granted herein.
|
||||||
|
|
||||||
|
Use of these trademarks must comply with Task Venture Capital GmbH's Trademark Guidelines or the guidelines of the respective third-party owners, and any usage must be approved in writing. Third-party trademarks used herein are the property of their respective owners and used only in a descriptive manner, e.g. for an implementation of an API or similar.
|
||||||
|
|
||||||
|
### Company Information
|
||||||
|
|
||||||
|
Task Venture Capital GmbH
|
||||||
|
Registered at District Court Bremen HRB 35230 HB, Germany
|
||||||
|
|
||||||
|
For any legal inquiries or further information, please contact us via email at hello@task.vc.
|
||||||
|
|
||||||
|
By using this repository, you acknowledge that you have read this section, agree to comply with its terms, and understand that the licensing of the code does not imply endorsement by Task Venture Capital GmbH of any derivative works.
|
||||||
|
|||||||
321
test/test.nonci.ts
Normal file
321
test/test.nonci.ts
Normal file
@@ -0,0 +1,321 @@
|
|||||||
|
import { expect, tap } from '@git.zone/tstest/tapbundle';
|
||||||
|
import * as smartclickhouse from '../ts/index.js';
|
||||||
|
|
||||||
|
let testClickhouseDb: smartclickhouse.SmartClickHouseDb;
|
||||||
|
|
||||||
|
// ============================================================
|
||||||
|
// Connection
|
||||||
|
// ============================================================
|
||||||
|
|
||||||
|
tap.test('should create a SmartClickHouseDb instance', async () => {
|
||||||
|
testClickhouseDb = new smartclickhouse.SmartClickHouseDb({
|
||||||
|
url: 'http://localhost:8123',
|
||||||
|
database: 'test_smartclickhouse',
|
||||||
|
unref: true,
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should start the clickhouse db', async () => {
|
||||||
|
await testClickhouseDb.start(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================
|
||||||
|
// Backward-compatible TimeDataTable tests
|
||||||
|
// ============================================================
|
||||||
|
|
||||||
|
let timeTable: smartclickhouse.TimeDataTable;
|
||||||
|
|
||||||
|
tap.test('should create a TimeDataTable via getTable()', async () => {
|
||||||
|
timeTable = await testClickhouseDb.getTable('analytics');
|
||||||
|
expect(timeTable).toBeInstanceOf(smartclickhouse.TimeDataTable);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should insert data via addData()', async () => {
|
||||||
|
for (let i = 0; i < 50; i++) {
|
||||||
|
await timeTable.addData({
|
||||||
|
timestamp: Date.now(),
|
||||||
|
message: `hello this is a message ${i}`,
|
||||||
|
wow: 'hey',
|
||||||
|
deep: {
|
||||||
|
so: 'hello',
|
||||||
|
myArray: ['array1', 'array2'],
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should retrieve the last 10 entries', async () => {
|
||||||
|
const entries = await timeTable.getLastEntries(10);
|
||||||
|
expect(entries.length).toEqual(10);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should retrieve entries newer than a specific timestamp', async () => {
|
||||||
|
const timestamp = Date.now() - 60000;
|
||||||
|
const entries = await timeTable.getEntriesNewerThan(timestamp);
|
||||||
|
expect(entries.length).toBeGreaterThan(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should retrieve entries between two timestamps', async () => {
|
||||||
|
const startTimestamp = Date.now() - 120000;
|
||||||
|
const endTimestamp = Date.now() + 5000;
|
||||||
|
const entries = await timeTable.getEntriesBetween(startTimestamp, endTimestamp);
|
||||||
|
expect(entries.length).toBeGreaterThan(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should delete the time data table', async () => {
|
||||||
|
await timeTable.delete();
|
||||||
|
const result = await testClickhouseDb.clickhouseHttpClient.queryPromise(`
|
||||||
|
SHOW TABLES FROM ${testClickhouseDb.options.database} LIKE '${timeTable.options.tableName}'
|
||||||
|
`);
|
||||||
|
expect(result.length).toEqual(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================
|
||||||
|
// New typed ClickhouseTable API
|
||||||
|
// ============================================================
|
||||||
|
|
||||||
|
interface ILogEntry {
|
||||||
|
timestamp: number;
|
||||||
|
level: string;
|
||||||
|
message: string;
|
||||||
|
service: string;
|
||||||
|
duration: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
let logTable: smartclickhouse.ClickhouseTable<ILogEntry>;
|
||||||
|
|
||||||
|
tap.test('should create a typed table via createTable()', async () => {
|
||||||
|
logTable = await testClickhouseDb.createTable<ILogEntry>({
|
||||||
|
tableName: 'logs',
|
||||||
|
orderBy: 'timestamp',
|
||||||
|
columns: [
|
||||||
|
{ name: 'timestamp', type: "DateTime64(3, 'Europe/Berlin')" },
|
||||||
|
{ name: 'level', type: 'String' },
|
||||||
|
{ name: 'message', type: 'String' },
|
||||||
|
{ name: 'service', type: 'String' },
|
||||||
|
{ name: 'duration', type: 'Float64' },
|
||||||
|
],
|
||||||
|
ttl: { column: 'timestamp', interval: '30 DAY' },
|
||||||
|
});
|
||||||
|
expect(logTable).toBeInstanceOf(smartclickhouse.ClickhouseTable);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should insert a single row', async () => {
|
||||||
|
await logTable.insert({
|
||||||
|
timestamp: Date.now(),
|
||||||
|
level: 'info',
|
||||||
|
message: 'Server started',
|
||||||
|
service: 'api',
|
||||||
|
duration: 0,
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should insert many rows', async () => {
|
||||||
|
const rows: ILogEntry[] = [];
|
||||||
|
for (let i = 0; i < 100; i++) {
|
||||||
|
rows.push({
|
||||||
|
timestamp: Date.now(),
|
||||||
|
level: i % 10 === 0 ? 'error' : 'info',
|
||||||
|
message: `Log message ${i}`,
|
||||||
|
service: i % 2 === 0 ? 'api' : 'worker',
|
||||||
|
duration: Math.random() * 2000,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
await logTable.insertMany(rows);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should query with builder - basic where', async () => {
|
||||||
|
const errors = await logTable.query()
|
||||||
|
.where('level', '=', 'error')
|
||||||
|
.orderBy('timestamp', 'DESC')
|
||||||
|
.toArray();
|
||||||
|
expect(errors.length).toBeGreaterThan(0);
|
||||||
|
for (const entry of errors) {
|
||||||
|
expect(entry.level).toEqual('error');
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should query with builder - limit and offset', async () => {
|
||||||
|
const result = await logTable.query()
|
||||||
|
.orderBy('timestamp', 'DESC')
|
||||||
|
.limit(5)
|
||||||
|
.toArray();
|
||||||
|
expect(result.length).toEqual(5);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should query with builder - multiple conditions', async () => {
|
||||||
|
const result = await logTable.query()
|
||||||
|
.where('service', '=', 'api')
|
||||||
|
.and('level', '=', 'info')
|
||||||
|
.orderBy('timestamp', 'DESC')
|
||||||
|
.limit(10)
|
||||||
|
.toArray();
|
||||||
|
for (const entry of result) {
|
||||||
|
expect(entry.service).toEqual('api');
|
||||||
|
expect(entry.level).toEqual('info');
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should query with builder - IN operator', async () => {
|
||||||
|
const result = await logTable.query()
|
||||||
|
.where('level', 'IN', ['error', 'info'])
|
||||||
|
.limit(10)
|
||||||
|
.toArray();
|
||||||
|
expect(result.length).toBeGreaterThan(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should query first()', async () => {
|
||||||
|
const entry = await logTable.query()
|
||||||
|
.orderBy('timestamp', 'DESC')
|
||||||
|
.first();
|
||||||
|
expect(entry).toBeTruthy();
|
||||||
|
expect(entry.level).toBeTruthy();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should query count()', async () => {
|
||||||
|
const count = await logTable.query().count();
|
||||||
|
expect(count).toBeGreaterThan(100);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should get row count', async () => {
|
||||||
|
const count = await logTable.getRowCount();
|
||||||
|
expect(count).toBeGreaterThan(100);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should generate SQL with toSQL()', async () => {
|
||||||
|
const sql = logTable.query()
|
||||||
|
.where('level', '=', 'error')
|
||||||
|
.orderBy('timestamp', 'DESC')
|
||||||
|
.limit(10)
|
||||||
|
.toSQL();
|
||||||
|
expect(sql).toInclude('WHERE');
|
||||||
|
expect(sql).toInclude('level');
|
||||||
|
expect(sql).toInclude('ORDER BY');
|
||||||
|
expect(sql).toInclude('LIMIT');
|
||||||
|
console.log('Generated SQL:', sql);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should execute result set operations', async () => {
|
||||||
|
const resultSet = await logTable.query()
|
||||||
|
.orderBy('timestamp', 'DESC')
|
||||||
|
.limit(20)
|
||||||
|
.execute();
|
||||||
|
|
||||||
|
expect(resultSet.isEmpty()).toBeFalse();
|
||||||
|
expect(resultSet.rowCount).toEqual(20);
|
||||||
|
expect(resultSet.first()).toBeTruthy();
|
||||||
|
expect(resultSet.last()).toBeTruthy();
|
||||||
|
|
||||||
|
const filtered = resultSet.filter((row) => row.level === 'error');
|
||||||
|
expect(filtered.rows.length).toBeLessThanOrEqual(20);
|
||||||
|
|
||||||
|
const services = resultSet.map((row) => row.service);
|
||||||
|
expect(services.length).toEqual(20);
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================
|
||||||
|
// UPDATE (mutation)
|
||||||
|
// ============================================================
|
||||||
|
|
||||||
|
tap.test('should update rows via mutation', async () => {
|
||||||
|
// First, insert a specific row to update
|
||||||
|
await logTable.insert({
|
||||||
|
timestamp: Date.now(),
|
||||||
|
level: 'warning',
|
||||||
|
message: 'Deprecated API call',
|
||||||
|
service: 'api',
|
||||||
|
duration: 50,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Update it
|
||||||
|
await logTable.update(
|
||||||
|
{ level: 'warn' },
|
||||||
|
(q) => q.where('level', '=', 'warning'),
|
||||||
|
);
|
||||||
|
|
||||||
|
// Verify: no more 'warning' level entries
|
||||||
|
const warnings = await logTable.query()
|
||||||
|
.where('level', '=', 'warning')
|
||||||
|
.toArray();
|
||||||
|
expect(warnings.length).toEqual(0);
|
||||||
|
|
||||||
|
// Verify: 'warn' entries exist
|
||||||
|
const warns = await logTable.query()
|
||||||
|
.where('level', '=', 'warn')
|
||||||
|
.toArray();
|
||||||
|
expect(warns.length).toBeGreaterThan(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================
|
||||||
|
// DELETE (targeted)
|
||||||
|
// ============================================================
|
||||||
|
|
||||||
|
tap.test('should delete rows with targeted where clause', async () => {
|
||||||
|
const countBefore = await logTable.query()
|
||||||
|
.where('level', '=', 'warn')
|
||||||
|
.count();
|
||||||
|
expect(countBefore).toBeGreaterThan(0);
|
||||||
|
|
||||||
|
await logTable.deleteWhere(
|
||||||
|
(q) => q.where('level', '=', 'warn'),
|
||||||
|
);
|
||||||
|
|
||||||
|
const countAfter = await logTable.query()
|
||||||
|
.where('level', '=', 'warn')
|
||||||
|
.count();
|
||||||
|
expect(countAfter).toEqual(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================
|
||||||
|
// Auto-schema evolution on typed table
|
||||||
|
// ============================================================
|
||||||
|
|
||||||
|
tap.test('should auto-evolve schema when inserting new fields', async () => {
|
||||||
|
const flexTable = await testClickhouseDb.createTable<any>({
|
||||||
|
tableName: 'flex_data',
|
||||||
|
orderBy: 'timestamp' as any,
|
||||||
|
autoSchemaEvolution: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
await flexTable.insert({
|
||||||
|
timestamp: Date.now(),
|
||||||
|
message: 'first insert',
|
||||||
|
});
|
||||||
|
|
||||||
|
// Insert with a new field — should trigger schema evolution
|
||||||
|
await flexTable.insert({
|
||||||
|
timestamp: Date.now(),
|
||||||
|
message: 'second insert',
|
||||||
|
newField: 'surprise!',
|
||||||
|
count: 42,
|
||||||
|
});
|
||||||
|
|
||||||
|
const columns = await flexTable.updateColumns();
|
||||||
|
const columnNames = columns.map((c) => c.name);
|
||||||
|
expect(columnNames).toContain('newField');
|
||||||
|
expect(columnNames).toContain('count');
|
||||||
|
|
||||||
|
await flexTable.drop();
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================
|
||||||
|
// Raw query on db
|
||||||
|
// ============================================================
|
||||||
|
|
||||||
|
tap.test('should execute raw query via db.query()', async () => {
|
||||||
|
const result = await testClickhouseDb.query<{ cnt: string }>(
|
||||||
|
`SELECT count() as cnt FROM ${testClickhouseDb.options.database}.logs FORMAT JSONEachRow`
|
||||||
|
);
|
||||||
|
expect(result.length).toEqual(1);
|
||||||
|
expect(parseInt(result[0].cnt, 10)).toBeGreaterThan(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================
|
||||||
|
// Cleanup
|
||||||
|
// ============================================================
|
||||||
|
|
||||||
|
tap.test('should drop the logs table', async () => {
|
||||||
|
await logTable.drop();
|
||||||
|
});
|
||||||
|
|
||||||
|
export default tap.start();
|
||||||
45
test/test.ts
45
test/test.ts
@@ -1,45 +0,0 @@
|
|||||||
import { expect, expectAsync, tap } from '@pushrocks/tapbundle';
|
|
||||||
import * as smartclickhouse from '../ts/index';
|
|
||||||
|
|
||||||
let testClickhouseDb: smartclickhouse.ClickhouseDb;
|
|
||||||
|
|
||||||
tap.test('first test', async () => {
|
|
||||||
testClickhouseDb = new smartclickhouse.ClickhouseDb({
|
|
||||||
url: 'http://localhost',
|
|
||||||
port: 8123,
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
tap.test('should start the clickhouse db', async () => {
|
|
||||||
await testClickhouseDb.start();
|
|
||||||
})
|
|
||||||
|
|
||||||
tap.test('should write something to the clickhouse db', async () => {
|
|
||||||
const result = await testClickhouseDb.clickhouseClient.query(`CREATE DATABASE IF NOT EXISTS lossless`).toPromise();
|
|
||||||
console.log(result);
|
|
||||||
const result2 = await testClickhouseDb.clickhouseClient.query(`CREATE TABLE IF NOT EXISTS lossless.visits (
|
|
||||||
timestamp UInt64,
|
|
||||||
ip String,
|
|
||||||
os String,
|
|
||||||
userAgent String,
|
|
||||||
version String
|
|
||||||
) ENGINE=MergeTree() ORDER BY timestamp`).toPromise();
|
|
||||||
console.log(result2);
|
|
||||||
const ws = testClickhouseDb.clickhouseClient.insert('INSERT INTO lossless.visits FORMAT JSONEachRow').stream();
|
|
||||||
for(let i = 0; i <= 1000; i++) {
|
|
||||||
await ws.writeRow(
|
|
||||||
JSON.stringify({
|
|
||||||
timestamp: Date.now(),
|
|
||||||
ip: '127.0.01',
|
|
||||||
os: 'Mac OS X',
|
|
||||||
userAgent: 'some',
|
|
||||||
version: 'someversion'
|
|
||||||
})
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
//wait stream finish
|
|
||||||
const result3 = await ws.exec();
|
|
||||||
})
|
|
||||||
|
|
||||||
tap.start();
|
|
||||||
8
ts/00_commitinfo_data.ts
Normal file
8
ts/00_commitinfo_data.ts
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
/**
|
||||||
|
* autocreated commitinfo by @push.rocks/commitinfo
|
||||||
|
*/
|
||||||
|
export const commitinfo = {
|
||||||
|
name: '@push.rocks/smartclickhouse',
|
||||||
|
version: '2.2.0',
|
||||||
|
description: 'A TypeScript-based ODM for ClickHouse databases with full CRUD support, fluent query builder, configurable engines, and automatic schema evolution.'
|
||||||
|
}
|
||||||
34
ts/index.ts
34
ts/index.ts
@@ -1,26 +1,14 @@
|
|||||||
import * as plugins from './smartclickhouse.plugins';
|
// Core
|
||||||
|
export * from './smartclickhouse.classes.smartclickhouse.js';
|
||||||
|
export * from './smartclickhouse.classes.clickhousetable.js';
|
||||||
|
export * from './smartclickhouse.classes.httpclient.js';
|
||||||
|
|
||||||
export interface IClickhouseConstructorOptions {
|
// Query & Results
|
||||||
url: string;
|
export * from './smartclickhouse.classes.querybuilder.js';
|
||||||
port?: number;
|
export * from './smartclickhouse.classes.resultset.js';
|
||||||
}
|
|
||||||
|
|
||||||
export class ClickhouseDb {
|
// Time Data Table (backward compat)
|
||||||
public options: IClickhouseConstructorOptions;
|
export * from './smartclickhouse.classes.timedatatable.js';
|
||||||
public clickhouseClient: plugins.clickhouse.ClickHouse;
|
|
||||||
|
|
||||||
constructor(optionsArg: IClickhouseConstructorOptions) {
|
// Types
|
||||||
this.options = optionsArg;
|
export * from './smartclickhouse.types.js';
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* starts the connection to the Clickhouse db
|
|
||||||
*/
|
|
||||||
public start() {
|
|
||||||
this.clickhouseClient = new plugins.clickhouse.ClickHouse({
|
|
||||||
...this.options,
|
|
||||||
basicAuth: null,
|
|
||||||
format: 'json'
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|||||||
498
ts/smartclickhouse.classes.clickhousetable.ts
Normal file
498
ts/smartclickhouse.classes.clickhousetable.ts
Normal file
@@ -0,0 +1,498 @@
|
|||||||
|
import * as plugins from './smartclickhouse.plugins.js';
|
||||||
|
import type { SmartClickHouseDb } from './smartclickhouse.classes.smartclickhouse.js';
|
||||||
|
import { ClickhouseQueryBuilder } from './smartclickhouse.classes.querybuilder.js';
|
||||||
|
import type {
|
||||||
|
IClickhouseTableOptions,
|
||||||
|
IColumnInfo,
|
||||||
|
TClickhouseColumnType,
|
||||||
|
} from './smartclickhouse.types.js';
|
||||||
|
import { detectClickhouseType, escapeClickhouseValue } from './smartclickhouse.types.js';
|
||||||
|
|
||||||
|
export class ClickhouseTable<T extends Record<string, any>> {
|
||||||
|
// ---- STATIC FACTORY ----
|
||||||
|
|
||||||
|
public static async create<T extends Record<string, any>>(
|
||||||
|
db: SmartClickHouseDb,
|
||||||
|
options: IClickhouseTableOptions<T>,
|
||||||
|
): Promise<ClickhouseTable<T>> {
|
||||||
|
const table = new ClickhouseTable<T>(db, options);
|
||||||
|
await table.setup();
|
||||||
|
return table;
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---- INSTANCE ----
|
||||||
|
|
||||||
|
public db: SmartClickHouseDb;
|
||||||
|
public options: IClickhouseTableOptions<T>;
|
||||||
|
public columns: IColumnInfo[] = [];
|
||||||
|
private healingDeferred: plugins.smartpromise.Deferred<any> | null = null;
|
||||||
|
|
||||||
|
constructor(db: SmartClickHouseDb, options: IClickhouseTableOptions<T>) {
|
||||||
|
this.db = db;
|
||||||
|
this.options = {
|
||||||
|
autoSchemaEvolution: true,
|
||||||
|
...options,
|
||||||
|
database: options.database || db.options.database,
|
||||||
|
engine: options.engine || { engine: 'MergeTree' },
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---- SCHEMA MANAGEMENT ----
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates the table if it doesn't exist and refreshes column metadata
|
||||||
|
*/
|
||||||
|
public async setup(): Promise<void> {
|
||||||
|
const { database, tableName, engine, orderBy, partitionBy, primaryKey, ttl, retainDataForDays, columns } = this.options;
|
||||||
|
|
||||||
|
// Build column definitions
|
||||||
|
let columnDefs: string;
|
||||||
|
if (columns && columns.length > 0) {
|
||||||
|
columnDefs = columns.map((col) => {
|
||||||
|
let def = `${col.name} ${col.type}`;
|
||||||
|
if (col.defaultExpression) def += ` DEFAULT ${col.defaultExpression}`;
|
||||||
|
if (col.codec) def += ` CODEC(${col.codec})`;
|
||||||
|
return def;
|
||||||
|
}).join(',\n ');
|
||||||
|
} else {
|
||||||
|
// Default minimal schema — downstream code can add columns via auto-schema evolution
|
||||||
|
columnDefs = `timestamp DateTime64(3, 'Europe/Berlin'),\n message String`;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build engine clause
|
||||||
|
let engineClause: string = engine.engine;
|
||||||
|
if (engine.engine === 'ReplacingMergeTree' && engine.versionColumn) {
|
||||||
|
engineClause = `ReplacingMergeTree(${engine.versionColumn})`;
|
||||||
|
} else if (engine.engine === 'CollapsingMergeTree' && engine.signColumn) {
|
||||||
|
engineClause = `CollapsingMergeTree(${engine.signColumn})`;
|
||||||
|
} else if (engine.engine === 'VersionedCollapsingMergeTree' && engine.signColumn && engine.versionColumn) {
|
||||||
|
engineClause = `VersionedCollapsingMergeTree(${engine.signColumn}, ${engine.versionColumn})`;
|
||||||
|
} else {
|
||||||
|
engineClause = `${engine.engine}()`;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build ORDER BY
|
||||||
|
const orderByStr = Array.isArray(orderBy) ? orderBy.join(', ') : orderBy;
|
||||||
|
|
||||||
|
let createSQL = `
|
||||||
|
CREATE TABLE IF NOT EXISTS ${database}.${tableName} (
|
||||||
|
${columnDefs}
|
||||||
|
) ENGINE = ${engineClause}`;
|
||||||
|
|
||||||
|
if (partitionBy) {
|
||||||
|
createSQL += `\n PARTITION BY ${partitionBy}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
createSQL += `\n ORDER BY (${orderByStr})`;
|
||||||
|
|
||||||
|
if (primaryKey) {
|
||||||
|
const primaryKeyStr = Array.isArray(primaryKey) ? primaryKey.join(', ') : primaryKey;
|
||||||
|
createSQL += `\n PRIMARY KEY (${primaryKeyStr})`;
|
||||||
|
}
|
||||||
|
|
||||||
|
await this.db.clickhouseHttpClient.queryPromise(createSQL);
|
||||||
|
|
||||||
|
// Apply TTL if configured
|
||||||
|
if (ttl) {
|
||||||
|
await this.db.clickhouseHttpClient.queryPromise(`
|
||||||
|
ALTER TABLE ${database}.${tableName}
|
||||||
|
MODIFY TTL toDateTime(${String(ttl.column)}) + INTERVAL ${ttl.interval}
|
||||||
|
`);
|
||||||
|
} else if (retainDataForDays && retainDataForDays > 0) {
|
||||||
|
// Legacy shorthand
|
||||||
|
await this.db.clickhouseHttpClient.queryPromise(`
|
||||||
|
ALTER TABLE ${database}.${tableName}
|
||||||
|
MODIFY TTL toDateTime(timestamp) + INTERVAL ${retainDataForDays} DAY
|
||||||
|
`);
|
||||||
|
}
|
||||||
|
|
||||||
|
await this.updateColumns();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Refresh column metadata from system.columns
|
||||||
|
*/
|
||||||
|
public async updateColumns(): Promise<IColumnInfo[]> {
|
||||||
|
this.columns = await this.db.clickhouseHttpClient.queryPromise(`
|
||||||
|
SELECT * FROM system.columns
|
||||||
|
WHERE database = '${this.options.database}'
|
||||||
|
AND table = '${this.options.tableName}' FORMAT JSONEachRow
|
||||||
|
`);
|
||||||
|
return this.columns;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Auto-schema evolution: detect new columns from data and add them
|
||||||
|
*/
|
||||||
|
public async syncSchema(data: Record<string, any>): Promise<void> {
|
||||||
|
const flatData = plugins.smartobject.toFlatObject(data);
|
||||||
|
for (const key of Object.keys(flatData)) {
|
||||||
|
if (key === 'timestamp') continue;
|
||||||
|
const value = flatData[key];
|
||||||
|
const clickhouseType = detectClickhouseType(value);
|
||||||
|
if (!clickhouseType) continue;
|
||||||
|
await this.ensureColumn(key, clickhouseType);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---- INSERT ----
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Insert a single row
|
||||||
|
*/
|
||||||
|
public async insert(data: Partial<T>): Promise<void> {
|
||||||
|
if (this.healingDeferred) return;
|
||||||
|
|
||||||
|
const storageDoc = await this.prepareDocument(data);
|
||||||
|
await this.executeInsert([storageDoc]);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Insert multiple rows
|
||||||
|
*/
|
||||||
|
public async insertMany(data: Partial<T>[]): Promise<void> {
|
||||||
|
if (this.healingDeferred) return;
|
||||||
|
if (data.length === 0) return;
|
||||||
|
|
||||||
|
// Schema sync across all documents
|
||||||
|
if (this.options.autoSchemaEvolution) {
|
||||||
|
const allKeys = new Set<string>();
|
||||||
|
const sampleValues: Record<string, any> = {};
|
||||||
|
for (const doc of data) {
|
||||||
|
const flat = plugins.smartobject.toFlatObject(doc);
|
||||||
|
for (const [key, value] of Object.entries(flat)) {
|
||||||
|
if (!allKeys.has(key)) {
|
||||||
|
allKeys.add(key);
|
||||||
|
sampleValues[key] = value;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
await this.syncSchema(sampleValues);
|
||||||
|
}
|
||||||
|
|
||||||
|
const storageDocs = data.map((doc) => this.flattenDocument(doc));
|
||||||
|
await this.executeInsert(storageDocs);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Insert in batches of configurable size
|
||||||
|
*/
|
||||||
|
public async insertBatch(data: Partial<T>[], options?: { batchSize?: number }): Promise<void> {
|
||||||
|
const batchSize = options?.batchSize || 10000;
|
||||||
|
if (this.healingDeferred) return;
|
||||||
|
if (data.length === 0) return;
|
||||||
|
|
||||||
|
// Schema sync across all documents first
|
||||||
|
if (this.options.autoSchemaEvolution) {
|
||||||
|
const sampleValues: Record<string, any> = {};
|
||||||
|
for (const doc of data) {
|
||||||
|
const flat = plugins.smartobject.toFlatObject(doc);
|
||||||
|
for (const [key, value] of Object.entries(flat)) {
|
||||||
|
if (!(key in sampleValues)) {
|
||||||
|
sampleValues[key] = value;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
await this.syncSchema(sampleValues);
|
||||||
|
}
|
||||||
|
|
||||||
|
const storageDocs = data.map((doc) => this.flattenDocument(doc));
|
||||||
|
await this.db.clickhouseHttpClient.insertBatch(
|
||||||
|
this.options.database,
|
||||||
|
this.options.tableName,
|
||||||
|
storageDocs,
|
||||||
|
batchSize,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a push-based insert stream using ObservableIntake
|
||||||
|
*/
|
||||||
|
public createInsertStream(options?: { batchSize?: number; flushIntervalMs?: number }): plugins.smartrx.ObservableIntake<Partial<T>> {
|
||||||
|
const batchSize = options?.batchSize || 100;
|
||||||
|
const flushIntervalMs = options?.flushIntervalMs || 1000;
|
||||||
|
const intake = new plugins.smartrx.ObservableIntake<Partial<T>>();
|
||||||
|
let buffer: Partial<T>[] = [];
|
||||||
|
let flushTimer: ReturnType<typeof setTimeout> | null = null;
|
||||||
|
|
||||||
|
const flush = async () => {
|
||||||
|
if (buffer.length === 0) return;
|
||||||
|
const toInsert = buffer;
|
||||||
|
buffer = [];
|
||||||
|
await this.insertMany(toInsert);
|
||||||
|
};
|
||||||
|
|
||||||
|
const scheduleFlush = () => {
|
||||||
|
if (flushTimer) clearTimeout(flushTimer);
|
||||||
|
flushTimer = setTimeout(async () => {
|
||||||
|
await flush();
|
||||||
|
}, flushIntervalMs);
|
||||||
|
};
|
||||||
|
|
||||||
|
intake.subscribe(
|
||||||
|
async (doc) => {
|
||||||
|
buffer.push(doc);
|
||||||
|
if (buffer.length >= batchSize) {
|
||||||
|
if (flushTimer) clearTimeout(flushTimer);
|
||||||
|
await flush();
|
||||||
|
} else {
|
||||||
|
scheduleFlush();
|
||||||
|
}
|
||||||
|
},
|
||||||
|
undefined,
|
||||||
|
async () => {
|
||||||
|
if (flushTimer) clearTimeout(flushTimer);
|
||||||
|
await flush();
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
return intake;
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---- QUERY ----
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns a fluent query builder for this table
|
||||||
|
*/
|
||||||
|
public query(): ClickhouseQueryBuilder<T> {
|
||||||
|
return new ClickhouseQueryBuilder<T>(
|
||||||
|
this.options.tableName,
|
||||||
|
this.options.database,
|
||||||
|
this.db.clickhouseHttpClient,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---- UPDATE ----
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Update rows matching a where condition (ClickHouse mutation - use sparingly)
|
||||||
|
*/
|
||||||
|
public async update(
|
||||||
|
set: Partial<T>,
|
||||||
|
whereFn: (q: ClickhouseQueryBuilder<T>) => ClickhouseQueryBuilder<T>,
|
||||||
|
): Promise<void> {
|
||||||
|
const qb = whereFn(new ClickhouseQueryBuilder<T>(this.options.tableName, this.options.database, this.db.clickhouseHttpClient));
|
||||||
|
const whereClause = qb.buildWhereClause();
|
||||||
|
if (!whereClause) {
|
||||||
|
throw new Error('UPDATE requires a WHERE clause. Use a where condition to target specific rows.');
|
||||||
|
}
|
||||||
|
|
||||||
|
const setClauses = Object.entries(set)
|
||||||
|
.map(([key, value]) => `${key} = ${escapeClickhouseValue(value)}`)
|
||||||
|
.join(', ');
|
||||||
|
|
||||||
|
await this.db.clickhouseHttpClient.mutatePromise(
|
||||||
|
`ALTER TABLE ${this.options.database}.${this.options.tableName} UPDATE ${setClauses} WHERE ${whereClause}`
|
||||||
|
);
|
||||||
|
await this.waitForMutations();
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---- DELETE ----
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Delete rows matching a where condition (ClickHouse mutation)
|
||||||
|
*/
|
||||||
|
public async deleteWhere(
|
||||||
|
whereFn: (q: ClickhouseQueryBuilder<T>) => ClickhouseQueryBuilder<T>,
|
||||||
|
): Promise<void> {
|
||||||
|
const qb = whereFn(new ClickhouseQueryBuilder<T>(this.options.tableName, this.options.database, this.db.clickhouseHttpClient));
|
||||||
|
const whereClause = qb.buildWhereClause();
|
||||||
|
if (!whereClause) {
|
||||||
|
throw new Error('DELETE requires a WHERE clause.');
|
||||||
|
}
|
||||||
|
|
||||||
|
await this.db.clickhouseHttpClient.mutatePromise(
|
||||||
|
`ALTER TABLE ${this.options.database}.${this.options.tableName} DELETE WHERE ${whereClause}`
|
||||||
|
);
|
||||||
|
await this.waitForMutations();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Delete entries older than a given interval on a column
|
||||||
|
*/
|
||||||
|
public async deleteOlderThan(column: keyof T & string, interval: string): Promise<void> {
|
||||||
|
await this.db.clickhouseHttpClient.mutatePromise(
|
||||||
|
`ALTER TABLE ${this.options.database}.${this.options.tableName} DELETE WHERE ${String(column)} < now() - INTERVAL ${interval}`
|
||||||
|
);
|
||||||
|
await this.waitForMutations();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Drop the entire table
|
||||||
|
*/
|
||||||
|
public async drop(): Promise<void> {
|
||||||
|
await this.db.clickhouseHttpClient.queryPromise(
|
||||||
|
`DROP TABLE IF EXISTS ${this.options.database}.${this.options.tableName}`
|
||||||
|
);
|
||||||
|
this.columns = [];
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---- UTILITIES ----
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Wait for all pending mutations on this table to complete
|
||||||
|
*/
|
||||||
|
public async waitForMutations(): Promise<void> {
|
||||||
|
let pending = true;
|
||||||
|
while (pending) {
|
||||||
|
const mutations = await this.db.clickhouseHttpClient.queryPromise(`
|
||||||
|
SELECT count() AS cnt FROM system.mutations
|
||||||
|
WHERE is_done = 0 AND database = '${this.options.database}' AND table = '${this.options.tableName}' FORMAT JSONEachRow
|
||||||
|
`);
|
||||||
|
const count = mutations[0] ? parseInt(mutations[0].cnt, 10) : 0;
|
||||||
|
if (count === 0) {
|
||||||
|
pending = false;
|
||||||
|
} else {
|
||||||
|
await plugins.smartdelay.delayFor(1000);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the total row count
|
||||||
|
*/
|
||||||
|
public async getRowCount(): Promise<number> {
|
||||||
|
const result = await this.db.clickhouseHttpClient.queryPromise(`
|
||||||
|
SELECT count() AS cnt FROM ${this.options.database}.${this.options.tableName} FORMAT JSONEachRow
|
||||||
|
`);
|
||||||
|
return result[0] ? parseInt(result[0].cnt, 10) : 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Optimize table (useful for ReplacingMergeTree deduplication)
|
||||||
|
*/
|
||||||
|
public async optimize(final: boolean = false): Promise<void> {
|
||||||
|
const finalClause = final ? ' FINAL' : '';
|
||||||
|
await this.db.clickhouseHttpClient.queryPromise(
|
||||||
|
`OPTIMIZE TABLE ${this.options.database}.${this.options.tableName}${finalClause}`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---- OBSERVATION ----
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Watch for new entries via polling. Returns an RxJS Observable.
|
||||||
|
*/
|
||||||
|
public watch(options?: { pollInterval?: number }): plugins.smartrx.rxjs.Observable<T> {
|
||||||
|
const pollInterval = options?.pollInterval || 1000;
|
||||||
|
|
||||||
|
return new plugins.smartrx.rxjs.Observable<T>((observer) => {
|
||||||
|
let lastTimestamp: number;
|
||||||
|
let intervalId: ReturnType<typeof setInterval>;
|
||||||
|
let stopped = false;
|
||||||
|
|
||||||
|
const fetchInitialTimestamp = async () => {
|
||||||
|
const result = await this.db.clickhouseHttpClient.queryPromise(`
|
||||||
|
SELECT max(timestamp) as lastTimestamp
|
||||||
|
FROM ${this.options.database}.${this.options.tableName} FORMAT JSONEachRow
|
||||||
|
`);
|
||||||
|
lastTimestamp = result.length && result[0].lastTimestamp
|
||||||
|
? new Date(result[0].lastTimestamp).getTime()
|
||||||
|
: Date.now();
|
||||||
|
};
|
||||||
|
|
||||||
|
const fetchNewEntries = async () => {
|
||||||
|
if (stopped) return;
|
||||||
|
try {
|
||||||
|
const entries = await this.db.clickhouseHttpClient.queryPromise(`
|
||||||
|
SELECT * FROM ${this.options.database}.${this.options.tableName}
|
||||||
|
WHERE timestamp > toDateTime(${lastTimestamp / 1000})
|
||||||
|
ORDER BY timestamp ASC FORMAT JSONEachRow
|
||||||
|
`);
|
||||||
|
for (const entry of entries) {
|
||||||
|
observer.next(entry);
|
||||||
|
}
|
||||||
|
if (entries.length > 0) {
|
||||||
|
lastTimestamp = new Date(entries[entries.length - 1].timestamp).getTime();
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
observer.error(err);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const start = async () => {
|
||||||
|
await fetchInitialTimestamp();
|
||||||
|
intervalId = setInterval(fetchNewEntries, pollInterval);
|
||||||
|
};
|
||||||
|
|
||||||
|
start().catch((err) => observer.error(err));
|
||||||
|
|
||||||
|
return () => {
|
||||||
|
stopped = true;
|
||||||
|
clearInterval(intervalId);
|
||||||
|
};
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---- PRIVATE HELPERS ----
|
||||||
|
|
||||||
|
private async ensureColumn(name: string, type: TClickhouseColumnType): Promise<void> {
|
||||||
|
// Check cached columns first
|
||||||
|
const exists = this.columns.some((col) => col.name === name);
|
||||||
|
if (exists) return;
|
||||||
|
|
||||||
|
// Refresh and check again
|
||||||
|
await this.updateColumns();
|
||||||
|
if (this.columns.some((col) => col.name === name)) return;
|
||||||
|
|
||||||
|
// Add column
|
||||||
|
try {
|
||||||
|
await this.db.clickhouseHttpClient.queryPromise(
|
||||||
|
`ALTER TABLE ${this.options.database}.${this.options.tableName} ADD COLUMN \`${name}\` ${type}`
|
||||||
|
);
|
||||||
|
} catch (err) {
|
||||||
|
// Column might have been added concurrently — ignore "already exists" errors
|
||||||
|
if (!String(err).includes('already exists')) {
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
await this.updateColumns();
|
||||||
|
}
|
||||||
|
|
||||||
|
private flattenDocument(data: Partial<T>): Record<string, any> {
|
||||||
|
const flat = plugins.smartobject.toFlatObject(data);
|
||||||
|
const storageDoc: Record<string, any> = {};
|
||||||
|
for (const [key, value] of Object.entries(flat)) {
|
||||||
|
const type = detectClickhouseType(value);
|
||||||
|
if (type || key === 'timestamp') {
|
||||||
|
storageDoc[key] = value;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return storageDoc;
|
||||||
|
}
|
||||||
|
|
||||||
|
private async prepareDocument(data: Partial<T>): Promise<Record<string, any>> {
|
||||||
|
if (this.options.autoSchemaEvolution) {
|
||||||
|
await this.syncSchema(data as Record<string, any>);
|
||||||
|
}
|
||||||
|
return this.flattenDocument(data);
|
||||||
|
}
|
||||||
|
|
||||||
|
private async executeInsert(docs: Record<string, any>[]): Promise<void> {
|
||||||
|
try {
|
||||||
|
await this.db.clickhouseHttpClient.insertPromise(
|
||||||
|
this.options.database,
|
||||||
|
this.options.tableName,
|
||||||
|
docs,
|
||||||
|
);
|
||||||
|
} catch (err) {
|
||||||
|
await this.handleInsertError();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private async handleInsertError(): Promise<void> {
|
||||||
|
if (this.healingDeferred) return;
|
||||||
|
|
||||||
|
this.healingDeferred = plugins.smartpromise.defer();
|
||||||
|
console.log('ClickhouseTable: Insert error. Attempting self-healing...');
|
||||||
|
|
||||||
|
try {
|
||||||
|
await this.db.pingDatabaseUntilAvailable();
|
||||||
|
await this.db.createDatabase();
|
||||||
|
await this.setup();
|
||||||
|
} finally {
|
||||||
|
this.healingDeferred.resolve();
|
||||||
|
this.healingDeferred = null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
166
ts/smartclickhouse.classes.httpclient.ts
Normal file
166
ts/smartclickhouse.classes.httpclient.ts
Normal file
@@ -0,0 +1,166 @@
|
|||||||
|
import * as plugins from './smartclickhouse.plugins.js';
|
||||||
|
|
||||||
|
export interface IClickhouseHttpClientOptions {
|
||||||
|
username?: string;
|
||||||
|
password?: string;
|
||||||
|
url: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export class ClickhouseHttpClient {
|
||||||
|
// STATIC
|
||||||
|
public static async createAndStart(optionsArg: IClickhouseHttpClientOptions) {
|
||||||
|
const clickhouseHttpInstance = new ClickhouseHttpClient(optionsArg);
|
||||||
|
await clickhouseHttpInstance.start();
|
||||||
|
return clickhouseHttpInstance;
|
||||||
|
}
|
||||||
|
|
||||||
|
// INSTANCE
|
||||||
|
public options: IClickhouseHttpClientOptions;
|
||||||
|
public webrequestInstance = new plugins.webrequest.WebrequestClient({
|
||||||
|
logging: false,
|
||||||
|
});
|
||||||
|
public computedProperties: {
|
||||||
|
connectionUrl: string;
|
||||||
|
parsedUrl: plugins.smarturl.Smarturl;
|
||||||
|
} = {
|
||||||
|
connectionUrl: null,
|
||||||
|
parsedUrl: null,
|
||||||
|
};
|
||||||
|
|
||||||
|
constructor(optionsArg: IClickhouseHttpClientOptions) {
|
||||||
|
this.options = optionsArg;
|
||||||
|
}
|
||||||
|
|
||||||
|
public async start() {
|
||||||
|
this.computedProperties.parsedUrl = plugins.smarturl.Smarturl.createFromUrl(this.options.url);
|
||||||
|
console.log(this.computedProperties.parsedUrl);
|
||||||
|
this.computedProperties.connectionUrl = this.computedProperties.parsedUrl.toString();
|
||||||
|
}
|
||||||
|
|
||||||
|
public async ping() {
|
||||||
|
const ping = await this.webrequestInstance.request(
|
||||||
|
this.computedProperties.connectionUrl.toString(),
|
||||||
|
{
|
||||||
|
method: 'GET',
|
||||||
|
timeout: 1000,
|
||||||
|
}
|
||||||
|
);
|
||||||
|
return ping.status === 200 ? true : false;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Execute a query and return parsed JSONEachRow results
|
||||||
|
*/
|
||||||
|
public async queryPromise(queryArg: string): Promise<any[]> {
|
||||||
|
const returnArray: any[] = [];
|
||||||
|
const response = await this.webrequestInstance.request(
|
||||||
|
`${this.computedProperties.connectionUrl}?query=${encodeURIComponent(queryArg)}`,
|
||||||
|
{
|
||||||
|
method: 'POST',
|
||||||
|
headers: this.getHeaders(),
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
const responseText = await response.text();
|
||||||
|
|
||||||
|
// Check for errors (ClickHouse returns non-200 for errors)
|
||||||
|
if (!response.ok) {
|
||||||
|
throw new Error(`ClickHouse query error: ${responseText.trim()}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (response.headers.get('X-ClickHouse-Format') === 'JSONEachRow') {
|
||||||
|
const jsonArray = responseText.split('\n');
|
||||||
|
for (const jsonArg of jsonArray) {
|
||||||
|
if (!jsonArg) continue;
|
||||||
|
returnArray.push(JSON.parse(jsonArg));
|
||||||
|
}
|
||||||
|
} else if (responseText.trim()) {
|
||||||
|
// Try to parse as JSONEachRow even without header (e.g. when FORMAT is in query)
|
||||||
|
const lines = responseText.trim().split('\n');
|
||||||
|
for (const line of lines) {
|
||||||
|
if (!line) continue;
|
||||||
|
try {
|
||||||
|
returnArray.push(JSON.parse(line));
|
||||||
|
} catch {
|
||||||
|
// Not JSON — return raw text as single-element array
|
||||||
|
return [{ raw: responseText.trim() }];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return returnArray;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Execute a typed query returning T[]
|
||||||
|
*/
|
||||||
|
public async queryTyped<T>(queryArg: string): Promise<T[]> {
|
||||||
|
return this.queryPromise(queryArg) as Promise<T[]>;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Insert documents as JSONEachRow
|
||||||
|
*/
|
||||||
|
public async insertPromise(databaseArg: string, tableArg: string, documents: any[]) {
|
||||||
|
const queryArg = `INSERT INTO ${databaseArg}.${tableArg} FORMAT JSONEachRow`;
|
||||||
|
const response = await this.webrequestInstance.request(
|
||||||
|
`${this.computedProperties.connectionUrl}?query=${encodeURIComponent(queryArg)}`,
|
||||||
|
{
|
||||||
|
method: 'POST',
|
||||||
|
body: documents.map((docArg) => JSON.stringify(docArg)).join('\n'),
|
||||||
|
headers: this.getHeaders(),
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
const errorText = await response.text();
|
||||||
|
throw new Error(`ClickHouse insert error: ${errorText.trim()}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
return response;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Insert documents in batches of configurable size
|
||||||
|
*/
|
||||||
|
public async insertBatch(
|
||||||
|
databaseArg: string,
|
||||||
|
tableArg: string,
|
||||||
|
documents: any[],
|
||||||
|
batchSize: number = 10000,
|
||||||
|
) {
|
||||||
|
for (let i = 0; i < documents.length; i += batchSize) {
|
||||||
|
const batch = documents.slice(i, i + batchSize);
|
||||||
|
await this.insertPromise(databaseArg, tableArg, batch);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Execute a mutation (ALTER TABLE UPDATE/DELETE) and optionally wait for completion
|
||||||
|
*/
|
||||||
|
public async mutatePromise(queryArg: string): Promise<void> {
|
||||||
|
const response = await this.webrequestInstance.request(
|
||||||
|
`${this.computedProperties.connectionUrl}?query=${encodeURIComponent(queryArg)}`,
|
||||||
|
{
|
||||||
|
method: 'POST',
|
||||||
|
headers: this.getHeaders(),
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
const errorText = await response.text();
|
||||||
|
throw new Error(`ClickHouse mutation error: ${errorText.trim()}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private getHeaders() {
|
||||||
|
const headers: { [key: string]: string } = {};
|
||||||
|
if (this.options.username) {
|
||||||
|
headers['X-ClickHouse-User'] = this.options.username;
|
||||||
|
}
|
||||||
|
if (this.options.password) {
|
||||||
|
headers['X-ClickHouse-Key'] = this.options.password;
|
||||||
|
}
|
||||||
|
return headers;
|
||||||
|
}
|
||||||
|
}
|
||||||
214
ts/smartclickhouse.classes.querybuilder.ts
Normal file
214
ts/smartclickhouse.classes.querybuilder.ts
Normal file
@@ -0,0 +1,214 @@
|
|||||||
|
import type { ClickhouseHttpClient } from './smartclickhouse.classes.httpclient.js';
|
||||||
|
import { ClickhouseResultSet } from './smartclickhouse.classes.resultset.js';
|
||||||
|
import { escapeClickhouseValue } from './smartclickhouse.types.js';
|
||||||
|
import type { TComparisonOperator } from './smartclickhouse.types.js';
|
||||||
|
|
||||||
|
interface IWhereClause {
|
||||||
|
connector: 'AND' | 'OR' | '';
|
||||||
|
expression: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export class ClickhouseQueryBuilder<T extends Record<string, any>> {
|
||||||
|
private selectColumns: string[] = ['*'];
|
||||||
|
private whereClauses: IWhereClause[] = [];
|
||||||
|
private orderByClauses: string[] = [];
|
||||||
|
private groupByClauses: string[] = [];
|
||||||
|
private havingClauses: string[] = [];
|
||||||
|
private limitValue: number | null = null;
|
||||||
|
private offsetValue: number | null = null;
|
||||||
|
|
||||||
|
constructor(
|
||||||
|
private tableName: string,
|
||||||
|
private database: string,
|
||||||
|
private httpClient: ClickhouseHttpClient,
|
||||||
|
) {}
|
||||||
|
|
||||||
|
// ---- SELECT ----
|
||||||
|
|
||||||
|
public select<K extends keyof T & string>(...columns: K[]): this {
|
||||||
|
this.selectColumns = columns;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public selectRaw(...expressions: string[]): this {
|
||||||
|
this.selectColumns = expressions;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---- WHERE ----
|
||||||
|
|
||||||
|
public where<K extends keyof T & string>(
|
||||||
|
column: K,
|
||||||
|
operator: TComparisonOperator,
|
||||||
|
value: any,
|
||||||
|
): this {
|
||||||
|
this.whereClauses.push({
|
||||||
|
connector: '',
|
||||||
|
expression: this.buildCondition(column, operator, value),
|
||||||
|
});
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public and<K extends keyof T & string>(
|
||||||
|
column: K,
|
||||||
|
operator: TComparisonOperator,
|
||||||
|
value: any,
|
||||||
|
): this {
|
||||||
|
this.whereClauses.push({
|
||||||
|
connector: 'AND',
|
||||||
|
expression: this.buildCondition(column, operator, value),
|
||||||
|
});
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public or<K extends keyof T & string>(
|
||||||
|
column: K,
|
||||||
|
operator: TComparisonOperator,
|
||||||
|
value: any,
|
||||||
|
): this {
|
||||||
|
this.whereClauses.push({
|
||||||
|
connector: 'OR',
|
||||||
|
expression: this.buildCondition(column, operator, value),
|
||||||
|
});
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public whereRaw(expression: string): this {
|
||||||
|
this.whereClauses.push({
|
||||||
|
connector: this.whereClauses.length > 0 ? 'AND' : '',
|
||||||
|
expression,
|
||||||
|
});
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---- ORDER BY ----
|
||||||
|
|
||||||
|
public orderBy(column: (keyof T & string) | string, direction: 'ASC' | 'DESC' = 'ASC'): this {
|
||||||
|
this.orderByClauses.push(`${column} ${direction}`);
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---- GROUP BY ----
|
||||||
|
|
||||||
|
public groupBy<K extends keyof T & string>(...columns: K[]): this {
|
||||||
|
this.groupByClauses.push(...columns);
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public having(expression: string): this {
|
||||||
|
this.havingClauses.push(expression);
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---- LIMIT / OFFSET ----
|
||||||
|
|
||||||
|
public limit(count: number): this {
|
||||||
|
this.limitValue = count;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public offset(count: number): this {
|
||||||
|
this.offsetValue = count;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---- EXECUTION ----
|
||||||
|
|
||||||
|
public async execute(): Promise<ClickhouseResultSet<T>> {
|
||||||
|
const sql = this.toSQL();
|
||||||
|
const rows = await this.httpClient.queryTyped<T>(sql);
|
||||||
|
return new ClickhouseResultSet<T>(rows);
|
||||||
|
}
|
||||||
|
|
||||||
|
public async first(): Promise<T | null> {
|
||||||
|
this.limitValue = 1;
|
||||||
|
const result = await this.execute();
|
||||||
|
return result.first();
|
||||||
|
}
|
||||||
|
|
||||||
|
public async count(): Promise<number> {
|
||||||
|
const savedSelect = this.selectColumns;
|
||||||
|
this.selectColumns = ['count() as _count'];
|
||||||
|
const sql = this.toSQL();
|
||||||
|
this.selectColumns = savedSelect;
|
||||||
|
const rows = await this.httpClient.queryTyped<{ _count: string }>(sql);
|
||||||
|
return rows.length > 0 ? parseInt(rows[0]._count, 10) : 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
public async toArray(): Promise<T[]> {
|
||||||
|
const result = await this.execute();
|
||||||
|
return result.toArray();
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---- SQL GENERATION ----
|
||||||
|
|
||||||
|
public toSQL(): string {
|
||||||
|
const parts: string[] = [];
|
||||||
|
|
||||||
|
parts.push(`SELECT ${this.selectColumns.join(', ')}`);
|
||||||
|
parts.push(`FROM ${this.database}.${this.tableName}`);
|
||||||
|
|
||||||
|
const whereClause = this.buildWhereClause();
|
||||||
|
if (whereClause) {
|
||||||
|
parts.push(`WHERE ${whereClause}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.groupByClauses.length > 0) {
|
||||||
|
parts.push(`GROUP BY ${this.groupByClauses.join(', ')}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.havingClauses.length > 0) {
|
||||||
|
parts.push(`HAVING ${this.havingClauses.join(' AND ')}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.orderByClauses.length > 0) {
|
||||||
|
parts.push(`ORDER BY ${this.orderByClauses.join(', ')}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.limitValue !== null) {
|
||||||
|
parts.push(`LIMIT ${this.limitValue}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.offsetValue !== null) {
|
||||||
|
parts.push(`OFFSET ${this.offsetValue}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
parts.push('FORMAT JSONEachRow');
|
||||||
|
|
||||||
|
return parts.join(' ');
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Build the WHERE clause string. Reused by ClickhouseTable for UPDATE/DELETE.
|
||||||
|
*/
|
||||||
|
public buildWhereClause(): string {
|
||||||
|
if (this.whereClauses.length === 0) return '';
|
||||||
|
|
||||||
|
return this.whereClauses
|
||||||
|
.map((clause, index) => {
|
||||||
|
if (index === 0) return clause.expression;
|
||||||
|
return `${clause.connector} ${clause.expression}`;
|
||||||
|
})
|
||||||
|
.join(' ');
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---- PRIVATE ----
|
||||||
|
|
||||||
|
private buildCondition(column: string, operator: TComparisonOperator, value: any): string {
|
||||||
|
if (operator === 'IN' || operator === 'NOT IN') {
|
||||||
|
const escapedValues = Array.isArray(value)
|
||||||
|
? `(${value.map(escapeClickhouseValue).join(', ')})`
|
||||||
|
: escapeClickhouseValue(value);
|
||||||
|
return `${column} ${operator} ${escapedValues}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (operator === 'BETWEEN') {
|
||||||
|
if (Array.isArray(value) && value.length === 2) {
|
||||||
|
return `${column} BETWEEN ${escapeClickhouseValue(value[0])} AND ${escapeClickhouseValue(value[1])}`;
|
||||||
|
}
|
||||||
|
throw new Error('BETWEEN operator requires a two-element array value');
|
||||||
|
}
|
||||||
|
|
||||||
|
return `${column} ${operator} ${escapeClickhouseValue(value)}`;
|
||||||
|
}
|
||||||
|
}
|
||||||
44
ts/smartclickhouse.classes.resultset.ts
Normal file
44
ts/smartclickhouse.classes.resultset.ts
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
import * as plugins from './smartclickhouse.plugins.js';
|
||||||
|
|
||||||
|
export class ClickhouseResultSet<T> {
|
||||||
|
public rows: T[];
|
||||||
|
public rowCount: number;
|
||||||
|
|
||||||
|
constructor(rows: T[]) {
|
||||||
|
this.rows = rows;
|
||||||
|
this.rowCount = rows.length;
|
||||||
|
}
|
||||||
|
|
||||||
|
public first(): T | null {
|
||||||
|
return this.rows.length > 0 ? this.rows[0] : null;
|
||||||
|
}
|
||||||
|
|
||||||
|
public last(): T | null {
|
||||||
|
return this.rows.length > 0 ? this.rows[this.rows.length - 1] : null;
|
||||||
|
}
|
||||||
|
|
||||||
|
public isEmpty(): boolean {
|
||||||
|
return this.rows.length === 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
public toArray(): T[] {
|
||||||
|
return this.rows;
|
||||||
|
}
|
||||||
|
|
||||||
|
public map<U>(fn: (row: T) => U): U[] {
|
||||||
|
return this.rows.map(fn);
|
||||||
|
}
|
||||||
|
|
||||||
|
public filter(fn: (row: T) => boolean): ClickhouseResultSet<T> {
|
||||||
|
return new ClickhouseResultSet<T>(this.rows.filter(fn));
|
||||||
|
}
|
||||||
|
|
||||||
|
public toObservable(): plugins.smartrx.rxjs.Observable<T> {
|
||||||
|
return new plugins.smartrx.rxjs.Observable<T>((observer) => {
|
||||||
|
for (const row of this.rows) {
|
||||||
|
observer.next(row);
|
||||||
|
}
|
||||||
|
observer.complete();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
90
ts/smartclickhouse.classes.smartclickhouse.ts
Normal file
90
ts/smartclickhouse.classes.smartclickhouse.ts
Normal file
@@ -0,0 +1,90 @@
|
|||||||
|
import * as plugins from './smartclickhouse.plugins.js';
|
||||||
|
import { ClickhouseTable } from './smartclickhouse.classes.clickhousetable.js';
|
||||||
|
import { TimeDataTable } from './smartclickhouse.classes.timedatatable.js';
|
||||||
|
import { ClickhouseHttpClient } from './smartclickhouse.classes.httpclient.js';
|
||||||
|
import type { IClickhouseTableOptions } from './smartclickhouse.types.js';
|
||||||
|
|
||||||
|
export interface IClickhouseConstructorOptions {
|
||||||
|
url: string;
|
||||||
|
database: string;
|
||||||
|
username?: string;
|
||||||
|
password?: string;
|
||||||
|
/**
|
||||||
|
* Allow services to exit when waiting for clickhouse startup.
|
||||||
|
* This allows to leave the lifecycle flow to other processes
|
||||||
|
* like a listening server.
|
||||||
|
*/
|
||||||
|
unref?: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
export class SmartClickHouseDb {
|
||||||
|
public options: IClickhouseConstructorOptions;
|
||||||
|
public clickhouseHttpClient: ClickhouseHttpClient;
|
||||||
|
|
||||||
|
constructor(optionsArg: IClickhouseConstructorOptions) {
|
||||||
|
this.options = optionsArg;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Starts the connection to the Clickhouse db
|
||||||
|
*/
|
||||||
|
public async start(dropOld = false) {
|
||||||
|
console.log(`Connecting to default database first.`);
|
||||||
|
this.clickhouseHttpClient = await ClickhouseHttpClient.createAndStart(this.options);
|
||||||
|
await this.pingDatabaseUntilAvailable();
|
||||||
|
console.log(`Create database ${this.options.database}, if it does not exist...`);
|
||||||
|
await this.createDatabase(dropOld);
|
||||||
|
}
|
||||||
|
|
||||||
|
public async createDatabase(dropOld: boolean = false) {
|
||||||
|
if (dropOld) {
|
||||||
|
await this.clickhouseHttpClient.queryPromise(`DROP DATABASE IF EXISTS ${this.options.database}`);
|
||||||
|
}
|
||||||
|
await this.clickhouseHttpClient.queryPromise(
|
||||||
|
`CREATE DATABASE IF NOT EXISTS ${this.options.database}`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
public async pingDatabaseUntilAvailable() {
|
||||||
|
let available = false;
|
||||||
|
while (!available) {
|
||||||
|
available = await this.clickhouseHttpClient.ping().catch(() => false);
|
||||||
|
if (!available) {
|
||||||
|
console.log(`NOT OK: tried pinging ${this.options.url}... Trying again in 5 seconds.`);
|
||||||
|
await plugins.smartdelay.delayFor(5000, null, this.options.unref);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---- NEW: Generic typed table factory ----
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a typed ClickHouse table with full configuration
|
||||||
|
*/
|
||||||
|
public async createTable<T extends Record<string, any>>(
|
||||||
|
options: IClickhouseTableOptions<T>,
|
||||||
|
): Promise<ClickhouseTable<T>> {
|
||||||
|
return ClickhouseTable.create<T>(this, {
|
||||||
|
...options,
|
||||||
|
database: options.database || this.options.database,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---- BACKWARD COMPAT: TimeDataTable factory ----
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get a TimeDataTable (backward compatible)
|
||||||
|
*/
|
||||||
|
public async getTable(tableName: string): Promise<TimeDataTable> {
|
||||||
|
return TimeDataTable.getTable(this, tableName);
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---- RAW QUERY ----
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Execute a raw SQL query and return typed results
|
||||||
|
*/
|
||||||
|
public async query<T = any>(sql: string): Promise<T[]> {
|
||||||
|
return this.clickhouseHttpClient.queryTyped<T>(sql);
|
||||||
|
}
|
||||||
|
}
|
||||||
113
ts/smartclickhouse.classes.timedatatable.ts
Normal file
113
ts/smartclickhouse.classes.timedatatable.ts
Normal file
@@ -0,0 +1,113 @@
|
|||||||
|
import * as plugins from './smartclickhouse.plugins.js';
|
||||||
|
import type { SmartClickHouseDb } from './smartclickhouse.classes.smartclickhouse.js';
|
||||||
|
import { ClickhouseTable } from './smartclickhouse.classes.clickhousetable.js';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates a pre-configured ClickhouseTable for time-series data.
|
||||||
|
* This is the backward-compatible equivalent of the old TimeDataTable class.
|
||||||
|
*
|
||||||
|
* The table uses MergeTree engine, orders by timestamp, and has auto-schema evolution enabled.
|
||||||
|
*/
|
||||||
|
export async function createTimeDataTable(
|
||||||
|
db: SmartClickHouseDb,
|
||||||
|
tableName: string,
|
||||||
|
retainDataForDays: number = 30,
|
||||||
|
): Promise<TimeDataTable> {
|
||||||
|
const table = new TimeDataTable(db, tableName, retainDataForDays);
|
||||||
|
await table.setup();
|
||||||
|
return table;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* TimeDataTable — a ClickhouseTable pre-configured for time-series data.
|
||||||
|
* Provides backward-compatible convenience methods (addData, getLastEntries, etc.).
|
||||||
|
*/
|
||||||
|
export class TimeDataTable extends ClickhouseTable<any> {
|
||||||
|
/**
|
||||||
|
* Static factory for backward compatibility
|
||||||
|
*/
|
||||||
|
public static async getTable(
|
||||||
|
smartClickHouseDbRefArg: SmartClickHouseDb,
|
||||||
|
tableNameArg: string,
|
||||||
|
retainDataForDays: number = 30,
|
||||||
|
): Promise<TimeDataTable> {
|
||||||
|
return createTimeDataTable(smartClickHouseDbRefArg, tableNameArg, retainDataForDays);
|
||||||
|
}
|
||||||
|
|
||||||
|
constructor(db: SmartClickHouseDb, tableName: string, retainDataForDays: number = 30) {
|
||||||
|
super(db, {
|
||||||
|
tableName,
|
||||||
|
engine: { engine: 'MergeTree' },
|
||||||
|
orderBy: 'timestamp' as any,
|
||||||
|
retainDataForDays,
|
||||||
|
autoSchemaEvolution: true,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Insert data with auto-schema evolution and object flattening.
|
||||||
|
* Backward-compatible: accepts arbitrary JSON objects with a timestamp field.
|
||||||
|
*/
|
||||||
|
public async addData(dataArg: any): Promise<any> {
|
||||||
|
if (!dataArg.timestamp || typeof dataArg.timestamp !== 'number') {
|
||||||
|
throw new Error('timestamp must be of type number');
|
||||||
|
}
|
||||||
|
return this.insert(dataArg);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the last N entries ordered by timestamp DESC
|
||||||
|
*/
|
||||||
|
public async getLastEntries(count: number): Promise<any[]> {
|
||||||
|
return this.query()
|
||||||
|
.orderBy('timestamp' as any, 'DESC')
|
||||||
|
.limit(count)
|
||||||
|
.toArray();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get entries newer than a unix timestamp (in milliseconds)
|
||||||
|
*/
|
||||||
|
public async getEntriesNewerThan(unixTimestamp: number): Promise<any[]> {
|
||||||
|
return this.db.clickhouseHttpClient.queryPromise(`
|
||||||
|
SELECT * FROM ${this.options.database}.${this.options.tableName}
|
||||||
|
WHERE timestamp > toDateTime(${unixTimestamp / 1000}) FORMAT JSONEachRow
|
||||||
|
`);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get entries between two unix timestamps (in milliseconds)
|
||||||
|
*/
|
||||||
|
public async getEntriesBetween(unixTimestampStart: number, unixTimestampEnd: number): Promise<any[]> {
|
||||||
|
return this.db.clickhouseHttpClient.queryPromise(`
|
||||||
|
SELECT * FROM ${this.options.database}.${this.options.tableName}
|
||||||
|
WHERE timestamp > toDateTime(${unixTimestampStart / 1000})
|
||||||
|
AND timestamp < toDateTime(${unixTimestampEnd / 1000}) FORMAT JSONEachRow
|
||||||
|
`);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Delete entries older than N days
|
||||||
|
*/
|
||||||
|
public async deleteOldEntries(days: number): Promise<void> {
|
||||||
|
await this.db.clickhouseHttpClient.mutatePromise(`
|
||||||
|
ALTER TABLE ${this.options.database}.${this.options.tableName}
|
||||||
|
DELETE WHERE timestamp < now() - INTERVAL ${days} DAY
|
||||||
|
`);
|
||||||
|
await this.waitForMutations();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Drop the table (backward-compatible alias for drop())
|
||||||
|
*/
|
||||||
|
public async delete(): Promise<void> {
|
||||||
|
return this.drop();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Watch for new entries via polling (backward-compatible wrapper)
|
||||||
|
*/
|
||||||
|
public watchNewEntries(): plugins.smartrx.rxjs.Observable<any> {
|
||||||
|
return this.watch();
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,5 +1,9 @@
|
|||||||
import * as clickhouse from 'clickhouse';
|
// @pushrocks scope
|
||||||
|
import * as smartdelay from '@push.rocks/smartdelay';
|
||||||
|
import * as smartobject from '@push.rocks/smartobject';
|
||||||
|
import * as smartpromise from '@push.rocks/smartpromise';
|
||||||
|
import * as smartrx from '@push.rocks/smartrx';
|
||||||
|
import * as smarturl from '@push.rocks/smarturl';
|
||||||
|
import * as webrequest from '@push.rocks/webrequest';
|
||||||
|
|
||||||
export {
|
export { smartdelay, smartobject, smartpromise, smartrx, smarturl, webrequest };
|
||||||
clickhouse
|
|
||||||
}
|
|
||||||
|
|||||||
134
ts/smartclickhouse.types.ts
Normal file
134
ts/smartclickhouse.types.ts
Normal file
@@ -0,0 +1,134 @@
|
|||||||
|
// ============================================================
|
||||||
|
// Column Data Types
|
||||||
|
// ============================================================
|
||||||
|
export type TClickhouseColumnType =
|
||||||
|
| 'String'
|
||||||
|
| 'UInt8' | 'UInt16' | 'UInt32' | 'UInt64'
|
||||||
|
| 'Int8' | 'Int16' | 'Int32' | 'Int64'
|
||||||
|
| 'Float32' | 'Float64'
|
||||||
|
| 'Bool'
|
||||||
|
| 'Date' | 'Date32'
|
||||||
|
| 'DateTime' | 'DateTime64'
|
||||||
|
| 'UUID'
|
||||||
|
| 'IPv4' | 'IPv6'
|
||||||
|
| (string & {}); // allow arbitrary ClickHouse types like "DateTime64(3, 'Europe/Berlin')"
|
||||||
|
|
||||||
|
// ============================================================
|
||||||
|
// Engine Configuration
|
||||||
|
// ============================================================
|
||||||
|
export type TClickhouseEngine =
|
||||||
|
| 'MergeTree'
|
||||||
|
| 'ReplacingMergeTree'
|
||||||
|
| 'SummingMergeTree'
|
||||||
|
| 'AggregatingMergeTree'
|
||||||
|
| 'CollapsingMergeTree'
|
||||||
|
| 'VersionedCollapsingMergeTree';
|
||||||
|
|
||||||
|
export interface IEngineConfig {
|
||||||
|
engine: TClickhouseEngine;
|
||||||
|
/** For ReplacingMergeTree: the version column name */
|
||||||
|
versionColumn?: string;
|
||||||
|
/** For CollapsingMergeTree: the sign column name */
|
||||||
|
signColumn?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================
|
||||||
|
// Column Definition
|
||||||
|
// ============================================================
|
||||||
|
export interface IColumnDefinition {
|
||||||
|
name: string;
|
||||||
|
type: TClickhouseColumnType;
|
||||||
|
defaultExpression?: string;
|
||||||
|
codec?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================
|
||||||
|
// Table Options
|
||||||
|
// ============================================================
|
||||||
|
export interface IClickhouseTableOptions<T = any> {
|
||||||
|
tableName: string;
|
||||||
|
database?: string;
|
||||||
|
engine?: IEngineConfig;
|
||||||
|
orderBy: (keyof T & string) | (keyof T & string)[];
|
||||||
|
partitionBy?: string;
|
||||||
|
primaryKey?: (keyof T & string) | (keyof T & string)[];
|
||||||
|
ttl?: {
|
||||||
|
column: keyof T & string;
|
||||||
|
interval: string; // e.g., '30 DAY', '1 MONTH'
|
||||||
|
};
|
||||||
|
columns?: IColumnDefinition[];
|
||||||
|
/** Enable auto-schema evolution (add columns from data). Default: true */
|
||||||
|
autoSchemaEvolution?: boolean;
|
||||||
|
/** Data retention in days (shorthand for ttl). If ttl is set, this is ignored. */
|
||||||
|
retainDataForDays?: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================
|
||||||
|
// Column Info from system.columns
|
||||||
|
// ============================================================
|
||||||
|
export interface IColumnInfo {
|
||||||
|
database: string;
|
||||||
|
table: string;
|
||||||
|
name: string;
|
||||||
|
type: string;
|
||||||
|
position: string;
|
||||||
|
default_kind: string;
|
||||||
|
default_expression: string;
|
||||||
|
data_compressed_bytes: string;
|
||||||
|
data_uncompressed_bytes: string;
|
||||||
|
marks_bytes: string;
|
||||||
|
comment: string;
|
||||||
|
is_in_partition_key: 0 | 1;
|
||||||
|
is_in_sorting_key: 0 | 1;
|
||||||
|
is_in_primary_key: 0 | 1;
|
||||||
|
is_in_sampling_key: 0 | 1;
|
||||||
|
compression_codec: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================
|
||||||
|
// Comparison Operators for Query Builder
|
||||||
|
// ============================================================
|
||||||
|
export type TComparisonOperator =
|
||||||
|
| '='
|
||||||
|
| '!='
|
||||||
|
| '>'
|
||||||
|
| '>='
|
||||||
|
| '<'
|
||||||
|
| '<='
|
||||||
|
| 'LIKE'
|
||||||
|
| 'NOT LIKE'
|
||||||
|
| 'IN'
|
||||||
|
| 'NOT IN'
|
||||||
|
| 'BETWEEN';
|
||||||
|
|
||||||
|
// ============================================================
|
||||||
|
// Value Escaping (SQL Injection Prevention)
|
||||||
|
// ============================================================
|
||||||
|
export function escapeClickhouseValue(value: any): string {
|
||||||
|
if (value === null || value === undefined) return 'NULL';
|
||||||
|
if (typeof value === 'number') return String(value);
|
||||||
|
if (typeof value === 'boolean') return value ? '1' : '0';
|
||||||
|
if (value instanceof Date) return `'${value.toISOString().replace('T', ' ').replace('Z', '')}'`;
|
||||||
|
if (Array.isArray(value)) {
|
||||||
|
return `(${value.map(escapeClickhouseValue).join(', ')})`;
|
||||||
|
}
|
||||||
|
// String: escape single quotes and backslashes
|
||||||
|
return `'${String(value).replace(/\\/g, '\\\\').replace(/'/g, "\\'")}'`;
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================
|
||||||
|
// ClickHouse Type Detection from JS Values
|
||||||
|
// ============================================================
|
||||||
|
export function detectClickhouseType(value: any): TClickhouseColumnType | null {
|
||||||
|
if (value === null || value === undefined) return null;
|
||||||
|
if (typeof value === 'string') return 'String';
|
||||||
|
if (typeof value === 'number') return 'Float64';
|
||||||
|
if (typeof value === 'boolean') return 'UInt8';
|
||||||
|
if (value instanceof Array) {
|
||||||
|
if (value.length === 0) return null;
|
||||||
|
const elementType = detectClickhouseType(value[0]);
|
||||||
|
if (!elementType) return null;
|
||||||
|
return `Array(${elementType})` as TClickhouseColumnType;
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
14
tsconfig.json
Normal file
14
tsconfig.json
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
{
|
||||||
|
"compilerOptions": {
|
||||||
|
"experimentalDecorators": true,
|
||||||
|
"useDefineForClassFields": false,
|
||||||
|
"target": "ES2022",
|
||||||
|
"module": "NodeNext",
|
||||||
|
"moduleResolution": "NodeNext",
|
||||||
|
"esModuleInterop": true,
|
||||||
|
"verbatimModuleSyntax": true
|
||||||
|
},
|
||||||
|
"exclude": [
|
||||||
|
"dist_*/**/*.d.ts"
|
||||||
|
]
|
||||||
|
}
|
||||||
17
tslint.json
17
tslint.json
@@ -1,17 +0,0 @@
|
|||||||
{
|
|
||||||
"extends": ["tslint:latest", "tslint-config-prettier"],
|
|
||||||
"rules": {
|
|
||||||
"semicolon": [true, "always"],
|
|
||||||
"no-console": false,
|
|
||||||
"ordered-imports": false,
|
|
||||||
"object-literal-sort-keys": false,
|
|
||||||
"member-ordering": {
|
|
||||||
"options":{
|
|
||||||
"order": [
|
|
||||||
"static-method"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"defaultSeverity": "warning"
|
|
||||||
}
|
|
||||||
Reference in New Issue
Block a user