Compare commits

...

33 Commits

Author SHA1 Message Date
7ceaf694fe 1.0.33 2023-07-04 09:33:19 +02:00
391c6bd45d fix(core): update 2023-07-04 09:33:18 +02:00
1a702071c6 1.0.32 2023-07-04 09:13:45 +02:00
0fe2f6a4ae fix(core): update 2023-07-04 09:13:44 +02:00
20d04413c9 1.0.31 2023-07-04 09:13:15 +02:00
e56439e9f4 fix(core): update 2023-07-04 09:13:14 +02:00
c9a9434cd9 1.0.30 2019-11-03 00:51:18 +01:00
5d98dd9089 fix(core): update 2019-11-03 00:51:18 +01:00
2d635fdf7c 1.0.29 2019-11-03 00:49:35 +01:00
1dbf3724d0 fix(core): update 2019-11-03 00:49:35 +01:00
cc7eb8c139 1.0.28 2019-11-03 00:47:19 +01:00
0e01ecbd1a fix(core): update 2019-11-03 00:47:18 +01:00
2d21b40a76 1.0.27 2019-11-02 01:42:04 +01:00
2d1a5cdc50 fix(core): update 2019-11-02 01:42:03 +01:00
20a41d3381 1.0.26 2018-12-13 00:19:50 +01:00
b2019b33f8 fix(core): update 2018-12-13 00:19:50 +01:00
1ab582db51 1.0.25 2018-11-26 12:24:38 +01:00
9c87f5ee5e fix(core): update 2018-11-26 12:24:37 +01:00
ef9cb193d5 1.0.24 2018-11-25 22:06:25 +01:00
9f706e0a70 fix(core): update 2018-11-25 22:06:25 +01:00
b2d4b82532 1.0.23 2018-11-25 22:02:57 +01:00
63713f4cd2 fix(core): update 2018-11-25 22:02:57 +01:00
5d3bc13126 1.0.22 2018-11-11 02:00:41 +01:00
4b30234cb6 fix(core): update 2018-11-11 02:00:40 +01:00
271971a373 1.0.21 2018-11-10 01:48:44 +01:00
d636dab664 fix(clean up old indices): update 2018-11-10 01:48:44 +01:00
5f94db8d5d 1.0.20 2018-11-09 23:15:11 +01:00
f9866076ca fix(core): update 2018-11-09 23:15:11 +01:00
aa43a221a0 1.0.19 2018-11-07 12:44:49 +01:00
cb63b305ad fix(core): update 2018-11-07 12:44:49 +01:00
6fde0544f5 1.0.18 2018-11-07 11:53:36 +01:00
6ed2b6e993 1.0.17 2018-11-07 11:38:53 +01:00
4f1df305ed fix(core): update 2018-11-07 11:38:53 +01:00
33 changed files with 5288 additions and 1572 deletions

View File

@ -0,0 +1,66 @@
name: Default (not tags)
on:
push:
tags-ignore:
- '**'
env:
IMAGE: registry.gitlab.com/hosttoday/ht-docker-node:npmci
NPMCI_COMPUTED_REPOURL: https://${{gitea.repository_owner}}:${{secrets.GITEA_TOKEN}}@gitea.lossless.digital/${{gitea.repository}}.git
NPMCI_TOKEN_NPM: ${{secrets.NPMCI_TOKEN_NPM}}
NPMCI_TOKEN_NPM2: ${{secrets.NPMCI_TOKEN_NPM2}}
NPMCI_GIT_GITHUBTOKEN: ${{secrets.NPMCI_GIT_GITHUBTOKEN}}
NPMCI_URL_CLOUDLY: ${{secrets.NPMCI_URL_CLOUDLY}}
jobs:
security:
runs-on: ubuntu-latest
continue-on-error: true
container:
image: ${{ env.IMAGE }}
steps:
- uses: actions/checkout@v3
- name: Install pnpm and npmci
run: |
pnpm install -g pnpm
pnpm install -g @shipzone/npmci
- name: Run npm prepare
run: npmci npm prepare
- name: Audit production dependencies
run: |
npmci command npm config set registry https://registry.npmjs.org
npmci command pnpm audit --audit-level=high --prod
continue-on-error: true
- name: Audit development dependencies
run: |
npmci command npm config set registry https://registry.npmjs.org
npmci command pnpm audit --audit-level=high --dev
continue-on-error: true
test:
if: ${{ always() }}
needs: security
runs-on: ubuntu-latest
container:
image: ${{ env.IMAGE }}
steps:
- uses: actions/checkout@v3
- name: Test stable
run: |
npmci node install stable
npmci npm install
npmci npm test
- name: Test build
run: |
npmci node install stable
npmci npm install
npmci npm build

View File

@ -0,0 +1,124 @@
name: Default (tags)
on:
push:
tags:
- '*'
env:
IMAGE: registry.gitlab.com/hosttoday/ht-docker-node:npmci
NPMCI_COMPUTED_REPOURL: https://${{gitea.repository_owner}}:${{secrets.GITEA_TOKEN}}@gitea.lossless.digital/${{gitea.repository}}.git
NPMCI_TOKEN_NPM: ${{secrets.NPMCI_TOKEN_NPM}}
NPMCI_TOKEN_NPM2: ${{secrets.NPMCI_TOKEN_NPM2}}
NPMCI_GIT_GITHUBTOKEN: ${{secrets.NPMCI_GIT_GITHUBTOKEN}}
NPMCI_URL_CLOUDLY: ${{secrets.NPMCI_URL_CLOUDLY}}
jobs:
security:
runs-on: ubuntu-latest
continue-on-error: true
container:
image: ${{ env.IMAGE }}
steps:
- uses: actions/checkout@v3
- name: Prepare
run: |
pnpm install -g pnpm
pnpm install -g @shipzone/npmci
npmci npm prepare
- name: Audit production dependencies
run: |
npmci command npm config set registry https://registry.npmjs.org
npmci command pnpm audit --audit-level=high --prod
continue-on-error: true
- name: Audit development dependencies
run: |
npmci command npm config set registry https://registry.npmjs.org
npmci command pnpm audit --audit-level=high --dev
continue-on-error: true
test:
if: ${{ always() }}
needs: security
runs-on: ubuntu-latest
container:
image: ${{ env.IMAGE }}
steps:
- uses: actions/checkout@v3
- name: Prepare
run: |
pnpm install -g pnpm
pnpm install -g @shipzone/npmci
npmci npm prepare
- name: Test stable
run: |
npmci node install stable
npmci npm install
npmci npm test
- name: Test build
run: |
npmci node install stable
npmci npm install
npmci npm build
release:
needs: test
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/')
runs-on: ubuntu-latest
container:
image: ${{ env.IMAGE }}
steps:
- uses: actions/checkout@v3
- name: Prepare
run: |
pnpm install -g pnpm
pnpm install -g @shipzone/npmci
npmci npm prepare
- name: Release
run: |
npmci node install stable
npmci npm publish
metadata:
needs: test
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/')
runs-on: ubuntu-latest
container:
image: ${{ env.IMAGE }}
continue-on-error: true
steps:
- uses: actions/checkout@v3
- name: Prepare
run: |
pnpm install -g pnpm
pnpm install -g @shipzone/npmci
npmci npm prepare
- name: Code quality
run: |
npmci command npm install -g typescript
npmci npm install
- name: Trigger
run: npmci trigger
- name: Build docs and upload artifacts
run: |
npmci node install stable
npmci npm install
pnpm install -g @gitzone/tsdoc
npmci command tsdoc
continue-on-error: true

17
.gitignore vendored
View File

@ -1,5 +1,20 @@
.nogit/
node_modules/
# artifacts
coverage/
public/
pages/
# installs
node_modules/
# caches
.yarn/
.cache/
.rpt2_cache
# builds
dist/
dist_*/
# custom

View File

@ -1,131 +1,122 @@
# gitzone standard
image: hosttoday/ht-docker-node:npmci
# gitzone ci_default
image: registry.gitlab.com/hosttoday/ht-docker-node:npmci
cache:
paths:
- .npmci_cache/
key: "$CI_BUILD_STAGE"
- .npmci_cache/
key: '$CI_BUILD_STAGE'
stages:
- security
- test
- release
- metadata
- security
- test
- release
- metadata
before_script:
- pnpm install -g pnpm
- pnpm install -g @shipzone/npmci
- npmci npm prepare
# ====================
# security stage
# ====================
mirror:
# ====================
# security stage
# ====================
auditProductionDependencies:
image: registry.gitlab.com/hosttoday/ht-docker-node:npmci
stage: security
script:
- npmci git mirror
- npmci command npm config set registry https://registry.npmjs.org
- npmci command pnpm audit --audit-level=high --prod
tags:
- docker
- notpriv
- lossless
- docker
allow_failure: true
snyk:
auditDevDependencies:
image: registry.gitlab.com/hosttoday/ht-docker-node:npmci
stage: security
script:
- npmci npm prepare
- npmci command npm install -g snyk
- npmci command npm install --ignore-scripts
- npmci command snyk test
- npmci command npm config set registry https://registry.npmjs.org
- npmci command pnpm audit --audit-level=high --dev
tags:
- docker
- notpriv
- lossless
- docker
allow_failure: true
# ====================
# test stage
# ====================
testLEGACY:
stage: test
script:
- npmci npm prepare
- npmci node install legacy
- npmci npm install
- npmci npm test
coverage: /\d+.?\d+?\%\s*coverage/
tags:
- docker
- notpriv
allow_failure: true
testLTS:
testStable:
stage: test
script:
- npmci npm prepare
- npmci node install lts
- npmci npm install
- npmci npm test
- npmci node install stable
- npmci npm install
- npmci npm test
coverage: /\d+.?\d+?\%\s*coverage/
tags:
- docker
- notpriv
testSTABLE:
- docker
testBuild:
stage: test
script:
- npmci npm prepare
- npmci node install stable
- npmci npm install
- npmci npm test
- npmci node install stable
- npmci npm install
- npmci npm build
coverage: /\d+.?\d+?\%\s*coverage/
tags:
- docker
- notpriv
- docker
release:
stage: release
script:
- npmci node install stable
- npmci npm publish
- npmci node install stable
- npmci npm publish
only:
- tags
- tags
tags:
- docker
- notpriv
- lossless
- docker
- notpriv
# ====================
# metadata stage
# ====================
codequality:
stage: metadata
image: docker:stable
allow_failure: true
services:
- docker:stable-dind
only:
- tags
script:
- export SP_VERSION=$(echo "$CI_SERVER_VERSION" | sed 's/^\([0-9]*\)\.\([0-9]*\).*/\1-\2-stable/')
- docker run
--env SOURCE_CODE="$PWD"
--volume "$PWD":/code
--volume /var/run/docker.sock:/var/run/docker.sock
"registry.gitlab.com/gitlab-org/security-products/codequality:$SP_VERSION" /code
artifacts:
paths: [codeclimate.json]
- npmci command npm install -g typescript
- npmci npm prepare
- npmci npm install
tags:
- docker
- priv
- lossless
- docker
- priv
trigger:
stage: metadata
script:
- npmci trigger
- npmci trigger
only:
- tags
- tags
tags:
- docker
- notpriv
- lossless
- docker
- notpriv
pages:
image: hosttoday/ht-docker-node:npmci
stage: metadata
script:
- npmci command npm install -g typedoc typescript
- npmci npm prepare
- npmci node install stable
- npmci npm install
- npmci command typedoc --module "commonjs" --target "ES2016" --out public/ ts/
- npmci command npm run buildDocs
tags:
- lossless
- docker
- notpriv
only:
@ -133,15 +124,5 @@ pages:
artifacts:
expire_in: 1 week
paths:
- public
allow_failure: true
windowsCompatibility:
image: stefanscherer/node-windows:10-build-tools
stage: metadata
script:
- npm install & npm test
coverage: /\d+.?\d+?\%\s*coverage/
tags:
- windows
- public
allow_failure: true

4
.snyk
View File

@ -1,4 +0,0 @@
# Snyk (https://snyk.io) policy file, patches or ignores known vulnerabilities.
version: v1.12.0
ignore: {}
patch: {}

11
.vscode/launch.json vendored Normal file
View File

@ -0,0 +1,11 @@
{
"version": "0.2.0",
"configurations": [
{
"command": "npm test",
"name": "Run npm test",
"request": "launch",
"type": "node-terminal"
}
]
}

26
.vscode/settings.json vendored Normal file
View File

@ -0,0 +1,26 @@
{
"json.schemas": [
{
"fileMatch": ["/npmextra.json"],
"schema": {
"type": "object",
"properties": {
"npmci": {
"type": "object",
"description": "settings for npmci"
},
"gitzone": {
"type": "object",
"description": "settings for gitzone",
"properties": {
"projectType": {
"type": "string",
"enum": ["website", "element", "service", "npm", "wcc"]
}
}
}
}
}
}
]
}

View File

@ -1,31 +0,0 @@
import { Client as ElasticClient } from 'elasticsearch';
import { ILogContext } from 'smartlog-interfaces';
import { LogScheduler } from './elasticlog.classes.logscheduler';
export interface IStandardLogParams {
message: string;
severity: string;
}
export interface IElasticLogConstructorOptions {
port: number;
domain: string;
ssl: boolean;
user?: string;
pass?: string;
logContext: ILogContext;
}
export declare class ElasticLog<T> {
client: ElasticClient;
logContext: ILogContext;
logScheduler: LogScheduler;
/**
* sets up an instance of Elastic log
* @param optionsArg
*/
constructor(optionsArg: IElasticLogConstructorOptions);
/**
* computes the host string from the constructor options
* @param optionsArg
*/
private computeHostString(optionsArg);
log(logObject: IStandardLogParams, scheduleOverwrite?: boolean): Promise<void>;
}

View File

@ -1,77 +0,0 @@
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", { value: true });
// interfaces
const elasticsearch_1 = require("elasticsearch");
// other classes
const elasticlog_classes_logscheduler_1 = require("./elasticlog.classes.logscheduler");
class ElasticLog {
/**
* sets up an instance of Elastic log
* @param optionsArg
*/
constructor(optionsArg) {
this.logScheduler = new elasticlog_classes_logscheduler_1.LogScheduler(this);
this.logContext = optionsArg.logContext;
this.client = new elasticsearch_1.Client({
host: this.computeHostString(optionsArg),
log: 'trace'
});
}
/**
* computes the host string from the constructor options
* @param optionsArg
*/
computeHostString(optionsArg) {
let hostString = `${optionsArg.domain}:${optionsArg.port}`;
if (optionsArg.user && optionsArg.pass) {
hostString = `${optionsArg.user}:${optionsArg.pass}@${hostString}`;
}
if (optionsArg.ssl) {
hostString = `https://${hostString}`;
}
else {
hostString = `http://${hostString}`;
}
return hostString;
}
log(logObject, scheduleOverwrite = false) {
return __awaiter(this, void 0, void 0, function* () {
const now = new Date();
if (this.logScheduler.logsScheduled && !scheduleOverwrite) {
this.logScheduler.scheduleLog(logObject);
return;
}
this.client.index({
index: `logs-${now.getFullYear()}.${('0' + (now.getMonth() + 1)).slice(-2)}.${now.getDate()}`,
type: 'log',
body: {
'@timestamp': now.toISOString(),
zone: this.logContext.zone,
container: this.logContext.containerName,
environment: this.logContext.environment,
severity: logObject.severity,
message: logObject.message
}
}, (error, response) => {
if (error) {
console.log('ElasticLog encountered an error:');
console.log(error);
this.logScheduler.addFailedLog(logObject);
}
else {
console.log(`ElasticLog: ${logObject.message}`);
}
});
});
}
}
exports.ElasticLog = ElasticLog;
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiZWxhc3RpY2xvZy5jbGFzc2VzLmVsYXN0aWNsb2cuanMiLCJzb3VyY2VSb290IjoiIiwic291cmNlcyI6WyIuLi90cy9lbGFzdGljbG9nLmNsYXNzZXMuZWxhc3RpY2xvZy50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiOzs7Ozs7Ozs7O0FBQUEsYUFBYTtBQUNiLGlEQUF3RDtBQUd4RCxnQkFBZ0I7QUFDaEIsdUZBQWlFO0FBZ0JqRTtJQUtFOzs7T0FHRztJQUNILFlBQVksVUFBeUM7UUFOckQsaUJBQVksR0FBRyxJQUFJLDhDQUFZLENBQUMsSUFBSSxDQUFDLENBQUM7UUFPcEMsSUFBSSxDQUFDLFVBQVUsR0FBRyxVQUFVLENBQUMsVUFBVSxDQUFDO1FBQ3hDLElBQUksQ0FBQyxNQUFNLEdBQUcsSUFBSSxzQkFBYSxDQUFDO1lBQzlCLElBQUksRUFBRSxJQUFJLENBQUMsaUJBQWlCLENBQUMsVUFBVSxDQUFDO1lBQ3hDLEdBQUcsRUFBRSxPQUFPO1NBQ2IsQ0FBQyxDQUFDO0lBQ0wsQ0FBQztJQUVEOzs7T0FHRztJQUNLLGlCQUFpQixDQUFDLFVBQXlDO1FBQ2pFLElBQUksVUFBVSxHQUFHLEdBQUcsVUFBVSxDQUFDLE1BQU0sSUFBSSxVQUFVLENBQUMsSUFBSSxFQUFFLENBQUM7UUFDM0QsRUFBRSxDQUFDLENBQUMsVUFBVSxDQUFDLElBQUksSUFBSSxVQUFVLENBQUMsSUFBSSxDQUFDLENBQUMsQ0FBQztZQUN2QyxVQUFVLEdBQUcsR0FBRyxVQUFVLENBQUMsSUFBSSxJQUFJLFVBQVUsQ0FBQyxJQUFJLElBQUksVUFBVSxFQUFFLENBQUM7UUFDckUsQ0FBQztRQUNELEVBQUUsQ0FBQyxDQUFDLFVBQVUsQ0FBQyxHQUFHLENBQUMsQ0FBQyxDQUFDO1lBQ25CLFVBQVUsR0FBRyxXQUFXLFVBQVUsRUFBRSxDQUFDO1FBQ3ZDLENBQUM7UUFBQyxJQUFJLENBQUMsQ0FBQztZQUNOLFVBQVUsR0FBRyxVQUFVLFVBQVUsRUFBRSxDQUFDO1FBQ3RDLENBQUM7UUFDRCxNQUFNLENBQUMsVUFBVSxDQUFDO0lBQ3BCLENBQUM7SUFFSyxHQUFHLENBQUMsU0FBNkIsRUFBRSxpQkFBaUIsR0FBRyxLQUFLOztZQUNoRSxNQUFNLEdBQUcsR0FBRyxJQUFJLElBQUksRUFBRSxDQUFDO1lBQ3ZCLEVBQUUsQ0FBQyxDQUFDLElBQUksQ0FBQyxZQUFZLENBQUMsYUFBYSxJQUFJLENBQUMsaUJBQWlCLENBQUMsQ0FBQyxDQUFDO2dCQUMxRCxJQUFJLENBQUMsWUFBWSxDQUFDLFdBQVcsQ0FBQyxTQUFTLENBQUMsQ0FBQztnQkFDekMsTUFBTSxDQUFDO1lBQ1QsQ0FBQztZQUNELElBQUksQ0FBQyxNQUFNLENBQUMsS0FBSyxDQUNmO2dCQUNFLEtBQUssRUFBRSxRQUFRLEdBQUcsQ0FBQyxXQUFXLEVBQUUsSUFBSSxDQUFDLEdBQUcsR0FBRyxDQUFDLEdBQUcsQ0FBQyxRQUFRLEVBQUUsR0FBRyxDQUFDLENBQUMsQ0FBQyxDQUFDLEtBQUssQ0FDcEUsQ0FBQyxDQUFDLENBQ0gsSUFBSSxHQUFHLENBQUMsT0FBTyxFQUFFLEVBQUU7Z0JBQ3BCLElBQUksRUFBRSxLQUFLO2dCQUNYLElBQUksRUFBRTtvQkFDSixZQUFZLEVBQUUsR0FBRyxDQUFDLFdBQVcsRUFBRTtvQkFDL0IsSUFBSSxFQUFFLElBQUksQ0FBQyxVQUFVLENBQUMsSUFBSTtvQkFDMUIsU0FBUyxFQUFFLElBQUksQ0FBQyxVQUFVLENBQUMsYUFBYTtvQkFDeEMsV0FBVyxFQUFFLElBQUksQ0FBQyxVQUFVLENBQUMsV0FBVztvQkFDeEMsUUFBUSxFQUFFLFNBQVMsQ0FBQyxRQUFRO29CQUM1QixPQUFPLEVBQUUsU0FBUyxDQUFDLE9BQU87aUJBQzNCO2FBQ0YsRUFDRCxDQUFDLEtBQUssRUFBRSxRQUFRLEVBQUUsRUFBRTtnQkFDbEIsRUFBRSxDQUFDLENBQUMsS0FBSyxDQUFDLENBQUMsQ0FBQztvQkFDVixPQUFPLENBQUMsR0FBRyxDQUFDLGtDQUFrQyxDQUFDLENBQUM7b0JBQ2hELE9BQU8sQ0FBQyxHQUFHLENBQUMsS0FBSyxDQUFDLENBQUM7b0JBQ25CLElBQUksQ0FBQyxZQUFZLENBQUMsWUFBWSxDQUFDLFNBQVMsQ0FBQyxDQUFDO2dCQUM1QyxDQUFDO2dCQUFDLElBQUksQ0FBQyxDQUFDO29CQUNOLE9BQU8sQ0FBQyxHQUFHLENBQUMsZUFBZSxTQUFTLENBQUMsT0FBTyxFQUFFLENBQUMsQ0FBQztnQkFDbEQsQ0FBQztZQUNILENBQUMsQ0FDRixDQUFDO1FBQ0osQ0FBQztLQUFBO0NBQ0Y7QUFsRUQsZ0NBa0VDIn0=

View File

@ -1,11 +0,0 @@
import { ElasticLog, IStandardLogParams } from './elasticlog.classes.elasticlog';
export declare class LogScheduler {
elasticLogRef: ElasticLog<any>;
logsScheduled: boolean;
logStorage: any[];
constructor(elasticLogRefArg: ElasticLog<any>);
addFailedLog(objectArg: any | IStandardLogParams): void;
scheduleLog(logObject: any): void;
setRetry(): void;
deferSend(): void;
}

View File

@ -1,42 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
class LogScheduler {
constructor(elasticLogRefArg) {
this.logsScheduled = false;
this.logStorage = [];
this.elasticLogRef = elasticLogRefArg;
}
addFailedLog(objectArg) {
this.logStorage.push(objectArg);
this.setRetry();
}
scheduleLog(logObject) {
this.logStorage.push(logObject);
}
setRetry() {
setTimeout(() => {
const oldStorage = this.logStorage;
this.logStorage = [];
for (let logObject of oldStorage) {
this.elasticLogRef.log(logObject, true);
}
if (this.logStorage.length === 0) {
console.log('ElasticLog retry success!!!');
this.logsScheduled = false;
}
else {
console.log('ElasticLog retry failed');
this.setRetry();
}
}, 5000);
}
deferSend() {
if (!this.logsScheduled) {
console.log('Retry ElasticLog in 5 seconds!');
this.logsScheduled = true;
this.setRetry();
}
}
}
exports.LogScheduler = LogScheduler;
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiZWxhc3RpY2xvZy5jbGFzc2VzLmxvZ3NjaGVkdWxlci5qcyIsInNvdXJjZVJvb3QiOiIiLCJzb3VyY2VzIjpbIi4uL3RzL2VsYXN0aWNsb2cuY2xhc3Nlcy5sb2dzY2hlZHVsZXIudHMiXSwibmFtZXMiOltdLCJtYXBwaW5ncyI6Ijs7QUFFQTtJQUtFLFlBQVksZ0JBQWlDO1FBSDdDLGtCQUFhLEdBQUcsS0FBSyxDQUFDO1FBQ3RCLGVBQVUsR0FBVSxFQUFFLENBQUM7UUFHckIsSUFBSSxDQUFDLGFBQWEsR0FBRyxnQkFBZ0IsQ0FBQztJQUN4QyxDQUFDO0lBRUQsWUFBWSxDQUFDLFNBQW1DO1FBQzlDLElBQUksQ0FBQyxVQUFVLENBQUMsSUFBSSxDQUFDLFNBQVMsQ0FBQyxDQUFDO1FBQ2hDLElBQUksQ0FBQyxRQUFRLEVBQUUsQ0FBQztJQUNsQixDQUFDO0lBQ0QsV0FBVyxDQUFDLFNBQWM7UUFDeEIsSUFBSSxDQUFDLFVBQVUsQ0FBQyxJQUFJLENBQUMsU0FBUyxDQUFDLENBQUM7SUFDbEMsQ0FBQztJQUVELFFBQVE7UUFDTixVQUFVLENBQUMsR0FBRyxFQUFFO1lBQ2QsTUFBTSxVQUFVLEdBQUcsSUFBSSxDQUFDLFVBQVUsQ0FBQztZQUNuQyxJQUFJLENBQUMsVUFBVSxHQUFHLEVBQUUsQ0FBQztZQUNyQixHQUFHLENBQUMsQ0FBQyxJQUFJLFNBQVMsSUFBSSxVQUFVLENBQUMsQ0FBQyxDQUFDO2dCQUNqQyxJQUFJLENBQUMsYUFBYSxDQUFDLEdBQUcsQ0FBQyxTQUFTLEVBQUUsSUFBSSxDQUFDLENBQUM7WUFDMUMsQ0FBQztZQUNELEVBQUUsQ0FBQyxDQUFDLElBQUksQ0FBQyxVQUFVLENBQUMsTUFBTSxLQUFLLENBQUMsQ0FBQyxDQUFDLENBQUM7Z0JBQ2pDLE9BQU8sQ0FBQyxHQUFHLENBQUMsNkJBQTZCLENBQUMsQ0FBQztnQkFDM0MsSUFBSSxDQUFDLGFBQWEsR0FBRyxLQUFLLENBQUM7WUFDN0IsQ0FBQztZQUFDLElBQUksQ0FBQyxDQUFDO2dCQUNOLE9BQU8sQ0FBQyxHQUFHLENBQUMseUJBQXlCLENBQUMsQ0FBQztnQkFDdkMsSUFBSSxDQUFDLFFBQVEsRUFBRSxDQUFDO1lBQ2xCLENBQUM7UUFDSCxDQUFDLEVBQUUsSUFBSSxDQUFDLENBQUM7SUFDWCxDQUFDO0lBRUQsU0FBUztRQUNQLEVBQUUsQ0FBQyxDQUFDLENBQUMsSUFBSSxDQUFDLGFBQWEsQ0FBQyxDQUFDLENBQUM7WUFDeEIsT0FBTyxDQUFDLEdBQUcsQ0FBQyxnQ0FBZ0MsQ0FBQyxDQUFDO1lBQzlDLElBQUksQ0FBQyxhQUFhLEdBQUcsSUFBSSxDQUFDO1lBQzFCLElBQUksQ0FBQyxRQUFRLEVBQUUsQ0FBQztRQUNsQixDQUFDO0lBQ0gsQ0FBQztDQUNGO0FBekNELG9DQXlDQyJ9

View File

@ -1,4 +0,0 @@
import * as elasticsearch from 'elasticsearch';
import * as smartdelay from 'smartdelay';
import * as smartlogInterfaces from 'smartlog-interfaces';
export { elasticsearch, smartdelay, smartlogInterfaces };

View File

@ -1,9 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const elasticsearch = require("elasticsearch");
exports.elasticsearch = elasticsearch;
const smartdelay = require("smartdelay");
exports.smartdelay = smartdelay;
const smartlogInterfaces = require("smartlog-interfaces");
exports.smartlogInterfaces = smartlogInterfaces;
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiZWxhc3RpY2xvZy5wbHVnaW5zLmpzIiwic291cmNlUm9vdCI6IiIsInNvdXJjZXMiOlsiLi4vdHMvZWxhc3RpY2xvZy5wbHVnaW5zLnRzIl0sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiI7O0FBQUEsK0NBQStDO0FBR3RDLHNDQUFhO0FBRnRCLHlDQUF5QztBQUVqQixnQ0FBVTtBQURsQywwREFBMEQ7QUFDdEIsZ0RBQWtCIn0=

1
dist/index.d.ts vendored
View File

@ -1 +0,0 @@
export * from './elasticlog.classes.elasticlog';

7
dist/index.js vendored
View File

@ -1,7 +0,0 @@
"use strict";
function __export(m) {
for (var p in m) if (!exports.hasOwnProperty(p)) exports[p] = m[p];
}
Object.defineProperty(exports, "__esModule", { value: true });
__export(require("./elasticlog.classes.elasticlog"));
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiaW5kZXguanMiLCJzb3VyY2VSb290IjoiIiwic291cmNlcyI6WyIuLi90cy9pbmRleC50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiOzs7OztBQUFBLHFEQUFnRCJ9

View File

@ -2,5 +2,16 @@
"npmci": {
"npmGlobalTools": [],
"npmAccessLevel": "public"
},
"gitzone": {
"projectType": "npm",
"module": {
"githost": "gitlab.com",
"gitscope": "mojoio",
"gitrepo": "elasticsearch",
"description": "log to elasticsearch in a kibana compatible format",
"npmPackagename": "@mojoio/elasticsearch",
"license": "MIT"
}
}
}
}

1104
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -1,31 +1,49 @@
{
"name": "@pushrocks/elasticlog",
"version": "1.0.16",
"name": "@mojoio/elasticsearch",
"version": "1.0.33",
"private": false,
"description": "log to elasticsearch in a kibana compatible format",
"main": "dist/index.js",
"typings": "dist/index.d.ts",
"main": "dist_ts/index.js",
"typings": "dist_ts/index.d.ts",
"author": "Lossless GmbH",
"license": "MIT",
"scripts": {
"test": "(tstest test/)",
"format": "(gitzone format)",
"build": "echo \"Not needed for now\""
"build": "(tsbuild)",
"buildDocs": "tsdoc"
},
"devDependencies": {
"@gitzone/tsbuild": "^2.0.22",
"@gitzone/tsrun": "^1.1.13",
"@gitzone/tstest": "^1.0.15",
"@pushrocks/qenv": "^2.0.2",
"@pushrocks/tapbundle": "^3.0.7",
"@types/node": "^10.12.2",
"tslint": "^5.11.0",
"tslint-config-prettier": "^1.15.0"
"@gitzone/tsbuild": "^2.1.66",
"@gitzone/tsrun": "^1.2.42",
"@gitzone/tstest": "^1.0.74",
"@pushrocks/qenv": "^5.0.2",
"@pushrocks/tapbundle": "^5.0.8",
"@types/node": "^20.3.3"
},
"dependencies": {
"@pushrocks/smartdelay": "^2.0.2",
"@pushrocks/smartlog-interfaces": "^2.0.1",
"@types/elasticsearch": "^5.0.28",
"elasticsearch": "^15.2.0"
}
"@pushrocks/lik": "^6.0.2",
"@pushrocks/smartdelay": "^3.0.1",
"@pushrocks/smartlog-interfaces": "^3.0.0",
"@pushrocks/smartpromise": "^4.0.2",
"@pushrocks/smarttime": "^4.0.1",
"@types/elasticsearch": "^5.0.40",
"elasticsearch": "^16.7.3"
},
"files": [
"ts/**/*",
"ts_web/**/*",
"dist/**/*",
"dist_*/**/*",
"dist_ts/**/*",
"dist_ts_web/**/*",
"assets/**/*",
"cli.js",
"npmextra.json",
"readme.md"
],
"type": "module",
"browserslist": [
"last 1 chrome versions"
]
}

4609
pnpm-lock.yaml generated Normal file

File diff suppressed because it is too large Load Diff

View File

@ -1,4 +1,4 @@
vars:
required:
- ELK_DOMAIN
- ELK_PORT
- ELK_USER

View File

@ -1,26 +1,26 @@
# elasticlog
# @mojoio/elasticsearch
log to elasticsearch in a kibana compatible format
## Availabililty
[![npm](https://pushrocks.gitlab.io/assets/repo-button-npm.svg)](https://www.npmjs.com/package/elasticlog)
[![git](https://pushrocks.gitlab.io/assets/repo-button-git.svg)](https://GitLab.com/pushrocks/elasticlog)
[![git](https://pushrocks.gitlab.io/assets/repo-button-mirror.svg)](https://github.com/pushrocks/elasticlog)
[![docs](https://pushrocks.gitlab.io/assets/repo-button-docs.svg)](https://pushrocks.gitlab.io/elasticlog/)
## Availabililty and Links
* [npmjs.org (npm package)](https://www.npmjs.com/package/@mojoio/elasticsearch)
* [gitlab.com (source)](https://gitlab.com/mojoio/elasticsearch)
* [github.com (source mirror)](https://github.com/mojoio/elasticsearch)
* [docs (typedoc)](https://mojoio.gitlab.io/elasticsearch/)
## Status for master
[![build status](https://GitLab.com/pushrocks/elasticlog/badges/master/build.svg)](https://GitLab.com/pushrocks/elasticlog/commits/master)
[![coverage report](https://GitLab.com/pushrocks/elasticlog/badges/master/coverage.svg)](https://GitLab.com/pushrocks/elasticlog/commits/master)
[![npm downloads per month](https://img.shields.io/npm/dm/elasticlog.svg)](https://www.npmjs.com/package/elasticlog)
[![Dependency Status](https://david-dm.org/pushrocks/elasticlog.svg)](https://david-dm.org/pushrocks/elasticlog)
[![bitHound Dependencies](https://www.bithound.io/github/pushrocks/elasticlog/badges/dependencies.svg)](https://www.bithound.io/github/pushrocks/elasticlog/master/dependencies/npm)
[![bitHound Code](https://www.bithound.io/github/pushrocks/elasticlog/badges/code.svg)](https://www.bithound.io/github/pushrocks/elasticlog)
[![Known Vulnerabilities](https://snyk.io/test/npm/elasticlog/badge.svg)](https://snyk.io/test/npm/elasticlog)
[![TypeScript](https://img.shields.io/badge/TypeScript-2.x-blue.svg)](https://nodejs.org/dist/latest-v6.x/docs/api/)
[![node](https://img.shields.io/badge/node->=%206.x.x-blue.svg)](https://nodejs.org/dist/latest-v6.x/docs/api/)
[![JavaScript Style Guide](https://img.shields.io/badge/code%20style-standard-brightgreen.svg)](http://standardjs.com/)
Status Category | Status Badge
-- | --
GitLab Pipelines | [![pipeline status](https://gitlab.com/mojoio/elasticsearch/badges/master/pipeline.svg)](https://lossless.cloud)
GitLab Pipline Test Coverage | [![coverage report](https://gitlab.com/mojoio/elasticsearch/badges/master/coverage.svg)](https://lossless.cloud)
npm | [![npm downloads per month](https://badgen.net/npm/dy/@mojoio/elasticsearch)](https://lossless.cloud)
Snyk | [![Known Vulnerabilities](https://badgen.net/snyk/mojoio/elasticsearch)](https://lossless.cloud)
TypeScript Support | [![TypeScript](https://badgen.net/badge/TypeScript/>=%203.x/blue?icon=typescript)](https://lossless.cloud)
node Support | [![node](https://img.shields.io/badge/node->=%2010.x.x-blue.svg)](https://nodejs.org/dist/latest-v10.x/docs/api/)
Code Style | [![Code Style](https://badgen.net/badge/style/prettier/purple)](https://lossless.cloud)
PackagePhobia (total standalone install weight) | [![PackagePhobia](https://badgen.net/packagephobia/install/@mojoio/elasticsearch)](https://lossless.cloud)
PackagePhobia (package size on registry) | [![PackagePhobia](https://badgen.net/packagephobia/publish/@mojoio/elasticsearch)](https://lossless.cloud)
BundlePhobia (total size when bundled) | [![BundlePhobia](https://badgen.net/bundlephobia/minzip/@mojoio/elasticsearch)](https://lossless.cloud)
## Usage
@ -32,3 +32,17 @@ For further information read the linked docs at the top of this README.
> | By using this npm module you agree to our [privacy policy](https://lossless.gmbH/privacy.html)
[![repo-footer](https://pushrocks.gitlab.io/assets/repo-footer.svg)](https://push.rocks)
## Contribute
We are always happy for code contributions. If you are not the code contributing type that is ok. + Still, maintaining Open Source repositories takes considerable time and thought. If you like the quality of what we do and our modules are useful to you we would appreciate a little monthly contribution: [Contribute monthly :)](https://lossless.link/contribute)
## Contribution
We are always happy for code contributions. If you are not the code contributing type that is ok. Still, maintaining Open Source repositories takes considerable time and thought. If you like the quality of what we do and our modules are useful to you we would appreciate a little monthly contribution: You can [contribute one time](https://lossless.link/contribute-onetime) or [contribute monthly](https://lossless.link/contribute). :)
For further information read the linked docs at the top of this readme.
## Legal
> MIT licensed | **&copy;** [Task Venture Capital GmbH](https://task.vc)
| By using this npm module you agree to our [privacy policy](https://lossless.gmbH/privacy)

View File

@ -1,24 +1,25 @@
import { expect, tap } from '@pushrocks/tapbundle';
import { Qenv } from '@pushrocks/qenv';
import * as elasticlog from '../ts/index';
import * as elasticsearch from '../ts/index.js';
const testQenv = new Qenv('./', './.nogit/');
let testElasticLog: elasticlog.ElasticLog<any>;
let testElasticLog: elasticsearch.ElasticSearch<any>;
tap.test('first test', async () => {
testElasticLog = new elasticlog.ElasticLog({
domain: process.env.ELK_DOMAIN,
port: parseInt(process.env.ELK_PORT, 10),
testElasticLog = new elasticsearch.ElasticSearch({
indexPrefix: 'smartlog',
indexRetention: 7,
domain: testQenv.getEnvVarOnDemand('ELK_DOMAIN'),
port: parseInt(testQenv.getEnvVarOnDemand('ELK_PORT'), 10),
ssl: true,
user: process.env.ELK_USER,
pass: process.env.ELK_PASS
});
expect(testElasticLog).to.be.instanceOf(elasticlog.ElasticLog);
expect(testElasticLog).toBeInstanceOf(elasticsearch.ElasticSearch);
});
tap.test('should send a message to Elasticsearch', async () => {
testElasticLog.log({
timestamp: Date.now(),
type: 'increment',
level: 'info',
context: {
@ -29,7 +30,8 @@ tap.test('should send a message to Elasticsearch', async () => {
runtime: 'node',
zone: 'ship.zone',
},
message: 'hi, this is a testMessage'
message: 'GET https://myroute.to.a.cool.destination/sorare?hello=there',
correlation: null,
});
});

8
ts/00_commitinfo_data.ts Normal file
View File

@ -0,0 +1,8 @@
/**
* autocreated commitinfo by @pushrocks/commitinfo
*/
export const commitinfo = {
name: '@mojoio/elasticsearch',
version: '1.0.33',
description: 'log to elasticsearch in a kibana compatible format'
}

View File

@ -1,85 +0,0 @@
// interfaces
import { Client as ElasticClient } from 'elasticsearch';
import { ILogContext, ILogPackage } from '@pushrocks/smartlog-interfaces';
// other classes
import { LogScheduler } from './elasticlog.classes.logscheduler';
export interface IStandardLogParams {
message: string;
severity: string;
}
export interface IElasticLogConstructorOptions {
port: number;
domain: string;
ssl: boolean;
user?: string;
pass?: string;
}
export class ElasticLog<T> {
client: ElasticClient;
logScheduler = new LogScheduler(this);
/**
* sets up an instance of Elastic log
* @param optionsArg
*/
constructor(optionsArg: IElasticLogConstructorOptions) {
this.client = new ElasticClient({
host: this.computeHostString(optionsArg),
log: 'trace'
});
}
/**
* computes the host string from the constructor options
* @param optionsArg
*/
private computeHostString(optionsArg: IElasticLogConstructorOptions): string {
let hostString = `${optionsArg.domain}:${optionsArg.port}`;
if (optionsArg.user && optionsArg.pass) {
hostString = `${optionsArg.user}:${optionsArg.pass}@${hostString}`;
}
if (optionsArg.ssl) {
hostString = `https://${hostString}`;
} else {
hostString = `http://${hostString}`;
}
return hostString;
}
public async log(logPackageArg: ILogPackage, scheduleOverwrite = false) {
const now = new Date();
if (this.logScheduler.logsScheduled && !scheduleOverwrite) {
this.logScheduler.scheduleLog(logPackageArg);
return;
}
this.client.index(
{
index: `logstash-${now.getFullYear()}.${('0' + (now.getMonth() + 1)).slice(-2)}.${(
'0' + now.getDate()
).slice(-2)}`,
type: 'log',
body: {
'@timestamp': now.toISOString(),
zone: logPackageArg.context.zone,
container: logPackageArg.context.containerName,
environment: logPackageArg.context.environment,
severity: logPackageArg.level,
message: logPackageArg.message
}
},
(error, response) => {
if (error) {
console.log('ElasticLog encountered an error:');
console.log(error);
this.logScheduler.addFailedLog(logPackageArg);
} else {
console.log(`ElasticLog: ${logPackageArg.message}`);
}
}
);
}
}

View File

@ -1,44 +0,0 @@
import { ElasticLog, IStandardLogParams } from './elasticlog.classes.elasticlog';
export class LogScheduler {
elasticLogRef: ElasticLog<any>;
logsScheduled = false;
logStorage: any[] = [];
constructor(elasticLogRefArg: ElasticLog<any>) {
this.elasticLogRef = elasticLogRefArg;
}
addFailedLog(objectArg: any | IStandardLogParams) {
this.logStorage.push(objectArg);
this.setRetry();
}
scheduleLog(logObject: any) {
this.logStorage.push(logObject);
}
setRetry() {
setTimeout(() => {
const oldStorage = this.logStorage;
this.logStorage = [];
for (let logObject of oldStorage) {
this.elasticLogRef.log(logObject, true);
}
if (this.logStorage.length === 0) {
console.log('ElasticLog retry success!!!');
this.logsScheduled = false;
} else {
console.log('ElasticLog retry failed');
this.setRetry();
}
}, 5000);
}
deferSend() {
if (!this.logsScheduled) {
console.log('Retry ElasticLog in 5 seconds!');
this.logsScheduled = true;
this.setRetry();
}
}
}

View File

@ -1,4 +0,0 @@
import * as elasticsearch from 'elasticsearch';
import * as smartdelay from '@pushrocks/smartdelay';
import * as smartlogInterfaces from '@pushrocks/smartlog-interfaces';
export { elasticsearch, smartdelay, smartlogInterfaces };

View File

@ -0,0 +1,103 @@
import * as plugins from './elasticsearch.plugins';
import { ElasticSearch } from './elasticsearch.classes.elasticsearch';
import { ILogPackage } from '@pushrocks/smartlog-interfaces';
import { Stringmap } from '@pushrocks/lik';
export class ElasticIndex {
private stringmap = new Stringmap();
private elasticSearchRef: ElasticSearch<any>;
constructor(elasticSearchInstanceArg: ElasticSearch<ILogPackage>) {
this.elasticSearchRef = elasticSearchInstanceArg;
}
public async ensureIndex(indexArg: string) {
const done = plugins.smartpromise.defer();
if (this.stringmap.checkString(indexArg)) {
done.resolve();
return;
}
this.elasticSearchRef.client.cat.indices(
{
format: 'json',
bytes: 'm',
},
async (err, responseArg: any[]) => {
if (err) {
console.log(err);
return;
}
// lets delete indexes that violate the retention
if (Array.isArray(responseArg)) {
const filteredIndices = responseArg.filter((indexObjectArg) => {
return indexObjectArg.index.startsWith('smartlog');
});
const filteredIndexNames = filteredIndices.map((indexObjectArg) => {
return indexObjectArg.index;
});
this.deleteOldIndices(filteredIndexNames);
}
let index = null;
if (Array.isArray(responseArg)) {
index = responseArg.find((indexObject) => {
return indexObject.index === indexArg;
});
}
if (!index) {
const done2 = plugins.smartpromise.defer();
this.elasticSearchRef.client.indices.create(
{
waitForActiveShards: '1',
index: indexArg,
},
(error, response) => {
// console.lof(response)
done2.resolve();
}
);
await done2.promise;
}
this.stringmap.addString(indexArg);
done.resolve();
}
);
await done.promise;
}
public createNewIndex(indexNameArg: string) {}
public async deleteOldIndices(indicesArray: string[]) {
const todayAsUnix: number = Date.now();
const rententionPeriodAsUnix: number = plugins.smarttime.units.days(
this.elasticSearchRef.indexRetention
);
for (const indexName of indicesArray) {
const regexResult = /^smartlog-([0-9]*)\.([0-9]*)\.([0-9]*)$/.exec(indexName);
const dateAsUnix: number = new Date(
`${regexResult[1]}-${regexResult[2]}-${regexResult[3]}`
).getTime();
if (todayAsUnix - rententionPeriodAsUnix > dateAsUnix) {
console.log(`found old index ${indexName}`);
const done2 = plugins.smartpromise.defer();
this.elasticSearchRef.client.indices.delete(
{
index: indexName,
},
(err2, response2) => {
if (err2) {
console.log(err2);
}
console.log(`deleted ${indexName}`);
done2.resolve();
}
);
await done2.promise;
}
}
}
}

View File

@ -0,0 +1,44 @@
import { ElasticSearch, type IStandardLogParams } from './elasticsearch.classes.elasticsearch.js';
export class ElasticScheduler {
elasticSearchRef: ElasticSearch<any>;
docsScheduled = false;
docsStorage: any[] = [];
constructor(elasticLogRefArg: ElasticSearch<any>) {
this.elasticSearchRef = elasticLogRefArg;
}
public addFailedDoc(objectArg: any | IStandardLogParams) {
this.docsStorage.push(objectArg);
this.setRetry();
}
public scheduleDoc(logObject: any) {
this.docsStorage.push(logObject);
}
public setRetry() {
setTimeout(() => {
const oldStorage = this.docsStorage;
this.docsStorage = [];
for (let logObject of oldStorage) {
this.elasticSearchRef.log(logObject, true);
}
if (this.docsStorage.length === 0) {
console.log('ElasticLog retry success!!!');
this.docsScheduled = false;
} else {
console.log('ElasticLog retry failed');
this.setRetry();
}
}, 5000);
}
public deferSend() {
if (!this.docsScheduled) {
console.log('Retry ElasticLog in 5 seconds!');
this.docsScheduled = true;
this.setRetry();
}
}
}

View File

@ -0,0 +1,105 @@
// interfaces
import { Client as ElasticClient } from 'elasticsearch';
import type { ILogContext, ILogPackage, ILogDestination } from '@pushrocks/smartlog-interfaces';
// other classes
import { ElasticScheduler } from './elasticsearch.classes.elasticscheduler.js';
import { ElasticIndex } from './elasticsearch.classes.elasticindex.js';
export interface IStandardLogParams {
message: string;
severity: string;
}
export interface IElasticSearchConstructorOptions {
indexPrefix: string;
indexRetention: number;
port: number;
domain: string;
ssl: boolean;
user?: string;
pass?: string;
}
export class ElasticSearch<T> {
public client: ElasticClient;
public elasticScheduler = new ElasticScheduler(this);
public elasticIndex: ElasticIndex = new ElasticIndex(this);
public indexPrefix: string;
public indexRetention: number;
/**
* sets up an instance of Elastic log
* @param optionsArg
*/
constructor(optionsArg: IElasticSearchConstructorOptions) {
this.client = new ElasticClient({
host: this.computeHostString(optionsArg),
// log: 'trace'
});
this.indexPrefix = optionsArg.indexPrefix;
this.indexRetention = optionsArg.indexRetention;
}
/**
* computes the host string from the constructor options
* @param optionsArg
*/
private computeHostString(optionsArg: IElasticSearchConstructorOptions): string {
let hostString = `${optionsArg.domain}:${optionsArg.port}`;
if (optionsArg.user && optionsArg.pass) {
hostString = `${optionsArg.user}:${optionsArg.pass}@${hostString}`;
}
if (optionsArg.ssl) {
hostString = `https://${hostString}`;
} else {
hostString = `http://${hostString}`;
}
console.log(hostString);
return hostString;
}
public async log(logPackageArg: ILogPackage, scheduleOverwrite = false) {
const now = new Date();
const indexToUse = `${this.indexPrefix}-${now.getFullYear()}.${(
'0' +
(now.getMonth() + 1)
).slice(-2)}.${('0' + now.getDate()).slice(-2)}`;
if (this.elasticScheduler.docsScheduled && !scheduleOverwrite) {
this.elasticScheduler.scheduleDoc(logPackageArg);
return;
}
await this.elasticIndex.ensureIndex(indexToUse);
this.client.index(
{
index: indexToUse,
type: 'log',
body: {
'@timestamp': new Date(logPackageArg.timestamp).toISOString(),
...logPackageArg,
},
},
(error, response) => {
if (error) {
console.log('ElasticLog encountered an error:');
console.log(error);
this.elasticScheduler.addFailedDoc(logPackageArg);
} else {
// console.log(`ElasticLog: ${logPackageArg.message}`);
}
}
);
}
get logDestination(): ILogDestination {
return {
handleLog: async (smartlogPackageArg: ILogPackage) => {
this.log(smartlogPackageArg);
},
};
}
}

View File

@ -0,0 +1,8 @@
import * as elasticsearch from 'elasticsearch';
import * as lik from '@pushrocks/lik';
import * as smartdelay from '@pushrocks/smartdelay';
import * as smartlogInterfaces from '@pushrocks/smartlog-interfaces';
import * as smartpromise from '@pushrocks/smartpromise';
import * as smarttime from '@pushrocks/smarttime';
export { elasticsearch, lik, smartdelay, smartlogInterfaces, smartpromise, smarttime };

View File

@ -1 +1 @@
export * from './elasticlog.classes.elasticlog';
export * from './elasticsearch.classes.elasticsearch.js';

11
tsconfig.json Normal file
View File

@ -0,0 +1,11 @@
{
"compilerOptions": {
"experimentalDecorators": true,
"useDefineForClassFields": false,
"target": "ES2022",
"module": "ES2022",
"moduleResolution": "nodenext",
"esModuleInterop": true,
"verbatimModuleSyntax": true,
}
}

View File

@ -1,17 +0,0 @@
{
"extends": ["tslint:latest", "tslint-config-prettier"],
"rules": {
"semicolon": [true, "always"],
"no-console": false,
"ordered-imports": false,
"object-literal-sort-keys": false,
"member-ordering": {
"options":{
"order": [
"static-method"
]
}
}
},
"defaultSeverity": "warning"
}