Compare commits
34 Commits
Author | SHA1 | Date | |
---|---|---|---|
36d7cb69a3 | |||
4924e0a151 | |||
cd98529541 | |||
e6a9282987 | |||
9dcadcd611 | |||
4b045ff988 | |||
023dd1b519 | |||
4971385eae | |||
e209839962 | |||
e44365b674 | |||
bd154089c3 | |||
0be693da60 | |||
040c93dec3 | |||
21e55bd341 | |||
f1d04fe63c | |||
49c4660131 | |||
e5fd0361fc | |||
d6a291d8d4 | |||
fc87fd7ab7 | |||
203444d1a6 | |||
cdbf1fd316 | |||
10108d8338 | |||
36abb2c7c0 | |||
8f00d90bb1 | |||
41f1758d46 | |||
2b7cd33996 | |||
80a04ca893 | |||
93f739c79e | |||
a1b5bf5c0c | |||
084c5d137c | |||
4aa0592bd5 | |||
0313e5045a | |||
7442d93f58 | |||
35e70aae62 |
26
package.json
26
package.json
@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@apiclient.xyz/elasticsearch",
|
||||
"version": "1.0.56",
|
||||
"version": "2.0.16",
|
||||
"private": false,
|
||||
"description": "log to elasticsearch in a kibana compatible format",
|
||||
"main": "dist_ts/index.js",
|
||||
@ -14,20 +14,20 @@
|
||||
"buildDocs": "tsdoc"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@gitzone/tsbuild": "^2.1.66",
|
||||
"@gitzone/tsrun": "^1.2.42",
|
||||
"@gitzone/tstest": "^1.0.74",
|
||||
"@pushrocks/qenv": "^5.0.2",
|
||||
"@pushrocks/tapbundle": "^5.0.8",
|
||||
"@types/node": "^20.3.3"
|
||||
"@git.zone/tsbuild": "^2.1.70",
|
||||
"@git.zone/tsrun": "^1.2.46",
|
||||
"@git.zone/tstest": "^1.0.80",
|
||||
"@push.rocks/qenv": "^6.0.2",
|
||||
"@push.rocks/tapbundle": "^5.0.15",
|
||||
"@types/node": "^20.5.7"
|
||||
},
|
||||
"dependencies": {
|
||||
"@elastic/elasticsearch": "7.17.11-patch.1",
|
||||
"@pushrocks/lik": "^6.0.2",
|
||||
"@pushrocks/smartdelay": "^3.0.1",
|
||||
"@pushrocks/smartlog-interfaces": "^3.0.0",
|
||||
"@pushrocks/smartpromise": "^4.0.2",
|
||||
"@pushrocks/smarttime": "^4.0.1"
|
||||
"@elastic/elasticsearch": "8.9.0",
|
||||
"@push.rocks/lik": "^6.0.5",
|
||||
"@push.rocks/smartdelay": "^3.0.5",
|
||||
"@push.rocks/smartlog-interfaces": "^3.0.0",
|
||||
"@push.rocks/smartpromise": "^4.0.2",
|
||||
"@push.rocks/smarttime": "^4.0.5"
|
||||
},
|
||||
"files": [
|
||||
"ts/**/*",
|
||||
|
2203
pnpm-lock.yaml
generated
2203
pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
@ -1,5 +1,5 @@
|
||||
import { expect, tap } from '@pushrocks/tapbundle';
|
||||
import { Qenv } from '@pushrocks/qenv';
|
||||
import { expect, tap } from '@push.rocks/tapbundle';
|
||||
import { Qenv } from '@push.rocks/qenv';
|
||||
import * as elasticsearch from '../ts/index.js';
|
||||
|
||||
let testElasticLog: elasticsearch.ElsSmartlogDestination<any>;
|
||||
@ -49,7 +49,7 @@ tap.test('should create an ElasticDoc instance', async () => {
|
||||
});
|
||||
|
||||
tap.test('should add and update documents in a piping session', async () => {
|
||||
await testElasticDoc.startPipingSession();
|
||||
await testElasticDoc.startPipingSession({});
|
||||
await testElasticDoc.pipeDocument({
|
||||
docId: '1',
|
||||
timestamp: new Date().toISOString(),
|
||||
|
@ -3,6 +3,6 @@
|
||||
*/
|
||||
export const commitinfo = {
|
||||
name: '@apiclient.xyz/elasticsearch',
|
||||
version: '1.0.56',
|
||||
version: '2.0.16',
|
||||
description: 'log to elasticsearch in a kibana compatible format'
|
||||
}
|
||||
|
@ -14,13 +14,19 @@ export interface ISnapshot {
|
||||
aggregationData: any;
|
||||
}
|
||||
|
||||
export type SnapshotProcessor = (iterator: AsyncIterable<any>, prevSnapshot: ISnapshot | null) => Promise<ISnapshot>;
|
||||
export type SnapshotProcessor = (
|
||||
iterator: AsyncIterable<any>,
|
||||
prevSnapshot: ISnapshot | null
|
||||
) => Promise<ISnapshot>;
|
||||
|
||||
export class ElasticDoc {
|
||||
public client: ElasticClient;
|
||||
public index: string;
|
||||
private sessionDocs: Set<string> = new Set();
|
||||
private indexInitialized: boolean = false;
|
||||
private latestTimestamp: string | null = null; // Store the latest timestamp
|
||||
private onlyNew: boolean = false; // Whether to only pipe new docs
|
||||
public fastForward: boolean = false; // Whether to fast forward to the latest timestamp
|
||||
|
||||
private BATCH_SIZE = 1000;
|
||||
|
||||
@ -34,13 +40,13 @@ export class ElasticDoc {
|
||||
|
||||
private async ensureIndexExists(doc: any) {
|
||||
if (!this.indexInitialized) {
|
||||
const { body: indexExists } = await this.client.indices.exists({ index: this.index });
|
||||
const indexExists = await this.client.indices.exists({ index: this.index });
|
||||
if (!indexExists) {
|
||||
const mappings = this.createMappingsFromDoc(doc);
|
||||
await this.client.indices.create({
|
||||
index: this.index,
|
||||
body: {
|
||||
mappings,
|
||||
// mappings,
|
||||
settings: {
|
||||
// You can define the settings according to your requirements here
|
||||
},
|
||||
@ -63,15 +69,34 @@ export class ElasticDoc {
|
||||
return { properties };
|
||||
}
|
||||
|
||||
async startPipingSession() {
|
||||
async startPipingSession(options: { onlyNew?: boolean }) {
|
||||
this.sessionDocs.clear();
|
||||
}
|
||||
this.onlyNew = options.onlyNew;
|
||||
const indexExists = await this.client.indices.exists({ index: this.index });
|
||||
if (this.onlyNew && indexExists) {
|
||||
const response = await this.client.search({
|
||||
index: this.index,
|
||||
sort: '@timestamp:desc',
|
||||
size: 1,
|
||||
});
|
||||
|
||||
async pipeDocument(optionsArg: {
|
||||
docId: string;
|
||||
timestamp: string | number;
|
||||
doc: any;
|
||||
}) {
|
||||
// If the search query succeeded, the index exists.
|
||||
const hit = response.hits.hits[0];
|
||||
this.latestTimestamp = hit?._source?.['@timestamp'] || null;
|
||||
|
||||
if (this.latestTimestamp) {
|
||||
console.log(`Working in "onlyNew" mode. Hence we are omitting documents prior to ${this.latestTimestamp}`);
|
||||
} else {
|
||||
console.log(`Working in "onlyNew" mode, but no documents found in index ${this.index}. Hence processing all documents now.`);
|
||||
}
|
||||
} else if (this.onlyNew && !indexExists) {
|
||||
console.log(`Working in "onlyNew" mode, but index ${this.index} does not exist. Hence processing all documents now.`);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
async pipeDocument(optionsArg: { docId: string; timestamp?: string | number; doc: any }) {
|
||||
await this.ensureIndexExists(optionsArg.doc);
|
||||
|
||||
const documentBody = {
|
||||
@ -79,24 +104,43 @@ export class ElasticDoc {
|
||||
...(optionsArg.timestamp && { '@timestamp': optionsArg.timestamp }),
|
||||
};
|
||||
|
||||
await this.client.index({
|
||||
index: this.index,
|
||||
id: optionsArg.docId,
|
||||
body: documentBody,
|
||||
});
|
||||
// If 'onlyNew' is true, compare the document timestamp with the latest timestamp
|
||||
if (this.onlyNew) {
|
||||
if (this.latestTimestamp && optionsArg.timestamp <= this.latestTimestamp) {
|
||||
this.fastForward = true;
|
||||
} else {
|
||||
this.fastForward = false;
|
||||
await this.client.index({
|
||||
index: this.index,
|
||||
id: optionsArg.docId,
|
||||
body: documentBody,
|
||||
});
|
||||
}
|
||||
} else {
|
||||
this.fastForward = false;
|
||||
await this.client.index({
|
||||
index: this.index,
|
||||
id: optionsArg.docId,
|
||||
body: documentBody,
|
||||
});
|
||||
}
|
||||
this.sessionDocs.add(optionsArg.docId);
|
||||
}
|
||||
|
||||
async endPipingSession() {
|
||||
const allDocIds: string[] = [];
|
||||
const responseQueue = [];
|
||||
let response = await this.client.search({ index: this.index, scroll: '1m', size: this.BATCH_SIZE });
|
||||
let response = await this.client.search({
|
||||
index: this.index,
|
||||
scroll: '1m',
|
||||
size: this.BATCH_SIZE,
|
||||
});
|
||||
while (true) {
|
||||
response.body.hits.hits.forEach((hit: any) => allDocIds.push(hit._id));
|
||||
if (!response.body.hits.hits.length) {
|
||||
response.hits.hits.forEach((hit: any) => allDocIds.push(hit._id));
|
||||
if (!response.hits.hits.length) {
|
||||
break;
|
||||
}
|
||||
response = await this.client.scroll({ scroll_id: response.body._scroll_id, scroll: '1m' });
|
||||
response = await this.client.scroll({ scroll_id: response._scroll_id, scroll: '1m' });
|
||||
}
|
||||
|
||||
for (const docId of allDocIds) {
|
||||
@ -124,37 +168,37 @@ export class ElasticDoc {
|
||||
|
||||
async takeSnapshot(processIterator: SnapshotProcessor) {
|
||||
const snapshotIndex = `${this.index}_snapshots`;
|
||||
|
||||
const { body: indexExists } = await this.client.indices.exists({ index: snapshotIndex });
|
||||
|
||||
const indexExists = await this.client.indices.exists({ index: snapshotIndex });
|
||||
if (!indexExists) {
|
||||
await this.client.indices.create({
|
||||
await this.client.indices.create({
|
||||
index: snapshotIndex,
|
||||
body: {
|
||||
mappings: {
|
||||
properties: {
|
||||
date: {
|
||||
type: 'date'
|
||||
type: 'date',
|
||||
},
|
||||
aggregationData: {
|
||||
type: 'object',
|
||||
enabled: true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
enabled: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
const documentIterator = this.getDocumentIterator();
|
||||
|
||||
|
||||
const newSnapshot = await processIterator(documentIterator, await this.getLastSnapshot());
|
||||
|
||||
|
||||
await this.storeSnapshot(newSnapshot);
|
||||
}
|
||||
|
||||
private async getLastSnapshot(): Promise<ISnapshot | null> {
|
||||
private async getLastSnapshot(): Promise<ISnapshot | null> {
|
||||
const snapshotIndex = `${this.index}_snapshots`;
|
||||
const { body: indexExists } = await this.client.indices.exists({ index: snapshotIndex });
|
||||
const indexExists = await this.client.indices.exists({ index: snapshotIndex });
|
||||
|
||||
if (!indexExists) {
|
||||
return null;
|
||||
@ -163,33 +207,36 @@ private async getLastSnapshot(): Promise<ISnapshot | null> {
|
||||
const response = await this.client.search({
|
||||
index: snapshotIndex,
|
||||
sort: 'date:desc',
|
||||
size: 1
|
||||
size: 1,
|
||||
});
|
||||
|
||||
if (response.body.hits.hits.length > 0) {
|
||||
const hit = response.body.hits.hits[0];
|
||||
if (response.hits.hits.length > 0) {
|
||||
const hit = response.hits.hits[0];
|
||||
return {
|
||||
date: hit._source.date,
|
||||
aggregationData: hit._source.aggregationData,
|
||||
date: hit._source['date'],
|
||||
aggregationData: hit._source['aggregationData'],
|
||||
};
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private async *getDocumentIterator() {
|
||||
let response = await this.client.search({ index: this.index, scroll: '1m', size: this.BATCH_SIZE });
|
||||
let response = await this.client.search({
|
||||
index: this.index,
|
||||
scroll: '1m',
|
||||
size: this.BATCH_SIZE,
|
||||
});
|
||||
while (true) {
|
||||
for (const hit of response.body.hits.hits) {
|
||||
for (const hit of response.hits.hits) {
|
||||
yield hit._source;
|
||||
}
|
||||
|
||||
if (!response.body.hits.hits.length) {
|
||||
if (!response.hits.hits.length) {
|
||||
break;
|
||||
}
|
||||
|
||||
response = await this.client.scroll({ scroll_id: response.body._scroll_id, scroll: '1m' });
|
||||
response = await this.client.scroll({ scroll_id: response._scroll_id, scroll: '1m' });
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
import * as plugins from './els.plugins.js';
|
||||
import { ElsSmartlogDestination } from './els.classes.smartlogdestination.js';
|
||||
import { type ILogPackage } from '@pushrocks/smartlog-interfaces';
|
||||
import { Stringmap } from '@pushrocks/lik';
|
||||
import { type ILogPackage } from '@push.rocks/smartlog-interfaces';
|
||||
import { Stringmap } from '@push.rocks/lik';
|
||||
|
||||
export class ElasticIndex {
|
||||
private stringmap = new Stringmap();
|
||||
@ -18,7 +18,7 @@ export class ElasticIndex {
|
||||
|
||||
const responseArg = await this.elasticSearchRef.client.cat.indices({
|
||||
format: 'json',
|
||||
bytes: 'm',
|
||||
bytes: 'mb',
|
||||
}).catch(err => {
|
||||
console.log(err);
|
||||
});
|
||||
@ -27,8 +27,8 @@ export class ElasticIndex {
|
||||
throw new Error('Could not get valid response from elastic search');
|
||||
}
|
||||
|
||||
if (Array.isArray(responseArg.body)) {
|
||||
const filteredIndices = responseArg.body.filter((indexObjectArg) => {
|
||||
if (Array.isArray(responseArg)) {
|
||||
const filteredIndices = responseArg.filter((indexObjectArg) => {
|
||||
return indexObjectArg.index.startsWith(prefixArg);
|
||||
});
|
||||
const filteredIndexNames = filteredIndices.map((indexObjectArg) => {
|
||||
@ -39,8 +39,8 @@ export class ElasticIndex {
|
||||
|
||||
let index = null;
|
||||
|
||||
if (Array.isArray(responseArg.body)) {
|
||||
index = responseArg.body.find((indexItemArg) => {
|
||||
if (Array.isArray(responseArg)) {
|
||||
index = responseArg.find((indexItemArg) => {
|
||||
return indexItemArg.index === indexNameArg;
|
||||
});
|
||||
}
|
||||
@ -55,7 +55,7 @@ export class ElasticIndex {
|
||||
|
||||
public async createNewIndex(indexNameArg: string) {
|
||||
const response = await this.elasticSearchRef.client.indices.create({
|
||||
wait_for_active_shards: '1',
|
||||
wait_for_active_shards: 1,
|
||||
index: indexNameArg,
|
||||
body: {
|
||||
mappings: {
|
||||
|
@ -18,7 +18,7 @@ export class FastPush {
|
||||
async pushToIndex(indexName: string, docArray: any[], options?: FastPushOptions) {
|
||||
if (docArray.length === 0) return;
|
||||
|
||||
const { body: indexExists } = await this.client.indices.exists({ index: indexName });
|
||||
const indexExists = await this.client.indices.exists({ index: indexName });
|
||||
|
||||
if (indexExists) {
|
||||
if (options?.deleteIndex) {
|
||||
|
@ -27,7 +27,7 @@ export class ElasticKVStore {
|
||||
|
||||
private async setupIndex() {
|
||||
try {
|
||||
const { body: indexExists } = await this.client.indices.exists({ index: this.index });
|
||||
const indexExists = await this.client.indices.exists({ index: this.index });
|
||||
|
||||
if (!indexExists) {
|
||||
await this.client.indices.create({
|
||||
@ -72,7 +72,7 @@ export class ElasticKVStore {
|
||||
index: this.index,
|
||||
id: key
|
||||
});
|
||||
return response.body._source.value;
|
||||
return response._source['value'];
|
||||
} catch (error) {
|
||||
if (error.meta && error.meta.statusCode === 404) {
|
||||
return null;
|
||||
|
@ -1,8 +1,5 @@
|
||||
// interfaces
|
||||
import { Client as ElasticClient } from '@elastic/elasticsearch';
|
||||
import type { ILogContext, ILogPackage, ILogDestination } from '@pushrocks/smartlog-interfaces';
|
||||
|
||||
// other classes
|
||||
import type { ILogContext, ILogPackage, ILogDestination } from '@push.rocks/smartlog-interfaces';
|
||||
import { ElasticScheduler } from './els.classes.elasticscheduler.js';
|
||||
import { ElasticIndex } from './els.classes.elasticindex.js';
|
||||
|
||||
@ -29,10 +26,6 @@ export class ElsSmartlogDestination<T> {
|
||||
public indexPrefix: string;
|
||||
public indexRetention: number;
|
||||
|
||||
/**
|
||||
* sets up an instance of Elastic log
|
||||
* @param optionsArg
|
||||
*/
|
||||
constructor(optionsArg: IElasticSearchConstructorOptions) {
|
||||
this.client = new ElasticClient({
|
||||
node: optionsArg.node,
|
||||
@ -40,58 +33,34 @@ export class ElsSmartlogDestination<T> {
|
||||
});
|
||||
this.indexPrefix = `${optionsArg.indexPrefix}`;
|
||||
this.indexRetention = optionsArg.indexRetention;
|
||||
this.setupDataStream();
|
||||
}
|
||||
|
||||
private async setupDataStream() {
|
||||
// Define an index template
|
||||
await this.client.indices.putIndexTemplate({
|
||||
name: `${this.indexPrefix}_template`,
|
||||
index_patterns: [`${this.indexPrefix}-*`],
|
||||
data_stream: {},
|
||||
});
|
||||
}
|
||||
|
||||
public async log(logPackageArg: ILogPackage, scheduleOverwrite = false) {
|
||||
const now = new Date();
|
||||
const indexToUse = `${this.indexPrefix}-${now.toISOString().split('T')[0]}`;
|
||||
const indexToUse = `${this.indexPrefix}-data-stream`; // Use data stream name
|
||||
|
||||
if (this.elasticScheduler.docsScheduled && !scheduleOverwrite) {
|
||||
this.elasticScheduler.scheduleDoc(logPackageArg);
|
||||
return;
|
||||
}
|
||||
|
||||
// Make sure the index is created with a mapping for dynamic JSON
|
||||
const indexExists = await this.client.indices.exists({ index: indexToUse });
|
||||
if (!indexExists.body) {
|
||||
await this.client.indices.create({
|
||||
index: indexToUse,
|
||||
body: {
|
||||
mappings: {
|
||||
properties: {
|
||||
'@timestamp': {
|
||||
type: 'date',
|
||||
},
|
||||
logPackageArg: {
|
||||
properties: {
|
||||
payload: {
|
||||
type: 'object',
|
||||
dynamic: true
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
this.client.index(
|
||||
await this.client.index(
|
||||
{
|
||||
index: indexToUse,
|
||||
body: {
|
||||
'@timestamp': new Date(logPackageArg.timestamp).toISOString(),
|
||||
...logPackageArg,
|
||||
},
|
||||
},
|
||||
(error, response) => {
|
||||
if (error) {
|
||||
console.log('ElasticLog encountered an error:');
|
||||
console.log(error);
|
||||
this.elasticScheduler.addFailedDoc(logPackageArg);
|
||||
} else {
|
||||
// console.log(`ElasticLog: ${logPackageArg.message}`);
|
||||
}
|
||||
}
|
||||
);
|
||||
}
|
||||
@ -99,7 +68,7 @@ export class ElsSmartlogDestination<T> {
|
||||
get logDestination(): ILogDestination {
|
||||
return {
|
||||
handleLog: async (smartlogPackageArg: ILogPackage) => {
|
||||
this.log(smartlogPackageArg);
|
||||
await this.log(smartlogPackageArg);
|
||||
},
|
||||
};
|
||||
}
|
||||
|
@ -1,8 +1,8 @@
|
||||
import * as elasticsearch from '@elastic/elasticsearch';
|
||||
import * as lik from '@pushrocks/lik';
|
||||
import * as smartdelay from '@pushrocks/smartdelay';
|
||||
import * as smartlogInterfaces from '@pushrocks/smartlog-interfaces';
|
||||
import * as smartpromise from '@pushrocks/smartpromise';
|
||||
import * as smarttime from '@pushrocks/smarttime';
|
||||
import * as lik from '@push.rocks/lik';
|
||||
import * as smartdelay from '@push.rocks/smartdelay';
|
||||
import * as smartlogInterfaces from '@push.rocks/smartlog-interfaces';
|
||||
import * as smartpromise from '@push.rocks/smartpromise';
|
||||
import * as smarttime from '@push.rocks/smarttime';
|
||||
|
||||
export { elasticsearch, lik, smartdelay, smartlogInterfaces, smartpromise, smarttime };
|
||||
|
Loading…
x
Reference in New Issue
Block a user