fix(core): update

This commit is contained in:
Philipp Kunz 2024-05-08 20:49:10 +02:00
commit 1d6d056942
26 changed files with 6396 additions and 0 deletions

20
.gitignore vendored Normal file
View File

@ -0,0 +1,20 @@
.nogit/
# artifacts
coverage/
public/
pages/
# installs
node_modules/
# caches
.yarn/
.cache/
.rpt2_cache
# builds
dist/
dist_*/
# custom

132
.gitlab-ci.yml Normal file
View File

@ -0,0 +1,132 @@
# gitzone ci_default_private
image: registry.gitlab.com/hosttoday/ht-docker-node:npmci
cache:
paths:
- .npmci_cache/
key: '$CI_BUILD_STAGE'
stages:
- security
- test
- release
- metadata
before_script:
- pnpm install -g pnpm
- pnpm install -g @shipzone/npmci
- npmci npm prepare
# ====================
# security stage
# ====================
# ====================
# security stage
# ====================
auditProductionDependencies:
image: registry.gitlab.com/hosttoday/ht-docker-node:npmci
stage: security
script:
- npmci command npm config set registry https://registry.npmjs.org
- npmci command pnpm audit --audit-level=high --prod
tags:
- lossless
- docker
auditDevDependencies:
image: registry.gitlab.com/hosttoday/ht-docker-node:npmci
stage: security
script:
- npmci command npm config set registry https://registry.npmjs.org
- npmci command pnpm audit --audit-level=high --dev
tags:
- lossless
- docker
allow_failure: true
# ====================
# test stage
# ====================
testStable:
stage: test
script:
- npmci node install stable
- npmci npm install
- npmci npm test
coverage: /\d+.?\d+?\%\s*coverage/
tags:
- lossless
- docker
- notpriv
testBuild:
stage: test
script:
- npmci node install stable
- npmci npm install
- npmci command npm run build
coverage: /\d+.?\d+?\%\s*coverage/
tags:
- lossless
- docker
- notpriv
release:
stage: release
script:
- npmci node install stable
- npmci npm publish
only:
- tags
tags:
- lossless
- docker
- notpriv
# ====================
# metadata stage
# ====================
codequality:
stage: metadata
allow_failure: true
only:
- tags
script:
- npmci command npm install -g typescript
- npmci npm prepare
- npmci npm install
tags:
- lossless
- docker
- priv
trigger:
stage: metadata
script:
- npmci trigger
only:
- tags
tags:
- lossless
- docker
- notpriv
pages:
stage: metadata
script:
- npmci node install lts
- npmci command npm install -g @git.zone/tsdoc
- npmci npm install
- npmci command tsdoc
tags:
- lossless
- docker
- notpriv
only:
- tags
artifacts:
expire_in: 1 week
paths:
- public
allow_failure: true

11
.vscode/launch.json vendored Normal file
View File

@ -0,0 +1,11 @@
{
"version": "0.2.0",
"configurations": [
{
"command": "npm test",
"name": "Run npm test",
"request": "launch",
"type": "node-terminal"
}
]
}

26
.vscode/settings.json vendored Normal file
View File

@ -0,0 +1,26 @@
{
"json.schemas": [
{
"fileMatch": ["/npmextra.json"],
"schema": {
"type": "object",
"properties": {
"npmci": {
"type": "object",
"description": "settings for npmci"
},
"gitzone": {
"type": "object",
"description": "settings for gitzone",
"properties": {
"projectType": {
"type": "string",
"enum": ["website", "element", "service", "npm", "wcc"]
}
}
}
}
}
}
]
}

4
cli.child.ts Normal file
View File

@ -0,0 +1,4 @@
#!/usr/bin/env node
process.env.CLI_CALL = 'true';
import * as cliTool from './ts/index.js';
cliTool.runCli();

4
cli.js Normal file
View File

@ -0,0 +1,4 @@
#!/usr/bin/env node
process.env.CLI_CALL = 'true';
const cliTool = await import('./dist_ts/index.js');
cliTool.runCli();

5
cli.ts.js Normal file
View File

@ -0,0 +1,5 @@
#!/usr/bin/env node
process.env.CLI_CALL = 'true';
import * as tsrun from '@git.zone/tsrun';
tsrun.runPath('./cli.child.js', import.meta.url);

19
license Normal file
View File

@ -0,0 +1,19 @@
Copyright (c) 2019 Lossless GmbH (hello@lossless.com)
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

19
npmextra.json Normal file
View File

@ -0,0 +1,19 @@
{
"gitzone": {
"projectType": "npm",
"module": {
"githost": "gitlab.com",
"gitscope": "losslessone/services/initzone",
"gitrepo": "spark",
"description": "sparks the servezone services",
"npmPackagename": "@losslessone_private/spark",
"license": "MIT",
"projectDomain": "https://lossless.one"
}
},
"npmci": {
"npmGlobalTools": [],
"npmAccessLevel": "private",
"npmRegistryUrl": "verdaccio.lossless.one"
}
}

58
package.json Normal file
View File

@ -0,0 +1,58 @@
{
"name": "@serve.zone/spark",
"version": "1.0.82",
"private": false,
"description": "sparks the servezone services",
"main": "dist_ts/index.js",
"typings": "dist_ts/index.d.ts",
"author": "Task Venture Capital GmbH",
"license": "MIT",
"scripts": {
"test": "(tstest test/ --web)",
"build": "(tsbuild --web --allowimplicitany)",
"buildDocs": "tsdoc"
},
"bin": {
"spark": "./cli.js"
},
"devDependencies": {
"@git.zone/tsbuild": "^2.1.29",
"@git.zone/tsrun": "^1.2.39",
"@git.zone/tstest": "^1.0.60",
"@push.rocks/tapbundle": "^5.0.4",
"@types/node": "20.10.0"
},
"dependencies": {
"@apiclient.xyz/docker": "^1.0.103",
"@push.rocks/npmextra": "^5.0.13",
"@push.rocks/projectinfo": "^5.0.1",
"@push.rocks/qenv": "^6.0.5",
"@push.rocks/smartcli": "^4.0.6",
"@push.rocks/smartdaemon": "^2.0.3",
"@push.rocks/smartdelay": "^3.0.5",
"@push.rocks/smartfile": "^11.0.14",
"@push.rocks/smartjson": "^5.0.5",
"@push.rocks/smartlog": "^3.0.3",
"@push.rocks/smartlog-destination-local": "^9.0.0",
"@push.rocks/smartpath": "^5.0.5",
"@push.rocks/smartshell": "^3.0.5",
"@push.rocks/smartupdate": "^2.0.4",
"@push.rocks/taskbuffer": "^3.0.10"
},
"files": [
"ts/**/*",
"ts_web/**/*",
"dist/**/*",
"dist_*/**/*",
"dist_ts/**/*",
"dist_ts_web/**/*",
"assets/**/*",
"cli.js",
"npmextra.json",
"readme.md"
],
"browserslist": [
"last 1 chrome versions"
],
"type": "module"
}

5605
pnpm-lock.yaml generated Normal file

File diff suppressed because it is too large Load Diff

40
readme.md Normal file
View File

@ -0,0 +1,40 @@
# @losslessone/services/initzone/spark
sparks the servezone services
## Availabililty and Links
* [npmjs.org (npm package)](https://www.npmjs.com/package/@losslessone_private/spark)
* [gitlab.com (source)](https://gitlab.com/losslessone/services/initzone/spark)
* [github.com (source mirror)](https://github.com/losslessone/services/initzone/spark)
* [docs (typedoc)](https://losslessone/services/initzone.gitlab.io/spark/)
## Status for master
Status Category | Status Badge
-- | --
GitLab Pipelines | [![pipeline status](https://gitlab.com/losslessone/services/initzone/spark/badges/master/pipeline.svg)](https://lossless.cloud)
GitLab Pipline Test Coverage | [![coverage report](https://gitlab.com/losslessone/services/initzone/spark/badges/master/coverage.svg)](https://lossless.cloud)
npm | [![npm downloads per month](https://badgen.net/npm/dy/@losslessone_private/spark)](https://lossless.cloud)
Snyk | [![Known Vulnerabilities](https://badgen.net/snyk/losslessone/services/initzone/spark)](https://lossless.cloud)
TypeScript Support | [![TypeScript](https://badgen.net/badge/TypeScript/>=%203.x/blue?icon=typescript)](https://lossless.cloud)
node Support | [![node](https://img.shields.io/badge/node->=%2010.x.x-blue.svg)](https://nodejs.org/dist/latest-v10.x/docs/api/)
Code Style | [![Code Style](https://badgen.net/badge/style/prettier/purple)](https://lossless.cloud)
PackagePhobia (total standalone install weight) | [![PackagePhobia](https://badgen.net/packagephobia/install/@losslessone_private/spark)](https://lossless.cloud)
PackagePhobia (package size on registry) | [![PackagePhobia](https://badgen.net/packagephobia/publish/@losslessone_private/spark)](https://lossless.cloud)
BundlePhobia (total size when bundled) | [![BundlePhobia](https://badgen.net/bundlephobia/minzip/@losslessone_private/spark)](https://lossless.cloud)
## Usage
## Contribution
We are always happy for code contributions. If you are not the code contributing type that is ok. Still, maintaining Open Source repositories takes considerable time and thought. If you like the quality of what we do and our modules are useful to you we would appreciate a little monthly contribution: You can [contribute one time](https://lossless.link/contribute-onetime) or [contribute monthly](https://lossless.link/contribute). :)
## Contribution
We are always happy for code contributions. If you are not the code contributing type that is ok. Still, maintaining Open Source repositories takes considerable time and thought. If you like the quality of what we do and our modules are useful to you we would appreciate a little monthly contribution: You can [contribute one time](https://lossless.link/contribute-onetime) or [contribute monthly](https://lossless.link/contribute). :)
For further information read the linked docs at the top of this readme.
## Legal
> MIT licensed | **©** [Task Venture Capital GmbH](https://task.vc)
| By using this npm module you agree to our [privacy policy](https://lossless.gmbH/privacy)

11
test/test.nonci.ts Normal file
View File

@ -0,0 +1,11 @@
import { expect, tap } from '@push.rocks/tapbundle';
import * as spark from '../ts/index.js';
let testSpark: spark.Spark;
tap.test('should create a spark instance', async () => {
testSpark = new spark.Spark();
expect(testSpark).toBeInstanceOf(spark.Spark);
});
tap.start();

8
ts/00_commitinfo_data.ts Normal file
View File

@ -0,0 +1,8 @@
/**
* autocreated commitinfo by @pushrocks/commitinfo
*/
export const commitinfo = {
name: '@serve.zone/spark',
version: '1.0.83',
description: 'sparks the servezone services'
}

7
ts/index.ts Normal file
View File

@ -0,0 +1,7 @@
export * from './spark.classes.spark.js';
import * as cli from './spark.cli.js';
export const runCli = async () => {
cli.runCli();
};

View File

@ -0,0 +1,9 @@
import * as plugins from './spark.plugins.js';
import { Spark } from './index.js';
export class SparkConfig {
public sparkRef: Spark;
constructor(sparkRefArg: Spark) {
this.sparkRef = sparkRefArg;
}
}

12
ts/spark.classes.info.ts Normal file
View File

@ -0,0 +1,12 @@
import * as plugins from './spark.plugins.js';
import * as paths from './spark.paths.js';
import { Spark } from './spark.classes.spark.js';
export class SparkInfo {
public sparkRef: Spark;
public projectInfo = new plugins.projectinfo.ProjectinfoNpm(paths.packageDir);
constructor(sparkRefArg: Spark) {
this.sparkRef = sparkRefArg;
}
}

View File

@ -0,0 +1,12 @@
import * as plugins from './spark.plugins.js';
import { Spark } from './index.js';
export class SparkLocalConfig {
public sparkRef: Spark;
private kvStore: plugins.npmextra.KeyValueStore;
constructor(sparkRefArg: Spark) {
this.sparkRef = sparkRefArg;
this.kvStore = new plugins.npmextra.KeyValueStore('custom', 'spark');
}
}

27
ts/spark.classes.spark.ts Normal file
View File

@ -0,0 +1,27 @@
import * as plugins from './spark.plugins.js';
import { SparkTaskManager } from './spark.classes.taskmanager.js';
import { SparkInfo } from './spark.classes.info.js';
import { SparkUpdateManager } from './spark.classes.updatemanager.js';
import { logger } from './spark.logging.js';
import { SparkLocalConfig } from './spark.classes.localconfig.js';
export class Spark {
public smartdaemon: plugins.smartdaemon.SmartDaemon;
public sparkLocalConfig: SparkLocalConfig;
public sparkTaskManager: SparkTaskManager;
public sparkInfo: SparkInfo;
public sparkUpdateManager: SparkUpdateManager;
constructor() {
this.smartdaemon = new plugins.smartdaemon.SmartDaemon();
this.sparkLocalConfig = new SparkLocalConfig(this);
this.sparkInfo = new SparkInfo(this);
this.sparkTaskManager = new SparkTaskManager(this);
this.sparkUpdateManager = new SparkUpdateManager(this);
}
public async daemonStart() {
await this.sparkUpdateManager.start();
this.sparkTaskManager.start();
}
}

View File

@ -0,0 +1,100 @@
import * as plugins from './spark.plugins.js';
import { Spark } from './index.js';
import * as paths from './spark.paths.js';
import { logger } from './spark.logging.js';
export class SparkTaskManager {
public sparkRef: Spark;
public taskmanager: plugins.taskbuffer.TaskManager;
// tasks
public checkinSlackTask: plugins.taskbuffer.Task;
public updateSpark: plugins.taskbuffer.Task;
public updateHost: plugins.taskbuffer.Task;
public updateCloudly: plugins.taskbuffer.Task;
constructor(sparkRefArg: Spark) {
this.sparkRef = sparkRefArg;
this.taskmanager = new plugins.taskbuffer.TaskManager();
// checkinOnSlack
this.checkinSlackTask = new plugins.taskbuffer.Task({
name: 'checkinSlack',
taskFunction: async () => {
logger.log('ok', 'running hourly checkin now');
logger.log('info', 'completed hourly checkin');
},
});
// updateSpark
this.updateSpark = new plugins.taskbuffer.Task({
name: 'updateSpark',
taskFunction: async () => {
const smartupdateInstance = new plugins.smartupdate.SmartUpdate({
npmRegistryUrl: 'https://registry.npmjs.org',
});
const shouldUpdate = await smartupdateInstance.check(
this.sparkRef.sparkInfo.projectInfo.name,
this.sparkRef.sparkInfo.projectInfo.version
);
if (shouldUpdate) {
await this.stop();
const smartshellInstance = new plugins.smartshell.Smartshell({
executor: 'bash',
});
await smartshellInstance.exec(`cd / && npm upgrade -g && spark updatedaemon`);
logger.log('info', 'Cooling off before restart...');
await plugins.smartdelay.delayFor(5000);
logger.log('ok', '######## Trying to exit / Restart expected... ########');
process.exit(0);
}
},
});
this.updateHost = new plugins.taskbuffer.Task({
name: 'updateHost',
taskFunction: async () => {
await this.stop();
const smartshellInstance = new plugins.smartshell.Smartshell({
executor: 'bash',
});
await smartshellInstance.exec(
`apt-get update && apt-get upgrade -y --force-yes && apt-get autoremove -y --force-yes && apt-get autoclean -y --force-yes`
);
await this.start();
},
});
this.updateCloudly = new plugins.taskbuffer.Task({
name: 'updateCloudly',
taskFunction: async () => {
logger.log('info', 'now running updateCloudly task');
this.sparkRef.sparkUpdateManager.updateServices();
},
});
}
/**
* start the taskmanager
*/
public async start() {
this.taskmanager.addAndScheduleTask(this.checkinSlackTask, '0 0 * * * *');
this.taskmanager.addAndScheduleTask(this.updateSpark, '0 * * * * *');
this.taskmanager.addAndScheduleTask(this.updateHost, '0 0 0 * * *');
this.taskmanager.addAndScheduleTask(this.updateCloudly, '30 */2 * * * *');
this.taskmanager.start();
}
/**
* stops the taskmanager
*/
public async stop() {
this.taskmanager.descheduleTask(this.checkinSlackTask);
this.taskmanager.descheduleTask(this.updateSpark);
this.taskmanager.descheduleTask(this.updateHost);
this.taskmanager.descheduleTask(this.updateCloudly);
this.taskmanager.stop();
}
}

View File

@ -0,0 +1,99 @@
import * as plugins from './spark.plugins.js';
import * as paths from './spark.paths.js';
import { Spark } from './spark.classes.spark.js';
import { logger } from './spark.logging.js';
export class SparkUpdateManager {
public sparkRef: Spark;
public dockerHost: plugins.docker.DockerHost;
public smartupdate: plugins.smartupdate.SmartUpdate;
constructor(sparkrefArg: Spark) {
this.sparkRef = sparkrefArg;
this.dockerHost = new plugins.docker.DockerHost();
this.smartupdate = new plugins.smartupdate.SmartUpdate();
}
/**
* start the instance
*/
public async start() {
await this.dockerHost.activateSwarm();
}
public async updateServices() {
if (
plugins.smartfile.fs.isDirectory(plugins.path.join(paths.homeDir, 'serve.zone/spark')) &&
(await plugins.smartfile.fs.fileExists(
plugins.path.join(paths.homeDir, 'serve.zone/spark/spark.json')
))
) {
const sparkJson = plugins.smartfile.fs.toObjectSync(
plugins.path.join(paths.homeDir, 'serve.zone/spark/spark.json')
);
const services: Array<{
name: string;
image: string;
url: string;
port: string;
environment: string;
secretJson: any;
}> = [];
for (const serviceKey of Object.keys(sparkJson.services)) {
services.push({
name: serviceKey,
image: sparkJson.services[serviceKey].image,
url: sparkJson.services[serviceKey].url,
environment: sparkJson.services[serviceKey].environment,
port: sparkJson.services[serviceKey].port,
secretJson: sparkJson.services[serviceKey].secretJson,
});
}
for (const service of services) {
const existingService = await plugins.docker.DockerService.getServiceByName(
this.dockerHost,
service.name
);
const existingServiceSecret = await plugins.docker.DockerSecret.getSecretByName(
this.dockerHost,
`${service.name}Secret`
);
if (existingService) {
const needsUpdate: boolean = await existingService.needsUpdate();
if (!needsUpdate) {
logger.log('info', `not needing update.`);
return;
}
logger.log('ok', `${service.name} needs to be updated!`);
await existingService.remove();
await existingServiceSecret.remove();
}
if (!existingService && existingServiceSecret) {
await existingServiceSecret.remove();
}
const newServiceImage = await plugins.docker.DockerImage.createFromRegistry(
this.dockerHost,
{
imageUrl: service.image,
}
);
const newServiceSecret = await plugins.docker.DockerSecret.createSecret(this.dockerHost, {
name: `${service.name}Secret`,
contentArg: plugins.smartjson.stringify(service.secretJson),
version: await newServiceImage.getVersion(),
labels: {},
});
const newService = await plugins.docker.DockerService.createService(this.dockerHost, {
image: newServiceImage,
labels: {},
name: service.name,
networkAlias: service.name,
networks: [],
secrets: [newServiceSecret],
ports: [`${service.port}:${service.secretJson.SERVEZONE_PORT}`],
});
logger.log('success', 'updated all services!');
}
}
}
}

95
ts/spark.cli.ts Normal file
View File

@ -0,0 +1,95 @@
import * as plugins from './spark.plugins.js';
import * as paths from './spark.paths.js';
import { Spark } from './spark.classes.spark.js';
import { logger } from './spark.logging.js';
export const runCli = async () => {
const smartshellInstance = new plugins.smartshell.Smartshell({
executor: 'bash',
});
const sparkInstance = new Spark();
const smartcliInstance = new plugins.smartcli.Smartcli();
smartcliInstance.standardCommand().subscribe(async () => {
logger.log('info', 'no action specified! you can type:');
logger.log('info', '* installdaemon');
});
smartcliInstance.addCommand('installdaemon').subscribe(async (argvArg) => {
logger.log('ok', 'we are apparently not running as daemon yet');
logger.log('info', 'trying to set this up now');
const sparkService = await sparkInstance.smartdaemon.addService({
name: 'spark',
version: sparkInstance.sparkInfo.projectInfo.version,
command: 'spark asdaemon',
description: 'spark daemon service',
workingDir: paths.packageDir,
});
await sparkService.save();
await sparkService.enable();
await sparkService.start();
});
smartcliInstance.addCommand('updatedaemon').subscribe(async (argvArg) => {
logger.log('ok', 'we are apparently trying to update the daemon for spark');
logger.log('info', 'trying to set this up now');
const sparkService = await sparkInstance.smartdaemon.addService({
name: 'spark',
version: sparkInstance.sparkInfo.projectInfo.version,
command: 'spark asdaemon',
description: 'spark daemon service',
workingDir: paths.packageDir,
});
await sparkService.reload();
});
smartcliInstance.addCommand('asdaemon').subscribe(async (argvArg) => {
logger.log('success', 'looks like we are running as daemon now');
logger.log('info', 'starting spark in daemon mode');
await sparkInstance.daemonStart();
});
smartcliInstance.addCommand('logs').subscribe(async (argvArg) => {
smartshellInstance.exec(`journalctl -u smartdaemon_spark -f`);
});
smartcliInstance.addCommand('prune').subscribe(async (argvArg) => {
// daemon
await smartshellInstance.exec(`systemctl stop smartdaemon_spark`);
logger.log('ok', 'stopped serverconfig daemon');
await plugins.smartdelay.delayFor(5000);
// services
await smartshellInstance.exec(`docker stack rm $(docker stack ls -q)`);
logger.log('ok', 'removed docker stacks');
await plugins.smartdelay.delayFor(5000);
// services
await smartshellInstance.exec(`docker service rm $(docker service ls -q)`);
logger.log('ok', 'removed docker services');
await plugins.smartdelay.delayFor(5000);
// secrets
await smartshellInstance.exec(`docker secret rm $(docker secret ls -q)`);
logger.log('ok', 'removed docker secrets');
await plugins.smartdelay.delayFor(5000);
// networks
await smartshellInstance.exec(`docker network rm szncorechat sznwebgateway`);
logger.log('ok', 'removed docker networks');
await plugins.smartdelay.delayFor(5000);
await smartshellInstance.exec(`docker system prune -af`);
logger.log('ok', 'pruned docker system');
await plugins.smartdelay.delayFor(5000);
// restart docker
await smartshellInstance.exec(`systemctl restart docker`);
logger.log('ok', 'restarted the docker service');
await plugins.smartdelay.delayFor(5000);
// serverconfig daemon
await smartshellInstance.exec(`systemctl start smartdaemon_spark`);
logger.log('ok', 'handed over control back to serverconfig daemon');
});
smartcliInstance.startParse();
};

15
ts/spark.logging.ts Normal file
View File

@ -0,0 +1,15 @@
import * as plugins from './spark.plugins.js';
import * as paths from './spark.paths.js';
const projectInfoNpm = new plugins.projectinfo.ProjectinfoNpm(paths.packageDir);
export const logger = new plugins.smartlog.Smartlog({
logContext: {
environment: 'production',
runtime: 'node',
zone: 'baremetal',
company: null,
companyunit: null,
containerName: 'spark',
}
});

4
ts/spark.paths.ts Normal file
View File

@ -0,0 +1,4 @@
import * as plugins from './spark.plugins.js';
export const packageDir = plugins.path.join(plugins.smartpath.get.dirnameFromImportMetaUrl(import.meta.url), '../');
export const homeDir = plugins.smartpath.get.home();

40
ts/spark.plugins.ts Normal file
View File

@ -0,0 +1,40 @@
// node native scope
import * as path from 'path';
export { path };
// @apiclient.xyz scope
import * as docker from '@apiclient.xyz/docker';
export { docker };
// @pushrocks scope
import * as npmextra from '@push.rocks/npmextra';
import * as projectinfo from '@push.rocks/projectinfo';
import * as smartcli from '@push.rocks/smartcli';
import * as smartdaemon from '@push.rocks/smartdaemon';
import * as smartdelay from '@push.rocks/smartdelay';
import * as smartfile from '@push.rocks/smartfile';
import * as smartjson from '@push.rocks/smartjson';
import * as smartlog from '@push.rocks/smartlog';
import * as smartlogDestinationLocal from '@push.rocks/smartlog-destination-local';
import * as smartpath from '@push.rocks/smartpath';
import * as smartshell from '@push.rocks/smartshell';
import * as smartupdate from '@push.rocks/smartupdate';
import * as taskbuffer from '@push.rocks/taskbuffer';
export {
npmextra,
projectinfo,
smartcli,
smartdaemon,
smartdelay,
smartfile,
smartjson,
smartlog,
smartlogDestinationLocal,
smartpath,
smartshell,
smartupdate,
taskbuffer,
};

14
tsconfig.json Normal file
View File

@ -0,0 +1,14 @@
{
"compilerOptions": {
"experimentalDecorators": true,
"useDefineForClassFields": false,
"target": "ES2022",
"module": "NodeNext",
"moduleResolution": "NodeNext",
"esModuleInterop": true,
"verbatimModuleSyntax": true
},
"exclude": [
"dist_*/**/*.d.ts"
]
}