Compare commits
14 Commits
Author | SHA1 | Date | |
---|---|---|---|
2dcb10d233 | |||
d53c46fa82 | |||
25e847a9ea | |||
cc0ecb3f16 | |||
2cd0846c74 | |||
49ab40af09 | |||
5ff51ff88d | |||
c578a3fdc1 | |||
ad0352a712 | |||
f921338fd6 | |||
614dae5ade | |||
f87359fb97 | |||
21da75c09a | |||
fe49d25765 |
21
package.json
21
package.json
@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@push.rocks/smartarchive",
|
"name": "@push.rocks/smartarchive",
|
||||||
"version": "4.0.13",
|
"version": "4.0.20",
|
||||||
"description": "work with archives",
|
"description": "work with archives",
|
||||||
"main": "dist_ts/index.js",
|
"main": "dist_ts/index.js",
|
||||||
"typings": "dist_ts/index.d.ts",
|
"typings": "dist_ts/index.d.ts",
|
||||||
@ -21,25 +21,26 @@
|
|||||||
},
|
},
|
||||||
"homepage": "https://github.com/pushrocks/smartarchive#readme",
|
"homepage": "https://github.com/pushrocks/smartarchive#readme",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@push.rocks/smartfile": "^11.0.0",
|
"@push.rocks/smartdelay": "^3.0.5",
|
||||||
|
"@push.rocks/smartfile": "^11.0.4",
|
||||||
"@push.rocks/smartpath": "^5.0.11",
|
"@push.rocks/smartpath": "^5.0.11",
|
||||||
"@push.rocks/smartpromise": "^4.0.3",
|
"@push.rocks/smartpromise": "^4.0.3",
|
||||||
"@push.rocks/smartrequest": "^2.0.21",
|
"@push.rocks/smartrequest": "^2.0.21",
|
||||||
"@push.rocks/smartrx": "^3.0.7",
|
"@push.rocks/smartrx": "^3.0.7",
|
||||||
"@push.rocks/smartstream": "^3.0.25",
|
"@push.rocks/smartstream": "^3.0.34",
|
||||||
"@push.rocks/smartunique": "^3.0.6",
|
"@push.rocks/smartunique": "^3.0.8",
|
||||||
"@push.rocks/smarturl": "^3.0.7",
|
"@push.rocks/smarturl": "^3.0.7",
|
||||||
"@types/tar-stream": "^3.1.3",
|
"@types/tar-stream": "^3.1.3",
|
||||||
"fflate": "^0.8.1",
|
"fflate": "^0.8.2",
|
||||||
"file-type": "^18.7.0",
|
"file-type": "^19.0.0",
|
||||||
"tar-stream": "^3.1.6",
|
"tar-stream": "^3.1.7",
|
||||||
"through": "^2.3.8"
|
"through": "^2.3.8"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@git.zone/tsbuild": "^2.1.66",
|
"@git.zone/tsbuild": "^2.1.72",
|
||||||
"@git.zone/tsrun": "^1.2.44",
|
"@git.zone/tsrun": "^1.2.44",
|
||||||
"@git.zone/tstest": "^1.0.84",
|
"@git.zone/tstest": "^1.0.88",
|
||||||
"@push.rocks/tapbundle": "^5.0.15"
|
"@push.rocks/tapbundle": "^5.0.17"
|
||||||
},
|
},
|
||||||
"private": false,
|
"private": false,
|
||||||
"files": [
|
"files": [
|
||||||
|
2191
pnpm-lock.yaml
generated
2191
pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
@ -47,4 +47,4 @@ tap.skip.test('should extract a b2zip', async () => {
|
|||||||
);
|
);
|
||||||
})
|
})
|
||||||
|
|
||||||
tap.start();
|
await tap.start();
|
||||||
|
@ -3,6 +3,6 @@
|
|||||||
*/
|
*/
|
||||||
export const commitinfo = {
|
export const commitinfo = {
|
||||||
name: '@push.rocks/smartarchive',
|
name: '@push.rocks/smartarchive',
|
||||||
version: '4.0.13',
|
version: '4.0.20',
|
||||||
description: 'work with archives'
|
description: 'work with archives'
|
||||||
}
|
}
|
||||||
|
@ -49,13 +49,16 @@ export function unbzip2Stream() {
|
|||||||
} catch (e) {
|
} catch (e) {
|
||||||
console.error(e);
|
console.error(e);
|
||||||
broken = true;
|
broken = true;
|
||||||
return false;
|
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
let counter = 0;
|
||||||
return new plugins.smartstream.SmartDuplex({
|
return new plugins.smartstream.SmartDuplex({
|
||||||
|
objectMode: true,
|
||||||
|
name: 'bzip2',
|
||||||
|
debug: false,
|
||||||
|
highWaterMark: 1,
|
||||||
writeFunction: async function (data, streamTools) {
|
writeFunction: async function (data, streamTools) {
|
||||||
//console.error('received', data.length,'bytes in', typeof data);
|
// console.log(`got chunk ${counter++}`)
|
||||||
bufferQueue.push(data);
|
bufferQueue.push(data);
|
||||||
hasBytes += data.length;
|
hasBytes += data.length;
|
||||||
if (bitReader === null) {
|
if (bitReader === null) {
|
||||||
@ -66,6 +69,10 @@ export function unbzip2Stream() {
|
|||||||
while (!broken && hasBytes - bitReader.bytesRead + 1 >= (25000 + 100000 * blockSize || 4)) {
|
while (!broken && hasBytes - bitReader.bytesRead + 1 >= (25000 + 100000 * blockSize || 4)) {
|
||||||
//console.error('decompressing with', hasBytes - bitReader.bytesRead + 1, 'bytes in buffer');
|
//console.error('decompressing with', hasBytes - bitReader.bytesRead + 1, 'bytes in buffer');
|
||||||
const result = await decompressAndPush();
|
const result = await decompressAndPush();
|
||||||
|
if (!result) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
// console.log(result.toString());
|
||||||
await streamTools.push(result);
|
await streamTools.push(result);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@ -73,11 +80,13 @@ export function unbzip2Stream() {
|
|||||||
//console.error(x,'last compressing with', hasBytes, 'bytes in buffer');
|
//console.error(x,'last compressing with', hasBytes, 'bytes in buffer');
|
||||||
while (!broken && bitReader && hasBytes > bitReader.bytesRead) {
|
while (!broken && bitReader && hasBytes > bitReader.bytesRead) {
|
||||||
const result = await decompressAndPush();
|
const result = await decompressAndPush();
|
||||||
streamTools.push(result);
|
if (!result) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
await streamTools.push(result);
|
||||||
}
|
}
|
||||||
if (!broken) {
|
if (!broken) {
|
||||||
if (streamCRC !== null) this.emit('error', new Error('input stream ended prematurely'));
|
if (streamCRC !== null) this.emit('error', new Error('input stream ended prematurely'));
|
||||||
this.queue(null);
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
@ -4,7 +4,7 @@ import * as plugins from './plugins.js';
|
|||||||
export interface IAnalyzedResult {
|
export interface IAnalyzedResult {
|
||||||
fileType: plugins.fileType.FileTypeResult;
|
fileType: plugins.fileType.FileTypeResult;
|
||||||
isArchive: boolean;
|
isArchive: boolean;
|
||||||
resultStream: plugins.smartstream.PassThrough;
|
resultStream: plugins.smartstream.SmartDuplex;
|
||||||
decompressionStream: plugins.stream.Transform | plugins.stream.Duplex | plugins.tarStream.Extract;
|
decompressionStream: plugins.stream.Transform | plugins.stream.Duplex | plugins.tarStream.Extract;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -36,37 +36,41 @@ export class ArchiveAnalyzer {
|
|||||||
switch (mimeTypeArg) {
|
switch (mimeTypeArg) {
|
||||||
case 'application/gzip':
|
case 'application/gzip':
|
||||||
return this.smartArchiveRef.gzipTools.getDecompressionStream();
|
return this.smartArchiveRef.gzipTools.getDecompressionStream();
|
||||||
|
case 'application/zip':
|
||||||
|
return this.smartArchiveRef.zipTools.getDecompressionStream();
|
||||||
case 'application/x-bzip2':
|
case 'application/x-bzip2':
|
||||||
return await this.smartArchiveRef.bzip2Tools.getDecompressionStream(); // replace with your own bzip2 decompression stream
|
return await this.smartArchiveRef.bzip2Tools.getDecompressionStream(); // replace with your own bzip2 decompression stream
|
||||||
case 'application/x-tar':
|
case 'application/x-tar':
|
||||||
return this.smartArchiveRef.tarTools.getDecompressionStream(); // replace with your own tar decompression stream
|
return this.smartArchiveRef.tarTools.getDecompressionStream(); // replace with your own tar decompression stream
|
||||||
default:
|
default:
|
||||||
// Handle unsupported formats or no decompression needed
|
// Handle unsupported formats or no decompression needed
|
||||||
return new plugins.smartstream.PassThrough();
|
return plugins.smartstream.createPassThrough();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public getAnalyzedStream() {
|
public getAnalyzedStream() {
|
||||||
let firstRun = true;
|
let firstRun = true;
|
||||||
const resultStream = new plugins.smartstream.PassThrough();
|
const resultStream = plugins.smartstream.createPassThrough();
|
||||||
const analyzerstream = new plugins.smartstream.SmartDuplex<Buffer, IAnalyzedResult>({
|
const analyzerstream = new plugins.smartstream.SmartDuplex<Buffer, IAnalyzedResult>({
|
||||||
readableObjectMode: true,
|
readableObjectMode: true,
|
||||||
writeFunction: async (chunkArg: Buffer, streamtools) => {
|
writeFunction: async (chunkArg: Buffer, streamtools) => {
|
||||||
const fileType = await plugins.fileType.fileTypeFromBuffer(chunkArg);
|
|
||||||
const decompressionStream = await this.getDecompressionStream(fileType?.mime as any);
|
|
||||||
resultStream.push(chunkArg);
|
|
||||||
if (firstRun) {
|
if (firstRun) {
|
||||||
firstRun = false;
|
firstRun = false;
|
||||||
|
const fileType = await plugins.fileType.fileTypeFromBuffer(chunkArg);
|
||||||
|
const decompressionStream = await this.getDecompressionStream(fileType?.mime as any);
|
||||||
|
/**
|
||||||
|
* analyzed stream emits once with this object
|
||||||
|
*/
|
||||||
const result: IAnalyzedResult = {
|
const result: IAnalyzedResult = {
|
||||||
fileType,
|
fileType,
|
||||||
isArchive: await this.mimeTypeIsArchive(fileType?.mime),
|
isArchive: await this.mimeTypeIsArchive(fileType?.mime),
|
||||||
resultStream,
|
resultStream,
|
||||||
decompressionStream,
|
decompressionStream,
|
||||||
};
|
};
|
||||||
streamtools.push(result);
|
await streamtools.push(result);
|
||||||
streamtools.push(null);
|
|
||||||
return null;
|
|
||||||
}
|
}
|
||||||
|
await resultStream.backpressuredPush(chunkArg);
|
||||||
|
return null;
|
||||||
},
|
},
|
||||||
finalFunction: async (tools) => {
|
finalFunction: async (tools) => {
|
||||||
resultStream.push(null);
|
resultStream.push(null);
|
||||||
|
@ -1,9 +1,10 @@
|
|||||||
import * as plugins from './plugins.js';
|
import * as plugins from './plugins.js';
|
||||||
import * as paths from './paths.js';
|
import * as paths from './paths.js';
|
||||||
|
|
||||||
|
import { Bzip2Tools } from './classes.bzip2tools.js';
|
||||||
import { GzipTools } from './classes.gziptools.js';
|
import { GzipTools } from './classes.gziptools.js';
|
||||||
import { TarTools } from './classes.tartools.js';
|
import { TarTools } from './classes.tartools.js';
|
||||||
import { Bzip2Tools } from './classes.bzip2tools.js';
|
import { ZipTools } from './classes.ziptools.js';
|
||||||
|
|
||||||
import { ArchiveAnalyzer, type IAnalyzedResult } from './classes.archiveanalyzer.js';
|
import { ArchiveAnalyzer, type IAnalyzedResult } from './classes.archiveanalyzer.js';
|
||||||
|
|
||||||
@ -32,9 +33,10 @@ export class SmartArchive {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// INSTANCE
|
// INSTANCE
|
||||||
public tarTools = new TarTools(this);
|
|
||||||
public gzipTools = new GzipTools(this);
|
public gzipTools = new GzipTools(this);
|
||||||
public bzip2Tools = new Bzip2Tools(this);
|
public bzip2Tools = new Bzip2Tools(this);
|
||||||
|
public tarTools = new TarTools(this);
|
||||||
|
public zipTools = new ZipTools(this);
|
||||||
public archiveAnalyzer = new ArchiveAnalyzer(this);
|
public archiveAnalyzer = new ArchiveAnalyzer(this);
|
||||||
|
|
||||||
public sourceUrl: string;
|
public sourceUrl: string;
|
||||||
@ -77,7 +79,8 @@ export class SmartArchive {
|
|||||||
public async exportToFs(targetDir: string, fileNameArg?: string): Promise<void> {
|
public async exportToFs(targetDir: string, fileNameArg?: string): Promise<void> {
|
||||||
const done = plugins.smartpromise.defer<void>();
|
const done = plugins.smartpromise.defer<void>();
|
||||||
const streamFileStream = await this.exportToStreamOfStreamFiles();
|
const streamFileStream = await this.exportToStreamOfStreamFiles();
|
||||||
streamFileStream.pipe(new plugins.smartstream.SmartDuplex({
|
streamFileStream.pipe(
|
||||||
|
new plugins.smartstream.SmartDuplex({
|
||||||
objectMode: true,
|
objectMode: true,
|
||||||
writeFunction: async (chunkArg: plugins.smartfile.StreamFile, streamtools) => {
|
writeFunction: async (chunkArg: plugins.smartfile.StreamFile, streamtools) => {
|
||||||
const done = plugins.smartpromise.defer<void>();
|
const done = plugins.smartpromise.defer<void>();
|
||||||
@ -85,19 +88,23 @@ export class SmartArchive {
|
|||||||
const streamFile = chunkArg;
|
const streamFile = chunkArg;
|
||||||
const readStream = await streamFile.createReadStream();
|
const readStream = await streamFile.createReadStream();
|
||||||
await plugins.smartfile.fs.ensureDir(targetDir);
|
await plugins.smartfile.fs.ensureDir(targetDir);
|
||||||
const writePath = plugins.path.join(targetDir, (streamFile.relativeFilePath || fileNameArg));
|
const writePath = plugins.path.join(
|
||||||
|
targetDir,
|
||||||
|
streamFile.relativeFilePath || fileNameArg
|
||||||
|
);
|
||||||
await plugins.smartfile.fs.ensureDir(plugins.path.dirname(writePath));
|
await plugins.smartfile.fs.ensureDir(plugins.path.dirname(writePath));
|
||||||
const writeStream = plugins.smartfile.fsStream.createWriteStream(writePath);
|
const writeStream = plugins.smartfile.fsStream.createWriteStream(writePath);
|
||||||
readStream.pipe(writeStream);
|
readStream.pipe(writeStream);
|
||||||
writeStream.on('finish', () => {
|
writeStream.on('finish', () => {
|
||||||
done.resolve();
|
done.resolve();
|
||||||
})
|
});
|
||||||
await done.promise;
|
await done.promise;
|
||||||
},
|
},
|
||||||
finalFunction: async () => {
|
finalFunction: async () => {
|
||||||
done.resolve();
|
done.resolve();
|
||||||
}
|
},
|
||||||
}));
|
})
|
||||||
|
);
|
||||||
return done.promise;
|
return done.promise;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -114,21 +121,27 @@ export class SmartArchive {
|
|||||||
async (analyzedResultChunk) => {
|
async (analyzedResultChunk) => {
|
||||||
if (analyzedResultChunk.fileType?.mime === 'application/x-tar') {
|
if (analyzedResultChunk.fileType?.mime === 'application/x-tar') {
|
||||||
const tarStream = analyzedResultChunk.decompressionStream as plugins.tarStream.Extract;
|
const tarStream = analyzedResultChunk.decompressionStream as plugins.tarStream.Extract;
|
||||||
tarStream.on(
|
tarStream.on('entry', async (header, stream, next) => {
|
||||||
'entry',
|
|
||||||
async (header, stream, next) => {
|
|
||||||
const streamfile = plugins.smartfile.StreamFile.fromStream(stream, header.name);
|
const streamfile = plugins.smartfile.StreamFile.fromStream(stream, header.name);
|
||||||
streamFileIntake.push(streamfile);
|
streamFileIntake.push(streamfile);
|
||||||
stream.on('end', function () {
|
stream.on('end', function () {
|
||||||
next(); // ready for next entry
|
next(); // ready for next entry
|
||||||
});
|
});
|
||||||
}
|
});
|
||||||
);
|
|
||||||
tarStream.on('finish', function () {
|
tarStream.on('finish', function () {
|
||||||
console.log('finished');
|
console.log('finished');
|
||||||
streamFileIntake.signalEnd();
|
streamFileIntake.signalEnd();
|
||||||
});
|
});
|
||||||
analyzedResultChunk.resultStream.pipe(analyzedResultChunk.decompressionStream);
|
analyzedResultChunk.resultStream.pipe(analyzedResultChunk.decompressionStream);
|
||||||
|
} else if (analyzedResultChunk.fileType?.mime === 'application/zip') {
|
||||||
|
analyzedResultChunk.resultStream
|
||||||
|
.pipe(analyzedResultChunk.decompressionStream)
|
||||||
|
.pipe(new plugins.smartstream.SmartDuplex({
|
||||||
|
objectMode: true,
|
||||||
|
writeFunction: async (streamFileArg: plugins.smartfile.StreamFile, streamtools) => {
|
||||||
|
streamFileIntake.push(streamFileArg);
|
||||||
|
},
|
||||||
|
}));
|
||||||
} else if (analyzedResultChunk.isArchive && analyzedResultChunk.decompressionStream) {
|
} else if (analyzedResultChunk.isArchive && analyzedResultChunk.decompressionStream) {
|
||||||
analyzedResultChunk.resultStream
|
analyzedResultChunk.resultStream
|
||||||
.pipe(analyzedResultChunk.decompressionStream)
|
.pipe(analyzedResultChunk.decompressionStream)
|
||||||
@ -142,8 +155,9 @@ export class SmartArchive {
|
|||||||
streamFileIntake.push(streamFile);
|
streamFileIntake.push(streamFile);
|
||||||
streamFileIntake.signalEnd();
|
streamFileIntake.signalEnd();
|
||||||
}
|
}
|
||||||
}, {
|
},
|
||||||
objectMode: true
|
{
|
||||||
|
objectMode: true,
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
|
||||||
|
77
ts/classes.ziptools.ts
Normal file
77
ts/classes.ziptools.ts
Normal file
@ -0,0 +1,77 @@
|
|||||||
|
import type { SmartArchive } from './classes.smartarchive.js';
|
||||||
|
import * as plugins from './plugins.js';
|
||||||
|
|
||||||
|
class DecompressZipTransform extends plugins.smartstream.SmartDuplex<ArrayBufferLike> {
|
||||||
|
private streamtools: plugins.smartstream.IStreamTools;
|
||||||
|
private unzipper = new plugins.fflate.Unzip(async (fileArg) => {
|
||||||
|
let resultBuffer: Buffer;
|
||||||
|
fileArg.ondata = async (dataArg, dat, final) => {
|
||||||
|
resultBuffer? resultBuffer = Buffer.concat([resultBuffer, Buffer.from(dat)])
|
||||||
|
: resultBuffer = Buffer.from(dat);
|
||||||
|
if (final) {
|
||||||
|
const streamFile = plugins.smartfile.StreamFile.fromBuffer(resultBuffer);
|
||||||
|
streamFile.relativeFilePath = fileArg.name;
|
||||||
|
this.streamtools.push(fileArg);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fileArg.start();
|
||||||
|
});
|
||||||
|
constructor() {
|
||||||
|
super({
|
||||||
|
objectMode: true,
|
||||||
|
writeFunction: async (chunkArg: Buffer, streamtoolsArg) => {
|
||||||
|
this.streamtools? null : this.streamtools = streamtoolsArg;
|
||||||
|
this.unzipper.push(chunkArg, false);
|
||||||
|
},
|
||||||
|
finalFunction: async () => {
|
||||||
|
this.unzipper.push(Buffer.from(''), true);
|
||||||
|
await plugins.smartdelay.delayFor(0);
|
||||||
|
this.streamtools.push(null);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
this.unzipper.register(plugins.fflate.UnzipInflate);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// This class wraps fflate's zip in a Node.js Transform stream for compression
|
||||||
|
export class CompressZipTransform extends plugins.stream.Transform {
|
||||||
|
files: { [fileName: string]: Uint8Array };
|
||||||
|
|
||||||
|
constructor() {
|
||||||
|
super();
|
||||||
|
this.files = {};
|
||||||
|
}
|
||||||
|
|
||||||
|
_transform(chunk: Buffer, encoding: BufferEncoding, callback: plugins.stream.TransformCallback) {
|
||||||
|
// Simple example: storing chunks in memory before finalizing ZIP in _flush
|
||||||
|
this.files['file.txt'] = new Uint8Array(chunk);
|
||||||
|
callback();
|
||||||
|
}
|
||||||
|
|
||||||
|
_flush(callback: plugins.stream.TransformCallback) {
|
||||||
|
plugins.fflate.zip(this.files, (err, zipped) => {
|
||||||
|
if (err) {
|
||||||
|
callback(err);
|
||||||
|
} else {
|
||||||
|
this.push(Buffer.from(zipped));
|
||||||
|
callback();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export class ZipTools {
|
||||||
|
smartArchiveRef: SmartArchive;
|
||||||
|
|
||||||
|
constructor(smartArchiveRefArg: SmartArchive) {
|
||||||
|
this.smartArchiveRef = smartArchiveRefArg;
|
||||||
|
}
|
||||||
|
|
||||||
|
public getCompressionStream() {
|
||||||
|
return new CompressZipTransform();
|
||||||
|
}
|
||||||
|
|
||||||
|
public getDecompressionStream() {
|
||||||
|
return new DecompressZipTransform();
|
||||||
|
}
|
||||||
|
}
|
@ -6,6 +6,7 @@ export { path, stream };
|
|||||||
|
|
||||||
// @pushrocks scope
|
// @pushrocks scope
|
||||||
import * as smartfile from '@push.rocks/smartfile';
|
import * as smartfile from '@push.rocks/smartfile';
|
||||||
|
import * as smartdelay from '@push.rocks/smartdelay';
|
||||||
import * as smartpath from '@push.rocks/smartpath';
|
import * as smartpath from '@push.rocks/smartpath';
|
||||||
import * as smartpromise from '@push.rocks/smartpromise';
|
import * as smartpromise from '@push.rocks/smartpromise';
|
||||||
import * as smartrequest from '@push.rocks/smartrequest';
|
import * as smartrequest from '@push.rocks/smartrequest';
|
||||||
@ -14,7 +15,7 @@ import * as smartstream from '@push.rocks/smartstream';
|
|||||||
import * as smartrx from '@push.rocks/smartrx';
|
import * as smartrx from '@push.rocks/smartrx';
|
||||||
import * as smarturl from '@push.rocks/smarturl';
|
import * as smarturl from '@push.rocks/smarturl';
|
||||||
|
|
||||||
export { smartfile, smartpath, smartpromise, smartrequest, smartunique, smartstream, smartrx, smarturl };
|
export { smartfile, smartdelay, smartpath, smartpromise, smartrequest, smartunique, smartstream, smartrx, smarturl };
|
||||||
|
|
||||||
// third party scope
|
// third party scope
|
||||||
import * as fileType from 'file-type';
|
import * as fileType from 'file-type';
|
||||||
|
Reference in New Issue
Block a user