Files
spark/bin/spark-wrapper.js
Juergen Kunz 67f97e6115
Some checks failed
CI / Type Check & Lint (push) Failing after 4s
CI / Build Test (Current Platform) (push) Failing after 3s
CI / Build All Platforms (push) Failing after 3s
feat(npm): Add npm package distribution support
Added npm package wrapper to enable installation via npm while
maintaining the Deno binary distribution model.

New Files:
- package.json: npm package configuration with binary wrapper
- bin/spark-wrapper.js: Detects platform and executes correct binary
- scripts/install-binary.js: Downloads appropriate binary on npm install
- .npmignore: Excludes source files from npm package
- npmextra.json: npm extra configuration

Updated:
- readme.md: Added npm installation instructions

How It Works:
1. User runs: npm install -g @serve.zone/spark
2. Postinstall script (install-binary.js) downloads the correct
   pre-compiled binary for the user's platform from Gitea releases
3. Binary is cached in dist/binaries/
4. Wrapper script (spark-wrapper.js) executes the binary when user
   runs 'spark' command

Supported via npm:
- Linux (x64, ARM64)
- macOS (Intel, Apple Silicon)
- Windows (x64)

This maintains the benefits of Deno compilation (no runtime deps)
while providing familiar npm-based installation for users who prefer it.
2025-10-23 23:31:53 +00:00

108 lines
2.7 KiB
JavaScript
Executable File

#!/usr/bin/env node
/**
* SPARK npm wrapper
* This script executes the appropriate pre-compiled binary based on the current platform
*/
import { spawn } from 'child_process';
import { fileURLToPath } from 'url';
import { dirname, join } from 'path';
import { existsSync } from 'fs';
import { platform, arch } from 'os';
const __filename = fileURLToPath(import.meta.url);
const __dirname = dirname(__filename);
/**
* Get the binary name for the current platform
*/
function getBinaryName() {
const plat = platform();
const architecture = arch();
// Map Node's platform/arch to our binary naming
const platformMap = {
'darwin': 'macos',
'linux': 'linux',
'win32': 'windows'
};
const archMap = {
'x64': 'x64',
'arm64': 'arm64'
};
const mappedPlatform = platformMap[plat];
const mappedArch = archMap[architecture];
if (!mappedPlatform || !mappedArch) {
console.error(`Error: Unsupported platform/architecture: ${plat}/${architecture}`);
console.error('Supported platforms: Linux, macOS, Windows');
console.error('Supported architectures: x64, arm64');
process.exit(1);
}
// Construct binary name
let binaryName = `spark-${mappedPlatform}-${mappedArch}`;
if (plat === 'win32') {
binaryName += '.exe';
}
return binaryName;
}
/**
* Execute the binary
*/
function executeBinary() {
const binaryName = getBinaryName();
const binaryPath = join(__dirname, '..', 'dist', 'binaries', binaryName);
// Check if binary exists
if (!existsSync(binaryPath)) {
console.error(`Error: Binary not found at ${binaryPath}`);
console.error('This might happen if:');
console.error('1. The postinstall script failed to run');
console.error('2. The platform is not supported');
console.error('3. The package was not installed correctly');
console.error('');
console.error('Try reinstalling the package:');
console.error(' npm uninstall -g @serve.zone/spark');
console.error(' npm install -g @serve.zone/spark');
process.exit(1);
}
// Spawn the binary with all arguments passed through
const child = spawn(binaryPath, process.argv.slice(2), {
stdio: 'inherit',
shell: false
});
// Handle child process events
child.on('error', (err) => {
console.error(`Error executing spark: ${err.message}`);
process.exit(1);
});
child.on('exit', (code, signal) => {
if (signal) {
process.kill(process.pid, signal);
} else {
process.exit(code || 0);
}
});
// Forward signals to child process
const signals = ['SIGINT', 'SIGTERM', 'SIGHUP'];
signals.forEach(signal => {
process.on(signal, () => {
if (!child.killed) {
child.kill(signal);
}
});
});
}
// Execute
executeBinary();