Compare commits
14 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| a3255fd1fb | |||
| d6fb6e527e | |||
| 96bafec720 | |||
| 86f47ff743 | |||
| 38c134f084 | |||
| 25372bf97d | |||
| f521530eed | |||
| dd81d65958 | |||
| d1ef48560d | |||
| 49d62e20a4 | |||
| 4f8443d33f | |||
| 2fb838d1bd | |||
| b4a1ff5eab | |||
| d3e3905e7f |
53
changelog.md
53
changelog.md
@@ -1,5 +1,58 @@
|
||||
# Changelog
|
||||
|
||||
## 2026-03-05 - 4.1.18 - fix(mod_compiler)
|
||||
add diagnostic logging of output directory states after compilation and after import-path rewriting to aid debugging
|
||||
|
||||
- Imported fs to allow reading output directories for diagnostics
|
||||
- Logs entries and directory counts for each successful output directory both pre- and post-import-path-rewrite
|
||||
- Diagnostics are gated by !isQuiet && !isJson and are read-only (no behavior change)
|
||||
- Tags used: 'diag' (post-compilation) and 'diag-post-rewrite' (after rewriting) to help identify missing or unexpected output folders
|
||||
|
||||
## 2026-03-05 - 4.1.17 - fix(tsunpacker)
|
||||
use synchronous fs operations in tsunpacker to avoid readdir race conditions
|
||||
|
||||
- Replaced async fs.promises.readdir/rename/rm/rmdir loops with fs.readdirSync/renameSync/rmSync/rmdirSync
|
||||
- Removed readdir retry loops that attempted to handle partial/stale readdir results
|
||||
- Updated comment to document rationale: avoid race conditions under signal pressure and XFS metadata lag
|
||||
- Note: function remains async but now performs blocking sync filesystem calls which may block the event loop during unpack
|
||||
|
||||
## 2026-03-05 - 4.1.16 - fix(mod_unpack)
|
||||
handle partial readdir results from signal-interrupted getdents64 when unpacking to ensure sibling removal and nested moves complete
|
||||
|
||||
- Loop readdir calls for destination directory until only the source folder remains to avoid partial-listing leftovers
|
||||
- Loop readdir calls for nested directory and repeatedly rename entries until the nested directory is empty
|
||||
- Prevents leftover files and incomplete moves when readdir returns partial results under signals
|
||||
|
||||
## 2026-03-05 - 4.1.15 - fix(mod_unpack)
|
||||
flatten nested output directory without temporary rename steps to avoid race conditions
|
||||
|
||||
- Replace rename-rm-rename strategy with: remove sibling entries in destination, move nested source entries up into the destination, then remove the now-empty nested folder.
|
||||
- Avoid creating temporary sibling directories and avoid removing the destination directory to reduce filesystem race conditions and metadata lag issues (XFS/NFS/etc.).
|
||||
- Remove removed removeEmptyDirectory helper and stop using FsHelpers.move/removeDirectory in unpack; import and use fs.promises methods (readdir, rm, rename, rmdir) directly.
|
||||
|
||||
## 2026-03-05 - 4.1.14 - fix(fs)
|
||||
replace execSync and fsync workarounds with atomic async FsHelpers operations to avoid XFS races and shell dependencies
|
||||
|
||||
- Removed child_process.execSync usage and shell mv/rm commands in mod_unpack and mod_compiler.
|
||||
- Removed syncDirectoryTree and fsync-based workaround from the compiler module.
|
||||
- Use FsHelpers.move and FsHelpers.removeDirectory (async rename/remove) for atomic filesystem operations during unpack.
|
||||
- Await performUnpack directly and simplify unpack flow to improve portability and reliability on XFS and other filesystems.
|
||||
|
||||
## 2026-03-05 - 4.1.13 - fix(mod_unpack)
|
||||
Use child_process.execSync (mv/rm) to perform unpack atomically, replacing async fs operations and logs to avoid ENOENT/partial directory listings on XFS
|
||||
|
||||
- Replaced async fs.promises.rename/rm and readdir/stat debugging with execSync rm -rf and mv operations for sequential, atomic moves
|
||||
- Imported execSync from child_process and removed verbose console logging and extra fs checks
|
||||
- Addresses race conditions observed on filesystems like XFS where libuv async operations can return partial results or ENOENT errors
|
||||
|
||||
## 2026-03-05 - 4.1.12 - fix(mod_compiler)
|
||||
replace runtime require calls with top-level imports and use execSync/path.join for filesystem sync and traversal
|
||||
|
||||
- Added top-level imports: path and execSync from child_process
|
||||
- Replaced require('child_process').execSync('sync') with execSync('sync') to force fs sync
|
||||
- Replaced require('path').join(...) with path.join(...) when recursing directories
|
||||
- Refactor is purely local/maintenance-focused (consistency and slight performance/readability improvement); no functional change expected
|
||||
|
||||
## 2026-03-05 - 4.1.11 - fix(mod_compiler)
|
||||
flush directory entries before unpack to avoid XFS delayed-log causing partial readdir results
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@git.zone/tsbuild",
|
||||
"version": "4.1.11",
|
||||
"version": "4.1.18",
|
||||
"private": false,
|
||||
"description": "A tool for compiling TypeScript files using the latest nightly features, offering flexible APIs and a CLI for streamlined development.",
|
||||
"main": "dist_ts/index.js",
|
||||
|
||||
@@ -3,6 +3,6 @@
|
||||
*/
|
||||
export const commitinfo = {
|
||||
name: '@git.zone/tsbuild',
|
||||
version: '4.1.11',
|
||||
version: '4.1.18',
|
||||
description: 'A tool for compiling TypeScript files using the latest nightly features, offering flexible APIs and a CLI for streamlined development.'
|
||||
}
|
||||
|
||||
@@ -368,19 +368,23 @@ export class TsCompiler {
|
||||
|
||||
// Perform unpack if compilation succeeded
|
||||
if (result.errorSummary.totalErrors === 0) {
|
||||
// Force XFS to commit all pending directory entries before unpacking.
|
||||
// TypeScript's writeFileSync creates entries that may reside in XFS's
|
||||
// delayed log. Without sync, readdir can return partial results.
|
||||
require('child_process').execSync('sync');
|
||||
this.syncDirectoryTree(destDir);
|
||||
|
||||
try {
|
||||
await performUnpack(pattern, destDir, this.cwd);
|
||||
} catch (unpackErr: any) {
|
||||
console.error(` ⚠️ Unpack error for ${destPath}: ${unpackErr.message}`);
|
||||
successfulOutputDirs.push(destDir);
|
||||
}
|
||||
|
||||
successfulOutputDirs.push(destDir);
|
||||
// Diagnostic: log all output directory states after each compilation
|
||||
if (!isQuiet && !isJson) {
|
||||
for (const prevDir of successfulOutputDirs) {
|
||||
try {
|
||||
const entries = fs.readdirSync(prevDir);
|
||||
const dirs = entries.filter(e => {
|
||||
try { return fs.statSync(prevDir + '/' + e).isDirectory(); } catch { return false; }
|
||||
});
|
||||
console.log(` 📋 [diag] ${prevDir.replace(this.cwd + '/', '')}: ${entries.length} entries, ${dirs.length} dirs`);
|
||||
} catch {
|
||||
console.log(` 📋 [diag] ${prevDir.replace(this.cwd + '/', '')}: MISSING!`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@@ -397,6 +401,21 @@ export class TsCompiler {
|
||||
if (totalRewritten > 0 && !isQuiet && !isJson) {
|
||||
console.log(` 🔄 Rewrote import paths in ${totalRewritten} file${totalRewritten !== 1 ? 's' : ''}`);
|
||||
}
|
||||
|
||||
// Diagnostic: log output directory states after path rewriting
|
||||
if (!isQuiet && !isJson) {
|
||||
for (const dir of successfulOutputDirs) {
|
||||
try {
|
||||
const entries = fs.readdirSync(dir);
|
||||
const dirs = entries.filter(e => {
|
||||
try { return fs.statSync(dir + '/' + e).isDirectory(); } catch { return false; }
|
||||
});
|
||||
console.log(` 📋 [diag-post-rewrite] ${dir.replace(this.cwd + '/', '')}: ${entries.length} entries, ${dirs.length} dirs`);
|
||||
} catch {
|
||||
console.log(` 📋 [diag-post-rewrite] ${dir.replace(this.cwd + '/', '')}: MISSING!`);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Merge all error summaries
|
||||
@@ -502,27 +521,6 @@ export class TsCompiler {
|
||||
return success;
|
||||
}
|
||||
|
||||
/**
|
||||
* Recursively fsync all directories in a tree.
|
||||
* Forces XFS to commit pending directory entries from its log.
|
||||
*/
|
||||
private syncDirectoryTree(dirPath: string): void {
|
||||
try {
|
||||
const fd = fs.openSync(dirPath, 'r');
|
||||
fs.fsyncSync(fd);
|
||||
fs.closeSync(fd);
|
||||
|
||||
const entries = fs.readdirSync(dirPath, { withFileTypes: true });
|
||||
for (const entry of entries) {
|
||||
if (entry.isDirectory()) {
|
||||
this.syncDirectoryTree(require('path').join(dirPath, entry.name));
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// Ignore errors (directory may not exist)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Merge multiple error summaries into one
|
||||
*/
|
||||
|
||||
@@ -134,34 +134,4 @@ export class FsHelpers {
|
||||
public static async move(src: string, dest: string): Promise<void> {
|
||||
await fs.promises.rename(src, dest);
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove an empty directory
|
||||
*/
|
||||
public static async removeEmptyDirectory(dirPath: string): Promise<void> {
|
||||
// Retry rmdir with delays to handle filesystem metadata lag (XFS, NFS, etc.)
|
||||
// NEVER use recursive rm here — if rmdir fails with ENOTEMPTY, entries may
|
||||
// still be valid references to renamed files/dirs that haven't fully detached
|
||||
for (let attempt = 0; attempt < 5; attempt++) {
|
||||
try {
|
||||
await fs.promises.rmdir(dirPath);
|
||||
return;
|
||||
} catch (err: any) {
|
||||
if (err.code === 'ENOENT') {
|
||||
return; // Already gone
|
||||
}
|
||||
if (err.code === 'ENOTEMPTY' && attempt < 4) {
|
||||
// Wait for filesystem metadata to catch up
|
||||
await new Promise(resolve => setTimeout(resolve, 100 * (attempt + 1)));
|
||||
continue;
|
||||
}
|
||||
// Final attempt failed or non-retryable error — leave directory in place
|
||||
// It will be cleaned up by the next build's "clear output directory" step
|
||||
if (err.code === 'ENOTEMPTY') {
|
||||
return;
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -83,11 +83,16 @@ export class TsUnpacker {
|
||||
/**
|
||||
* Perform the unpack operation - flatten nested output directories.
|
||||
*
|
||||
* Strategy: instead of listing entries and moving them individually (which is
|
||||
* vulnerable to async readdir returning partial results under signal pressure),
|
||||
* we rename the entire nested directory out, remove the dest dir, then rename
|
||||
* the nested directory back as the dest dir. This uses only rename operations
|
||||
* which are atomic at the kernel level.
|
||||
* When TypeScript compiles files that import from sibling directories,
|
||||
* it creates a nested structure like dist_ts/ts/ with siblings like
|
||||
* dist_ts/ts_interfaces/. This method flattens by:
|
||||
* 1. Removing sibling directories (non-source folders)
|
||||
* 2. Moving contents of the nested source folder up to the dest dir
|
||||
* 3. Removing the now-empty nested source folder
|
||||
*
|
||||
* Uses synchronous fs operations to avoid race conditions with
|
||||
* async readdir returning partial/stale results under signal pressure
|
||||
* or XFS metadata lag (observed in process-group environments like gitzone).
|
||||
*
|
||||
* Returns true if unpacking was performed, false if skipped.
|
||||
*/
|
||||
@@ -101,42 +106,26 @@ export class TsUnpacker {
|
||||
}
|
||||
|
||||
const nestedPath = this.getNestedPath();
|
||||
const tempPath = this.destDir + '.__unpack_temp__';
|
||||
|
||||
// Log what we're about to do
|
||||
const nestedEntries = fs.readdirSync(nestedPath);
|
||||
console.log(` 📦 Unpacking ${this.sourceFolderName}/: ${nestedEntries.length} entries in nested dir`);
|
||||
console.log(` 📦 Entries: [${nestedEntries.join(', ')}]`);
|
||||
// Also list the dest dir to see what TypeScript created
|
||||
// Step 1: Remove sibling entries (everything in dest except the source folder)
|
||||
const destEntries = fs.readdirSync(this.destDir);
|
||||
console.log(` 📦 destDir entries: [${destEntries.join(', ')}]`);
|
||||
for (const entry of destEntries) {
|
||||
if (entry !== this.sourceFolderName) {
|
||||
fs.rmSync(path.join(this.destDir, entry), { recursive: true, force: true });
|
||||
}
|
||||
}
|
||||
|
||||
// Clean up any leftover temp dir from a previous failed unpack
|
||||
await fs.promises.rm(tempPath, { recursive: true, force: true });
|
||||
// Step 2: Move all contents from nested dir up to dest dir
|
||||
const nestedEntries = fs.readdirSync(nestedPath);
|
||||
for (const entry of nestedEntries) {
|
||||
fs.renameSync(
|
||||
path.join(nestedPath, entry),
|
||||
path.join(this.destDir, entry),
|
||||
);
|
||||
}
|
||||
|
||||
// Step 1: Rename the nested source folder out to a temp location.
|
||||
// e.g. dist_ts/ts/ → dist_ts.__unpack_temp__/
|
||||
await fs.promises.rename(nestedPath, tempPath);
|
||||
|
||||
// Verify step 1
|
||||
const tempEntries = fs.readdirSync(tempPath);
|
||||
console.log(` 📦 Step 1 (rename to temp): ${tempEntries.length} entries in temp`);
|
||||
|
||||
// Step 2: Remove the dest dir (which now only contains sibling folders
|
||||
// like ts_interfaces/). Use recursive rm to handle any contents.
|
||||
await fs.promises.rm(this.destDir, { recursive: true, force: true, maxRetries: 3, retryDelay: 100 });
|
||||
console.log(` 📦 Step 2 (remove dest): done`);
|
||||
|
||||
// Step 3: Rename the temp dir to the dest dir.
|
||||
// e.g. dist_ts.__unpack_temp__/ → dist_ts/
|
||||
await fs.promises.rename(tempPath, this.destDir);
|
||||
|
||||
// Verify final state
|
||||
const finalEntries = fs.readdirSync(this.destDir);
|
||||
const finalDirs = finalEntries.filter((e: string) => {
|
||||
return fs.statSync(path.join(this.destDir, e)).isDirectory();
|
||||
});
|
||||
console.log(` 📦 Step 3 (rename to dest): ${finalEntries.length} entries (${finalDirs.length} dirs)`);
|
||||
// Step 3: Remove the now-empty nested directory
|
||||
fs.rmdirSync(nestedPath);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user