Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
33 changes: 33 additions & 0 deletions packages/cli/src/lib/fs-utils.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
import fs from 'node:fs/promises';

// Small helper to retry copyFile on transient errors (EBUSY on Windows CI).
// Attempts: number of tries (default 5)
// baseDelay: starting delay in ms for exponential backoff (default 100ms)
async function copyFileWithRetry(source, target, { attempts = 5, baseDelay = 100 } = {}) {
let lastError;

for (let i = 0; i < attempts; i++) {
try {
return await fs.copyFile(source, target);
} catch (err) {
lastError = err;

// Only retry for transient EBUSY / EPERM on Windows or generic resource busy errors
if ((err.code === 'EBUSY' || err.code === 'EPERM' || err.code === 'EACCES') && i < attempts - 1) {
const delay = baseDelay * Math.pow(2, i);
// add small jitter
const jitter = Math.floor(Math.random() * baseDelay);
await new Promise((res) => setTimeout(res, delay + jitter));
continue;
}

// non-retryable or last attempt -> rethrow
throw err;
}
}

// if we exhausted loop, throw last seen error
throw lastError;
}

export { copyFileWithRetry };
34 changes: 25 additions & 9 deletions packages/cli/src/lifecycles/copy.js
Original file line number Diff line number Diff line change
@@ -1,6 +1,23 @@
import fs from "node:fs/promises";
import { checkResourceExists } from "../lib/resource-utils.js";
import { asyncForEach } from "../lib/async-utils.js";
import { copyFileWithRetry } from "../lib/fs-utils.js";

// simple concurrency-limited mapper using chunked batches
async function mapWithConcurrency(items, mapper, concurrency = 8) {
const results = [];

for (let i = 0; i < items.length; i += concurrency) {
const chunk = items.slice(i, i + concurrency);
const chunkPromises = chunk.map((item) => mapper(item));
// wait for this batch to finish before continuing to the next
// preserve individual rejections
const chunkResults = await Promise.all(chunkPromises);
results.push(...chunkResults);
}

return results;
}

async function rreaddir(dir, allFiles = []) {
const files = (await fs.readdir(dir)).map((f) => new URL(`./${f}`, dir));
Expand All @@ -20,10 +37,9 @@ async function rreaddir(dir, allFiles = []) {
async function copyFile(source, target, projectDirectory) {
try {
console.info(`copying file... ${source.pathname.replace(projectDirectory.pathname, "")}`);

await fs.copyFile(source, target);
await copyFileWithRetry(source, target);
} catch (error) {
console.error("ERROR", error);
console.error("ERROR copying file", source.href, "->", target.href, error);
}
}

Expand All @@ -39,7 +55,7 @@ async function copyDirectory(fromUrl, toUrl, projectDirectory) {
});
}

await asyncForEach(files, async (fileUrl) => {
await mapWithConcurrency(files, async (fileUrl) => {
const targetUrl = new URL(
`file://${fileUrl.pathname.replace(fromUrl.pathname, toUrl.pathname)}`,
);
Expand All @@ -60,7 +76,7 @@ async function copyDirectory(fromUrl, toUrl, projectDirectory) {
}

await copyFile(fileUrl, targetUrl, projectDirectory);
});
}, 8);
}
} catch (e) {
console.error("ERROR", e);
Expand All @@ -71,19 +87,19 @@ const copyAssets = async (compilation) => {
const copyPlugins = compilation.config.plugins.filter((plugin) => plugin.type === "copy");
const { projectDirectory } = compilation.context;

await asyncForEach(copyPlugins, async (plugin) => {
await mapWithConcurrency(copyPlugins, async (plugin) => {
const locations = await plugin.provider(compilation);

await asyncForEach(locations, async (location) => {
await mapWithConcurrency(locations, async (location) => {
const { from, to } = location;

if (from.pathname.endsWith("/")) {
await copyDirectory(from, to, projectDirectory);
} else {
await copyFile(from, to, projectDirectory);
}
});
});
}, 4);
}, 2);
};

export { copyAssets };
7 changes: 6 additions & 1 deletion packages/cli/src/plugins/resource/plugin-standard-css.js
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
*
*/
import fs from "node:fs";
import { copyFileWithRetry } from "../../lib/fs-utils.js";
import path from "node:path";
import { parse, walk } from "css-tree";
import { hashString } from "../../lib/hashing-utils.js";
Expand Down Expand Up @@ -134,7 +135,11 @@ function bundleCss(body, sourceUrl, compilation, workingUrl) {
recursive: true,
});

fs.promises.copyFile(resolvedUrl, new URL(`.${finalValue}`, outputDir));
// Use copy helper with retry to avoid intermittent EBUSY on Windows CI
// bundleCss is synchronous, so we don't await here; log any copy errors.
copyFileWithRetry(resolvedUrl, new URL(`.${finalValue}`, outputDir)).catch((err) =>
console.error('ERROR copying asset during CSS bundling', resolvedUrl.href, err),
);
}

optimizedCss += `url('${basePath}${finalValue}')`;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ import chai from "chai";
import { JSDOM } from "jsdom";
import path from "node:path";
import { Runner } from "gallinago";
import { runSmokeTest } from "../../../../../test/smoke-test.js";
import { runSmokeTest, safeTeardown } from "../../../../../test/smoke-test.js";
import { fileURLToPath } from "node:url";

const expect = chai.expect;
Expand Down Expand Up @@ -93,8 +93,11 @@ describe("Initialize a new Greenwood project: ", function () {
});
});

after(function () {
after(async function () {
runner.stopCommand();
runner.teardown([initOutputPath]);
// give the process a moment to release file handles (helps on Windows)
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I don't think we want this, this should be taken care of by #1573 which includes a fix to the test runner around teardown exit conditions

await new Promise((resolve) => setTimeout(resolve, 500));
// increase attempts and baseDelay for teardown to be more resilient on CI
await safeTeardown(runner, [initOutputPath], 6, 200);
});
});
68 changes: 68 additions & 0 deletions test/smoke-test.js
Original file line number Diff line number Diff line change
Expand Up @@ -252,3 +252,71 @@ async function runSmokeTest(testCases, label) {
}

export { runSmokeTest };

// Shared helper for tests to teardown runner paths safely with retries on transient Windows file locks
import fsPromises from 'node:fs/promises';

async function safeRmPath(pathStr, attempts = 5, baseDelay = 100, verbose = false) {
for (let i = 0; i < attempts; i++) {
try {
if (verbose) console.info(`[safeRmPath] rm attempt ${i + 1}/${attempts} ${pathStr}`);
// Node 14+ supports fs.promises.rm with recursive and force options
await fsPromises.rm(pathStr, { recursive: true, force: false });
if (verbose) console.info(`[safeRmPath] rm succeeded ${pathStr}`);
return;
} catch (err) {
if (verbose) console.warn(`[safeRmPath] rm attempt ${i + 1} failed for ${pathStr}:`, err && err.code ? err.code : err);

if ((err.code === 'EBUSY' || err.code === 'EPERM' || err.code === 'EACCES') && i < attempts - 1) {
const delay = baseDelay * Math.pow(2, i);
await new Promise((resolve) => setTimeout(resolve, delay));
continue;
}

// rethrow if non-retryable or last attempt
throw err;
}
}
}

async function safeTeardown(runner, paths = [], attempts = 5, baseDelay = 100) {
const verbose = process.env.GWD_RETRY_VERBOSE === 'true';

for (let i = 0; i < attempts; i++) {
try {
if (verbose) console.info(`[safeTeardown] attempt ${i + 1}/${attempts} teardown paths=${JSON.stringify(paths)}`);
runner.teardown(paths);
if (verbose) console.info(`[safeTeardown] teardown succeeded`);
return;
} catch (err) {
if (verbose) console.warn(`[safeTeardown] attempt ${i + 1} failed:`, err && err.code ? err.code : err);

if ((err.code === 'EBUSY' || err.code === 'EPERM' || err.code === 'EACCES') && i < attempts - 1) {
const delay = baseDelay * Math.pow(2, i);
if (verbose) console.info(`[safeTeardown] retrying in ${delay}ms`);
await new Promise((resolve) => setTimeout(resolve, delay));
continue;
}

// If runner.teardown failed and we've exhausted retries, attempt per-path removal as fallback
if (err.code === 'EBUSY' || err.code === 'EPERM' || err.code === 'EACCES') {
if (verbose) console.info('[safeTeardown] falling back to per-path rm');
for (const p of paths) {
try {
await safeRmPath(p, attempts, baseDelay, verbose);
} catch (rmErr) {
// if fallback removal fails, surface original error for debugging
if (verbose) console.error('[safeTeardown] fallback rm failed for', p, rmErr);
throw rmErr;
}
}

return;
}

throw err;
}
}
}

export { safeTeardown };