Compare commits
11 Commits
528a4dbdc9
...
fc8b43ab25
| Author | SHA1 | Date |
|---|---|---|
|
|
fc8b43ab25 | |
|
|
fdfa3343d3 | |
|
|
44972d62b9 | |
|
|
deedf18fc5 | |
|
|
17fe438452 | |
|
|
d036d34892 | |
|
|
bc7c7f0757 | |
|
|
43cfc01f2c | |
|
|
a32fc19d4a | |
|
|
c356aae5b4 | |
|
|
373c06b68f |
|
|
@ -7,6 +7,7 @@ name: Quality & Validation
|
|||
# - Schema validation (YAML structure)
|
||||
# - Agent schema tests (fixture-based validation)
|
||||
# - Installation component tests (compilation)
|
||||
# - fs wrapper tests (native fs replacement)
|
||||
# - Bundle validation (web bundle integrity)
|
||||
|
||||
"on":
|
||||
|
|
@ -112,5 +113,8 @@ jobs:
|
|||
- name: Test agent compilation components
|
||||
run: npm run test:install
|
||||
|
||||
- name: Test fs wrapper
|
||||
run: npm run test:fs
|
||||
|
||||
- name: Validate file references
|
||||
run: npm run validate:refs
|
||||
|
|
|
|||
15
CHANGELOG.md
15
CHANGELOG.md
|
|
@ -1,5 +1,20 @@
|
|||
# Changelog
|
||||
|
||||
## [6.0.4]
|
||||
|
||||
### 🎁 Features
|
||||
|
||||
* Add edge case hunter review task - new reusable review task that exhaustively traces branching paths and boundary conditions in code, reporting only unhandled gaps. Method-driven analysis complementary to adversarial review (#1790)
|
||||
|
||||
### 🐛 Bug Fixes
|
||||
|
||||
* Fix brainstorming to not overwrite previous sessions; now prompts to continue existing brainstorming or start a new one when older brainstorming sessions are found
|
||||
* Fix installer templates - replace legacy `@` path prefixes with explicit `{project-root}` syntax for consistency (#1769)
|
||||
* Fix edge case hunter - remove zero-findings halt condition that was pressuring the LLM to hallucinate findings when none legitimately exist (#1797)
|
||||
* Fix broken docs domain references in README and GitHub issue templates (#1777)
|
||||
|
||||
---
|
||||
|
||||
## [6.0.3]
|
||||
|
||||
### 🎁 Features
|
||||
|
|
|
|||
|
|
@ -1,12 +1,12 @@
|
|||
{
|
||||
"name": "bmad-method",
|
||||
"version": "6.0.3",
|
||||
"version": "6.0.4",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "bmad-method",
|
||||
"version": "6.0.3",
|
||||
"version": "6.0.4",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@clack/core": "^1.0.0",
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
{
|
||||
"$schema": "https://json.schemastore.org/package.json",
|
||||
"name": "bmad-method",
|
||||
"version": "6.0.3",
|
||||
"version": "6.0.4",
|
||||
"description": "Breakthrough Method of Agile AI-driven Development",
|
||||
"keywords": [
|
||||
"agile",
|
||||
|
|
@ -40,8 +40,9 @@
|
|||
"lint:md": "markdownlint-cli2 \"**/*.md\"",
|
||||
"prepare": "command -v husky >/dev/null 2>&1 && husky || exit 0",
|
||||
"rebundle": "node tools/cli/bundlers/bundle-web.js rebundle",
|
||||
"test": "npm run test:schemas && npm run test:refs && npm run test:install && npm run validate:schemas && npm run lint && npm run lint:md && npm run format:check",
|
||||
"test": "npm run test:schemas && npm run test:refs && npm run test:install && npm run test:fs && npm run validate:schemas && npm run lint && npm run lint:md && npm run format:check",
|
||||
"test:coverage": "c8 --reporter=text --reporter=html npm run test:schemas",
|
||||
"test:fs": "node test/test-fs-wrapper.js",
|
||||
"test:install": "node test/test-installation-components.js",
|
||||
"test:refs": "node test/test-file-refs-csv.js",
|
||||
"test:schemas": "node test/test-agent-schema.js",
|
||||
|
|
@ -71,7 +72,6 @@
|
|||
"chalk": "^4.1.2",
|
||||
"commander": "^14.0.0",
|
||||
"csv-parse": "^6.1.0",
|
||||
"fs-extra": "^11.3.0",
|
||||
"glob": "^11.0.3",
|
||||
"ignore": "^7.0.5",
|
||||
"js-yaml": "^4.1.0",
|
||||
|
|
|
|||
|
|
@ -7,3 +7,4 @@ core,anytime,Shard Document,SD,,_bmad/core/tasks/shard-doc.xml,bmad-shard-doc,fa
|
|||
core,anytime,Editorial Review - Prose,EP,,_bmad/core/tasks/editorial-review-prose.xml,bmad-editorial-review-prose,false,,,"Review prose for clarity, tone, and communication issues. Use after drafting to polish written content.",report located with target document,"three-column markdown table with suggested fixes",
|
||||
core,anytime,Editorial Review - Structure,ES,,_bmad/core/tasks/editorial-review-structure.xml,bmad-editorial-review-structure,false,,,"Propose cuts, reorganization, and simplification while preserving comprehension. Use when doc produced from multiple subprocesses or needs structural improvement.",report located with target document,
|
||||
core,anytime,Adversarial Review (General),AR,,_bmad/core/tasks/review-adversarial-general.xml,bmad-review-adversarial-general,false,,,"Review content critically to find issues and weaknesses. Use for quality assurance or before finalizing deliverables. Code Review in other modules run this automatically, but its useful also for document reviews",,
|
||||
core,anytime,Edge Case Hunter Review,ECH,,_bmad/core/tasks/review-edge-case-hunter.xml,bmad-review-edge-case-hunter,false,,,"Walk every branching path and boundary condition in code, report only unhandled edge cases. Use alongside adversarial review for orthogonal coverage - method-driven not attitude-driven.",,
|
||||
|
|
|
|||
|
Can't render this file because it has a wrong number of fields in line 2.
|
|
|
@ -0,0 +1,63 @@
|
|||
<!-- if possible, run this in a separate subagent or process with read access to the project,
|
||||
but no context except the content to review -->
|
||||
|
||||
<task id="_bmad/core/tasks/review-edge-case-hunter.xml" name="Edge Case Hunter Review"
|
||||
description="Walk every branching path and boundary condition in content, report only unhandled edge cases. Orthogonal to adversarial review - method-driven not attitude-driven.">
|
||||
<objective>You are a pure path tracer. Never comment on whether code is good or bad; only list missing handling.
|
||||
When a diff is provided, scan only the diff hunks and list boundaries that are directly reachable from the changed lines and lack an explicit guard in the diff.
|
||||
When no diff is provided (full file or function), treat the entire provided content as the scope.
|
||||
Ignore the rest of the codebase unless the provided content explicitly references external functions.</objective>
|
||||
|
||||
<inputs>
|
||||
<input name="content" desc="Content to review - diff, full file, or function" />
|
||||
<input name="also_consider" required="false"
|
||||
desc="Optional areas to keep in mind during review alongside normal edge-case analysis" />
|
||||
</inputs>
|
||||
|
||||
<output-format>Return ONLY a valid JSON array of objects. Each object must contain exactly these four fields and nothing else:
|
||||
{
|
||||
"location": "file:line",
|
||||
"trigger_condition": "one-line description (max 15 words)",
|
||||
"guard_snippet": "minimal code sketch that closes the gap",
|
||||
"potential_consequence": "what could actually go wrong (max 15 words)"
|
||||
}
|
||||
No extra text, no explanations, no markdown wrapping.</output-format>
|
||||
|
||||
<llm critical="true">
|
||||
<i>MANDATORY: Execute ALL steps in the flow section IN EXACT ORDER</i>
|
||||
<i>DO NOT skip steps or change the sequence</i>
|
||||
<i>HALT immediately when halt-conditions are met</i>
|
||||
<i>Each action xml tag within step xml tag is a REQUIRED action to complete that step</i>
|
||||
|
||||
<i>Your method is exhaustive path enumeration — mechanically walk every branch, not hunt by intuition</i>
|
||||
<i>Trace each branching path: conditionals, switches, early returns, guard clauses, loops, error handlers</i>
|
||||
<i>Trace each boundary condition: null, undefined, empty, zero, negative, overflow, max-length, type coercion, concurrency, timing</i>
|
||||
<i>Report ONLY paths and conditions that lack handling — discard handled ones silently</i>
|
||||
<i>Do NOT editorialize or add filler — findings only</i>
|
||||
</llm>
|
||||
|
||||
<flow>
|
||||
<step n="1" title="Receive Content">
|
||||
<action>Load the content to review from provided input or context</action>
|
||||
<action>If content to review is empty, ask for clarification and abort task</action>
|
||||
<action>Identify content type (diff, full file, or function) to determine scope rules</action>
|
||||
</step>
|
||||
|
||||
<step n="2" title="Exhaustive Path Analysis" critical="true">
|
||||
<mandate>Walk every branching path and boundary condition within scope - report only unhandled ones</mandate>
|
||||
<action>If also_consider input was provided, incorporate those areas into the analysis</action>
|
||||
<action>Enumerate all branching paths and boundary conditions within scope: conditionals, switches, early returns, guard clauses, loops, error handlers, null/empty states, overflow, type edges, concurrency, timing</action>
|
||||
<action>For each path: determine whether the content handles it</action>
|
||||
<action>Collect only the unhandled paths as findings - discard handled ones silently</action>
|
||||
</step>
|
||||
|
||||
<step n="3" title="Present Findings">
|
||||
<action>Output findings as a JSON array following the output-format specification exactly</action>
|
||||
</step>
|
||||
</flow>
|
||||
|
||||
<halt-conditions>
|
||||
<condition>HALT if content is empty or unreadable</condition>
|
||||
</halt-conditions>
|
||||
|
||||
</task>
|
||||
|
|
@ -29,23 +29,30 @@ Initialize the brainstorming workflow by detecting continuation state and settin
|
|||
|
||||
## INITIALIZATION SEQUENCE:
|
||||
|
||||
### 1. Check for Existing Workflow
|
||||
### 1. Check for Existing Sessions
|
||||
|
||||
First, check if the output document already exists:
|
||||
First, check the brainstorming sessions folder for existing sessions:
|
||||
|
||||
- Look for file at `{output_folder}/brainstorming/brainstorming-session-{{date}}.md`
|
||||
- If exists, read the complete file including frontmatter
|
||||
- If not exists, this is a fresh workflow
|
||||
- List all files in `{output_folder}/brainstorming/`
|
||||
- **DO NOT read any file contents** - only list filenames
|
||||
- If files exist, identify the most recent by date/time in the filename
|
||||
- If no files exist, this is a fresh workflow
|
||||
|
||||
### 2. Handle Continuation (If Document Exists)
|
||||
### 2. Handle Existing Sessions (If Files Found)
|
||||
|
||||
If the document exists and has frontmatter with `stepsCompleted`:
|
||||
If existing session files are found:
|
||||
|
||||
- **STOP here** and load `./step-01b-continue.md` immediately
|
||||
- Do not proceed with any initialization tasks
|
||||
- Let step-01b handle the continuation logic
|
||||
- Display the most recent session filename (do NOT read its content)
|
||||
- Ask the user: "Found existing session: `[filename]`. Would you like to:
|
||||
**[1]** Continue this session
|
||||
**[2]** Start a new session
|
||||
**[3]** See all existing sessions"
|
||||
|
||||
### 3. Fresh Workflow Setup (If No Document)
|
||||
- If user selects **[1]** (continue): Set `{brainstorming_session_output_file}` to that file path and load `./step-01b-continue.md`
|
||||
- If user selects **[2]** (new): Generate new filename with current date/time and proceed to step 3
|
||||
- If user selects **[3]** (see all): List all session filenames and ask which to continue or if new
|
||||
|
||||
### 3. Fresh Workflow Setup (If No Files or User Chooses New)
|
||||
|
||||
If no document exists or no `stepsCompleted` in frontmatter:
|
||||
|
||||
|
|
@ -55,10 +62,10 @@ Create the brainstorming session document:
|
|||
|
||||
```bash
|
||||
# Create directory if needed
|
||||
mkdir -p "$(dirname "{output_folder}/brainstorming/brainstorming-session-{{date}}.md")"
|
||||
mkdir -p "$(dirname "{brainstorming_session_output_file}")"
|
||||
|
||||
# Initialize from template
|
||||
cp "{template_path}" "{output_folder}/brainstorming/brainstorming-session-{{date}}.md"
|
||||
cp "{template_path}" "{brainstorming_session_output_file}"
|
||||
```
|
||||
|
||||
#### B. Context File Check and Loading
|
||||
|
|
@ -134,7 +141,7 @@ _[Content based on conversation about session parameters and facilitator approac
|
|||
|
||||
## APPEND TO DOCUMENT:
|
||||
|
||||
When user selects approach, append the session overview content directly to `{output_folder}/brainstorming/brainstorming-session-{{date}}.md` using the structure from above.
|
||||
When user selects approach, append the session overview content directly to `{brainstorming_session_output_file}` using the structure from above.
|
||||
|
||||
### E. Continue to Technique Selection
|
||||
|
||||
|
|
@ -152,7 +159,7 @@ Which approach appeals to you most? (Enter 1-4)"
|
|||
|
||||
#### When user selects approach number:
|
||||
|
||||
- **Append initial session overview to `{output_folder}/brainstorming/brainstorming-session-{{date}}.md`**
|
||||
- **Append initial session overview to `{brainstorming_session_output_file}`**
|
||||
- **Update frontmatter:** `stepsCompleted: [1]`, `selected_approach: '[selected approach]'`
|
||||
- **Load the appropriate step-02 file** based on selection
|
||||
|
||||
|
|
@ -167,7 +174,9 @@ After user selects approach number:
|
|||
|
||||
## SUCCESS METRICS:
|
||||
|
||||
✅ Existing workflow detected and continuation handled properly
|
||||
✅ Existing sessions detected without reading file contents
|
||||
✅ User prompted to continue existing session or start new
|
||||
✅ Correct session file selected for continuation
|
||||
✅ Fresh workflow initialized with correct document structure
|
||||
✅ Session context gathered and understood clearly
|
||||
✅ User's approach selection captured and routed correctly
|
||||
|
|
@ -176,7 +185,9 @@ After user selects approach number:
|
|||
|
||||
## FAILURE MODES:
|
||||
|
||||
❌ Not checking for existing document before creating new one
|
||||
❌ Reading file contents during session detection (wastes context)
|
||||
❌ Not asking user before continuing existing session
|
||||
❌ Not properly routing user's continue/new session selection
|
||||
❌ Missing continuation detection leading to duplicate work
|
||||
❌ Insufficient session context gathering
|
||||
❌ Not properly routing user's approach selection
|
||||
|
|
@ -184,7 +195,9 @@ After user selects approach number:
|
|||
|
||||
## SESSION SETUP PROTOCOLS:
|
||||
|
||||
- Always verify document existence before initialization
|
||||
- Always list sessions folder WITHOUT reading file contents
|
||||
- Ask user before continuing any existing session
|
||||
- Only load continue step after user confirms
|
||||
- Load brain techniques CSV only when needed for technique presentation
|
||||
- Use collaborative facilitation language throughout
|
||||
- Maintain psychological safety for creative exploration
|
||||
|
|
|
|||
|
|
@ -35,7 +35,7 @@ Load existing document and analyze current state:
|
|||
|
||||
**Document Analysis:**
|
||||
|
||||
- Read existing `{output_folder}/brainstorming/brainstorming-session-{{date}}.md`
|
||||
- Read existing `{brainstorming_session_output_file}`
|
||||
- Examine frontmatter for `stepsCompleted`, `session_topic`, `session_goals`
|
||||
- Review content to understand session progress and outcomes
|
||||
- Identify current stage and next logical steps
|
||||
|
|
|
|||
|
|
@ -296,7 +296,7 @@ After final technique element:
|
|||
|
||||
#### If 'C' (Move to organization):
|
||||
|
||||
- **Append the technique execution content to `{output_folder}/brainstorming/brainstorming-session-{{date}}.md`**
|
||||
- **Append the technique execution content to `{brainstorming_session_output_file}`**
|
||||
- **Update frontmatter:** `stepsCompleted: [1, 2, 3]`
|
||||
- **Load:** `./step-04-idea-organization.md`
|
||||
|
||||
|
|
@ -356,7 +356,7 @@ _[Short narrative describing the user and AI collaboration journey - what made t
|
|||
|
||||
## APPEND TO DOCUMENT:
|
||||
|
||||
When user selects 'C', append the content directly to `{output_folder}/brainstorming/brainstorming-session-{{date}}.md` using the structure from above.
|
||||
When user selects 'C', append the content directly to `{brainstorming_session_output_file}` using the structure from above.
|
||||
|
||||
## SUCCESS METRICS:
|
||||
|
||||
|
|
|
|||
|
|
@ -253,14 +253,14 @@ Provide final session wrap-up and forward guidance:
|
|||
|
||||
#### If [C] Complete:
|
||||
|
||||
- **Append the final session content to `{output_folder}/brainstorming/brainstorming-session-{{date}}.md`**
|
||||
- **Append the final session content to `{brainstorming_session_output_file}`**
|
||||
- Update frontmatter: `stepsCompleted: [1, 2, 3, 4]`
|
||||
- Set `session_active: false` and `workflow_completed: true`
|
||||
- Complete workflow with positive closure message
|
||||
|
||||
## APPEND TO DOCUMENT:
|
||||
|
||||
When user selects 'C', append the content directly to `{output_folder}/brainstorming/brainstorming-session-{{date}}.md` using the structure from step 7.
|
||||
When user selects 'C', append the content directly to `{brainstorming_session_output_file}` using the structure from step 7.
|
||||
|
||||
## SUCCESS METRICS:
|
||||
|
||||
|
|
|
|||
|
|
@ -45,7 +45,9 @@ Load config from `{project-root}/_bmad/core/config.yaml` and resolve:
|
|||
- `installed_path` = `{project-root}/_bmad/core/workflows/brainstorming`
|
||||
- `template_path` = `{installed_path}/template.md`
|
||||
- `brain_techniques_path` = `{installed_path}/brain-methods.csv`
|
||||
- `default_output_file` = `{output_folder}/brainstorming/brainstorming-session-{{date}}.md`
|
||||
- `brainstorming_session_output_file` = `{output_folder}/brainstorming/brainstorming-session-{{date}}-{{time}}.md` (evaluated once at workflow start)
|
||||
|
||||
All steps MUST reference `{brainstorming_session_output_file}` instead of the full path pattern.
|
||||
- `context_file` = Optional context file path from workflow invocation for project-specific guidance
|
||||
- `advancedElicitationTask` = `{project-root}/_bmad/core/workflows/advanced-elicitation/workflow.xml`
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,489 @@
|
|||
/**
|
||||
* Native fs Wrapper Tests
|
||||
*
|
||||
* Validates that tools/cli/lib/fs.js provides the same API surface
|
||||
* as fs-extra but backed entirely by native node:fs. Exercises every
|
||||
* exported method the CLI codebase relies on.
|
||||
*
|
||||
* Usage: node test/test-fs-wrapper.js
|
||||
* Exit codes: 0 = all tests pass, 1 = test failures
|
||||
*/
|
||||
|
||||
const nativeFs = require('node:fs');
|
||||
const path = require('node:path');
|
||||
const fs = require('../tools/cli/lib/fs');
|
||||
|
||||
// ANSI color codes
|
||||
const colors = {
|
||||
reset: '\u001B[0m',
|
||||
green: '\u001B[32m',
|
||||
red: '\u001B[31m',
|
||||
yellow: '\u001B[33m',
|
||||
cyan: '\u001B[36m',
|
||||
dim: '\u001B[2m',
|
||||
};
|
||||
|
||||
let totalTests = 0;
|
||||
let passedTests = 0;
|
||||
const failures = [];
|
||||
|
||||
function test(name, fn) {
|
||||
totalTests++;
|
||||
try {
|
||||
fn();
|
||||
passedTests++;
|
||||
console.log(` ${colors.green}\u2713${colors.reset} ${name}`);
|
||||
} catch (error) {
|
||||
console.log(` ${colors.red}\u2717${colors.reset} ${name} ${colors.red}${error.message}${colors.reset}`);
|
||||
failures.push({ name, message: error.message });
|
||||
}
|
||||
}
|
||||
|
||||
async function asyncTest(name, fn) {
|
||||
totalTests++;
|
||||
try {
|
||||
await fn();
|
||||
passedTests++;
|
||||
console.log(` ${colors.green}\u2713${colors.reset} ${name}`);
|
||||
} catch (error) {
|
||||
console.log(` ${colors.red}\u2717${colors.reset} ${name} ${colors.red}${error.message}${colors.reset}`);
|
||||
failures.push({ name, message: error.message });
|
||||
}
|
||||
}
|
||||
|
||||
function assert(condition, message) {
|
||||
if (!condition) throw new Error(message);
|
||||
}
|
||||
|
||||
function assertEqual(actual, expected, message) {
|
||||
if (actual !== expected) {
|
||||
throw new Error(`${message}: expected ${JSON.stringify(expected)}, got ${JSON.stringify(actual)}`);
|
||||
}
|
||||
}
|
||||
|
||||
// ── Test fixtures ───────────────────────────────────────────────────────────
|
||||
|
||||
const TMP = path.join(__dirname, '.tmp-fs-wrapper-test');
|
||||
|
||||
function setup() {
|
||||
nativeFs.rmSync(TMP, { recursive: true, force: true });
|
||||
nativeFs.mkdirSync(TMP, { recursive: true });
|
||||
}
|
||||
|
||||
function teardown() {
|
||||
nativeFs.rmSync(TMP, { recursive: true, force: true });
|
||||
}
|
||||
|
||||
// ── Tests ───────────────────────────────────────────────────────────────────
|
||||
|
||||
async function runTests() {
|
||||
console.log(`${colors.cyan}========================================`);
|
||||
console.log('Native fs Wrapper Tests');
|
||||
console.log(`========================================${colors.reset}\n`);
|
||||
|
||||
setup();
|
||||
|
||||
// ── Re-exported native members ──────────────────────────────────────────
|
||||
|
||||
console.log(`${colors.yellow}Re-exported native fs members${colors.reset}`);
|
||||
|
||||
test('exports fs.constants', () => {
|
||||
assert(fs.constants !== undefined, 'fs.constants is undefined');
|
||||
assert(typeof fs.constants.F_OK === 'number', 'fs.constants.F_OK is not a number');
|
||||
});
|
||||
|
||||
test('exports fs.existsSync', () => {
|
||||
assert(typeof fs.existsSync === 'function', 'fs.existsSync is not a function');
|
||||
assert(fs.existsSync(__dirname), 'existsSync returns false for existing dir');
|
||||
assert(!fs.existsSync(path.join(TMP, 'nonexistent')), 'existsSync returns true for missing path');
|
||||
});
|
||||
|
||||
test('exports fs.readFileSync', () => {
|
||||
const content = fs.readFileSync(__filename, 'utf8');
|
||||
assert(content.includes('Native fs Wrapper Tests'), 'readFileSync did not return expected content');
|
||||
});
|
||||
|
||||
test('exports fs.writeFileSync', () => {
|
||||
const p = path.join(TMP, 'write-sync.txt');
|
||||
fs.writeFileSync(p, 'hello sync');
|
||||
assertEqual(nativeFs.readFileSync(p, 'utf8'), 'hello sync', 'writeFileSync content mismatch');
|
||||
});
|
||||
|
||||
test('exports fs.mkdirSync', () => {
|
||||
const p = path.join(TMP, 'mkdir-sync');
|
||||
fs.mkdirSync(p);
|
||||
assert(nativeFs.statSync(p).isDirectory(), 'mkdirSync did not create directory');
|
||||
});
|
||||
|
||||
test('exports fs.readdirSync', () => {
|
||||
const entries = fs.readdirSync(TMP);
|
||||
assert(Array.isArray(entries), 'readdirSync did not return array');
|
||||
});
|
||||
|
||||
test('exports fs.statSync', () => {
|
||||
const stat = fs.statSync(__dirname);
|
||||
assert(stat.isDirectory(), 'statSync did not return directory stat');
|
||||
});
|
||||
|
||||
test('exports fs.copyFileSync', () => {
|
||||
const src = path.join(TMP, 'copy-src.txt');
|
||||
const dest = path.join(TMP, 'copy-dest.txt');
|
||||
nativeFs.writeFileSync(src, 'copy me');
|
||||
fs.copyFileSync(src, dest);
|
||||
assertEqual(nativeFs.readFileSync(dest, 'utf8'), 'copy me', 'copyFileSync content mismatch');
|
||||
});
|
||||
|
||||
test('exports fs.accessSync', () => {
|
||||
// Should not throw for existing file
|
||||
fs.accessSync(__filename);
|
||||
let threw = false;
|
||||
try {
|
||||
fs.accessSync(path.join(TMP, 'nonexistent'));
|
||||
} catch {
|
||||
threw = true;
|
||||
}
|
||||
assert(threw, 'accessSync did not throw for missing file');
|
||||
});
|
||||
|
||||
test('exports fs.createReadStream', () => {
|
||||
assert(typeof fs.createReadStream === 'function', 'createReadStream is not a function');
|
||||
});
|
||||
|
||||
console.log('');
|
||||
|
||||
// ── Async promise-based methods ─────────────────────────────────────────
|
||||
|
||||
console.log(`${colors.yellow}Async promise-based methods${colors.reset}`);
|
||||
|
||||
await asyncTest('readFile returns promise with content', async () => {
|
||||
const content = await fs.readFile(__filename, 'utf8');
|
||||
assert(content.includes('Native fs Wrapper Tests'), 'readFile did not return expected content');
|
||||
});
|
||||
|
||||
await asyncTest('writeFile writes content asynchronously', async () => {
|
||||
const p = path.join(TMP, 'write-async.txt');
|
||||
await fs.writeFile(p, 'hello async');
|
||||
assertEqual(nativeFs.readFileSync(p, 'utf8'), 'hello async', 'writeFile content mismatch');
|
||||
});
|
||||
|
||||
await asyncTest('readdir returns directory entries', async () => {
|
||||
const dir = path.join(TMP, 'readdir-test');
|
||||
nativeFs.mkdirSync(dir, { recursive: true });
|
||||
nativeFs.writeFileSync(path.join(dir, 'a.txt'), 'a');
|
||||
const entries = await fs.readdir(dir);
|
||||
assert(Array.isArray(entries), 'readdir did not return array');
|
||||
assert(entries.length > 0, 'readdir returned empty array for non-empty dir');
|
||||
});
|
||||
|
||||
await asyncTest('readdir with withFileTypes returns Dirent objects', async () => {
|
||||
const dir = path.join(TMP, 'dirent-test');
|
||||
nativeFs.mkdirSync(dir, { recursive: true });
|
||||
nativeFs.writeFileSync(path.join(dir, 'file.txt'), 'content');
|
||||
nativeFs.mkdirSync(path.join(dir, 'subdir'));
|
||||
|
||||
const entries = await fs.readdir(dir, { withFileTypes: true });
|
||||
assert(Array.isArray(entries), 'should return array');
|
||||
|
||||
const fileEntry = entries.find((e) => e.name === 'file.txt');
|
||||
const dirEntry = entries.find((e) => e.name === 'subdir');
|
||||
|
||||
assert(fileEntry && typeof fileEntry.isFile === 'function', 'entry should have isFile method');
|
||||
assert(dirEntry && typeof dirEntry.isDirectory === 'function', 'entry should have isDirectory method');
|
||||
assert(fileEntry.isFile(), 'file entry should return true for isFile()');
|
||||
assert(dirEntry.isDirectory(), 'dir entry should return true for isDirectory()');
|
||||
});
|
||||
|
||||
await asyncTest('stat returns file stats', async () => {
|
||||
const stat = await fs.stat(__dirname);
|
||||
assert(stat.isDirectory(), 'stat did not return directory stat');
|
||||
});
|
||||
|
||||
await asyncTest('access resolves for existing file', async () => {
|
||||
await fs.access(__filename); // should not throw
|
||||
});
|
||||
|
||||
await asyncTest('access rejects for missing file', async () => {
|
||||
let threw = false;
|
||||
try {
|
||||
await fs.access(path.join(TMP, 'nonexistent'));
|
||||
} catch {
|
||||
threw = true;
|
||||
}
|
||||
assert(threw, 'access did not reject for missing file');
|
||||
});
|
||||
|
||||
await asyncTest('rename moves a file', async () => {
|
||||
const src = path.join(TMP, 'rename-src.txt');
|
||||
const dest = path.join(TMP, 'rename-dest.txt');
|
||||
nativeFs.writeFileSync(src, 'rename me');
|
||||
await fs.rename(src, dest);
|
||||
assert(!nativeFs.existsSync(src), 'rename did not remove source');
|
||||
assertEqual(nativeFs.readFileSync(dest, 'utf8'), 'rename me', 'rename content mismatch');
|
||||
});
|
||||
|
||||
await asyncTest('realpath resolves path', async () => {
|
||||
const resolved = await fs.realpath(__dirname);
|
||||
assert(typeof resolved === 'string', 'realpath did not return string');
|
||||
assert(resolved.length > 0, 'realpath returned empty string');
|
||||
});
|
||||
|
||||
console.log('');
|
||||
|
||||
// ── fs-extra compatible methods ─────────────────────────────────────────
|
||||
|
||||
console.log(`${colors.yellow}fs-extra compatible methods${colors.reset}`);
|
||||
|
||||
await asyncTest('ensureDir creates nested directories', async () => {
|
||||
const p = path.join(TMP, 'ensure', 'deep', 'nested');
|
||||
await fs.ensureDir(p);
|
||||
assert(nativeFs.statSync(p).isDirectory(), 'ensureDir did not create nested dirs');
|
||||
});
|
||||
|
||||
await asyncTest('ensureDir is idempotent on existing directory', async () => {
|
||||
const p = path.join(TMP, 'ensure', 'deep', 'nested');
|
||||
await fs.ensureDir(p); // should not throw
|
||||
assert(nativeFs.statSync(p).isDirectory(), 'ensureDir failed on existing dir');
|
||||
});
|
||||
|
||||
await asyncTest('pathExists returns true for existing path', async () => {
|
||||
assertEqual(await fs.pathExists(__filename), true, 'pathExists returned false for existing file');
|
||||
});
|
||||
|
||||
await asyncTest('pathExists returns false for missing path', async () => {
|
||||
assertEqual(await fs.pathExists(path.join(TMP, 'nonexistent')), false, 'pathExists returned true for missing path');
|
||||
});
|
||||
|
||||
test('pathExistsSync returns true for existing path', () => {
|
||||
assertEqual(fs.pathExistsSync(__filename), true, 'pathExistsSync returned false for existing file');
|
||||
});
|
||||
|
||||
test('pathExistsSync returns false for missing path', () => {
|
||||
assertEqual(fs.pathExistsSync(path.join(TMP, 'nonexistent')), false, 'pathExistsSync returned true for missing path');
|
||||
});
|
||||
|
||||
await asyncTest('copy copies a single file', async () => {
|
||||
const src = path.join(TMP, 'copy-file-src.txt');
|
||||
const dest = path.join(TMP, 'copy-file-dest.txt');
|
||||
nativeFs.writeFileSync(src, 'copy file');
|
||||
await fs.copy(src, dest);
|
||||
assertEqual(nativeFs.readFileSync(dest, 'utf8'), 'copy file', 'copy file content mismatch');
|
||||
});
|
||||
|
||||
await asyncTest('copy creates parent directories for dest', async () => {
|
||||
const src = path.join(TMP, 'copy-mkdir-src.txt');
|
||||
nativeFs.writeFileSync(src, 'copy mkdir');
|
||||
const dest = path.join(TMP, 'copy-deep', 'nested', 'dest.txt');
|
||||
await fs.copy(src, dest);
|
||||
assertEqual(nativeFs.readFileSync(dest, 'utf8'), 'copy mkdir', 'copy with mkdir content mismatch');
|
||||
});
|
||||
|
||||
await asyncTest('copy copies a directory recursively', async () => {
|
||||
const srcDir = path.join(TMP, 'copy-dir-src');
|
||||
nativeFs.mkdirSync(path.join(srcDir, 'sub'), { recursive: true });
|
||||
nativeFs.writeFileSync(path.join(srcDir, 'a.txt'), 'file a');
|
||||
nativeFs.writeFileSync(path.join(srcDir, 'sub', 'b.txt'), 'file b');
|
||||
|
||||
const destDir = path.join(TMP, 'copy-dir-dest');
|
||||
await fs.copy(srcDir, destDir);
|
||||
|
||||
assertEqual(nativeFs.readFileSync(path.join(destDir, 'a.txt'), 'utf8'), 'file a', 'copy dir: top-level file mismatch');
|
||||
assertEqual(nativeFs.readFileSync(path.join(destDir, 'sub', 'b.txt'), 'utf8'), 'file b', 'copy dir: nested file mismatch');
|
||||
});
|
||||
|
||||
await asyncTest('copy respects overwrite: false for files', async () => {
|
||||
const src = path.join(TMP, 'overwrite-src.txt');
|
||||
const dest = path.join(TMP, 'overwrite-dest.txt');
|
||||
nativeFs.writeFileSync(src, 'new content');
|
||||
nativeFs.writeFileSync(dest, 'original content');
|
||||
await fs.copy(src, dest, { overwrite: false });
|
||||
assertEqual(nativeFs.readFileSync(dest, 'utf8'), 'original content', 'copy overwrote file when overwrite: false');
|
||||
});
|
||||
|
||||
await asyncTest('copy respects overwrite: false for directories', async () => {
|
||||
const srcDir = path.join(TMP, 'ow-dir-src');
|
||||
nativeFs.mkdirSync(srcDir, { recursive: true });
|
||||
nativeFs.writeFileSync(path.join(srcDir, 'file.txt'), 'new');
|
||||
|
||||
const destDir = path.join(TMP, 'ow-dir-dest');
|
||||
nativeFs.mkdirSync(destDir, { recursive: true });
|
||||
nativeFs.writeFileSync(path.join(destDir, 'file.txt'), 'original');
|
||||
|
||||
await fs.copy(srcDir, destDir, { overwrite: false });
|
||||
assertEqual(nativeFs.readFileSync(path.join(destDir, 'file.txt'), 'utf8'), 'original', 'copy dir overwrote file when overwrite: false');
|
||||
});
|
||||
|
||||
await asyncTest('copy respects filter option for files', async () => {
|
||||
const srcDir = path.join(TMP, 'filter-src');
|
||||
nativeFs.mkdirSync(srcDir, { recursive: true });
|
||||
nativeFs.writeFileSync(path.join(srcDir, 'keep.txt'), 'keep me');
|
||||
nativeFs.writeFileSync(path.join(srcDir, 'skip.log'), 'skip me');
|
||||
|
||||
const destDir = path.join(TMP, 'filter-dest');
|
||||
await fs.copy(srcDir, destDir, {
|
||||
filter: (src) => !src.endsWith('.log'),
|
||||
});
|
||||
|
||||
assert(nativeFs.existsSync(path.join(destDir, 'keep.txt')), 'filter: kept file is missing');
|
||||
assert(!nativeFs.existsSync(path.join(destDir, 'skip.log')), 'filter: skipped file was copied');
|
||||
});
|
||||
|
||||
await asyncTest('copy respects filter option for directories', async () => {
|
||||
const srcDir = path.join(TMP, 'filter-dir-src');
|
||||
nativeFs.mkdirSync(path.join(srcDir, 'include'), { recursive: true });
|
||||
nativeFs.mkdirSync(path.join(srcDir, 'node_modules'), { recursive: true });
|
||||
nativeFs.writeFileSync(path.join(srcDir, 'include', 'a.txt'), 'included');
|
||||
nativeFs.writeFileSync(path.join(srcDir, 'node_modules', 'b.txt'), 'excluded');
|
||||
|
||||
const destDir = path.join(TMP, 'filter-dir-dest');
|
||||
await fs.copy(srcDir, destDir, {
|
||||
filter: (src) => !src.includes('node_modules'),
|
||||
});
|
||||
|
||||
assert(nativeFs.existsSync(path.join(destDir, 'include', 'a.txt')), 'filter: included dir file is missing');
|
||||
assert(!nativeFs.existsSync(path.join(destDir, 'node_modules')), 'filter: excluded dir was copied');
|
||||
});
|
||||
|
||||
await asyncTest('copy filter skips top-level src when filter returns false', async () => {
|
||||
const src = path.join(TMP, 'filter-skip-src.txt');
|
||||
const dest = path.join(TMP, 'filter-skip-dest.txt');
|
||||
nativeFs.writeFileSync(src, 'should not be copied');
|
||||
await fs.copy(src, dest, {
|
||||
filter: () => false,
|
||||
});
|
||||
assert(!nativeFs.existsSync(dest), 'filter: file was copied despite filter returning false');
|
||||
});
|
||||
|
||||
await asyncTest('remove deletes a file', async () => {
|
||||
const p = path.join(TMP, 'remove-file.txt');
|
||||
nativeFs.writeFileSync(p, 'delete me');
|
||||
await fs.remove(p);
|
||||
assert(!nativeFs.existsSync(p), 'remove did not delete file');
|
||||
});
|
||||
|
||||
await asyncTest('remove deletes a directory recursively', async () => {
|
||||
const dir = path.join(TMP, 'remove-dir');
|
||||
nativeFs.mkdirSync(path.join(dir, 'sub'), { recursive: true });
|
||||
nativeFs.writeFileSync(path.join(dir, 'sub', 'file.txt'), 'nested');
|
||||
await fs.remove(dir);
|
||||
assert(!nativeFs.existsSync(dir), 'remove did not delete directory');
|
||||
});
|
||||
|
||||
await asyncTest('remove does not throw for missing path', async () => {
|
||||
await fs.remove(path.join(TMP, 'nonexistent-remove-target'));
|
||||
// should not throw — force: true
|
||||
});
|
||||
|
||||
await asyncTest('move renames a file', async () => {
|
||||
const src = path.join(TMP, 'move-src.txt');
|
||||
const dest = path.join(TMP, 'move-dest.txt');
|
||||
nativeFs.writeFileSync(src, 'move me');
|
||||
await fs.move(src, dest);
|
||||
assert(!nativeFs.existsSync(src), 'move did not remove source');
|
||||
assertEqual(nativeFs.readFileSync(dest, 'utf8'), 'move me', 'move content mismatch');
|
||||
});
|
||||
|
||||
await asyncTest('move renames a directory', async () => {
|
||||
const srcDir = path.join(TMP, 'move-dir-src');
|
||||
nativeFs.mkdirSync(srcDir, { recursive: true });
|
||||
nativeFs.writeFileSync(path.join(srcDir, 'file.txt'), 'dir move');
|
||||
|
||||
const destDir = path.join(TMP, 'move-dir-dest');
|
||||
await fs.move(srcDir, destDir);
|
||||
assert(!nativeFs.existsSync(srcDir), 'move did not remove source dir');
|
||||
assertEqual(nativeFs.readFileSync(path.join(destDir, 'file.txt'), 'utf8'), 'dir move', 'move dir content mismatch');
|
||||
});
|
||||
|
||||
test('readJsonSync parses JSON file', () => {
|
||||
const p = path.join(TMP, 'test.json');
|
||||
nativeFs.writeFileSync(p, JSON.stringify({ key: 'value', num: 42 }));
|
||||
const result = fs.readJsonSync(p);
|
||||
assertEqual(result.key, 'value', 'readJsonSync key mismatch');
|
||||
assertEqual(result.num, 42, 'readJsonSync num mismatch');
|
||||
});
|
||||
|
||||
test('readJsonSync throws on invalid JSON', () => {
|
||||
const p = path.join(TMP, 'bad.json');
|
||||
nativeFs.writeFileSync(p, '{ invalid json }');
|
||||
let threw = false;
|
||||
try {
|
||||
fs.readJsonSync(p);
|
||||
} catch {
|
||||
threw = true;
|
||||
}
|
||||
assert(threw, 'readJsonSync did not throw on invalid JSON');
|
||||
});
|
||||
|
||||
test('readJsonSync strips UTF-8 BOM', () => {
|
||||
const p = path.join(TMP, 'bom.json');
|
||||
nativeFs.writeFileSync(p, '\uFEFF{"bom": true}');
|
||||
const result = fs.readJsonSync(p);
|
||||
assertEqual(result.bom, true, 'readJsonSync failed to parse BOM-prefixed JSON');
|
||||
});
|
||||
|
||||
console.log('');
|
||||
|
||||
// ── Bulk copy stress test ───────────────────────────────────────────────
|
||||
|
||||
console.log(`${colors.yellow}Bulk copy determinism${colors.reset}`);
|
||||
|
||||
await asyncTest('copy preserves all files in a large directory tree', async () => {
|
||||
// Create a tree with 200+ files to verify no silent loss
|
||||
const srcDir = path.join(TMP, 'bulk-src');
|
||||
const fileCount = 250;
|
||||
|
||||
for (let i = 0; i < fileCount; i++) {
|
||||
const subDir = path.join(srcDir, `dir-${String(Math.floor(i / 10)).padStart(2, '0')}`);
|
||||
nativeFs.mkdirSync(subDir, { recursive: true });
|
||||
nativeFs.writeFileSync(path.join(subDir, `file-${i}.txt`), `content-${i}`);
|
||||
}
|
||||
|
||||
const destDir = path.join(TMP, 'bulk-dest');
|
||||
await fs.copy(srcDir, destDir);
|
||||
|
||||
// Count all files in destination
|
||||
let destCount = 0;
|
||||
const countFiles = (dir) => {
|
||||
const entries = nativeFs.readdirSync(dir, { withFileTypes: true });
|
||||
for (const entry of entries) {
|
||||
if (entry.isDirectory()) {
|
||||
countFiles(path.join(dir, entry.name));
|
||||
} else {
|
||||
destCount++;
|
||||
}
|
||||
}
|
||||
};
|
||||
countFiles(destDir);
|
||||
|
||||
assertEqual(destCount, fileCount, `bulk copy lost files: expected ${fileCount}, got ${destCount}`);
|
||||
});
|
||||
|
||||
console.log('');
|
||||
|
||||
// ── Cleanup ─────────────────────────────────────────────────────────────
|
||||
|
||||
teardown();
|
||||
|
||||
// ── Summary ─────────────────────────────────────────────────────────────
|
||||
console.log(`${colors.cyan}========================================`);
|
||||
console.log('Test Results:');
|
||||
console.log(` Total: ${totalTests}`);
|
||||
console.log(` Passed: ${colors.green}${passedTests}${colors.reset}`);
|
||||
console.log(` Failed: ${colors.red}${totalTests - passedTests}${colors.reset}`);
|
||||
console.log(`========================================${colors.reset}\n`);
|
||||
|
||||
if (failures.length === 0) {
|
||||
console.log(`${colors.green}\u2728 All fs wrapper tests passed!${colors.reset}\n`);
|
||||
process.exit(0);
|
||||
} else {
|
||||
console.log(`${colors.red}\u274C Some fs wrapper tests failed${colors.reset}\n`);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
// Run tests
|
||||
runTests().catch((error) => {
|
||||
teardown();
|
||||
console.error(`${colors.red}Test runner failed:${colors.reset}`, error.message);
|
||||
console.error(error.stack);
|
||||
process.exit(1);
|
||||
});
|
||||
|
|
@ -12,7 +12,7 @@
|
|||
*/
|
||||
|
||||
const path = require('node:path');
|
||||
const fs = require('fs-extra');
|
||||
const fs = require('../tools/cli/lib/fs');
|
||||
const { YamlXmlBuilder } = require('../tools/cli/lib/yaml-xml-builder');
|
||||
const { ManifestGenerator } = require('../tools/cli/installers/lib/core/manifest-generator');
|
||||
|
||||
|
|
@ -68,7 +68,9 @@ async function runTests() {
|
|||
const tempOutput = path.join(__dirname, 'temp-pm-agent.md');
|
||||
|
||||
try {
|
||||
const result = await builder.buildAgent(pmAgentPath, null, tempOutput, { includeMetadata: true });
|
||||
const result = await builder.buildAgent(pmAgentPath, null, tempOutput, {
|
||||
includeMetadata: true,
|
||||
});
|
||||
|
||||
assert(result && result.outputPath === tempOutput, 'Agent compilation returns result object with outputPath');
|
||||
|
||||
|
|
@ -168,7 +170,9 @@ async function runTests() {
|
|||
const tempOutput = path.join(__dirname, 'temp-qa-agent.md');
|
||||
|
||||
try {
|
||||
const result = await builder.buildAgent(qaAgentPath, null, tempOutput, { includeMetadata: true });
|
||||
const result = await builder.buildAgent(qaAgentPath, null, tempOutput, {
|
||||
includeMetadata: true,
|
||||
});
|
||||
const compiled = await fs.readFile(tempOutput, 'utf8');
|
||||
|
||||
assert(compiled.includes('QA Engineer'), 'QA agent compilation includes agent title');
|
||||
|
|
|
|||
|
|
@ -19,7 +19,7 @@ module.exports = {
|
|||
const { bmadDir } = await installer.findBmadDir(projectDir);
|
||||
|
||||
// Check if bmad directory exists
|
||||
const fs = require('fs-extra');
|
||||
const fs = require('../lib/fs');
|
||||
if (!(await fs.pathExists(bmadDir))) {
|
||||
await prompts.log.warn('No BMAD installation found in the current directory.');
|
||||
await prompts.log.message(`Expected location: ${bmadDir}`);
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
const path = require('node:path');
|
||||
const fs = require('fs-extra');
|
||||
const fs = require('../lib/fs');
|
||||
const prompts = require('../lib/prompts');
|
||||
const { Installer } = require('../installers/lib/core/installer');
|
||||
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
const path = require('node:path');
|
||||
const fs = require('fs-extra');
|
||||
const fs = require('../../../lib/fs');
|
||||
const yaml = require('yaml');
|
||||
const { getProjectRoot, getModulePath } = require('../../../lib/project-root');
|
||||
const { CLIUtils } = require('../../../lib/cli-utils');
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@
|
|||
* and can be checked into source control
|
||||
*/
|
||||
|
||||
const fs = require('fs-extra');
|
||||
const fs = require('../../../lib/fs');
|
||||
const path = require('node:path');
|
||||
const crypto = require('node:crypto');
|
||||
const prompts = require('../../../lib/prompts');
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
const fs = require('fs-extra');
|
||||
const fs = require('../../../lib/fs');
|
||||
const path = require('node:path');
|
||||
const glob = require('glob');
|
||||
const yaml = require('yaml');
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
const path = require('node:path');
|
||||
const fs = require('fs-extra');
|
||||
const fs = require('../../../lib/fs');
|
||||
const yaml = require('yaml');
|
||||
const { Manifest } = require('./manifest');
|
||||
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
const path = require('node:path');
|
||||
const fs = require('fs-extra');
|
||||
const fs = require('../../../lib/fs');
|
||||
const yaml = require('yaml');
|
||||
const prompts = require('../../../lib/prompts');
|
||||
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
const path = require('node:path');
|
||||
const fs = require('fs-extra');
|
||||
const fs = require('../../../lib/fs');
|
||||
const { Detector } = require('./detector');
|
||||
const { Manifest } = require('./manifest');
|
||||
const { ModuleManager } = require('../modules/manager');
|
||||
|
|
@ -87,7 +87,7 @@ class Installer {
|
|||
if (textExtensions.includes(ext)) {
|
||||
try {
|
||||
// Read the file content
|
||||
let content = await fs.readFile(sourcePath, 'utf8');
|
||||
const content = await fs.readFile(sourcePath, 'utf8');
|
||||
|
||||
// Write to target with replaced content
|
||||
await fs.ensureDir(path.dirname(targetPath));
|
||||
|
|
@ -260,7 +260,7 @@ class Installer {
|
|||
|
||||
// Collect configurations for modules (skip if quick update already collected them)
|
||||
let moduleConfigs;
|
||||
let customModulePaths = new Map();
|
||||
const customModulePaths = new Map();
|
||||
|
||||
if (config._quickUpdate) {
|
||||
// Quick update already collected all configs, use them directly
|
||||
|
|
@ -524,7 +524,9 @@ class Installer {
|
|||
// Also check cache directory for custom modules (like quick update does)
|
||||
const cacheDir = path.join(bmadDir, '_config', 'custom');
|
||||
if (await fs.pathExists(cacheDir)) {
|
||||
const cachedModules = await fs.readdir(cacheDir, { withFileTypes: true });
|
||||
const cachedModules = await fs.readdir(cacheDir, {
|
||||
withFileTypes: true,
|
||||
});
|
||||
|
||||
for (const cachedModule of cachedModules) {
|
||||
const moduleId = cachedModule.name;
|
||||
|
|
@ -585,7 +587,9 @@ class Installer {
|
|||
const relativePath = path.relative(bmadDir, modifiedFile.path);
|
||||
const tempBackupPath = path.join(tempModifiedBackupDir, relativePath);
|
||||
await fs.ensureDir(path.dirname(tempBackupPath));
|
||||
await fs.copy(modifiedFile.path, tempBackupPath, { overwrite: true });
|
||||
await fs.copy(modifiedFile.path, tempBackupPath, {
|
||||
overwrite: true,
|
||||
});
|
||||
}
|
||||
spinner.stop(`Backed up ${modifiedFiles.length} modified files`);
|
||||
|
||||
|
|
@ -608,7 +612,9 @@ class Installer {
|
|||
// Also check cache directory for custom modules (like quick update does)
|
||||
const cacheDir = path.join(bmadDir, '_config', 'custom');
|
||||
if (await fs.pathExists(cacheDir)) {
|
||||
const cachedModules = await fs.readdir(cacheDir, { withFileTypes: true });
|
||||
const cachedModules = await fs.readdir(cacheDir, {
|
||||
withFileTypes: true,
|
||||
});
|
||||
|
||||
for (const cachedModule of cachedModules) {
|
||||
const moduleId = cachedModule.name;
|
||||
|
|
@ -668,7 +674,9 @@ class Installer {
|
|||
const relativePath = path.relative(bmadDir, modifiedFile.path);
|
||||
const tempBackupPath = path.join(tempModifiedBackupDir, relativePath);
|
||||
await fs.ensureDir(path.dirname(tempBackupPath));
|
||||
await fs.copy(modifiedFile.path, tempBackupPath, { overwrite: true });
|
||||
await fs.copy(modifiedFile.path, tempBackupPath, {
|
||||
overwrite: true,
|
||||
});
|
||||
}
|
||||
spinner.stop(`Backed up ${modifiedFiles.length} modified files`);
|
||||
config._tempModifiedBackupDir = tempModifiedBackupDir;
|
||||
|
|
@ -887,7 +895,11 @@ class Installer {
|
|||
let taskResolution;
|
||||
|
||||
// Collect directory creation results for output after tasks() completes
|
||||
const dirResults = { createdDirs: [], movedDirs: [], createdWdsFolders: [] };
|
||||
const dirResults = {
|
||||
createdDirs: [],
|
||||
movedDirs: [],
|
||||
createdWdsFolders: [],
|
||||
};
|
||||
|
||||
// Build task list conditionally
|
||||
const installTasks = [];
|
||||
|
|
@ -899,7 +911,9 @@ class Installer {
|
|||
task: async (message) => {
|
||||
await this.installCoreWithDependencies(bmadDir, { core: {} });
|
||||
addResult('Core', 'ok', isQuickUpdate ? 'updated' : 'installed');
|
||||
await this.generateModuleConfigs(bmadDir, { core: config.coreConfig || {} });
|
||||
await this.generateModuleConfigs(bmadDir, {
|
||||
core: config.coreConfig || {},
|
||||
});
|
||||
return isQuickUpdate ? 'Core updated' : 'Core installed';
|
||||
},
|
||||
});
|
||||
|
|
@ -945,7 +959,11 @@ class Installer {
|
|||
const cachedModule = finalCustomContent.cachedModules.find((m) => m.id === moduleName);
|
||||
if (cachedModule) {
|
||||
isCustomModule = true;
|
||||
customInfo = { id: moduleName, path: cachedModule.cachePath, config: {} };
|
||||
customInfo = {
|
||||
id: moduleName,
|
||||
path: cachedModule.cachePath,
|
||||
config: {},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -995,7 +1013,11 @@ class Installer {
|
|||
},
|
||||
);
|
||||
await this.generateModuleConfigs(bmadDir, {
|
||||
[moduleName]: { ...config.coreConfig, ...customInfo.config, ...collectedModuleConfig },
|
||||
[moduleName]: {
|
||||
...config.coreConfig,
|
||||
...customInfo.config,
|
||||
...collectedModuleConfig,
|
||||
},
|
||||
});
|
||||
} else {
|
||||
if (!resolution || !resolution.byModule) {
|
||||
|
|
@ -1424,7 +1446,9 @@ class Installer {
|
|||
// Also check cache directory
|
||||
const cacheDir = path.join(bmadDir, '_config', 'custom');
|
||||
if (await fs.pathExists(cacheDir)) {
|
||||
const cachedModules = await fs.readdir(cacheDir, { withFileTypes: true });
|
||||
const cachedModules = await fs.readdir(cacheDir, {
|
||||
withFileTypes: true,
|
||||
});
|
||||
|
||||
for (const cachedModule of cachedModules) {
|
||||
if (cachedModule.isDirectory()) {
|
||||
|
|
@ -1499,7 +1523,9 @@ class Installer {
|
|||
|
||||
for (const module of existingInstall.modules) {
|
||||
spinner.message(`Updating module: ${module.id}...`);
|
||||
await this.moduleManager.update(module.id, bmadDir, config.force, { installer: this });
|
||||
await this.moduleManager.update(module.id, bmadDir, config.force, {
|
||||
installer: this,
|
||||
});
|
||||
}
|
||||
|
||||
// Update manifest
|
||||
|
|
@ -1558,7 +1584,9 @@ class Installer {
|
|||
|
||||
// 2. IDE CLEANUP (before _bmad/ deletion so configs are accessible)
|
||||
if (options.removeIdeConfigs !== false) {
|
||||
await this.uninstallIdeConfigs(projectDir, existingInstall, { silent: options.silent });
|
||||
await this.uninstallIdeConfigs(projectDir, existingInstall, {
|
||||
silent: options.silent,
|
||||
});
|
||||
removed.ideConfigs = true;
|
||||
}
|
||||
|
||||
|
|
@ -1797,7 +1825,11 @@ class Installer {
|
|||
|
||||
// Lookup agent info
|
||||
const cleanAgentName = agentName ? agentName.trim() : '';
|
||||
const agentData = agentInfo.get(cleanAgentName) || { command: '', displayName: '', title: '' };
|
||||
const agentData = agentInfo.get(cleanAgentName) || {
|
||||
command: '',
|
||||
displayName: '',
|
||||
title: '',
|
||||
};
|
||||
|
||||
// Build new row with agent info
|
||||
const newRow = [
|
||||
|
|
@ -1852,8 +1884,8 @@ class Installer {
|
|||
}
|
||||
|
||||
// Sequence comparison
|
||||
const seqA = parseInt(colsA[4] || '0', 10);
|
||||
const seqB = parseInt(colsB[4] || '0', 10);
|
||||
const seqA = Number.parseInt(colsA[4] || '0', 10);
|
||||
const seqB = Number.parseInt(colsB[4] || '0', 10);
|
||||
return seqA - seqB;
|
||||
});
|
||||
|
||||
|
|
@ -2395,7 +2427,9 @@ class Installer {
|
|||
}
|
||||
const cacheDir = path.join(bmadDir, '_config', 'custom');
|
||||
if (await fs.pathExists(cacheDir)) {
|
||||
const cachedModules = await fs.readdir(cacheDir, { withFileTypes: true });
|
||||
const cachedModules = await fs.readdir(cacheDir, {
|
||||
withFileTypes: true,
|
||||
});
|
||||
|
||||
for (const cachedModule of cachedModules) {
|
||||
const moduleId = cachedModule.name;
|
||||
|
|
@ -2630,7 +2664,9 @@ class Installer {
|
|||
const customModuleSources = new Map();
|
||||
const cacheDir = path.join(bmadDir, '_config', 'custom');
|
||||
if (await fs.pathExists(cacheDir)) {
|
||||
const cachedModules = await fs.readdir(cacheDir, { withFileTypes: true });
|
||||
const cachedModules = await fs.readdir(cacheDir, {
|
||||
withFileTypes: true,
|
||||
});
|
||||
|
||||
for (const cachedModule of cachedModules) {
|
||||
if (cachedModule.isDirectory()) {
|
||||
|
|
@ -3102,8 +3138,7 @@ class Installer {
|
|||
// Remove the module from filesystem and manifest
|
||||
const modulePath = path.join(bmadDir, missing.id);
|
||||
if (await fs.pathExists(modulePath)) {
|
||||
const fsExtra = require('fs-extra');
|
||||
await fsExtra.remove(modulePath);
|
||||
await fs.remove(modulePath);
|
||||
await prompts.log.warn(`Deleted module directory: ${path.relative(projectRoot, modulePath)}`);
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
const path = require('node:path');
|
||||
const fs = require('fs-extra');
|
||||
const fs = require('../../../lib/fs');
|
||||
const yaml = require('yaml');
|
||||
const crypto = require('node:crypto');
|
||||
const csv = require('csv-parse/sync');
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
const path = require('node:path');
|
||||
const fs = require('fs-extra');
|
||||
const fs = require('../../../lib/fs');
|
||||
const crypto = require('node:crypto');
|
||||
const { getProjectRoot } = require('../../../lib/project-root');
|
||||
const prompts = require('../../../lib/prompts');
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
const path = require('node:path');
|
||||
const fs = require('fs-extra');
|
||||
const fs = require('../../../lib/fs');
|
||||
const yaml = require('yaml');
|
||||
const prompts = require('../../../lib/prompts');
|
||||
const { FileOps } = require('../../../lib/file-ops');
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
const path = require('node:path');
|
||||
const fs = require('fs-extra');
|
||||
const fs = require('../../../lib/fs');
|
||||
const { XmlHandler } = require('../../../lib/xml-handler');
|
||||
const prompts = require('../../../lib/prompts');
|
||||
const { getSourcePath } = require('../../../lib/project-root');
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
const path = require('node:path');
|
||||
const fs = require('fs-extra');
|
||||
const fs = require('../../../lib/fs');
|
||||
const { BaseIdeSetup } = require('./_base-ide');
|
||||
const prompts = require('../../../lib/prompts');
|
||||
const { AgentCommandGenerator } = require('./shared/agent-command-generator');
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
const path = require('node:path');
|
||||
const os = require('node:os');
|
||||
const fs = require('fs-extra');
|
||||
const fs = require('../../../lib/fs');
|
||||
const yaml = require('yaml');
|
||||
const { BaseIdeSetup } = require('./_base-ide');
|
||||
const { WorkflowCommandGenerator } = require('./shared/workflow-command-generator');
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ const { BaseIdeSetup } = require('./_base-ide');
|
|||
const prompts = require('../../../lib/prompts');
|
||||
const { AgentCommandGenerator } = require('./shared/agent-command-generator');
|
||||
const { BMAD_FOLDER_NAME, toDashPath } = require('./shared/path-utils');
|
||||
const fs = require('fs-extra');
|
||||
const fs = require('../../../lib/fs');
|
||||
const csv = require('csv-parse/sync');
|
||||
const yaml = require('yaml');
|
||||
|
||||
|
|
|
|||
|
|
@ -157,7 +157,7 @@ class KiloSetup extends BaseIdeSetup {
|
|||
* @param {string} workflowsDir - Workflows directory path
|
||||
*/
|
||||
async clearBmadWorkflows(workflowsDir) {
|
||||
const fs = require('fs-extra');
|
||||
const fs = require('../../../lib/fs');
|
||||
if (!(await fs.pathExists(workflowsDir))) return;
|
||||
|
||||
const entries = await fs.readdir(workflowsDir);
|
||||
|
|
@ -172,7 +172,7 @@ class KiloSetup extends BaseIdeSetup {
|
|||
* Cleanup KiloCode configuration
|
||||
*/
|
||||
async cleanup(projectDir, options = {}) {
|
||||
const fs = require('fs-extra');
|
||||
const fs = require('../../../lib/fs');
|
||||
const kiloModesPath = path.join(projectDir, this.configFile);
|
||||
|
||||
if (await fs.pathExists(kiloModesPath)) {
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
const fs = require('fs-extra');
|
||||
const fs = require('../../../lib/fs');
|
||||
const path = require('node:path');
|
||||
const { BMAD_FOLDER_NAME } = require('./shared/path-utils');
|
||||
const prompts = require('../../../lib/prompts');
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
const fs = require('fs-extra');
|
||||
const fs = require('../../../lib/fs');
|
||||
const path = require('node:path');
|
||||
const yaml = require('yaml');
|
||||
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
const path = require('node:path');
|
||||
const fs = require('fs-extra');
|
||||
const fs = require('../../../lib/fs');
|
||||
const yaml = require('yaml');
|
||||
const { BaseIdeSetup } = require('./_base-ide');
|
||||
const prompts = require('../../../lib/prompts');
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
const path = require('node:path');
|
||||
const fs = require('fs-extra');
|
||||
const fs = require('../../../../lib/fs');
|
||||
const { toColonPath, toDashPath, customAgentColonName, customAgentDashName, BMAD_FOLDER_NAME } = require('./path-utils');
|
||||
|
||||
/**
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
const path = require('node:path');
|
||||
const fs = require('fs-extra');
|
||||
const fs = require('../../../../lib/fs');
|
||||
|
||||
/**
|
||||
* Helpers for gathering BMAD agents/tasks from the installed tree.
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
const path = require('node:path');
|
||||
const fs = require('fs-extra');
|
||||
const fs = require('../../../../lib/fs');
|
||||
const yaml = require('yaml');
|
||||
const { glob } = require('glob');
|
||||
const { getSourcePath } = require('../../../../lib/project-root');
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
const path = require('node:path');
|
||||
const fs = require('fs-extra');
|
||||
const fs = require('../../../../lib/fs');
|
||||
const csv = require('csv-parse/sync');
|
||||
const { toColonName, toColonPath, toDashPath, BMAD_FOLDER_NAME } = require('./path-utils');
|
||||
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
const path = require('node:path');
|
||||
const fs = require('fs-extra');
|
||||
const fs = require('../../../../lib/fs');
|
||||
const csv = require('csv-parse/sync');
|
||||
const prompts = require('../../../../lib/prompts');
|
||||
const { toColonPath, toDashPath, customAgentColonName, customAgentDashName, BMAD_FOLDER_NAME } = require('./path-utils');
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ description: '{{description}}'
|
|||
You must fully embody this agent's persona and follow all activation instructions exactly as specified. NEVER break character until given an exit command.
|
||||
|
||||
<agent-activation CRITICAL="TRUE">
|
||||
1. LOAD the FULL agent file from @_bmad/{{module}}/agents/{{path}}
|
||||
1. LOAD the FULL agent file from {project-root}/_bmad/{{module}}/agents/{{path}}
|
||||
2. READ its entire contents - this contains the complete agent persona, menu, and instructions
|
||||
3. Execute ALL activation steps exactly as written in the agent file
|
||||
4. Follow the agent's persona and menu system precisely
|
||||
|
|
|
|||
|
|
@ -6,9 +6,9 @@ description: '{{description}}'
|
|||
IT IS CRITICAL THAT YOU FOLLOW THESE STEPS - while staying in character as the current agent persona you may have loaded:
|
||||
|
||||
<steps CRITICAL="TRUE">
|
||||
1. Always LOAD the FULL @{project-root}/{{bmadFolderName}}/core/tasks/workflow.xml
|
||||
2. READ its entire contents - this is the CORE OS for EXECUTING the specific workflow-config @{project-root}/{{bmadFolderName}}/{{path}}
|
||||
3. Pass the yaml path @{project-root}/{{bmadFolderName}}/{{path}} as 'workflow-config' parameter to the workflow.xml instructions
|
||||
1. Always LOAD the FULL {project-root}/{{bmadFolderName}}/core/tasks/workflow.xml
|
||||
2. READ its entire contents - this is the CORE OS for EXECUTING the specific workflow-config {project-root}/{{bmadFolderName}}/{{path}}
|
||||
3. Pass the yaml path {project-root}/{{bmadFolderName}}/{{path}} as 'workflow-config' parameter to the workflow.xml instructions
|
||||
4. Follow workflow.xml instructions EXACTLY as written to process and follow the specific workflow config and its instructions
|
||||
5. Save outputs after EACH section when generating any documents from templates
|
||||
</steps>
|
||||
|
|
|
|||
|
|
@ -3,4 +3,4 @@ name: '{{name}}'
|
|||
description: '{{description}}'
|
||||
---
|
||||
|
||||
IT IS CRITICAL THAT YOU FOLLOW THIS COMMAND: LOAD the FULL @{project-root}/{{bmadFolderName}}/{{path}}, READ its entire contents and follow its directions exactly!
|
||||
IT IS CRITICAL THAT YOU FOLLOW THIS COMMAND: LOAD the FULL {project-root}/{{bmadFolderName}}/{{path}}, READ its entire contents and follow its directions exactly!
|
||||
|
|
|
|||
|
|
@ -5,8 +5,8 @@ description: '{{description}}'
|
|||
IT IS CRITICAL THAT YOU FOLLOW THESE STEPS - while staying in character as the current agent persona you may have loaded:
|
||||
|
||||
<steps CRITICAL="TRUE">
|
||||
1. Always LOAD the FULL @_bmad/core/tasks/workflow.xml
|
||||
2. READ its entire contents - this is the CORE OS for EXECUTING the specific workflow-config @{{workflow_path}}
|
||||
1. Always LOAD the FULL {project-root}/_bmad/core/tasks/workflow.xml
|
||||
2. READ its entire contents - this is the CORE OS for EXECUTING the specific workflow-config {project-root}/{{workflow_path}}
|
||||
3. Pass the yaml path {{workflow_path}} as 'workflow-config' parameter to the workflow.xml instructions
|
||||
4. Follow workflow.xml instructions EXACTLY as written to process and follow the specific workflow config and its instructions
|
||||
5. Save outputs after EACH section when generating any documents from templates
|
||||
|
|
|
|||
|
|
@ -2,4 +2,4 @@
|
|||
description: '{{description}}'
|
||||
---
|
||||
|
||||
IT IS CRITICAL THAT YOU FOLLOW THIS COMMAND: LOAD the FULL @{{workflow_path}}, READ its entire contents and follow its directions exactly!
|
||||
IT IS CRITICAL THAT YOU FOLLOW THIS COMMAND: LOAD the FULL {project-root}/{{workflow_path}}, READ its entire contents and follow its directions exactly!
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
const fs = require('fs-extra');
|
||||
const fs = require('../../lib/fs');
|
||||
const path = require('node:path');
|
||||
const yaml = require('yaml');
|
||||
const prompts = require('../../lib/prompts');
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
const fs = require('fs-extra');
|
||||
const fs = require('../../../lib/fs');
|
||||
const path = require('node:path');
|
||||
const yaml = require('yaml');
|
||||
const prompts = require('../../../lib/prompts');
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
const path = require('node:path');
|
||||
const fs = require('fs-extra');
|
||||
const fs = require('../../../lib/fs');
|
||||
const yaml = require('yaml');
|
||||
const prompts = require('../../../lib/prompts');
|
||||
const { XmlHandler } = require('../../../lib/xml-handler');
|
||||
|
|
@ -14,7 +14,7 @@ const { BMAD_FOLDER_NAME } = require('../ide/shared/path-utils');
|
|||
* and agent file management including XML activation block injection.
|
||||
*
|
||||
* @class ModuleManager
|
||||
* @requires fs-extra
|
||||
* @requires lib/fs
|
||||
* @requires yaml
|
||||
* @requires prompts
|
||||
* @requires XmlHandler
|
||||
|
|
@ -208,7 +208,9 @@ class ModuleManager {
|
|||
if (this.bmadDir) {
|
||||
const customCacheDir = path.join(this.bmadDir, '_config', 'custom');
|
||||
if (await fs.pathExists(customCacheDir)) {
|
||||
const cacheEntries = await fs.readdir(customCacheDir, { withFileTypes: true });
|
||||
const cacheEntries = await fs.readdir(customCacheDir, {
|
||||
withFileTypes: true,
|
||||
});
|
||||
for (const entry of cacheEntries) {
|
||||
if (entry.isDirectory()) {
|
||||
const cachePath = path.join(customCacheDir, entry.name);
|
||||
|
|
@ -387,7 +389,12 @@ class ModuleManager {
|
|||
const fetchSpinner = await createSpinner();
|
||||
fetchSpinner.start(`Fetching ${moduleInfo.name}...`);
|
||||
try {
|
||||
const currentRef = execSync('git rev-parse HEAD', { cwd: moduleCacheDir, stdio: 'pipe' }).toString().trim();
|
||||
const currentRef = execSync('git rev-parse HEAD', {
|
||||
cwd: moduleCacheDir,
|
||||
stdio: 'pipe',
|
||||
})
|
||||
.toString()
|
||||
.trim();
|
||||
// Fetch and reset to remote - works better with shallow clones than pull
|
||||
execSync('git fetch origin --depth 1', {
|
||||
cwd: moduleCacheDir,
|
||||
|
|
@ -399,7 +406,12 @@ class ModuleManager {
|
|||
stdio: ['ignore', 'pipe', 'pipe'],
|
||||
env: { ...process.env, GIT_TERMINAL_PROMPT: '0' },
|
||||
});
|
||||
const newRef = execSync('git rev-parse HEAD', { cwd: moduleCacheDir, stdio: 'pipe' }).toString().trim();
|
||||
const newRef = execSync('git rev-parse HEAD', {
|
||||
cwd: moduleCacheDir,
|
||||
stdio: 'pipe',
|
||||
})
|
||||
.toString()
|
||||
.trim();
|
||||
|
||||
fetchSpinner.stop(`Fetched ${moduleInfo.name}`);
|
||||
// Force dependency install if we got new code
|
||||
|
|
@ -521,7 +533,9 @@ class ModuleManager {
|
|||
* @param {Object} options.logger - Logger instance for output
|
||||
*/
|
||||
async install(moduleName, bmadDir, fileTrackingCallback = null, options = {}) {
|
||||
const sourcePath = await this.findModuleSource(moduleName, { silent: options.silent });
|
||||
const sourcePath = await this.findModuleSource(moduleName, {
|
||||
silent: options.silent,
|
||||
});
|
||||
const targetPath = path.join(bmadDir, moduleName);
|
||||
|
||||
// Check if source module exists
|
||||
|
|
@ -619,7 +633,9 @@ class ModuleManager {
|
|||
if (force) {
|
||||
// Force update - remove and reinstall
|
||||
await fs.remove(targetPath);
|
||||
return await this.install(moduleName, bmadDir, null, { installer: options.installer });
|
||||
return await this.install(moduleName, bmadDir, null, {
|
||||
installer: options.installer,
|
||||
});
|
||||
} else {
|
||||
// Selective update - preserve user modifications
|
||||
await this.syncModule(sourcePath, targetPath);
|
||||
|
|
@ -947,7 +963,7 @@ class ModuleManager {
|
|||
|
||||
// Check for customizations and build answers object
|
||||
let customizedFields = [];
|
||||
let answers = {};
|
||||
const answers = {};
|
||||
if (await fs.pathExists(customizePath)) {
|
||||
const customizeContent = await fs.readFile(customizePath, 'utf8');
|
||||
const customizeData = yaml.parse(customizeContent);
|
||||
|
|
@ -1020,7 +1036,9 @@ class ModuleManager {
|
|||
|
||||
// Copy any non-sidecar files from agent directory (e.g., foo.md)
|
||||
const agentDir = path.dirname(agentFile);
|
||||
const agentEntries = await fs.readdir(agentDir, { withFileTypes: true });
|
||||
const agentEntries = await fs.readdir(agentDir, {
|
||||
withFileTypes: true,
|
||||
});
|
||||
|
||||
for (const entry of agentEntries) {
|
||||
if (entry.isFile() && !entry.name.endsWith('.agent.yaml') && !entry.name.endsWith('.md')) {
|
||||
|
|
@ -1230,7 +1248,7 @@ class ModuleManager {
|
|||
* @param {string} newModuleName - New module name to reference
|
||||
*/
|
||||
async updateWorkflowConfigSource(workflowYamlPath, newModuleName) {
|
||||
let yamlContent = await fs.readFile(workflowYamlPath, 'utf8');
|
||||
const yamlContent = await fs.readFile(workflowYamlPath, 'utf8');
|
||||
|
||||
// Replace config_source: "{project-root}/_bmad/OLD_MODULE/config.yaml"
|
||||
// with config_source: "{project-root}/_bmad/NEW_MODULE/config.yaml"
|
||||
|
|
@ -1262,7 +1280,11 @@ class ModuleManager {
|
|||
const moduleConfig = options.moduleConfig || {};
|
||||
const existingModuleConfig = options.existingModuleConfig || {};
|
||||
const projectRoot = path.dirname(bmadDir);
|
||||
const emptyResult = { createdDirs: [], movedDirs: [], createdWdsFolders: [] };
|
||||
const emptyResult = {
|
||||
createdDirs: [],
|
||||
movedDirs: [],
|
||||
createdWdsFolders: [],
|
||||
};
|
||||
|
||||
// Special handling for core module - it's in src/core not src/modules
|
||||
let sourcePath;
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
const fs = require('fs-extra');
|
||||
const fs = require('./fs');
|
||||
const path = require('node:path');
|
||||
const { getSourcePath } = require('./project-root');
|
||||
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
const yaml = require('yaml');
|
||||
const fs = require('fs-extra');
|
||||
const fs = require('./fs');
|
||||
|
||||
/**
|
||||
* Analyzes agent YAML files to detect which handlers are needed
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
const path = require('node:path');
|
||||
const fs = require('fs-extra');
|
||||
const fs = require('./fs');
|
||||
const { escapeXml } = require('../../lib/xml-utils');
|
||||
|
||||
const AgentPartyGenerator = {
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
const fs = require('fs-extra');
|
||||
const fs = require('./fs');
|
||||
const yaml = require('yaml');
|
||||
const path = require('node:path');
|
||||
const packageJson = require('../../../package.json');
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
const fs = require('fs-extra');
|
||||
const fs = require('./fs');
|
||||
const path = require('node:path');
|
||||
const crypto = require('node:crypto');
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,169 @@
|
|||
/**
|
||||
* Drop-in replacement for fs-extra that uses only native Node.js fs.
|
||||
*
|
||||
* fs-extra routes every call through graceful-fs, whose EMFILE retry queue
|
||||
* causes non-deterministic file loss on macOS during bulk copy operations.
|
||||
* This module provides the same API surface used by the CLI codebase but
|
||||
* backed entirely by `node:fs` and `node:fs/promises` — no third-party
|
||||
* wrappers, no retry queues, no silent data loss.
|
||||
*
|
||||
* Async methods return native promises (from `node:fs/promises`).
|
||||
* Sync methods delegate directly to `node:fs`.
|
||||
*/
|
||||
|
||||
const fs = require('node:fs');
|
||||
const fsp = require('node:fs/promises');
|
||||
const path = require('node:path');
|
||||
|
||||
// ── Re-export every native fs member ────────────────────────────────────────
|
||||
// Callers that use fs.constants, fs.createReadStream, etc. keep working.
|
||||
module.exports = { ...fs };
|
||||
|
||||
// ── Async methods (return promises, like fs-extra) ──────────────────────────
|
||||
|
||||
module.exports.readFile = fsp.readFile;
|
||||
module.exports.writeFile = fsp.writeFile;
|
||||
module.exports.readdir = fsp.readdir;
|
||||
module.exports.stat = fsp.stat;
|
||||
module.exports.access = fsp.access;
|
||||
module.exports.rename = fsp.rename;
|
||||
module.exports.realpath = fsp.realpath;
|
||||
module.exports.rmdir = fsp.rmdir;
|
||||
|
||||
/**
|
||||
* Recursively ensure a directory exists.
|
||||
* @param {string} dirPath
|
||||
*/
|
||||
module.exports.ensureDir = async function ensureDir(dirPath) {
|
||||
await fsp.mkdir(dirPath, { recursive: true });
|
||||
};
|
||||
|
||||
/**
|
||||
* Check whether a path exists.
|
||||
* @param {string} p
|
||||
* @returns {Promise<boolean>}
|
||||
*/
|
||||
module.exports.pathExists = async function pathExists(p) {
|
||||
try {
|
||||
await fsp.access(p);
|
||||
return true;
|
||||
} catch (error) {
|
||||
if (error && (error.code === 'ENOENT' || error.code === 'ENOTDIR')) {
|
||||
return false;
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Synchronous variant of pathExists.
|
||||
* @param {string} p
|
||||
* @returns {boolean}
|
||||
*/
|
||||
module.exports.pathExistsSync = function pathExistsSync(p) {
|
||||
return fs.existsSync(p);
|
||||
};
|
||||
|
||||
/**
|
||||
* Recursively copy a directory tree synchronously.
|
||||
* @param {string} src - Source directory
|
||||
* @param {string} dest - Destination directory
|
||||
* @param {boolean} force - Whether to overwrite existing files
|
||||
* @param {Function} [filter] - Optional filter(srcPath) → boolean; return false to skip
|
||||
*/
|
||||
function copyDirSync(src, dest, force, filter) {
|
||||
if (filter && !filter(src)) return;
|
||||
fs.mkdirSync(dest, { recursive: true });
|
||||
const entries = fs.readdirSync(src, { withFileTypes: true });
|
||||
for (const entry of entries) {
|
||||
const srcPath = path.join(src, entry.name);
|
||||
const destPath = path.join(dest, entry.name);
|
||||
if (filter && !filter(srcPath)) continue;
|
||||
if (entry.isDirectory()) {
|
||||
copyDirSync(srcPath, destPath, force, filter);
|
||||
} else {
|
||||
if (!force && fs.existsSync(destPath)) {
|
||||
continue;
|
||||
}
|
||||
fs.copyFileSync(srcPath, destPath);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Copy a file or directory.
|
||||
* @param {string} src
|
||||
* @param {string} dest
|
||||
* @param {object} [options]
|
||||
* @param {boolean} [options.overwrite=true]
|
||||
* @param {Function} [options.filter] - Optional filter(srcPath) → boolean; return false to skip
|
||||
*/
|
||||
module.exports.copy = async function copy(src, dest, options = {}) {
|
||||
const overwrite = options.overwrite !== false;
|
||||
const filter = options.filter;
|
||||
|
||||
if (filter && !filter(src)) return;
|
||||
|
||||
const srcStat = await fsp.stat(src);
|
||||
|
||||
if (srcStat.isDirectory()) {
|
||||
copyDirSync(src, dest, overwrite, filter);
|
||||
} else {
|
||||
await fsp.mkdir(path.dirname(dest), { recursive: true });
|
||||
if (!overwrite) {
|
||||
try {
|
||||
await fsp.access(dest);
|
||||
return; // dest exists, skip
|
||||
} catch (error) {
|
||||
if (error && error.code !== 'ENOENT' && error.code !== 'ENOTDIR') {
|
||||
throw error;
|
||||
}
|
||||
// dest doesn't exist, proceed
|
||||
}
|
||||
}
|
||||
fs.copyFileSync(src, dest);
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Recursively remove a file or directory.
|
||||
* @param {string} p
|
||||
*/
|
||||
module.exports.remove = async function remove(p) {
|
||||
fs.rmSync(p, { recursive: true, force: true });
|
||||
};
|
||||
|
||||
/**
|
||||
* Move (rename) a file or directory, with cross-device fallback.
|
||||
* @param {string} src
|
||||
* @param {string} dest
|
||||
*/
|
||||
module.exports.move = async function move(src, dest) {
|
||||
try {
|
||||
await fsp.rename(src, dest);
|
||||
} catch (error) {
|
||||
if (error.code === 'EXDEV') {
|
||||
// Cross-device: copy then remove
|
||||
const srcStat = fs.statSync(src);
|
||||
if (srcStat.isDirectory()) {
|
||||
copyDirSync(src, dest, true);
|
||||
} else {
|
||||
fs.mkdirSync(path.dirname(dest), { recursive: true });
|
||||
fs.copyFileSync(src, dest);
|
||||
}
|
||||
fs.rmSync(src, { recursive: true, force: true });
|
||||
} else {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Read and parse a JSON file synchronously.
|
||||
* @param {string} filePath
|
||||
* @returns {any}
|
||||
*/
|
||||
module.exports.readJsonSync = function readJsonSync(filePath) {
|
||||
const raw = fs.readFileSync(filePath, 'utf8').replace(/^\uFEFF/, '');
|
||||
return JSON.parse(raw);
|
||||
};
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
const fs = require('fs-extra');
|
||||
const fs = require('./fs');
|
||||
const path = require('node:path');
|
||||
const yaml = require('yaml');
|
||||
const { getProjectRoot } = require('./project-root');
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
const path = require('node:path');
|
||||
const fs = require('fs-extra');
|
||||
const fs = require('./fs');
|
||||
|
||||
/**
|
||||
* Find the BMAD project root directory by looking for package.json
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
const path = require('node:path');
|
||||
const os = require('node:os');
|
||||
const fs = require('fs-extra');
|
||||
const fs = require('./fs');
|
||||
const { CLIUtils } = require('./cli-utils');
|
||||
const { CustomHandler } = require('../installers/lib/custom/handler');
|
||||
const { ExternalModuleManager } = require('../installers/lib/modules/external-manager');
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
const xml2js = require('xml2js');
|
||||
const fs = require('fs-extra');
|
||||
const fs = require('./fs');
|
||||
const path = require('node:path');
|
||||
const { getProjectRoot, getSourcePath } = require('./project-root');
|
||||
const { YamlXmlBuilder } = require('./yaml-xml-builder');
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
const yaml = require('yaml');
|
||||
const fs = require('fs-extra');
|
||||
const fs = require('./fs');
|
||||
const path = require('node:path');
|
||||
const crypto = require('node:crypto');
|
||||
const { AgentAnalyzer } = require('./agent-analyzer');
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@
|
|||
* This should be run once to update existing installations
|
||||
*/
|
||||
|
||||
const fs = require('fs-extra');
|
||||
const fs = require('./cli/lib/fs');
|
||||
const path = require('node:path');
|
||||
const yaml = require('yaml');
|
||||
const chalk = require('chalk');
|
||||
|
|
|
|||
Loading…
Reference in New Issue