Merge branch 'main' into fix/clack-prompts
This commit is contained in:
commit
9f7e11685c
|
|
@ -113,3 +113,6 @@ jobs:
|
||||||
|
|
||||||
- name: Test agent compilation components
|
- name: Test agent compilation components
|
||||||
run: npm run test:install
|
run: npm run test:install
|
||||||
|
|
||||||
|
- name: Validate file references
|
||||||
|
run: npm run validate:refs
|
||||||
|
|
|
||||||
|
|
@ -58,7 +58,7 @@ Build it, one story at a time.
|
||||||
| `correct-course` | Handle significant mid-sprint changes | Updated plan or re-routing |
|
| `correct-course` | Handle significant mid-sprint changes | Updated plan or re-routing |
|
||||||
| `retrospective` | Review after epic completion | Lessons learned |
|
| `retrospective` | Review after epic completion | Lessons learned |
|
||||||
|
|
||||||
**Quinn (QA Agent):** Built-in QA agent for test automation. Trigger with `QA` or `bmad-bmm-automate`. Generates standard API and E2E tests using your project's test framework. Beginner-friendly, no configuration needed. For advanced test strategy, install [Test Architect (TEA)](https://bmad-code-org.github.io/bmad-method-test-architecture-enterprise/) module.
|
**Quinn (QA Agent):** Built-in QA agent for test automation. Trigger with `QA` or `bmad-bmm-qa-automate`. Generates standard API and E2E tests using your project's test framework. Beginner-friendly, no configuration needed. For advanced test strategy, install [Test Architect (TEA)](https://bmad-code-org.github.io/bmad-method-test-architecture-enterprise/) module.
|
||||||
|
|
||||||
## Quick Flow (Parallel Track)
|
## Quick Flow (Parallel Track)
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -49,6 +49,7 @@
|
||||||
"test:coverage": "c8 --reporter=text --reporter=html npm run test:schemas",
|
"test:coverage": "c8 --reporter=text --reporter=html npm run test:schemas",
|
||||||
"test:install": "node test/test-installation-components.js",
|
"test:install": "node test/test-installation-components.js",
|
||||||
"test:schemas": "node test/test-agent-schema.js",
|
"test:schemas": "node test/test-agent-schema.js",
|
||||||
|
"validate:refs": "node tools/validate-file-refs.js",
|
||||||
"validate:schemas": "node tools/validate-agent-schema.js"
|
"validate:schemas": "node tools/validate-agent-schema.js"
|
||||||
},
|
},
|
||||||
"lint-staged": {
|
"lint-staged": {
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,6 @@
|
||||||
agent:
|
agent:
|
||||||
metadata:
|
metadata:
|
||||||
id: "_bmad/bmm/agents/quinn"
|
id: "_bmad/bmm/agents/qa"
|
||||||
name: Quinn
|
name: Quinn
|
||||||
title: QA Engineer
|
title: QA Engineer
|
||||||
icon: 🧪
|
icon: 🧪
|
||||||
|
|
@ -54,4 +54,4 @@ agent:
|
||||||
For comprehensive test strategy, risk-based planning, quality gates, and enterprise features,
|
For comprehensive test strategy, risk-based planning, quality gates, and enterprise features,
|
||||||
install the Test Architect (TEA) module: https://bmad-code-org.github.io/bmad-method-test-architecture-enterprise/
|
install the Test Architect (TEA) module: https://bmad-code-org.github.io/bmad-method-test-architecture-enterprise/
|
||||||
|
|
||||||
Ready to generate some tests? Just say `QA` or `bmad-bmm-automate`!
|
Ready to generate some tests? Just say `QA` or `bmad-bmm-qa-automate`!
|
||||||
|
|
@ -34,5 +34,5 @@ bmm,4-implementation,Validate Story,VS,35,_bmad/bmm/workflows/4-implementation/c
|
||||||
bmm,4-implementation,Create Story,CS,30,_bmad/bmm/workflows/4-implementation/create-story/workflow.yaml,bmad-bmm-create-story,true,sm,Create Mode,"Story cycle start: Prepare first found story in the sprint plan that is next, or if the command is run with a specific epic and story designation with context. Once complete, then VS then DS then CR then back to DS if needed or next CS or ER",implementation_artifacts,story,
|
bmm,4-implementation,Create Story,CS,30,_bmad/bmm/workflows/4-implementation/create-story/workflow.yaml,bmad-bmm-create-story,true,sm,Create Mode,"Story cycle start: Prepare first found story in the sprint plan that is next, or if the command is run with a specific epic and story designation with context. Once complete, then VS then DS then CR then back to DS if needed or next CS or ER",implementation_artifacts,story,
|
||||||
bmm,4-implementation,Dev Story,DS,40,_bmad/bmm/workflows/4-implementation/dev-story/workflow.yaml,bmad-bmm-dev-story,true,dev,Create Mode,"Story cycle: Execute story implementation tasks and tests then CR then back to DS if fixes needed",,,
|
bmm,4-implementation,Dev Story,DS,40,_bmad/bmm/workflows/4-implementation/dev-story/workflow.yaml,bmad-bmm-dev-story,true,dev,Create Mode,"Story cycle: Execute story implementation tasks and tests then CR then back to DS if fixes needed",,,
|
||||||
bmm,4-implementation,Code Review,CR,50,_bmad/bmm/workflows/4-implementation/code-review/workflow.yaml,bmad-bmm-code-review,false,dev,Create Mode,"Story cycle: If issues back to DS if approved then next CS or ER if epic complete",,,
|
bmm,4-implementation,Code Review,CR,50,_bmad/bmm/workflows/4-implementation/code-review/workflow.yaml,bmad-bmm-code-review,false,dev,Create Mode,"Story cycle: If issues back to DS if approved then next CS or ER if epic complete",,,
|
||||||
bmm,4-implementation,QA Automation Test,QA,45,_bmad/bmm/workflows/qa/automate/workflow.yaml,bmad-bmm-qa-automate,false,quinn,Create Mode,"Generate automated API and E2E tests for implemented code using the project's existing test framework (detects existing well known in use test frameworks). Use after implementation to add test coverage. NOT for code review or story validation - use CR for that.",implementation_artifacts,"test suite",
|
bmm,4-implementation,QA Automation Test,QA,45,_bmad/bmm/workflows/qa/automate/workflow.yaml,bmad-bmm-qa-automate,false,qa,Create Mode,"Generate automated API and E2E tests for implemented code using the project's existing test framework (detects existing well known in use test frameworks). Use after implementation to add test coverage. NOT for code review or story validation - use CR for that.",implementation_artifacts,"test suite",
|
||||||
bmm,4-implementation,Retrospective,ER,60,_bmad/bmm/workflows/4-implementation/retrospective/workflow.yaml,bmad-bmm-retrospective,false,sm,Create Mode,"Optional at epic end: Review completed work lessons learned and next epic or if major issues consider CC",implementation_artifacts,retrospective,
|
bmm,4-implementation,Retrospective,ER,60,_bmad/bmm/workflows/4-implementation/retrospective/workflow.yaml,bmad-bmm-retrospective,false,sm,Create Mode,"Optional at epic end: Review completed work lessons learned and next epic or if major issues consider CC",implementation_artifacts,retrospective,
|
||||||
|
|
|
||||||
|
|
|
@ -9,5 +9,7 @@ scientific,"research,algorithm,simulation,modeling,computational,analysis,data s
|
||||||
legaltech,"legal,law,contract,compliance,litigation,patent,attorney,court",high,"Legal ethics;Bar regulations;Data retention;Attorney-client privilege;Court system integration","Legal practice rules;Ethics requirements;Court filing systems;Document standards;Confidentiality","domain-research","legal technology ethics {date};law practice management software requirements;court filing system standards;attorney client privilege technology","ethics_compliance;data_retention;confidentiality_measures;court_integration"
|
legaltech,"legal,law,contract,compliance,litigation,patent,attorney,court",high,"Legal ethics;Bar regulations;Data retention;Attorney-client privilege;Court system integration","Legal practice rules;Ethics requirements;Court filing systems;Document standards;Confidentiality","domain-research","legal technology ethics {date};law practice management software requirements;court filing system standards;attorney client privilege technology","ethics_compliance;data_retention;confidentiality_measures;court_integration"
|
||||||
insuretech,"insurance,claims,underwriting,actuarial,policy,risk,premium",high,"Insurance regulations;Actuarial standards;Data privacy;Fraud detection;State compliance","Insurance regulations by state;Actuarial methods;Risk modeling;Claims processing;Regulatory reporting","domain-research","insurance software regulations {date};actuarial standards software;insurance fraud detection;state insurance compliance","regulatory_requirements;risk_modeling;fraud_detection;reporting_compliance"
|
insuretech,"insurance,claims,underwriting,actuarial,policy,risk,premium",high,"Insurance regulations;Actuarial standards;Data privacy;Fraud detection;State compliance","Insurance regulations by state;Actuarial methods;Risk modeling;Claims processing;Regulatory reporting","domain-research","insurance software regulations {date};actuarial standards software;insurance fraud detection;state insurance compliance","regulatory_requirements;risk_modeling;fraud_detection;reporting_compliance"
|
||||||
energy,"energy,utility,grid,solar,wind,power,electricity,oil,gas",high,"Grid compliance;NERC standards;Environmental regulations;Safety requirements;Real-time operations","Energy regulations;Grid standards;Environmental compliance;Safety protocols;SCADA systems","domain-research","energy sector software compliance {date};NERC CIP standards;smart grid requirements;renewable energy software standards","grid_compliance;safety_protocols;environmental_compliance;operational_requirements"
|
energy,"energy,utility,grid,solar,wind,power,electricity,oil,gas",high,"Grid compliance;NERC standards;Environmental regulations;Safety requirements;Real-time operations","Energy regulations;Grid standards;Environmental compliance;Safety protocols;SCADA systems","domain-research","energy sector software compliance {date};NERC CIP standards;smart grid requirements;renewable energy software standards","grid_compliance;safety_protocols;environmental_compliance;operational_requirements"
|
||||||
|
process_control,"industrial automation,process control,PLC,SCADA,DCS,HMI,operational technology,OT,control system,cyberphysical,MES,historian,instrumentation,I&C,P&ID",high,"Functional safety;OT cybersecurity;Real-time control requirements;Legacy system integration;Process safety and hazard analysis;Environmental compliance and permitting;Engineering authority and PE requirements","Functional safety standards;OT security frameworks;Industrial protocols;Process control architecture;Plant reliability and maintainability","domain-research + technical-model","IEC 62443 OT cybersecurity requirements {date};functional safety software requirements {date};industrial process control architecture;ISA-95 manufacturing integration","functional_safety;ot_security;process_requirements;engineering_authority"
|
||||||
|
building_automation,"building automation,BAS,BMS,HVAC,smart building,lighting control,fire alarm,fire protection,fire suppression,life safety,elevator,access control,DDC,energy management,sequence of operations,commissioning",high,"Life safety codes;Building energy standards;Multi-trade coordination and interoperability;Commissioning and ongoing operational performance;Indoor environmental quality and occupant comfort;Engineering authority and PE requirements","Building automation protocols;HVAC and mechanical controls;Fire alarm, fire protection, and life safety design;Commissioning process and sequence of operations;Building codes and energy standards","domain-research","smart building software architecture {date};BACnet integration best practices;building automation cybersecurity {date};ASHRAE building standards","life_safety;energy_compliance;commissioning_requirements;engineering_authority"
|
||||||
gaming,"game,player,gameplay,level,character,multiplayer,quest",redirect,"REDIRECT TO GAME WORKFLOWS","Game design","game-brief","NA","NA"
|
gaming,"game,player,gameplay,level,character,multiplayer,quest",redirect,"REDIRECT TO GAME WORKFLOWS","Game design","game-brief","NA","NA"
|
||||||
general,"",low,"Standard requirements;Basic security;User experience;Performance","General software practices","continue","software development best practices {date}","standard_requirements"
|
general,"",low,"Standard requirements;Basic security;User experience;Performance","General software practices","continue","software development best practices {date}","standard_requirements"
|
||||||
|
|
|
@ -8,4 +8,6 @@ productivity,"productivity,workflow,tasks,management,business,tools",medium,stan
|
||||||
media,"content,media,video,audio,streaming,broadcast",high,advanced,"CDN architecture, video encoding, streaming protocols, content delivery"
|
media,"content,media,video,audio,streaming,broadcast",high,advanced,"CDN architecture, video encoding, streaming protocols, content delivery"
|
||||||
iot,"IoT,sensors,devices,embedded,smart,connected",high,advanced,"device communication, real-time data processing, edge computing, security"
|
iot,"IoT,sensors,devices,embedded,smart,connected",high,advanced,"device communication, real-time data processing, edge computing, security"
|
||||||
government,"government,civic,public,admin,policy,regulation",high,enhanced,"accessibility standards, security clearance, data privacy, audit trails"
|
government,"government,civic,public,admin,policy,regulation",high,enhanced,"accessibility standards, security clearance, data privacy, audit trails"
|
||||||
|
process_control,"industrial automation,process control,PLC,SCADA,DCS,HMI,operational technology,control system,cyberphysical,MES,instrumentation,I&C,P&ID",high,advanced,"industrial process control architecture, SCADA system design, OT cybersecurity architecture, real-time control systems"
|
||||||
|
building_automation,"building automation,BAS,BMS,HVAC,smart building,fire alarm,fire protection,fire suppression,life safety,elevator,DDC,access control,sequence of operations,commissioning",high,advanced,"building automation architecture, BACnet integration patterns, smart building design, building management system security"
|
||||||
gaming,"game,gaming,multiplayer,real-time,interactive,entertainment",high,advanced,"real-time multiplayer, game engine architecture, matchmaking, leaderboards"
|
gaming,"game,gaming,multiplayer,real-time,interactive,entertainment",high,advanced,"real-time multiplayer, game engine architecture, matchmaking, leaderboards"
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
<task id="_bmad/core/tasks/workflow.xml" name="Execute Workflow" standalone="false">
|
<task id="_bmad/core/tasks/workflow.xml" name="Execute Workflow" standalone="false" internal="true">
|
||||||
<objective>Execute given workflow by loading its configuration, following instructions, and producing output</objective>
|
<objective>Execute given workflow by loading its configuration, following instructions, and producing output</objective>
|
||||||
|
|
||||||
<llm critical="true">
|
<llm critical="true">
|
||||||
|
|
|
||||||
|
|
@ -164,7 +164,7 @@ async function runTests() {
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const builder = new YamlXmlBuilder();
|
const builder = new YamlXmlBuilder();
|
||||||
const qaAgentPath = path.join(projectRoot, 'src/bmm/agents/quinn.agent.yaml');
|
const qaAgentPath = path.join(projectRoot, 'src/bmm/agents/qa.agent.yaml');
|
||||||
const tempOutput = path.join(__dirname, 'temp-qa-agent.md');
|
const tempOutput = path.join(__dirname, 'temp-qa-agent.md');
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
|
|
||||||
|
|
@ -146,7 +146,7 @@ class DependencyResolver {
|
||||||
const content = await fs.readFile(file.path, 'utf8');
|
const content = await fs.readFile(file.path, 'utf8');
|
||||||
|
|
||||||
// Parse YAML frontmatter for explicit dependencies
|
// Parse YAML frontmatter for explicit dependencies
|
||||||
const frontmatterMatch = content.match(/^---\n([\s\S]*?)\n---/);
|
const frontmatterMatch = content.match(/^---\r?\n([\s\S]*?)\r?\n---/);
|
||||||
if (frontmatterMatch) {
|
if (frontmatterMatch) {
|
||||||
try {
|
try {
|
||||||
// Pre-process to handle backticks in YAML values
|
// Pre-process to handle backticks in YAML values
|
||||||
|
|
|
||||||
|
|
@ -17,9 +17,7 @@ const { ManifestGenerator } = require('./manifest-generator');
|
||||||
const { IdeConfigManager } = require('./ide-config-manager');
|
const { IdeConfigManager } = require('./ide-config-manager');
|
||||||
const { CustomHandler } = require('../custom/handler');
|
const { CustomHandler } = require('../custom/handler');
|
||||||
const prompts = require('../../../lib/prompts');
|
const prompts = require('../../../lib/prompts');
|
||||||
|
const { BMAD_FOLDER_NAME } = require('../ide/shared/path-utils');
|
||||||
// BMAD installation folder name - this is constant and should never change
|
|
||||||
const BMAD_FOLDER_NAME = '_bmad';
|
|
||||||
|
|
||||||
class Installer {
|
class Installer {
|
||||||
constructor() {
|
constructor() {
|
||||||
|
|
|
||||||
|
|
@ -2,6 +2,7 @@ const path = require('node:path');
|
||||||
const fs = require('fs-extra');
|
const fs = require('fs-extra');
|
||||||
const yaml = require('yaml');
|
const yaml = require('yaml');
|
||||||
const crypto = require('node:crypto');
|
const crypto = require('node:crypto');
|
||||||
|
const csv = require('csv-parse/sync');
|
||||||
const { getSourcePath, getModulePath } = require('../../../lib/project-root');
|
const { getSourcePath, getModulePath } = require('../../../lib/project-root');
|
||||||
|
|
||||||
// Load package.json for version info
|
// Load package.json for version info
|
||||||
|
|
@ -21,6 +22,19 @@ class ManifestGenerator {
|
||||||
this.selectedIdes = [];
|
this.selectedIdes = [];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Clean text for CSV output by normalizing whitespace and escaping quotes
|
||||||
|
* @param {string} text - Text to clean
|
||||||
|
* @returns {string} Cleaned text safe for CSV
|
||||||
|
*/
|
||||||
|
cleanForCSV(text) {
|
||||||
|
if (!text) return '';
|
||||||
|
return text
|
||||||
|
.trim()
|
||||||
|
.replaceAll(/\s+/g, ' ') // Normalize all whitespace (including newlines) to single space
|
||||||
|
.replaceAll('"', '""'); // Escape quotes for CSV
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Generate all manifests for the installation
|
* Generate all manifests for the installation
|
||||||
* @param {string} bmadDir - _bmad
|
* @param {string} bmadDir - _bmad
|
||||||
|
|
@ -161,7 +175,7 @@ class ManifestGenerator {
|
||||||
workflow = yaml.parse(content);
|
workflow = yaml.parse(content);
|
||||||
} else {
|
} else {
|
||||||
// Parse MD workflow with YAML frontmatter
|
// Parse MD workflow with YAML frontmatter
|
||||||
const frontmatterMatch = content.match(/^---\n([\s\S]*?)\n---/);
|
const frontmatterMatch = content.match(/^---\r?\n([\s\S]*?)\r?\n---/);
|
||||||
if (!frontmatterMatch) {
|
if (!frontmatterMatch) {
|
||||||
if (debug) {
|
if (debug) {
|
||||||
console.log(`[DEBUG] Skipped (no frontmatter): ${fullPath}`);
|
console.log(`[DEBUG] Skipped (no frontmatter): ${fullPath}`);
|
||||||
|
|
@ -201,7 +215,7 @@ class ManifestGenerator {
|
||||||
// Workflows with standalone: false are filtered out above
|
// Workflows with standalone: false are filtered out above
|
||||||
workflows.push({
|
workflows.push({
|
||||||
name: workflow.name,
|
name: workflow.name,
|
||||||
description: workflow.description.replaceAll('"', '""'), // Escape quotes for CSV
|
description: this.cleanForCSV(workflow.description),
|
||||||
module: moduleName,
|
module: moduleName,
|
||||||
path: installPath,
|
path: installPath,
|
||||||
});
|
});
|
||||||
|
|
@ -319,24 +333,15 @@ class ManifestGenerator {
|
||||||
|
|
||||||
const agentName = entry.name.replace('.md', '');
|
const agentName = entry.name.replace('.md', '');
|
||||||
|
|
||||||
// Helper function to clean and escape CSV content
|
|
||||||
const cleanForCSV = (text) => {
|
|
||||||
if (!text) return '';
|
|
||||||
return text
|
|
||||||
.trim()
|
|
||||||
.replaceAll(/\s+/g, ' ') // Normalize whitespace
|
|
||||||
.replaceAll('"', '""'); // Escape quotes for CSV
|
|
||||||
};
|
|
||||||
|
|
||||||
agents.push({
|
agents.push({
|
||||||
name: agentName,
|
name: agentName,
|
||||||
displayName: nameMatch ? nameMatch[1] : agentName,
|
displayName: nameMatch ? nameMatch[1] : agentName,
|
||||||
title: titleMatch ? titleMatch[1] : '',
|
title: titleMatch ? titleMatch[1] : '',
|
||||||
icon: iconMatch ? iconMatch[1] : '',
|
icon: iconMatch ? iconMatch[1] : '',
|
||||||
role: roleMatch ? cleanForCSV(roleMatch[1]) : '',
|
role: roleMatch ? this.cleanForCSV(roleMatch[1]) : '',
|
||||||
identity: identityMatch ? cleanForCSV(identityMatch[1]) : '',
|
identity: identityMatch ? this.cleanForCSV(identityMatch[1]) : '',
|
||||||
communicationStyle: styleMatch ? cleanForCSV(styleMatch[1]) : '',
|
communicationStyle: styleMatch ? this.cleanForCSV(styleMatch[1]) : '',
|
||||||
principles: principlesMatch ? cleanForCSV(principlesMatch[1]) : '',
|
principles: principlesMatch ? this.cleanForCSV(principlesMatch[1]) : '',
|
||||||
module: moduleName,
|
module: moduleName,
|
||||||
path: installPath,
|
path: installPath,
|
||||||
});
|
});
|
||||||
|
|
@ -385,6 +390,11 @@ class ManifestGenerator {
|
||||||
const filePath = path.join(dirPath, file);
|
const filePath = path.join(dirPath, file);
|
||||||
const content = await fs.readFile(filePath, 'utf8');
|
const content = await fs.readFile(filePath, 'utf8');
|
||||||
|
|
||||||
|
// Skip internal/engine files (not user-facing tasks)
|
||||||
|
if (content.includes('internal="true"')) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
let name = file.replace(/\.(xml|md)$/, '');
|
let name = file.replace(/\.(xml|md)$/, '');
|
||||||
let displayName = name;
|
let displayName = name;
|
||||||
let description = '';
|
let description = '';
|
||||||
|
|
@ -392,13 +402,13 @@ class ManifestGenerator {
|
||||||
|
|
||||||
if (file.endsWith('.md')) {
|
if (file.endsWith('.md')) {
|
||||||
// Parse YAML frontmatter for .md tasks
|
// Parse YAML frontmatter for .md tasks
|
||||||
const frontmatterMatch = content.match(/^---\n([\s\S]*?)\n---/);
|
const frontmatterMatch = content.match(/^---\r?\n([\s\S]*?)\r?\n---/);
|
||||||
if (frontmatterMatch) {
|
if (frontmatterMatch) {
|
||||||
try {
|
try {
|
||||||
const frontmatter = yaml.parse(frontmatterMatch[1]);
|
const frontmatter = yaml.parse(frontmatterMatch[1]);
|
||||||
name = frontmatter.name || name;
|
name = frontmatter.name || name;
|
||||||
displayName = frontmatter.displayName || frontmatter.name || name;
|
displayName = frontmatter.displayName || frontmatter.name || name;
|
||||||
description = frontmatter.description || '';
|
description = this.cleanForCSV(frontmatter.description || '');
|
||||||
standalone = frontmatter.standalone === true || frontmatter.standalone === 'true';
|
standalone = frontmatter.standalone === true || frontmatter.standalone === 'true';
|
||||||
} catch {
|
} catch {
|
||||||
// If YAML parsing fails, use defaults
|
// If YAML parsing fails, use defaults
|
||||||
|
|
@ -411,7 +421,7 @@ class ManifestGenerator {
|
||||||
|
|
||||||
const descMatch = content.match(/description="([^"]+)"/);
|
const descMatch = content.match(/description="([^"]+)"/);
|
||||||
const objMatch = content.match(/<objective>([^<]+)<\/objective>/);
|
const objMatch = content.match(/<objective>([^<]+)<\/objective>/);
|
||||||
description = descMatch ? descMatch[1] : objMatch ? objMatch[1].trim() : '';
|
description = this.cleanForCSV(descMatch ? descMatch[1] : objMatch ? objMatch[1].trim() : '');
|
||||||
|
|
||||||
const standaloneMatch = content.match(/<task[^>]+standalone="true"/);
|
const standaloneMatch = content.match(/<task[^>]+standalone="true"/);
|
||||||
standalone = !!standaloneMatch;
|
standalone = !!standaloneMatch;
|
||||||
|
|
@ -424,7 +434,7 @@ class ManifestGenerator {
|
||||||
tasks.push({
|
tasks.push({
|
||||||
name: name,
|
name: name,
|
||||||
displayName: displayName,
|
displayName: displayName,
|
||||||
description: description.replaceAll('"', '""'),
|
description: description,
|
||||||
module: moduleName,
|
module: moduleName,
|
||||||
path: installPath,
|
path: installPath,
|
||||||
standalone: standalone,
|
standalone: standalone,
|
||||||
|
|
@ -474,6 +484,11 @@ class ManifestGenerator {
|
||||||
const filePath = path.join(dirPath, file);
|
const filePath = path.join(dirPath, file);
|
||||||
const content = await fs.readFile(filePath, 'utf8');
|
const content = await fs.readFile(filePath, 'utf8');
|
||||||
|
|
||||||
|
// Skip internal tools (same as tasks)
|
||||||
|
if (content.includes('internal="true"')) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
let name = file.replace(/\.(xml|md)$/, '');
|
let name = file.replace(/\.(xml|md)$/, '');
|
||||||
let displayName = name;
|
let displayName = name;
|
||||||
let description = '';
|
let description = '';
|
||||||
|
|
@ -481,13 +496,13 @@ class ManifestGenerator {
|
||||||
|
|
||||||
if (file.endsWith('.md')) {
|
if (file.endsWith('.md')) {
|
||||||
// Parse YAML frontmatter for .md tools
|
// Parse YAML frontmatter for .md tools
|
||||||
const frontmatterMatch = content.match(/^---\n([\s\S]*?)\n---/);
|
const frontmatterMatch = content.match(/^---\r?\n([\s\S]*?)\r?\n---/);
|
||||||
if (frontmatterMatch) {
|
if (frontmatterMatch) {
|
||||||
try {
|
try {
|
||||||
const frontmatter = yaml.parse(frontmatterMatch[1]);
|
const frontmatter = yaml.parse(frontmatterMatch[1]);
|
||||||
name = frontmatter.name || name;
|
name = frontmatter.name || name;
|
||||||
displayName = frontmatter.displayName || frontmatter.name || name;
|
displayName = frontmatter.displayName || frontmatter.name || name;
|
||||||
description = frontmatter.description || '';
|
description = this.cleanForCSV(frontmatter.description || '');
|
||||||
standalone = frontmatter.standalone === true || frontmatter.standalone === 'true';
|
standalone = frontmatter.standalone === true || frontmatter.standalone === 'true';
|
||||||
} catch {
|
} catch {
|
||||||
// If YAML parsing fails, use defaults
|
// If YAML parsing fails, use defaults
|
||||||
|
|
@ -500,7 +515,7 @@ class ManifestGenerator {
|
||||||
|
|
||||||
const descMatch = content.match(/description="([^"]+)"/);
|
const descMatch = content.match(/description="([^"]+)"/);
|
||||||
const objMatch = content.match(/<objective>([^<]+)<\/objective>/);
|
const objMatch = content.match(/<objective>([^<]+)<\/objective>/);
|
||||||
description = descMatch ? descMatch[1] : objMatch ? objMatch[1].trim() : '';
|
description = this.cleanForCSV(descMatch ? descMatch[1] : objMatch ? objMatch[1].trim() : '');
|
||||||
|
|
||||||
const standaloneMatch = content.match(/<tool[^>]+standalone="true"/);
|
const standaloneMatch = content.match(/<tool[^>]+standalone="true"/);
|
||||||
standalone = !!standaloneMatch;
|
standalone = !!standaloneMatch;
|
||||||
|
|
@ -513,7 +528,7 @@ class ManifestGenerator {
|
||||||
tools.push({
|
tools.push({
|
||||||
name: name,
|
name: name,
|
||||||
displayName: displayName,
|
displayName: displayName,
|
||||||
description: description.replaceAll('"', '""'),
|
description: description,
|
||||||
module: moduleName,
|
module: moduleName,
|
||||||
path: installPath,
|
path: installPath,
|
||||||
standalone: standalone,
|
standalone: standalone,
|
||||||
|
|
@ -773,30 +788,23 @@ class ManifestGenerator {
|
||||||
*/
|
*/
|
||||||
async writeAgentManifest(cfgDir) {
|
async writeAgentManifest(cfgDir) {
|
||||||
const csvPath = path.join(cfgDir, 'agent-manifest.csv');
|
const csvPath = path.join(cfgDir, 'agent-manifest.csv');
|
||||||
|
const escapeCsv = (value) => `"${String(value ?? '').replaceAll('"', '""')}"`;
|
||||||
|
|
||||||
// Read existing manifest to preserve entries
|
// Read existing manifest to preserve entries
|
||||||
const existingEntries = new Map();
|
const existingEntries = new Map();
|
||||||
if (await fs.pathExists(csvPath)) {
|
if (await fs.pathExists(csvPath)) {
|
||||||
const content = await fs.readFile(csvPath, 'utf8');
|
const content = await fs.readFile(csvPath, 'utf8');
|
||||||
const lines = content.split('\n').filter((line) => line.trim());
|
const records = csv.parse(content, {
|
||||||
|
columns: true,
|
||||||
// Skip header
|
skip_empty_lines: true,
|
||||||
for (let i = 1; i < lines.length; i++) {
|
});
|
||||||
const line = lines[i];
|
for (const record of records) {
|
||||||
if (line) {
|
existingEntries.set(`${record.module}:${record.name}`, record);
|
||||||
// Parse CSV (simple parsing assuming no commas in quoted fields)
|
|
||||||
const parts = line.split('","');
|
|
||||||
if (parts.length >= 11) {
|
|
||||||
const name = parts[0].replace(/^"/, '');
|
|
||||||
const module = parts[8];
|
|
||||||
existingEntries.set(`${module}:${name}`, line);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create CSV header with persona fields
|
// Create CSV header with persona fields
|
||||||
let csv = 'name,displayName,title,icon,role,identity,communicationStyle,principles,module,path\n';
|
let csvContent = 'name,displayName,title,icon,role,identity,communicationStyle,principles,module,path\n';
|
||||||
|
|
||||||
// Combine existing and new agents, preferring new data for duplicates
|
// Combine existing and new agents, preferring new data for duplicates
|
||||||
const allAgents = new Map();
|
const allAgents = new Map();
|
||||||
|
|
@ -809,18 +817,38 @@ class ManifestGenerator {
|
||||||
// Add/update new agents
|
// Add/update new agents
|
||||||
for (const agent of this.agents) {
|
for (const agent of this.agents) {
|
||||||
const key = `${agent.module}:${agent.name}`;
|
const key = `${agent.module}:${agent.name}`;
|
||||||
allAgents.set(
|
allAgents.set(key, {
|
||||||
key,
|
name: agent.name,
|
||||||
`"${agent.name}","${agent.displayName}","${agent.title}","${agent.icon}","${agent.role}","${agent.identity}","${agent.communicationStyle}","${agent.principles}","${agent.module}","${agent.path}"`,
|
displayName: agent.displayName,
|
||||||
);
|
title: agent.title,
|
||||||
|
icon: agent.icon,
|
||||||
|
role: agent.role,
|
||||||
|
identity: agent.identity,
|
||||||
|
communicationStyle: agent.communicationStyle,
|
||||||
|
principles: agent.principles,
|
||||||
|
module: agent.module,
|
||||||
|
path: agent.path,
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
// Write all agents
|
// Write all agents
|
||||||
for (const [, value] of allAgents) {
|
for (const [, record] of allAgents) {
|
||||||
csv += value + '\n';
|
const row = [
|
||||||
|
escapeCsv(record.name),
|
||||||
|
escapeCsv(record.displayName),
|
||||||
|
escapeCsv(record.title),
|
||||||
|
escapeCsv(record.icon),
|
||||||
|
escapeCsv(record.role),
|
||||||
|
escapeCsv(record.identity),
|
||||||
|
escapeCsv(record.communicationStyle),
|
||||||
|
escapeCsv(record.principles),
|
||||||
|
escapeCsv(record.module),
|
||||||
|
escapeCsv(record.path),
|
||||||
|
].join(',');
|
||||||
|
csvContent += row + '\n';
|
||||||
}
|
}
|
||||||
|
|
||||||
await fs.writeFile(csvPath, csv);
|
await fs.writeFile(csvPath, csvContent);
|
||||||
return csvPath;
|
return csvPath;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -830,30 +858,23 @@ class ManifestGenerator {
|
||||||
*/
|
*/
|
||||||
async writeTaskManifest(cfgDir) {
|
async writeTaskManifest(cfgDir) {
|
||||||
const csvPath = path.join(cfgDir, 'task-manifest.csv');
|
const csvPath = path.join(cfgDir, 'task-manifest.csv');
|
||||||
|
const escapeCsv = (value) => `"${String(value ?? '').replaceAll('"', '""')}"`;
|
||||||
|
|
||||||
// Read existing manifest to preserve entries
|
// Read existing manifest to preserve entries
|
||||||
const existingEntries = new Map();
|
const existingEntries = new Map();
|
||||||
if (await fs.pathExists(csvPath)) {
|
if (await fs.pathExists(csvPath)) {
|
||||||
const content = await fs.readFile(csvPath, 'utf8');
|
const content = await fs.readFile(csvPath, 'utf8');
|
||||||
const lines = content.split('\n').filter((line) => line.trim());
|
const records = csv.parse(content, {
|
||||||
|
columns: true,
|
||||||
// Skip header
|
skip_empty_lines: true,
|
||||||
for (let i = 1; i < lines.length; i++) {
|
});
|
||||||
const line = lines[i];
|
for (const record of records) {
|
||||||
if (line) {
|
existingEntries.set(`${record.module}:${record.name}`, record);
|
||||||
// Parse CSV (simple parsing assuming no commas in quoted fields)
|
|
||||||
const parts = line.split('","');
|
|
||||||
if (parts.length >= 6) {
|
|
||||||
const name = parts[0].replace(/^"/, '');
|
|
||||||
const module = parts[3];
|
|
||||||
existingEntries.set(`${module}:${name}`, line);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create CSV header with standalone column
|
// Create CSV header with standalone column
|
||||||
let csv = 'name,displayName,description,module,path,standalone\n';
|
let csvContent = 'name,displayName,description,module,path,standalone\n';
|
||||||
|
|
||||||
// Combine existing and new tasks
|
// Combine existing and new tasks
|
||||||
const allTasks = new Map();
|
const allTasks = new Map();
|
||||||
|
|
@ -866,15 +887,30 @@ class ManifestGenerator {
|
||||||
// Add/update new tasks
|
// Add/update new tasks
|
||||||
for (const task of this.tasks) {
|
for (const task of this.tasks) {
|
||||||
const key = `${task.module}:${task.name}`;
|
const key = `${task.module}:${task.name}`;
|
||||||
allTasks.set(key, `"${task.name}","${task.displayName}","${task.description}","${task.module}","${task.path}","${task.standalone}"`);
|
allTasks.set(key, {
|
||||||
|
name: task.name,
|
||||||
|
displayName: task.displayName,
|
||||||
|
description: task.description,
|
||||||
|
module: task.module,
|
||||||
|
path: task.path,
|
||||||
|
standalone: task.standalone,
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
// Write all tasks
|
// Write all tasks
|
||||||
for (const [, value] of allTasks) {
|
for (const [, record] of allTasks) {
|
||||||
csv += value + '\n';
|
const row = [
|
||||||
|
escapeCsv(record.name),
|
||||||
|
escapeCsv(record.displayName),
|
||||||
|
escapeCsv(record.description),
|
||||||
|
escapeCsv(record.module),
|
||||||
|
escapeCsv(record.path),
|
||||||
|
escapeCsv(record.standalone),
|
||||||
|
].join(',');
|
||||||
|
csvContent += row + '\n';
|
||||||
}
|
}
|
||||||
|
|
||||||
await fs.writeFile(csvPath, csv);
|
await fs.writeFile(csvPath, csvContent);
|
||||||
return csvPath;
|
return csvPath;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -884,30 +920,23 @@ class ManifestGenerator {
|
||||||
*/
|
*/
|
||||||
async writeToolManifest(cfgDir) {
|
async writeToolManifest(cfgDir) {
|
||||||
const csvPath = path.join(cfgDir, 'tool-manifest.csv');
|
const csvPath = path.join(cfgDir, 'tool-manifest.csv');
|
||||||
|
const escapeCsv = (value) => `"${String(value ?? '').replaceAll('"', '""')}"`;
|
||||||
|
|
||||||
// Read existing manifest to preserve entries
|
// Read existing manifest to preserve entries
|
||||||
const existingEntries = new Map();
|
const existingEntries = new Map();
|
||||||
if (await fs.pathExists(csvPath)) {
|
if (await fs.pathExists(csvPath)) {
|
||||||
const content = await fs.readFile(csvPath, 'utf8');
|
const content = await fs.readFile(csvPath, 'utf8');
|
||||||
const lines = content.split('\n').filter((line) => line.trim());
|
const records = csv.parse(content, {
|
||||||
|
columns: true,
|
||||||
// Skip header
|
skip_empty_lines: true,
|
||||||
for (let i = 1; i < lines.length; i++) {
|
});
|
||||||
const line = lines[i];
|
for (const record of records) {
|
||||||
if (line) {
|
existingEntries.set(`${record.module}:${record.name}`, record);
|
||||||
// Parse CSV (simple parsing assuming no commas in quoted fields)
|
|
||||||
const parts = line.split('","');
|
|
||||||
if (parts.length >= 6) {
|
|
||||||
const name = parts[0].replace(/^"/, '');
|
|
||||||
const module = parts[3];
|
|
||||||
existingEntries.set(`${module}:${name}`, line);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create CSV header with standalone column
|
// Create CSV header with standalone column
|
||||||
let csv = 'name,displayName,description,module,path,standalone\n';
|
let csvContent = 'name,displayName,description,module,path,standalone\n';
|
||||||
|
|
||||||
// Combine existing and new tools
|
// Combine existing and new tools
|
||||||
const allTools = new Map();
|
const allTools = new Map();
|
||||||
|
|
@ -920,15 +949,30 @@ class ManifestGenerator {
|
||||||
// Add/update new tools
|
// Add/update new tools
|
||||||
for (const tool of this.tools) {
|
for (const tool of this.tools) {
|
||||||
const key = `${tool.module}:${tool.name}`;
|
const key = `${tool.module}:${tool.name}`;
|
||||||
allTools.set(key, `"${tool.name}","${tool.displayName}","${tool.description}","${tool.module}","${tool.path}","${tool.standalone}"`);
|
allTools.set(key, {
|
||||||
|
name: tool.name,
|
||||||
|
displayName: tool.displayName,
|
||||||
|
description: tool.description,
|
||||||
|
module: tool.module,
|
||||||
|
path: tool.path,
|
||||||
|
standalone: tool.standalone,
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
// Write all tools
|
// Write all tools
|
||||||
for (const [, value] of allTools) {
|
for (const [, record] of allTools) {
|
||||||
csv += value + '\n';
|
const row = [
|
||||||
|
escapeCsv(record.name),
|
||||||
|
escapeCsv(record.displayName),
|
||||||
|
escapeCsv(record.description),
|
||||||
|
escapeCsv(record.module),
|
||||||
|
escapeCsv(record.path),
|
||||||
|
escapeCsv(record.standalone),
|
||||||
|
].join(',');
|
||||||
|
csvContent += row + '\n';
|
||||||
}
|
}
|
||||||
|
|
||||||
await fs.writeFile(csvPath, csv);
|
await fs.writeFile(csvPath, csvContent);
|
||||||
return csvPath;
|
return csvPath;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -297,7 +297,7 @@ class CustomHandler {
|
||||||
const agentFiles = await this.findFilesRecursively(sourceAgentsPath, ['.agent.yaml']);
|
const agentFiles = await this.findFilesRecursively(sourceAgentsPath, ['.agent.yaml']);
|
||||||
|
|
||||||
for (const agentFile of agentFiles) {
|
for (const agentFile of agentFiles) {
|
||||||
const relativePath = path.relative(sourceAgentsPath, agentFile);
|
const relativePath = path.relative(sourceAgentsPath, agentFile).split(path.sep).join('/');
|
||||||
const targetDir = path.join(targetAgentsPath, path.dirname(relativePath));
|
const targetDir = path.join(targetAgentsPath, path.dirname(relativePath));
|
||||||
|
|
||||||
await fs.ensureDir(targetDir);
|
await fs.ensureDir(targetDir);
|
||||||
|
|
|
||||||
|
|
@ -3,6 +3,7 @@ const fs = require('fs-extra');
|
||||||
const chalk = require('chalk');
|
const chalk = require('chalk');
|
||||||
const { XmlHandler } = require('../../../lib/xml-handler');
|
const { XmlHandler } = require('../../../lib/xml-handler');
|
||||||
const { getSourcePath } = require('../../../lib/project-root');
|
const { getSourcePath } = require('../../../lib/project-root');
|
||||||
|
const { BMAD_FOLDER_NAME } = require('./shared/path-utils');
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Base class for IDE-specific setup
|
* Base class for IDE-specific setup
|
||||||
|
|
@ -18,7 +19,7 @@ class BaseIdeSetup {
|
||||||
this.configFile = null; // Override in subclasses when detection is file-based
|
this.configFile = null; // Override in subclasses when detection is file-based
|
||||||
this.detectionPaths = []; // Additional paths that indicate the IDE is configured
|
this.detectionPaths = []; // Additional paths that indicate the IDE is configured
|
||||||
this.xmlHandler = new XmlHandler();
|
this.xmlHandler = new XmlHandler();
|
||||||
this.bmadFolderName = 'bmad'; // Default, can be overridden
|
this.bmadFolderName = BMAD_FOLDER_NAME; // Default, can be overridden
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
@ -57,7 +58,7 @@ class BaseIdeSetup {
|
||||||
if (this.configDir) {
|
if (this.configDir) {
|
||||||
const configPath = path.join(projectDir, this.configDir);
|
const configPath = path.join(projectDir, this.configDir);
|
||||||
if (await fs.pathExists(configPath)) {
|
if (await fs.pathExists(configPath)) {
|
||||||
const bmadRulesPath = path.join(configPath, 'bmad');
|
const bmadRulesPath = path.join(configPath, BMAD_FOLDER_NAME);
|
||||||
if (await fs.pathExists(bmadRulesPath)) {
|
if (await fs.pathExists(bmadRulesPath)) {
|
||||||
await fs.remove(bmadRulesPath);
|
await fs.remove(bmadRulesPath);
|
||||||
console.log(chalk.dim(`Removed ${this.name} BMAD configuration`));
|
console.log(chalk.dim(`Removed ${this.name} BMAD configuration`));
|
||||||
|
|
@ -445,6 +446,11 @@ class BaseIdeSetup {
|
||||||
try {
|
try {
|
||||||
const content = await fs.readFile(fullPath, 'utf8');
|
const content = await fs.readFile(fullPath, 'utf8');
|
||||||
|
|
||||||
|
// Skip internal/engine files (not user-facing tasks/tools)
|
||||||
|
if (content.includes('internal="true"')) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
// Check for standalone="true" in XML files
|
// Check for standalone="true" in XML files
|
||||||
if (entry.name.endsWith('.xml')) {
|
if (entry.name.endsWith('.xml')) {
|
||||||
// Look for standalone="true" in the opening tag (task or tool)
|
// Look for standalone="true" in the opening tag (task or tool)
|
||||||
|
|
|
||||||
|
|
@ -66,6 +66,13 @@ class ConfigDrivenIdeSetup extends BaseIdeSetup {
|
||||||
*/
|
*/
|
||||||
async installToTarget(projectDir, bmadDir, config, options) {
|
async installToTarget(projectDir, bmadDir, config, options) {
|
||||||
const { target_dir, template_type, artifact_types } = config;
|
const { target_dir, template_type, artifact_types } = config;
|
||||||
|
|
||||||
|
// Skip targets with explicitly empty artifact_types array
|
||||||
|
// This prevents creating empty directories when no artifacts will be written
|
||||||
|
if (Array.isArray(artifact_types) && artifact_types.length === 0) {
|
||||||
|
return { success: true, results: { agents: 0, workflows: 0, tasks: 0, tools: 0 } };
|
||||||
|
}
|
||||||
|
|
||||||
const targetPath = path.join(projectDir, target_dir);
|
const targetPath = path.join(projectDir, target_dir);
|
||||||
await this.ensureDir(targetPath);
|
await this.ensureDir(targetPath);
|
||||||
|
|
||||||
|
|
@ -86,10 +93,11 @@ class ConfigDrivenIdeSetup extends BaseIdeSetup {
|
||||||
results.workflows = await this.writeWorkflowArtifacts(targetPath, artifacts, template_type, config);
|
results.workflows = await this.writeWorkflowArtifacts(targetPath, artifacts, template_type, config);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Install tasks and tools
|
// Install tasks and tools using template system (supports TOML for Gemini, MD for others)
|
||||||
if (!artifact_types || artifact_types.includes('tasks') || artifact_types.includes('tools')) {
|
if (!artifact_types || artifact_types.includes('tasks') || artifact_types.includes('tools')) {
|
||||||
const taskToolGen = new TaskToolCommandGenerator();
|
const taskToolGen = new TaskToolCommandGenerator(this.bmadFolderName);
|
||||||
const taskToolResult = await taskToolGen.generateDashTaskToolCommands(projectDir, bmadDir, targetPath);
|
const { artifacts } = await taskToolGen.collectTaskToolArtifacts(bmadDir);
|
||||||
|
const taskToolResult = await this.writeTaskToolArtifacts(targetPath, artifacts, template_type, config);
|
||||||
results.tasks = taskToolResult.tasks || 0;
|
results.tasks = taskToolResult.tasks || 0;
|
||||||
results.tools = taskToolResult.tools || 0;
|
results.tools = taskToolResult.tools || 0;
|
||||||
}
|
}
|
||||||
|
|
@ -180,6 +188,53 @@ class ConfigDrivenIdeSetup extends BaseIdeSetup {
|
||||||
return count;
|
return count;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Write task/tool artifacts to target directory using templates
|
||||||
|
* @param {string} targetPath - Target directory path
|
||||||
|
* @param {Array} artifacts - Task/tool artifacts
|
||||||
|
* @param {string} templateType - Template type to use
|
||||||
|
* @param {Object} config - Installation configuration
|
||||||
|
* @returns {Promise<Object>} Counts of tasks and tools written
|
||||||
|
*/
|
||||||
|
async writeTaskToolArtifacts(targetPath, artifacts, templateType, config = {}) {
|
||||||
|
let taskCount = 0;
|
||||||
|
let toolCount = 0;
|
||||||
|
|
||||||
|
// Pre-load templates to avoid repeated file I/O in the loop
|
||||||
|
const taskTemplate = await this.loadTemplate(templateType, 'task', config, 'default-task');
|
||||||
|
const toolTemplate = await this.loadTemplate(templateType, 'tool', config, 'default-tool');
|
||||||
|
|
||||||
|
const { artifact_types } = config;
|
||||||
|
|
||||||
|
for (const artifact of artifacts) {
|
||||||
|
if (artifact.type !== 'task' && artifact.type !== 'tool') {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Skip if the specific artifact type is not requested in config
|
||||||
|
if (artifact_types) {
|
||||||
|
if (artifact.type === 'task' && !artifact_types.includes('tasks')) continue;
|
||||||
|
if (artifact.type === 'tool' && !artifact_types.includes('tools')) continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Use pre-loaded template based on artifact type
|
||||||
|
const { content: template, extension } = artifact.type === 'task' ? taskTemplate : toolTemplate;
|
||||||
|
|
||||||
|
const content = this.renderTemplate(template, artifact);
|
||||||
|
const filename = this.generateFilename(artifact, artifact.type, extension);
|
||||||
|
const filePath = path.join(targetPath, filename);
|
||||||
|
await this.writeFile(filePath, content);
|
||||||
|
|
||||||
|
if (artifact.type === 'task') {
|
||||||
|
taskCount++;
|
||||||
|
} else {
|
||||||
|
toolCount++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return { tasks: taskCount, tools: toolCount };
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Load template based on type and configuration
|
* Load template based on type and configuration
|
||||||
* @param {string} templateType - Template type (claude, windsurf, etc.)
|
* @param {string} templateType - Template type (claude, windsurf, etc.)
|
||||||
|
|
@ -316,10 +371,24 @@ LOAD and execute from: {project-root}/{{bmadFolderName}}/{{path}}
|
||||||
renderTemplate(template, artifact) {
|
renderTemplate(template, artifact) {
|
||||||
// Use the appropriate path property based on artifact type
|
// Use the appropriate path property based on artifact type
|
||||||
let pathToUse = artifact.relativePath || '';
|
let pathToUse = artifact.relativePath || '';
|
||||||
if (artifact.type === 'agent-launcher') {
|
switch (artifact.type) {
|
||||||
|
case 'agent-launcher': {
|
||||||
pathToUse = artifact.agentPath || artifact.relativePath || '';
|
pathToUse = artifact.agentPath || artifact.relativePath || '';
|
||||||
} else if (artifact.type === 'workflow-command') {
|
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
case 'workflow-command': {
|
||||||
pathToUse = artifact.workflowPath || artifact.relativePath || '';
|
pathToUse = artifact.workflowPath || artifact.relativePath || '';
|
||||||
|
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
case 'task':
|
||||||
|
case 'tool': {
|
||||||
|
pathToUse = artifact.path || artifact.relativePath || '';
|
||||||
|
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
// No default
|
||||||
}
|
}
|
||||||
|
|
||||||
let rendered = template
|
let rendered = template
|
||||||
|
|
@ -351,8 +420,9 @@ LOAD and execute from: {project-root}/{{bmadFolderName}}/{{path}}
|
||||||
// Reuse central logic to ensure consistent naming conventions
|
// Reuse central logic to ensure consistent naming conventions
|
||||||
const standardName = toDashPath(artifact.relativePath);
|
const standardName = toDashPath(artifact.relativePath);
|
||||||
|
|
||||||
// Clean up potential double extensions from source files (e.g. .yaml.md -> .md)
|
// Clean up potential double extensions from source files (e.g. .yaml.md, .xml.md -> .md)
|
||||||
const baseName = standardName.replace(/\.(yaml|yml)\.md$/, '.md');
|
// This handles any extensions that might slip through toDashPath()
|
||||||
|
const baseName = standardName.replace(/\.(md|yaml|yml|json|xml|toml)\.md$/i, '.md');
|
||||||
|
|
||||||
// If using default markdown, preserve the bmad-agent- prefix for agents
|
// If using default markdown, preserve the bmad-agent- prefix for agents
|
||||||
if (extension === '.md') {
|
if (extension === '.md') {
|
||||||
|
|
|
||||||
|
|
@ -104,7 +104,10 @@ class CodexSetup extends BaseIdeSetup {
|
||||||
);
|
);
|
||||||
taskArtifacts.push({
|
taskArtifacts.push({
|
||||||
type: 'task',
|
type: 'task',
|
||||||
|
name: task.name,
|
||||||
|
displayName: task.name,
|
||||||
module: task.module,
|
module: task.module,
|
||||||
|
path: task.path,
|
||||||
sourcePath: task.path,
|
sourcePath: task.path,
|
||||||
relativePath: path.join(task.module, 'tasks', `${task.name}.md`),
|
relativePath: path.join(task.module, 'tasks', `${task.name}.md`),
|
||||||
content,
|
content,
|
||||||
|
|
@ -116,7 +119,7 @@ class CodexSetup extends BaseIdeSetup {
|
||||||
const workflowCount = await workflowGenerator.writeDashArtifacts(destDir, workflowArtifacts);
|
const workflowCount = await workflowGenerator.writeDashArtifacts(destDir, workflowArtifacts);
|
||||||
|
|
||||||
// Also write tasks using underscore format
|
// Also write tasks using underscore format
|
||||||
const ttGen = new TaskToolCommandGenerator();
|
const ttGen = new TaskToolCommandGenerator(this.bmadFolderName);
|
||||||
const tasksWritten = await ttGen.writeDashArtifacts(destDir, taskArtifacts);
|
const tasksWritten = await ttGen.writeDashArtifacts(destDir, taskArtifacts);
|
||||||
|
|
||||||
const written = agentCount + workflowCount + tasksWritten;
|
const written = agentCount + workflowCount + tasksWritten;
|
||||||
|
|
@ -214,7 +217,10 @@ class CodexSetup extends BaseIdeSetup {
|
||||||
|
|
||||||
artifacts.push({
|
artifacts.push({
|
||||||
type: 'task',
|
type: 'task',
|
||||||
|
name: task.name,
|
||||||
|
displayName: task.name,
|
||||||
module: task.module,
|
module: task.module,
|
||||||
|
path: task.path,
|
||||||
sourcePath: task.path,
|
sourcePath: task.path,
|
||||||
relativePath: path.join(task.module, 'tasks', `${task.name}.md`),
|
relativePath: path.join(task.module, 'tasks', `${task.name}.md`),
|
||||||
content,
|
content,
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,7 @@
|
||||||
const fs = require('fs-extra');
|
const fs = require('fs-extra');
|
||||||
const path = require('node:path');
|
const path = require('node:path');
|
||||||
const chalk = require('chalk');
|
const chalk = require('chalk');
|
||||||
|
const { BMAD_FOLDER_NAME } = require('./shared/path-utils');
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* IDE Manager - handles IDE-specific setup
|
* IDE Manager - handles IDE-specific setup
|
||||||
|
|
@ -14,7 +15,7 @@ class IdeManager {
|
||||||
constructor() {
|
constructor() {
|
||||||
this.handlers = new Map();
|
this.handlers = new Map();
|
||||||
this._initialized = false;
|
this._initialized = false;
|
||||||
this.bmadFolderName = 'bmad'; // Default, can be overridden
|
this.bmadFolderName = BMAD_FOLDER_NAME; // Default, can be overridden
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
@ -73,6 +74,9 @@ class IdeManager {
|
||||||
if (HandlerClass) {
|
if (HandlerClass) {
|
||||||
const instance = new HandlerClass();
|
const instance = new HandlerClass();
|
||||||
if (instance.name && typeof instance.name === 'string') {
|
if (instance.name && typeof instance.name === 'string') {
|
||||||
|
if (typeof instance.setBmadFolderName === 'function') {
|
||||||
|
instance.setBmadFolderName(this.bmadFolderName);
|
||||||
|
}
|
||||||
this.handlers.set(instance.name, instance);
|
this.handlers.set(instance.name, instance);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -100,7 +104,9 @@ class IdeManager {
|
||||||
if (!platformInfo.installer) continue;
|
if (!platformInfo.installer) continue;
|
||||||
|
|
||||||
const handler = new ConfigDrivenIdeSetup(platformCode, platformInfo);
|
const handler = new ConfigDrivenIdeSetup(platformCode, platformInfo);
|
||||||
|
if (typeof handler.setBmadFolderName === 'function') {
|
||||||
handler.setBmadFolderName(this.bmadFolderName);
|
handler.setBmadFolderName(this.bmadFolderName);
|
||||||
|
}
|
||||||
this.handlers.set(platformCode, handler);
|
this.handlers.set(platformCode, handler);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -94,9 +94,6 @@ platforms:
|
||||||
- target_dir: .github/agents
|
- target_dir: .github/agents
|
||||||
template_type: copilot_agents
|
template_type: copilot_agents
|
||||||
artifact_types: [agents]
|
artifact_types: [agents]
|
||||||
- target_dir: .vscode
|
|
||||||
template_type: vscode_settings
|
|
||||||
artifact_types: []
|
|
||||||
|
|
||||||
iflow:
|
iflow:
|
||||||
name: "iFlow"
|
name: "iFlow"
|
||||||
|
|
|
||||||
|
|
@ -1,14 +1,14 @@
|
||||||
const path = require('node:path');
|
const path = require('node:path');
|
||||||
const fs = require('fs-extra');
|
const fs = require('fs-extra');
|
||||||
const chalk = require('chalk');
|
const chalk = require('chalk');
|
||||||
const { toColonPath, toDashPath, customAgentColonName, customAgentDashName } = require('./path-utils');
|
const { toColonPath, toDashPath, customAgentColonName, customAgentDashName, BMAD_FOLDER_NAME } = require('./path-utils');
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Generates launcher command files for each agent
|
* Generates launcher command files for each agent
|
||||||
* Similar to WorkflowCommandGenerator but for agents
|
* Similar to WorkflowCommandGenerator but for agents
|
||||||
*/
|
*/
|
||||||
class AgentCommandGenerator {
|
class AgentCommandGenerator {
|
||||||
constructor(bmadFolderName = 'bmad') {
|
constructor(bmadFolderName = BMAD_FOLDER_NAME) {
|
||||||
this.templatePath = path.join(__dirname, '../templates/agent-command-template.md');
|
this.templatePath = path.join(__dirname, '../templates/agent-command-template.md');
|
||||||
this.bmadFolderName = bmadFolderName;
|
this.bmadFolderName = bmadFolderName;
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -141,13 +141,24 @@ async function getTasksFromDir(dirPath, moduleName) {
|
||||||
const files = await fs.readdir(dirPath);
|
const files = await fs.readdir(dirPath);
|
||||||
|
|
||||||
for (const file of files) {
|
for (const file of files) {
|
||||||
if (!file.endsWith('.md')) {
|
// Include both .md and .xml task files
|
||||||
|
if (!file.endsWith('.md') && !file.endsWith('.xml')) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const filePath = path.join(dirPath, file);
|
||||||
|
const content = await fs.readFile(filePath, 'utf8');
|
||||||
|
|
||||||
|
// Skip internal/engine files (not user-facing tasks)
|
||||||
|
if (content.includes('internal="true"')) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Remove extension to get task name
|
||||||
|
const ext = file.endsWith('.xml') ? '.xml' : '.md';
|
||||||
tasks.push({
|
tasks.push({
|
||||||
path: path.join(dirPath, file),
|
path: filePath,
|
||||||
name: file.replace('.md', ''),
|
name: file.replace(ext, ''),
|
||||||
module: moduleName,
|
module: moduleName,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -18,6 +18,9 @@
|
||||||
const TYPE_SEGMENTS = ['workflows', 'tasks', 'tools'];
|
const TYPE_SEGMENTS = ['workflows', 'tasks', 'tools'];
|
||||||
const AGENT_SEGMENT = 'agents';
|
const AGENT_SEGMENT = 'agents';
|
||||||
|
|
||||||
|
// BMAD installation folder name - centralized constant for all installers
|
||||||
|
const BMAD_FOLDER_NAME = '_bmad';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Convert hierarchical path to flat dash-separated name (NEW STANDARD)
|
* Convert hierarchical path to flat dash-separated name (NEW STANDARD)
|
||||||
* Converts: 'bmm', 'agents', 'pm' → 'bmad-agent-bmm-pm.md'
|
* Converts: 'bmm', 'agents', 'pm' → 'bmad-agent-bmm-pm.md'
|
||||||
|
|
@ -59,7 +62,9 @@ function toDashPath(relativePath) {
|
||||||
return 'bmad-unknown.md';
|
return 'bmad-unknown.md';
|
||||||
}
|
}
|
||||||
|
|
||||||
const withoutExt = relativePath.replace('.md', '');
|
// Strip common file extensions to avoid double extensions in generated filenames
|
||||||
|
// e.g., 'create-story.xml' → 'create-story', 'workflow.yaml' → 'workflow'
|
||||||
|
const withoutExt = relativePath.replace(/\.(md|yaml|yml|json|xml|toml)$/i, '');
|
||||||
const parts = withoutExt.split(/[/\\]/);
|
const parts = withoutExt.split(/[/\\]/);
|
||||||
|
|
||||||
const module = parts[0];
|
const module = parts[0];
|
||||||
|
|
@ -183,7 +188,8 @@ function toUnderscoreName(module, type, name) {
|
||||||
* @deprecated Use toDashPath instead
|
* @deprecated Use toDashPath instead
|
||||||
*/
|
*/
|
||||||
function toUnderscorePath(relativePath) {
|
function toUnderscorePath(relativePath) {
|
||||||
const withoutExt = relativePath.replace('.md', '');
|
// Strip common file extensions (same as toDashPath for consistency)
|
||||||
|
const withoutExt = relativePath.replace(/\.(md|yaml|yml|json|xml|toml)$/i, '');
|
||||||
const parts = withoutExt.split(/[/\\]/);
|
const parts = withoutExt.split(/[/\\]/);
|
||||||
|
|
||||||
const module = parts[0];
|
const module = parts[0];
|
||||||
|
|
@ -289,4 +295,5 @@ module.exports = {
|
||||||
|
|
||||||
TYPE_SEGMENTS,
|
TYPE_SEGMENTS,
|
||||||
AGENT_SEGMENT,
|
AGENT_SEGMENT,
|
||||||
|
BMAD_FOLDER_NAME,
|
||||||
};
|
};
|
||||||
|
|
|
||||||
|
|
@ -2,12 +2,98 @@ const path = require('node:path');
|
||||||
const fs = require('fs-extra');
|
const fs = require('fs-extra');
|
||||||
const csv = require('csv-parse/sync');
|
const csv = require('csv-parse/sync');
|
||||||
const chalk = require('chalk');
|
const chalk = require('chalk');
|
||||||
const { toColonName, toColonPath, toDashPath } = require('./path-utils');
|
const { toColonName, toColonPath, toDashPath, BMAD_FOLDER_NAME } = require('./path-utils');
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Generates command files for standalone tasks and tools
|
* Generates command files for standalone tasks and tools
|
||||||
*/
|
*/
|
||||||
class TaskToolCommandGenerator {
|
class TaskToolCommandGenerator {
|
||||||
|
/**
|
||||||
|
* @param {string} bmadFolderName - Name of the BMAD folder for template rendering (default: '_bmad')
|
||||||
|
* Note: This parameter is accepted for API consistency with AgentCommandGenerator and
|
||||||
|
* WorkflowCommandGenerator, but is not used for path stripping. The manifest always stores
|
||||||
|
* filesystem paths with '_bmad/' prefix (the actual folder name), while bmadFolderName is
|
||||||
|
* used for template placeholder rendering ({{bmadFolderName}}).
|
||||||
|
*/
|
||||||
|
constructor(bmadFolderName = BMAD_FOLDER_NAME) {
|
||||||
|
this.bmadFolderName = bmadFolderName;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Collect task and tool artifacts for IDE installation
|
||||||
|
* @param {string} bmadDir - BMAD installation directory
|
||||||
|
* @returns {Promise<Object>} Artifacts array with metadata
|
||||||
|
*/
|
||||||
|
async collectTaskToolArtifacts(bmadDir) {
|
||||||
|
const tasks = await this.loadTaskManifest(bmadDir);
|
||||||
|
const tools = await this.loadToolManifest(bmadDir);
|
||||||
|
|
||||||
|
// Filter to only standalone items
|
||||||
|
const standaloneTasks = tasks ? tasks.filter((t) => t.standalone === 'true' || t.standalone === true) : [];
|
||||||
|
const standaloneTools = tools ? tools.filter((t) => t.standalone === 'true' || t.standalone === true) : [];
|
||||||
|
|
||||||
|
const artifacts = [];
|
||||||
|
const bmadPrefix = `${BMAD_FOLDER_NAME}/`;
|
||||||
|
|
||||||
|
// Collect task artifacts
|
||||||
|
for (const task of standaloneTasks) {
|
||||||
|
let taskPath = (task.path || '').replaceAll('\\', '/');
|
||||||
|
// Convert absolute paths to relative paths
|
||||||
|
if (path.isAbsolute(taskPath)) {
|
||||||
|
taskPath = path.relative(bmadDir, taskPath).replaceAll('\\', '/');
|
||||||
|
}
|
||||||
|
// Remove _bmad/ prefix if present to get relative path within bmad folder
|
||||||
|
if (taskPath.startsWith(bmadPrefix)) {
|
||||||
|
taskPath = taskPath.slice(bmadPrefix.length);
|
||||||
|
}
|
||||||
|
|
||||||
|
const taskExt = path.extname(taskPath) || '.md';
|
||||||
|
artifacts.push({
|
||||||
|
type: 'task',
|
||||||
|
name: task.name,
|
||||||
|
displayName: task.displayName || task.name,
|
||||||
|
description: task.description || `Execute ${task.displayName || task.name}`,
|
||||||
|
module: task.module,
|
||||||
|
// Use forward slashes for cross-platform consistency (not path.join which uses backslashes on Windows)
|
||||||
|
relativePath: `${task.module}/tasks/${task.name}${taskExt}`,
|
||||||
|
path: taskPath,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Collect tool artifacts
|
||||||
|
for (const tool of standaloneTools) {
|
||||||
|
let toolPath = (tool.path || '').replaceAll('\\', '/');
|
||||||
|
// Convert absolute paths to relative paths
|
||||||
|
if (path.isAbsolute(toolPath)) {
|
||||||
|
toolPath = path.relative(bmadDir, toolPath).replaceAll('\\', '/');
|
||||||
|
}
|
||||||
|
// Remove _bmad/ prefix if present to get relative path within bmad folder
|
||||||
|
if (toolPath.startsWith(bmadPrefix)) {
|
||||||
|
toolPath = toolPath.slice(bmadPrefix.length);
|
||||||
|
}
|
||||||
|
|
||||||
|
const toolExt = path.extname(toolPath) || '.md';
|
||||||
|
artifacts.push({
|
||||||
|
type: 'tool',
|
||||||
|
name: tool.name,
|
||||||
|
displayName: tool.displayName || tool.name,
|
||||||
|
description: tool.description || `Execute ${tool.displayName || tool.name}`,
|
||||||
|
module: tool.module,
|
||||||
|
// Use forward slashes for cross-platform consistency (not path.join which uses backslashes on Windows)
|
||||||
|
relativePath: `${tool.module}/tools/${tool.name}${toolExt}`,
|
||||||
|
path: toolPath,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
artifacts,
|
||||||
|
counts: {
|
||||||
|
tasks: standaloneTasks.length,
|
||||||
|
tools: standaloneTools.length,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Generate task and tool commands from manifest CSVs
|
* Generate task and tool commands from manifest CSVs
|
||||||
* @param {string} projectDir - Project directory
|
* @param {string} projectDir - Project directory
|
||||||
|
|
@ -65,9 +151,35 @@ class TaskToolCommandGenerator {
|
||||||
const description = item.description || `Execute ${item.displayName || item.name}`;
|
const description = item.description || `Execute ${item.displayName || item.name}`;
|
||||||
|
|
||||||
// Convert path to use {project-root} placeholder
|
// Convert path to use {project-root} placeholder
|
||||||
|
// Handle undefined/missing path by constructing from module and name
|
||||||
let itemPath = item.path;
|
let itemPath = item.path;
|
||||||
if (itemPath && typeof itemPath === 'string' && itemPath.startsWith('bmad/')) {
|
if (!itemPath || typeof itemPath !== 'string') {
|
||||||
itemPath = `{project-root}/${itemPath}`;
|
// Fallback: construct path from module and name if path is missing
|
||||||
|
const typePlural = type === 'task' ? 'tasks' : 'tools';
|
||||||
|
itemPath = `{project-root}/${this.bmadFolderName}/${item.module}/${typePlural}/${item.name}.md`;
|
||||||
|
} else {
|
||||||
|
// Normalize path separators to forward slashes
|
||||||
|
itemPath = itemPath.replaceAll('\\', '/');
|
||||||
|
|
||||||
|
// Extract relative path from absolute paths (Windows or Unix)
|
||||||
|
// Look for _bmad/ or bmad/ in the path and extract everything after it
|
||||||
|
// Match patterns like: /_bmad/core/tasks/... or /bmad/core/tasks/...
|
||||||
|
// Use [/\\] to handle both Unix forward slashes and Windows backslashes,
|
||||||
|
// and also paths without a leading separator (e.g., C:/_bmad/...)
|
||||||
|
const bmadMatch = itemPath.match(/[/\\]_bmad[/\\](.+)$/) || itemPath.match(/[/\\]bmad[/\\](.+)$/);
|
||||||
|
if (bmadMatch) {
|
||||||
|
// Found /_bmad/ or /bmad/ - use relative path after it
|
||||||
|
itemPath = `{project-root}/${this.bmadFolderName}/${bmadMatch[1]}`;
|
||||||
|
} else if (itemPath.startsWith(`${BMAD_FOLDER_NAME}/`)) {
|
||||||
|
// Relative path starting with _bmad/
|
||||||
|
itemPath = `{project-root}/${this.bmadFolderName}/${itemPath.slice(BMAD_FOLDER_NAME.length + 1)}`;
|
||||||
|
} else if (itemPath.startsWith('bmad/')) {
|
||||||
|
// Relative path starting with bmad/
|
||||||
|
itemPath = `{project-root}/${this.bmadFolderName}/${itemPath.slice(5)}`;
|
||||||
|
} else if (!itemPath.startsWith('{project-root}')) {
|
||||||
|
// For other relative paths, prefix with project root and bmad folder
|
||||||
|
itemPath = `{project-root}/${this.bmadFolderName}/${itemPath}`;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return `---
|
return `---
|
||||||
|
|
@ -187,7 +299,7 @@ Follow all instructions in the ${type} file exactly as written.
|
||||||
// Generate command files for tasks
|
// Generate command files for tasks
|
||||||
for (const task of standaloneTasks) {
|
for (const task of standaloneTasks) {
|
||||||
const commandContent = this.generateCommandContent(task, 'task');
|
const commandContent = this.generateCommandContent(task, 'task');
|
||||||
// Use underscore format: bmad_bmm_name.md
|
// Use dash format: bmad-bmm-name.md
|
||||||
const flatName = toDashPath(`${task.module}/tasks/${task.name}.md`);
|
const flatName = toDashPath(`${task.module}/tasks/${task.name}.md`);
|
||||||
const commandPath = path.join(baseCommandsDir, flatName);
|
const commandPath = path.join(baseCommandsDir, flatName);
|
||||||
await fs.ensureDir(path.dirname(commandPath));
|
await fs.ensureDir(path.dirname(commandPath));
|
||||||
|
|
@ -198,7 +310,7 @@ Follow all instructions in the ${type} file exactly as written.
|
||||||
// Generate command files for tools
|
// Generate command files for tools
|
||||||
for (const tool of standaloneTools) {
|
for (const tool of standaloneTools) {
|
||||||
const commandContent = this.generateCommandContent(tool, 'tool');
|
const commandContent = this.generateCommandContent(tool, 'tool');
|
||||||
// Use underscore format: bmad_bmm_name.md
|
// Use dash format: bmad-bmm-name.md
|
||||||
const flatName = toDashPath(`${tool.module}/tools/${tool.name}.md`);
|
const flatName = toDashPath(`${tool.module}/tools/${tool.name}.md`);
|
||||||
const commandPath = path.join(baseCommandsDir, flatName);
|
const commandPath = path.join(baseCommandsDir, flatName);
|
||||||
await fs.ensureDir(path.dirname(commandPath));
|
await fs.ensureDir(path.dirname(commandPath));
|
||||||
|
|
|
||||||
|
|
@ -2,13 +2,13 @@ const path = require('node:path');
|
||||||
const fs = require('fs-extra');
|
const fs = require('fs-extra');
|
||||||
const csv = require('csv-parse/sync');
|
const csv = require('csv-parse/sync');
|
||||||
const chalk = require('chalk');
|
const chalk = require('chalk');
|
||||||
const { toColonPath, toDashPath, customAgentColonName, customAgentDashName } = require('./path-utils');
|
const { toColonPath, toDashPath, customAgentColonName, customAgentDashName, BMAD_FOLDER_NAME } = require('./path-utils');
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Generates command files for each workflow in the manifest
|
* Generates command files for each workflow in the manifest
|
||||||
*/
|
*/
|
||||||
class WorkflowCommandGenerator {
|
class WorkflowCommandGenerator {
|
||||||
constructor(bmadFolderName = 'bmad') {
|
constructor(bmadFolderName = BMAD_FOLDER_NAME) {
|
||||||
this.templatePath = path.join(__dirname, '../templates/workflow-command-template.md');
|
this.templatePath = path.join(__dirname, '../templates/workflow-command-template.md');
|
||||||
this.bmadFolderName = bmadFolderName;
|
this.bmadFolderName = bmadFolderName;
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,10 @@
|
||||||
|
---
|
||||||
|
name: '{{name}}'
|
||||||
|
description: '{{description}}'
|
||||||
|
---
|
||||||
|
|
||||||
|
# {{name}}
|
||||||
|
|
||||||
|
Read the entire task file at: {project-root}/{{bmadFolderName}}/{{path}}
|
||||||
|
|
||||||
|
Follow all instructions in the task file exactly as written.
|
||||||
|
|
@ -0,0 +1,10 @@
|
||||||
|
---
|
||||||
|
name: '{{name}}'
|
||||||
|
description: '{{description}}'
|
||||||
|
---
|
||||||
|
|
||||||
|
# {{name}}
|
||||||
|
|
||||||
|
Read the entire tool file at: {project-root}/{{bmadFolderName}}/{{path}}
|
||||||
|
|
||||||
|
Follow all instructions in the tool file exactly as written.
|
||||||
|
|
@ -0,0 +1,11 @@
|
||||||
|
description = "Executes the {{name}} task from the BMAD Method."
|
||||||
|
prompt = """
|
||||||
|
Execute the BMAD '{{name}}' task.
|
||||||
|
|
||||||
|
TASK INSTRUCTIONS:
|
||||||
|
1. LOAD the task file from {project-root}/{{bmadFolderName}}/{{path}}
|
||||||
|
2. READ its entire contents
|
||||||
|
3. FOLLOW every instruction precisely as specified
|
||||||
|
|
||||||
|
TASK FILE: {project-root}/{{bmadFolderName}}/{{path}}
|
||||||
|
"""
|
||||||
|
|
@ -0,0 +1,11 @@
|
||||||
|
description = "Executes the {{name}} tool from the BMAD Method."
|
||||||
|
prompt = """
|
||||||
|
Execute the BMAD '{{name}}' tool.
|
||||||
|
|
||||||
|
TOOL INSTRUCTIONS:
|
||||||
|
1. LOAD the tool file from {project-root}/{{bmadFolderName}}/{{path}}
|
||||||
|
2. READ its entire contents
|
||||||
|
3. FOLLOW every instruction precisely as specified
|
||||||
|
|
||||||
|
TOOL FILE: {project-root}/{{bmadFolderName}}/{{path}}
|
||||||
|
"""
|
||||||
|
|
@ -7,6 +7,7 @@ const { XmlHandler } = require('../../../lib/xml-handler');
|
||||||
const { getProjectRoot, getSourcePath, getModulePath } = require('../../../lib/project-root');
|
const { getProjectRoot, getSourcePath, getModulePath } = require('../../../lib/project-root');
|
||||||
const { filterCustomizationData } = require('../../../lib/agent/compiler');
|
const { filterCustomizationData } = require('../../../lib/agent/compiler');
|
||||||
const { ExternalModuleManager } = require('./external-manager');
|
const { ExternalModuleManager } = require('./external-manager');
|
||||||
|
const { BMAD_FOLDER_NAME } = require('../ide/shared/path-utils');
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Manages the installation, updating, and removal of BMAD modules.
|
* Manages the installation, updating, and removal of BMAD modules.
|
||||||
|
|
@ -27,7 +28,7 @@ const { ExternalModuleManager } = require('./external-manager');
|
||||||
class ModuleManager {
|
class ModuleManager {
|
||||||
constructor(options = {}) {
|
constructor(options = {}) {
|
||||||
this.xmlHandler = new XmlHandler();
|
this.xmlHandler = new XmlHandler();
|
||||||
this.bmadFolderName = 'bmad'; // Default, can be overridden
|
this.bmadFolderName = BMAD_FOLDER_NAME; // Default, can be overridden
|
||||||
this.customModulePaths = new Map(); // Initialize custom module paths
|
this.customModulePaths = new Map(); // Initialize custom module paths
|
||||||
this.externalModuleManager = new ExternalModuleManager(); // For external official modules
|
this.externalModuleManager = new ExternalModuleManager(); // For external official modules
|
||||||
}
|
}
|
||||||
|
|
@ -870,7 +871,7 @@ class ModuleManager {
|
||||||
for (const agentFile of agentFiles) {
|
for (const agentFile of agentFiles) {
|
||||||
if (!agentFile.endsWith('.agent.yaml')) continue;
|
if (!agentFile.endsWith('.agent.yaml')) continue;
|
||||||
|
|
||||||
const relativePath = path.relative(sourceAgentsPath, agentFile);
|
const relativePath = path.relative(sourceAgentsPath, agentFile).split(path.sep).join('/');
|
||||||
const targetDir = path.join(targetAgentsPath, path.dirname(relativePath));
|
const targetDir = path.join(targetAgentsPath, path.dirname(relativePath));
|
||||||
|
|
||||||
await fs.ensureDir(targetDir);
|
await fs.ensureDir(targetDir);
|
||||||
|
|
|
||||||
|
|
@ -42,7 +42,7 @@ function findBmadConfig(startPath = process.cwd()) {
|
||||||
* @returns {string} Resolved path
|
* @returns {string} Resolved path
|
||||||
*/
|
*/
|
||||||
function resolvePath(pathStr, context) {
|
function resolvePath(pathStr, context) {
|
||||||
return pathStr.replaceAll('{project-root}', context.projectRoot).replaceAll('{bmad-folder}', context_bmadFolder);
|
return pathStr.replaceAll('{project-root}', context.projectRoot).replaceAll('{bmad-folder}', context.bmadFolder);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,480 @@
|
||||||
|
/**
|
||||||
|
* File Reference Validator
|
||||||
|
*
|
||||||
|
* Validates cross-file references in BMAD source files (agents, workflows, tasks, steps).
|
||||||
|
* Catches broken file paths, missing referenced files, and absolute path leaks.
|
||||||
|
*
|
||||||
|
* What it checks:
|
||||||
|
* - {project-root}/_bmad/ references in YAML and markdown resolve to real src/ files
|
||||||
|
* - Relative path references (./file.md, ../data/file.csv) point to existing files
|
||||||
|
* - exec="..." and <invoke-task> targets exist
|
||||||
|
* - Step metadata (thisStepFile, nextStepFile) references are valid
|
||||||
|
* - Load directives (Load: `./file.md`) target existing files
|
||||||
|
* - No absolute paths (/Users/, /home/, C:\) leak into source files
|
||||||
|
*
|
||||||
|
* What it does NOT check (deferred):
|
||||||
|
* - {installed_path} variable interpolation (self-referential, low risk)
|
||||||
|
* - {{mustache}} template variables (runtime substitution)
|
||||||
|
* - {config_source}:key dynamic YAML dereferences
|
||||||
|
*
|
||||||
|
* Usage:
|
||||||
|
* node tools/validate-file-refs.js # Warn on broken references (exit 0)
|
||||||
|
* node tools/validate-file-refs.js --strict # Fail on broken references (exit 1)
|
||||||
|
* node tools/validate-file-refs.js --verbose # Show all checked references
|
||||||
|
*
|
||||||
|
* Default mode is warning-only (exit 0) so adoption is non-disruptive.
|
||||||
|
* Use --strict when you want CI or pre-commit to enforce valid references.
|
||||||
|
*/
|
||||||
|
|
||||||
|
const fs = require('node:fs');
|
||||||
|
const path = require('node:path');
|
||||||
|
const yaml = require('yaml');
|
||||||
|
|
||||||
|
const PROJECT_ROOT = path.resolve(__dirname, '..');
|
||||||
|
const SRC_DIR = path.join(PROJECT_ROOT, 'src');
|
||||||
|
const VERBOSE = process.argv.includes('--verbose');
|
||||||
|
const STRICT = process.argv.includes('--strict');
|
||||||
|
|
||||||
|
// --- Constants ---
|
||||||
|
|
||||||
|
// File extensions to scan
|
||||||
|
const SCAN_EXTENSIONS = new Set(['.yaml', '.yml', '.md', '.xml']);
|
||||||
|
|
||||||
|
// Skip directories
|
||||||
|
const SKIP_DIRS = new Set(['node_modules', '_module-installer', '.git']);
|
||||||
|
|
||||||
|
// Pattern: {project-root}/_bmad/ references
|
||||||
|
const PROJECT_ROOT_REF = /\{project-root\}\/_bmad\/([^\s'"<>})\]`]+)/g;
|
||||||
|
|
||||||
|
// Pattern: {_bmad}/ shorthand references
|
||||||
|
const BMAD_SHORTHAND_REF = /\{_bmad\}\/([^\s'"<>})\]`]+)/g;
|
||||||
|
|
||||||
|
// Pattern: exec="..." attributes
|
||||||
|
const EXEC_ATTR = /exec="([^"]+)"/g;
|
||||||
|
|
||||||
|
// Pattern: <invoke-task> content
|
||||||
|
const INVOKE_TASK = /<invoke-task>([^<]+)<\/invoke-task>/g;
|
||||||
|
|
||||||
|
// Pattern: relative paths in quotes
|
||||||
|
const RELATIVE_PATH_QUOTED = /['"](\.\.\/?[^'"]+\.(?:md|yaml|yml|xml|json|csv|txt))['"]/g;
|
||||||
|
const RELATIVE_PATH_DOT = /['"](\.\/[^'"]+\.(?:md|yaml|yml|xml|json|csv|txt))['"]/g;
|
||||||
|
|
||||||
|
// Pattern: step metadata
|
||||||
|
const STEP_META = /(?:thisStepFile|nextStepFile|continueStepFile|skipToStepFile|altStepFile|workflowFile):\s*['"](\.[^'"]+)['"]/g;
|
||||||
|
|
||||||
|
// Pattern: Load directives
|
||||||
|
const LOAD_DIRECTIVE = /Load[:\s]+`(\.[^`]+)`/g;
|
||||||
|
|
||||||
|
// Pattern: absolute path leaks
|
||||||
|
const ABS_PATH_LEAK = /(?:\/Users\/|\/home\/|[A-Z]:\\\\)/;
|
||||||
|
|
||||||
|
// --- Output Escaping ---
|
||||||
|
|
||||||
|
function escapeAnnotation(str) {
|
||||||
|
return str.replaceAll('%', '%25').replaceAll('\r', '%0D').replaceAll('\n', '%0A');
|
||||||
|
}
|
||||||
|
|
||||||
|
function escapeTableCell(str) {
|
||||||
|
return String(str).replaceAll('|', String.raw`\|`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Path prefixes/patterns that only exist in installed structure, not in source
|
||||||
|
const INSTALL_ONLY_PATHS = ['_config/'];
|
||||||
|
|
||||||
|
// Files that are generated at install time and don't exist in the source tree
|
||||||
|
const INSTALL_GENERATED_FILES = ['config.yaml'];
|
||||||
|
|
||||||
|
// Variables that indicate a path is not statically resolvable
|
||||||
|
const UNRESOLVABLE_VARS = [
|
||||||
|
'{output_folder}',
|
||||||
|
'{value}',
|
||||||
|
'{timestamp}',
|
||||||
|
'{config_source}:',
|
||||||
|
'{installed_path}',
|
||||||
|
'{shared_path}',
|
||||||
|
'{planning_artifacts}',
|
||||||
|
'{research_topic}',
|
||||||
|
'{user_name}',
|
||||||
|
'{communication_language}',
|
||||||
|
'{epic_number}',
|
||||||
|
'{next_epic_num}',
|
||||||
|
'{epic_num}',
|
||||||
|
'{part_id}',
|
||||||
|
'{count}',
|
||||||
|
'{date}',
|
||||||
|
'{outputFile}',
|
||||||
|
'{nextStepFile}',
|
||||||
|
];
|
||||||
|
|
||||||
|
// --- File Discovery ---
|
||||||
|
|
||||||
|
function getSourceFiles(dir) {
|
||||||
|
const files = [];
|
||||||
|
|
||||||
|
function walk(currentDir) {
|
||||||
|
const entries = fs.readdirSync(currentDir, { withFileTypes: true });
|
||||||
|
|
||||||
|
for (const entry of entries) {
|
||||||
|
if (SKIP_DIRS.has(entry.name)) continue;
|
||||||
|
|
||||||
|
const fullPath = path.join(currentDir, entry.name);
|
||||||
|
|
||||||
|
if (entry.isDirectory()) {
|
||||||
|
walk(fullPath);
|
||||||
|
} else if (entry.isFile() && SCAN_EXTENSIONS.has(path.extname(entry.name))) {
|
||||||
|
files.push(fullPath);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
walk(dir);
|
||||||
|
return files;
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- Code Block Stripping ---
|
||||||
|
|
||||||
|
function stripCodeBlocks(content) {
|
||||||
|
return content.replaceAll(/```[\s\S]*?```/g, (m) => m.replaceAll(/[^\n]/g, ''));
|
||||||
|
}
|
||||||
|
|
||||||
|
function stripJsonExampleBlocks(content) {
|
||||||
|
// Strip bare JSON example blocks: { and } each on their own line.
|
||||||
|
// These are example/template data (not real file references).
|
||||||
|
return content.replaceAll(/^\{\s*\n(?:.*\n)*?^\}\s*$/gm, (m) => m.replaceAll(/[^\n]/g, ''));
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- Path Mapping ---
|
||||||
|
|
||||||
|
function mapInstalledToSource(refPath) {
|
||||||
|
// Strip {project-root}/_bmad/ or {_bmad}/ prefix
|
||||||
|
let cleaned = refPath.replace(/^\{project-root\}\/_bmad\//, '').replace(/^\{_bmad\}\//, '');
|
||||||
|
|
||||||
|
// Also handle bare _bmad/ prefix (seen in some invoke-task)
|
||||||
|
cleaned = cleaned.replace(/^_bmad\//, '');
|
||||||
|
|
||||||
|
// Skip install-only paths (generated at install time, not in source)
|
||||||
|
if (isInstallOnly(cleaned)) return null;
|
||||||
|
|
||||||
|
// core/, bmm/, and utility/ are directly under src/
|
||||||
|
if (cleaned.startsWith('core/') || cleaned.startsWith('bmm/') || cleaned.startsWith('utility/')) {
|
||||||
|
return path.join(SRC_DIR, cleaned);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fallback: map directly under src/
|
||||||
|
return path.join(SRC_DIR, cleaned);
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- Reference Extraction ---
|
||||||
|
|
||||||
|
function isResolvable(refStr) {
|
||||||
|
// Skip refs containing unresolvable runtime variables
|
||||||
|
if (refStr.includes('{{')) return false;
|
||||||
|
for (const v of UNRESOLVABLE_VARS) {
|
||||||
|
if (refStr.includes(v)) return false;
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
function isInstallOnly(cleanedPath) {
|
||||||
|
// Skip paths that only exist in the installed _bmad/ structure, not in src/
|
||||||
|
for (const prefix of INSTALL_ONLY_PATHS) {
|
||||||
|
if (cleanedPath.startsWith(prefix)) return true;
|
||||||
|
}
|
||||||
|
// Skip files that are generated during installation
|
||||||
|
const basename = path.basename(cleanedPath);
|
||||||
|
for (const generated of INSTALL_GENERATED_FILES) {
|
||||||
|
if (basename === generated) return true;
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
function extractYamlRefs(filePath, content) {
|
||||||
|
const refs = [];
|
||||||
|
|
||||||
|
let doc;
|
||||||
|
try {
|
||||||
|
doc = yaml.parseDocument(content);
|
||||||
|
} catch {
|
||||||
|
return refs; // Skip unparseable YAML (schema validator handles this)
|
||||||
|
}
|
||||||
|
|
||||||
|
function checkValue(value, range, keyPath) {
|
||||||
|
if (typeof value !== 'string') return;
|
||||||
|
if (!isResolvable(value)) return;
|
||||||
|
|
||||||
|
const line = range ? offsetToLine(content, range[0]) : undefined;
|
||||||
|
|
||||||
|
// Check for {project-root}/_bmad/ refs
|
||||||
|
const prMatch = value.match(/\{project-root\}\/_bmad\/[^\s'"<>})\]`]+/);
|
||||||
|
if (prMatch) {
|
||||||
|
refs.push({ file: filePath, raw: prMatch[0], type: 'project-root', line, key: keyPath });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for {_bmad}/ refs
|
||||||
|
const bmMatch = value.match(/\{_bmad\}\/[^\s'"<>})\]`]+/);
|
||||||
|
if (bmMatch) {
|
||||||
|
refs.push({ file: filePath, raw: bmMatch[0], type: 'project-root', line, key: keyPath });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for relative paths
|
||||||
|
const relMatch = value.match(/^\.\.?\/[^\s'"<>})\]`]+\.(?:md|yaml|yml|xml|json|csv|txt)$/);
|
||||||
|
if (relMatch) {
|
||||||
|
refs.push({ file: filePath, raw: relMatch[0], type: 'relative', line, key: keyPath });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function walkNode(node, keyPath) {
|
||||||
|
if (!node) return;
|
||||||
|
|
||||||
|
if (yaml.isMap(node)) {
|
||||||
|
for (const item of node.items) {
|
||||||
|
const key = item.key && item.key.value !== undefined ? item.key.value : '?';
|
||||||
|
const childPath = keyPath ? `${keyPath}.${key}` : String(key);
|
||||||
|
walkNode(item.value, childPath);
|
||||||
|
}
|
||||||
|
} else if (yaml.isSeq(node)) {
|
||||||
|
for (const [i, item] of node.items.entries()) {
|
||||||
|
walkNode(item, `${keyPath}[${i}]`);
|
||||||
|
}
|
||||||
|
} else if (yaml.isScalar(node)) {
|
||||||
|
checkValue(node.value, node.range, keyPath);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
walkNode(doc.contents, '');
|
||||||
|
return refs;
|
||||||
|
}
|
||||||
|
|
||||||
|
function offsetToLine(content, offset) {
|
||||||
|
let line = 1;
|
||||||
|
for (let i = 0; i < offset && i < content.length; i++) {
|
||||||
|
if (content[i] === '\n') line++;
|
||||||
|
}
|
||||||
|
return line;
|
||||||
|
}
|
||||||
|
|
||||||
|
function extractMarkdownRefs(filePath, content) {
|
||||||
|
const refs = [];
|
||||||
|
const stripped = stripJsonExampleBlocks(stripCodeBlocks(content));
|
||||||
|
|
||||||
|
function runPattern(regex, type) {
|
||||||
|
regex.lastIndex = 0;
|
||||||
|
let match;
|
||||||
|
while ((match = regex.exec(stripped)) !== null) {
|
||||||
|
const raw = match[1];
|
||||||
|
if (!isResolvable(raw)) continue;
|
||||||
|
refs.push({ file: filePath, raw, type, line: offsetToLine(stripped, match.index) });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// {project-root}/_bmad/ refs
|
||||||
|
runPattern(PROJECT_ROOT_REF, 'project-root');
|
||||||
|
|
||||||
|
// {_bmad}/ shorthand
|
||||||
|
runPattern(BMAD_SHORTHAND_REF, 'project-root');
|
||||||
|
|
||||||
|
// exec="..." attributes
|
||||||
|
runPattern(EXEC_ATTR, 'exec-attr');
|
||||||
|
|
||||||
|
// <invoke-task> tags
|
||||||
|
runPattern(INVOKE_TASK, 'invoke-task');
|
||||||
|
|
||||||
|
// Step metadata
|
||||||
|
runPattern(STEP_META, 'relative');
|
||||||
|
|
||||||
|
// Load directives
|
||||||
|
runPattern(LOAD_DIRECTIVE, 'relative');
|
||||||
|
|
||||||
|
// Relative paths in quotes
|
||||||
|
runPattern(RELATIVE_PATH_QUOTED, 'relative');
|
||||||
|
runPattern(RELATIVE_PATH_DOT, 'relative');
|
||||||
|
|
||||||
|
return refs;
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- Reference Resolution ---
|
||||||
|
|
||||||
|
function resolveRef(ref) {
|
||||||
|
if (ref.type === 'project-root') {
|
||||||
|
return mapInstalledToSource(ref.raw);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (ref.type === 'relative') {
|
||||||
|
return path.resolve(path.dirname(ref.file), ref.raw);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (ref.type === 'exec-attr') {
|
||||||
|
let execPath = ref.raw;
|
||||||
|
if (execPath.includes('{project-root}')) {
|
||||||
|
return mapInstalledToSource(execPath);
|
||||||
|
}
|
||||||
|
if (execPath.includes('{_bmad}')) {
|
||||||
|
return mapInstalledToSource(execPath);
|
||||||
|
}
|
||||||
|
if (execPath.startsWith('_bmad/')) {
|
||||||
|
return mapInstalledToSource(execPath);
|
||||||
|
}
|
||||||
|
// Relative exec path
|
||||||
|
return path.resolve(path.dirname(ref.file), execPath);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (ref.type === 'invoke-task') {
|
||||||
|
// Extract file path from invoke-task content
|
||||||
|
const prMatch = ref.raw.match(/\{project-root\}\/_bmad\/([^\s'"<>})\]`]+)/);
|
||||||
|
if (prMatch) return mapInstalledToSource(prMatch[0]);
|
||||||
|
|
||||||
|
const bmMatch = ref.raw.match(/\{_bmad\}\/([^\s'"<>})\]`]+)/);
|
||||||
|
if (bmMatch) return mapInstalledToSource(bmMatch[0]);
|
||||||
|
|
||||||
|
const bareMatch = ref.raw.match(/_bmad\/([^\s'"<>})\]`]+)/);
|
||||||
|
if (bareMatch) return mapInstalledToSource(bareMatch[0]);
|
||||||
|
|
||||||
|
return null; // Can't resolve — skip
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- Absolute Path Leak Detection ---
|
||||||
|
|
||||||
|
function checkAbsolutePathLeaks(filePath, content) {
|
||||||
|
const leaks = [];
|
||||||
|
const stripped = stripCodeBlocks(content);
|
||||||
|
const lines = stripped.split('\n');
|
||||||
|
|
||||||
|
for (const [i, line] of lines.entries()) {
|
||||||
|
if (ABS_PATH_LEAK.test(line)) {
|
||||||
|
leaks.push({ file: filePath, line: i + 1, content: line.trim() });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return leaks;
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- Main ---
|
||||||
|
|
||||||
|
console.log(`\nValidating file references in: ${SRC_DIR}`);
|
||||||
|
console.log(`Mode: ${STRICT ? 'STRICT (exit 1 on issues)' : 'WARNING (exit 0)'}${VERBOSE ? ' + VERBOSE' : ''}\n`);
|
||||||
|
|
||||||
|
const files = getSourceFiles(SRC_DIR);
|
||||||
|
console.log(`Found ${files.length} source files\n`);
|
||||||
|
|
||||||
|
let totalRefs = 0;
|
||||||
|
let brokenRefs = 0;
|
||||||
|
let totalLeaks = 0;
|
||||||
|
let filesWithIssues = 0;
|
||||||
|
const allIssues = []; // Collect for $GITHUB_STEP_SUMMARY
|
||||||
|
|
||||||
|
for (const filePath of files) {
|
||||||
|
const relativePath = path.relative(PROJECT_ROOT, filePath);
|
||||||
|
const content = fs.readFileSync(filePath, 'utf-8');
|
||||||
|
const ext = path.extname(filePath);
|
||||||
|
|
||||||
|
// Extract references
|
||||||
|
let refs;
|
||||||
|
if (ext === '.yaml' || ext === '.yml') {
|
||||||
|
refs = extractYamlRefs(filePath, content);
|
||||||
|
} else {
|
||||||
|
refs = extractMarkdownRefs(filePath, content);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Resolve and check
|
||||||
|
const broken = [];
|
||||||
|
|
||||||
|
if (VERBOSE && refs.length > 0) {
|
||||||
|
console.log(`\n${relativePath}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const ref of refs) {
|
||||||
|
totalRefs++;
|
||||||
|
const resolved = resolveRef(ref);
|
||||||
|
|
||||||
|
if (resolved && !fs.existsSync(resolved)) {
|
||||||
|
// For paths without extensions, also check if it's a directory
|
||||||
|
const hasExt = path.extname(resolved) !== '';
|
||||||
|
if (!hasExt) {
|
||||||
|
// Could be a directory reference — skip if not clearly a file
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
broken.push({ ref, resolved: path.relative(PROJECT_ROOT, resolved) });
|
||||||
|
brokenRefs++;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (VERBOSE && resolved) {
|
||||||
|
console.log(` [OK] ${ref.raw}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check absolute path leaks
|
||||||
|
const leaks = checkAbsolutePathLeaks(filePath, content);
|
||||||
|
totalLeaks += leaks.length;
|
||||||
|
|
||||||
|
// Report issues for this file
|
||||||
|
if (broken.length > 0 || leaks.length > 0) {
|
||||||
|
filesWithIssues++;
|
||||||
|
if (!VERBOSE) {
|
||||||
|
console.log(`\n${relativePath}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const { ref, resolved } of broken) {
|
||||||
|
const location = ref.line ? `line ${ref.line}` : ref.key ? `key: ${ref.key}` : '';
|
||||||
|
console.log(` [BROKEN] ${ref.raw}${location ? ` (${location})` : ''}`);
|
||||||
|
console.log(` Target not found: ${resolved}`);
|
||||||
|
allIssues.push({ file: relativePath, line: ref.line || 1, ref: ref.raw, issue: 'broken ref' });
|
||||||
|
if (process.env.GITHUB_ACTIONS) {
|
||||||
|
const line = ref.line || 1;
|
||||||
|
console.log(`::warning file=${relativePath},line=${line}::${escapeAnnotation(`Broken reference: ${ref.raw} → ${resolved}`)}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const leak of leaks) {
|
||||||
|
console.log(` [ABS-PATH] Line ${leak.line}: ${leak.content}`);
|
||||||
|
allIssues.push({ file: relativePath, line: leak.line, ref: leak.content, issue: 'abs-path' });
|
||||||
|
if (process.env.GITHUB_ACTIONS) {
|
||||||
|
console.log(`::warning file=${relativePath},line=${leak.line}::${escapeAnnotation(`Absolute path leak: ${leak.content}`)}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Summary
|
||||||
|
console.log(`\n${'─'.repeat(60)}`);
|
||||||
|
console.log(`\nSummary:`);
|
||||||
|
console.log(` Files scanned: ${files.length}`);
|
||||||
|
console.log(` References checked: ${totalRefs}`);
|
||||||
|
console.log(` Broken references: ${brokenRefs}`);
|
||||||
|
console.log(` Absolute path leaks: ${totalLeaks}`);
|
||||||
|
|
||||||
|
const hasIssues = brokenRefs > 0 || totalLeaks > 0;
|
||||||
|
|
||||||
|
if (hasIssues) {
|
||||||
|
console.log(`\n ${filesWithIssues} file(s) with issues`);
|
||||||
|
|
||||||
|
if (STRICT) {
|
||||||
|
console.log(`\n [STRICT MODE] Exiting with failure.`);
|
||||||
|
} else {
|
||||||
|
console.log(`\n Run with --strict to treat warnings as errors.`);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
console.log(`\n All file references valid!`);
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log('');
|
||||||
|
|
||||||
|
// Write GitHub Actions step summary
|
||||||
|
if (process.env.GITHUB_STEP_SUMMARY) {
|
||||||
|
let summary = '## File Reference Validation\n\n';
|
||||||
|
if (allIssues.length > 0) {
|
||||||
|
summary += '| File | Line | Reference | Issue |\n';
|
||||||
|
summary += '|------|------|-----------|-------|\n';
|
||||||
|
for (const issue of allIssues) {
|
||||||
|
summary += `| ${escapeTableCell(issue.file)} | ${issue.line} | ${escapeTableCell(issue.ref)} | ${issue.issue} |\n`;
|
||||||
|
}
|
||||||
|
summary += '\n';
|
||||||
|
}
|
||||||
|
summary += `**${files.length} files scanned, ${totalRefs} references checked, ${brokenRefs + totalLeaks} issues found**\n`;
|
||||||
|
fs.appendFileSync(process.env.GITHUB_STEP_SUMMARY, summary);
|
||||||
|
}
|
||||||
|
|
||||||
|
process.exit(hasIssues && STRICT ? 1 : 0);
|
||||||
|
|
@ -110,41 +110,7 @@ export default defineConfig({
|
||||||
collapsed: true,
|
collapsed: true,
|
||||||
autogenerate: { directory: 'reference' },
|
autogenerate: { directory: 'reference' },
|
||||||
},
|
},
|
||||||
{
|
// TEA docs moved to standalone module site; keep BMM sidebar focused.
|
||||||
label: 'TEA - Testing in BMAD',
|
|
||||||
collapsed: true,
|
|
||||||
items: [
|
|
||||||
{
|
|
||||||
label: 'Tutorials',
|
|
||||||
autogenerate: { directory: 'tea/tutorials' },
|
|
||||||
},
|
|
||||||
{
|
|
||||||
label: 'How-To Guides',
|
|
||||||
items: [
|
|
||||||
{
|
|
||||||
label: 'Workflows',
|
|
||||||
autogenerate: { directory: 'tea/how-to/workflows' },
|
|
||||||
},
|
|
||||||
{
|
|
||||||
label: 'Customization',
|
|
||||||
autogenerate: { directory: 'tea/how-to/customization' },
|
|
||||||
},
|
|
||||||
{
|
|
||||||
label: 'Brownfield',
|
|
||||||
autogenerate: { directory: 'tea/how-to/brownfield' },
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
{
|
|
||||||
label: 'Explanation',
|
|
||||||
autogenerate: { directory: 'tea/explanation' },
|
|
||||||
},
|
|
||||||
{
|
|
||||||
label: 'Reference',
|
|
||||||
autogenerate: { directory: 'tea/reference' },
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
],
|
],
|
||||||
|
|
||||||
// Credits in footer
|
// Credits in footer
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue