Compare commits
16 Commits
a395df0459
...
d74765ce8d
| Author | SHA1 | Date |
|---|---|---|
|
|
d74765ce8d | |
|
|
02513c721f | |
|
|
e7a34a2b61 | |
|
|
85339708e6 | |
|
|
b4f230f565 | |
|
|
b102694c64 | |
|
|
5aef6379b9 | |
|
|
4cb5cc7dbc | |
|
|
c5d0fb55ba | |
|
|
c0adbc4e76 | |
|
|
f6dab0d0ff | |
|
|
cf6cf779bb | |
|
|
7074395bdd | |
|
|
9b8ce69f37 | |
|
|
79959e75ac | |
|
|
5e841f9cac |
|
|
@ -81,6 +81,21 @@ export default [
|
|||
},
|
||||
},
|
||||
|
||||
// Test files using Vitest (ES modules)
|
||||
{
|
||||
files: ['test/unit/**/*.js', 'test/integration/**/*.js', 'test/helpers/**/*.js', 'test/setup.js', 'vitest.config.js'],
|
||||
languageOptions: {
|
||||
sourceType: 'module',
|
||||
ecmaVersion: 'latest',
|
||||
},
|
||||
rules: {
|
||||
// Allow dev dependencies in test files
|
||||
'n/no-unpublished-import': 'off',
|
||||
'unicorn/prefer-module': 'off',
|
||||
'no-unused-vars': 'off',
|
||||
},
|
||||
},
|
||||
|
||||
// CLI scripts under tools/** and test/**
|
||||
{
|
||||
files: ['tools/**/*.js', 'tools/**/*.mjs', 'test/**/*.js'],
|
||||
|
|
|
|||
|
|
@ -35,6 +35,8 @@
|
|||
"@astrojs/sitemap": "^3.6.0",
|
||||
"@astrojs/starlight": "^0.37.0",
|
||||
"@eslint/js": "^9.33.0",
|
||||
"@vitest/coverage-v8": "^4.0.16",
|
||||
"@vitest/ui": "^4.0.16",
|
||||
"archiver": "^7.0.1",
|
||||
"astro": "^5.16.0",
|
||||
"c8": "^10.1.3",
|
||||
|
|
@ -50,6 +52,7 @@
|
|||
"prettier": "^3.7.4",
|
||||
"prettier-plugin-packagejson": "^2.5.19",
|
||||
"sharp": "^0.33.5",
|
||||
"vitest": "^4.0.16",
|
||||
"yaml-eslint-parser": "^1.2.3",
|
||||
"yaml-lint": "^1.7.0"
|
||||
},
|
||||
|
|
@ -244,7 +247,6 @@
|
|||
"integrity": "sha512-e7jT4DxYvIDLk1ZHmU/m/mB19rex9sv0c2ftBtjSBv+kVM/902eh0fINUzD7UwLLNR+jU585GxUJ8/EBfAM5fw==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"@babel/code-frame": "^7.27.1",
|
||||
"@babel/generator": "^7.28.5",
|
||||
|
|
@ -2984,6 +2986,13 @@
|
|||
"url": "https://opencollective.com/pkgr"
|
||||
}
|
||||
},
|
||||
"node_modules/@polka/url": {
|
||||
"version": "1.0.0-next.29",
|
||||
"resolved": "https://registry.npmjs.org/@polka/url/-/url-1.0.0-next.29.tgz",
|
||||
"integrity": "sha512-wwQAWhWSuHaag8c4q/KN/vCoeOJYshAIvMQwD4GpSb3OiZklFfvAgmj0VCBBImRpuF/aFgIRzllXlVX93Jevww==",
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@rollup/pluginutils": {
|
||||
"version": "5.3.0",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/pluginutils/-/pluginutils-5.3.0.tgz",
|
||||
|
|
@ -3436,6 +3445,13 @@
|
|||
"@sinonjs/commons": "^3.0.1"
|
||||
}
|
||||
},
|
||||
"node_modules/@standard-schema/spec": {
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/@standard-schema/spec/-/spec-1.1.0.tgz",
|
||||
"integrity": "sha512-l2aFy5jALhniG5HgqrD6jXLi/rUWrKvqN/qJx6yoJsgKhblVd+iqqU4RCXavm/jPityDo5TCvKMnpjKnOriy0w==",
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@swc/helpers": {
|
||||
"version": "0.5.18",
|
||||
"resolved": "https://registry.npmjs.org/@swc/helpers/-/helpers-0.5.18.tgz",
|
||||
|
|
@ -3502,6 +3518,17 @@
|
|||
"@babel/types": "^7.28.2"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/chai": {
|
||||
"version": "5.2.3",
|
||||
"resolved": "https://registry.npmjs.org/@types/chai/-/chai-5.2.3.tgz",
|
||||
"integrity": "sha512-Mw558oeA9fFbv65/y4mHtXDs9bPnFMZAL/jxdPFUpOHHIXX91mcgEHbS5Lahr+pwZFR8A7GQleRWeI6cGFC2UA==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@types/deep-eql": "*",
|
||||
"assertion-error": "^2.0.1"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/debug": {
|
||||
"version": "4.1.12",
|
||||
"resolved": "https://registry.npmjs.org/@types/debug/-/debug-4.1.12.tgz",
|
||||
|
|
@ -3511,6 +3538,13 @@
|
|||
"@types/ms": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/deep-eql": {
|
||||
"version": "4.0.2",
|
||||
"resolved": "https://registry.npmjs.org/@types/deep-eql/-/deep-eql-4.0.2.tgz",
|
||||
"integrity": "sha512-c9h9dVVMigMPc4bwTvC5dxqtqJZwQPePsWjPlpSOnojbor6pGqdk541lfA7AqFQr5pB1BRdq0juY9db81BwyFw==",
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@types/estree": {
|
||||
"version": "1.0.8",
|
||||
"resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.8.tgz",
|
||||
|
|
@ -3954,6 +3988,171 @@
|
|||
"win32"
|
||||
]
|
||||
},
|
||||
"node_modules/@vitest/coverage-v8": {
|
||||
"version": "4.0.16",
|
||||
"resolved": "https://registry.npmjs.org/@vitest/coverage-v8/-/coverage-v8-4.0.16.tgz",
|
||||
"integrity": "sha512-2rNdjEIsPRzsdu6/9Eq0AYAzYdpP6Bx9cje9tL3FE5XzXRQF1fNU9pe/1yE8fCrS0HD+fBtt6gLPh6LI57tX7A==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@bcoe/v8-coverage": "^1.0.2",
|
||||
"@vitest/utils": "4.0.16",
|
||||
"ast-v8-to-istanbul": "^0.3.8",
|
||||
"istanbul-lib-coverage": "^3.2.2",
|
||||
"istanbul-lib-report": "^3.0.1",
|
||||
"istanbul-lib-source-maps": "^5.0.6",
|
||||
"istanbul-reports": "^3.2.0",
|
||||
"magicast": "^0.5.1",
|
||||
"obug": "^2.1.1",
|
||||
"std-env": "^3.10.0",
|
||||
"tinyrainbow": "^3.0.3"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://opencollective.com/vitest"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@vitest/browser": "4.0.16",
|
||||
"vitest": "4.0.16"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"@vitest/browser": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/@vitest/expect": {
|
||||
"version": "4.0.16",
|
||||
"resolved": "https://registry.npmjs.org/@vitest/expect/-/expect-4.0.16.tgz",
|
||||
"integrity": "sha512-eshqULT2It7McaJkQGLkPjPjNph+uevROGuIMJdG3V+0BSR2w9u6J9Lwu+E8cK5TETlfou8GRijhafIMhXsimA==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@standard-schema/spec": "^1.0.0",
|
||||
"@types/chai": "^5.2.2",
|
||||
"@vitest/spy": "4.0.16",
|
||||
"@vitest/utils": "4.0.16",
|
||||
"chai": "^6.2.1",
|
||||
"tinyrainbow": "^3.0.3"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://opencollective.com/vitest"
|
||||
}
|
||||
},
|
||||
"node_modules/@vitest/mocker": {
|
||||
"version": "4.0.16",
|
||||
"resolved": "https://registry.npmjs.org/@vitest/mocker/-/mocker-4.0.16.tgz",
|
||||
"integrity": "sha512-yb6k4AZxJTB+q9ycAvsoxGn+j/po0UaPgajllBgt1PzoMAAmJGYFdDk0uCcRcxb3BrME34I6u8gHZTQlkqSZpg==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@vitest/spy": "4.0.16",
|
||||
"estree-walker": "^3.0.3",
|
||||
"magic-string": "^0.30.21"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://opencollective.com/vitest"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"msw": "^2.4.9",
|
||||
"vite": "^6.0.0 || ^7.0.0-0"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"msw": {
|
||||
"optional": true
|
||||
},
|
||||
"vite": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/@vitest/pretty-format": {
|
||||
"version": "4.0.16",
|
||||
"resolved": "https://registry.npmjs.org/@vitest/pretty-format/-/pretty-format-4.0.16.tgz",
|
||||
"integrity": "sha512-eNCYNsSty9xJKi/UdVD8Ou16alu7AYiS2fCPRs0b1OdhJiV89buAXQLpTbe+X8V9L6qrs9CqyvU7OaAopJYPsA==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"tinyrainbow": "^3.0.3"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://opencollective.com/vitest"
|
||||
}
|
||||
},
|
||||
"node_modules/@vitest/runner": {
|
||||
"version": "4.0.16",
|
||||
"resolved": "https://registry.npmjs.org/@vitest/runner/-/runner-4.0.16.tgz",
|
||||
"integrity": "sha512-VWEDm5Wv9xEo80ctjORcTQRJ539EGPB3Pb9ApvVRAY1U/WkHXmmYISqU5E79uCwcW7xYUV38gwZD+RV755fu3Q==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@vitest/utils": "4.0.16",
|
||||
"pathe": "^2.0.3"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://opencollective.com/vitest"
|
||||
}
|
||||
},
|
||||
"node_modules/@vitest/snapshot": {
|
||||
"version": "4.0.16",
|
||||
"resolved": "https://registry.npmjs.org/@vitest/snapshot/-/snapshot-4.0.16.tgz",
|
||||
"integrity": "sha512-sf6NcrYhYBsSYefxnry+DR8n3UV4xWZwWxYbCJUt2YdvtqzSPR7VfGrY0zsv090DAbjFZsi7ZaMi1KnSRyK1XA==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@vitest/pretty-format": "4.0.16",
|
||||
"magic-string": "^0.30.21",
|
||||
"pathe": "^2.0.3"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://opencollective.com/vitest"
|
||||
}
|
||||
},
|
||||
"node_modules/@vitest/spy": {
|
||||
"version": "4.0.16",
|
||||
"resolved": "https://registry.npmjs.org/@vitest/spy/-/spy-4.0.16.tgz",
|
||||
"integrity": "sha512-4jIOWjKP0ZUaEmJm00E0cOBLU+5WE0BpeNr3XN6TEF05ltro6NJqHWxXD0kA8/Zc8Nh23AT8WQxwNG+WeROupw==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"funding": {
|
||||
"url": "https://opencollective.com/vitest"
|
||||
}
|
||||
},
|
||||
"node_modules/@vitest/ui": {
|
||||
"version": "4.0.16",
|
||||
"resolved": "https://registry.npmjs.org/@vitest/ui/-/ui-4.0.16.tgz",
|
||||
"integrity": "sha512-rkoPH+RqWopVxDnCBE/ysIdfQ2A7j1eDmW8tCxxrR9nnFBa9jKf86VgsSAzxBd1x+ny0GC4JgiD3SNfRHv3pOg==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@vitest/utils": "4.0.16",
|
||||
"fflate": "^0.8.2",
|
||||
"flatted": "^3.3.3",
|
||||
"pathe": "^2.0.3",
|
||||
"sirv": "^3.0.2",
|
||||
"tinyglobby": "^0.2.15",
|
||||
"tinyrainbow": "^3.0.3"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://opencollective.com/vitest"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"vitest": "4.0.16"
|
||||
}
|
||||
},
|
||||
"node_modules/@vitest/utils": {
|
||||
"version": "4.0.16",
|
||||
"resolved": "https://registry.npmjs.org/@vitest/utils/-/utils-4.0.16.tgz",
|
||||
"integrity": "sha512-h8z9yYhV3e1LEfaQ3zdypIrnAg/9hguReGZoS7Gl0aBG5xgA410zBqECqmaF/+RkTggRsfnzc1XaAHA6bmUufA==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@vitest/pretty-format": "4.0.16",
|
||||
"tinyrainbow": "^3.0.3"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://opencollective.com/vitest"
|
||||
}
|
||||
},
|
||||
"node_modules/abort-controller": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/abort-controller/-/abort-controller-3.0.0.tgz",
|
||||
|
|
@ -3973,7 +4172,6 @@
|
|||
"integrity": "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"bin": {
|
||||
"acorn": "bin/acorn"
|
||||
},
|
||||
|
|
@ -4266,6 +4464,35 @@
|
|||
"node": ">=8"
|
||||
}
|
||||
},
|
||||
"node_modules/assertion-error": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/assertion-error/-/assertion-error-2.0.1.tgz",
|
||||
"integrity": "sha512-Izi8RQcffqCeNVgFigKli1ssklIbpHnCYc6AknXGYoB6grJqyeby7jv12JUQgmTAnIDnbck1uxksT4dzN3PWBA==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/ast-v8-to-istanbul": {
|
||||
"version": "0.3.10",
|
||||
"resolved": "https://registry.npmjs.org/ast-v8-to-istanbul/-/ast-v8-to-istanbul-0.3.10.tgz",
|
||||
"integrity": "sha512-p4K7vMz2ZSk3wN8l5o3y2bJAoZXT3VuJI5OLTATY/01CYWumWvwkUw0SqDBnNq6IiTO3qDa1eSQDibAV8g7XOQ==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@jridgewell/trace-mapping": "^0.3.31",
|
||||
"estree-walker": "^3.0.3",
|
||||
"js-tokens": "^9.0.1"
|
||||
}
|
||||
},
|
||||
"node_modules/ast-v8-to-istanbul/node_modules/js-tokens": {
|
||||
"version": "9.0.1",
|
||||
"resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-9.0.1.tgz",
|
||||
"integrity": "sha512-mxa9E9ITFOt0ban3j6L5MpjwegGz6lBQmM1IJkWeBZGcMxto50+eWdjC/52xDbS2vy0k7vIMK0Fe2wfL9OQSpQ==",
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/astring": {
|
||||
"version": "1.9.0",
|
||||
"resolved": "https://registry.npmjs.org/astring/-/astring-1.9.0.tgz",
|
||||
|
|
@ -4282,7 +4509,6 @@
|
|||
"integrity": "sha512-6mF/YrvwwRxLTu+aMEa5pwzKUNl5ZetWbTyZCs9Um0F12HUmxUiF5UHiZPy4rifzU3gtpM3xP2DfdmkNX9eZRg==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"@astrojs/compiler": "^2.13.0",
|
||||
"@astrojs/internal-helpers": "0.7.5",
|
||||
|
|
@ -5350,7 +5576,6 @@
|
|||
}
|
||||
],
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"baseline-browser-mapping": "^2.9.0",
|
||||
"caniuse-lite": "^1.0.30001759",
|
||||
|
|
@ -5517,6 +5742,16 @@
|
|||
"url": "https://github.com/sponsors/wooorm"
|
||||
}
|
||||
},
|
||||
"node_modules/chai": {
|
||||
"version": "6.2.2",
|
||||
"resolved": "https://registry.npmjs.org/chai/-/chai-6.2.2.tgz",
|
||||
"integrity": "sha512-NUPRluOfOiTKBKvWPtSD4PhFvWCqOi0BGStNWs57X9js7XGTprSmFoz5F0tWhR4WPjNeR9jXqdC7/UpSJTnlRg==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=18"
|
||||
}
|
||||
},
|
||||
"node_modules/chalk": {
|
||||
"version": "4.1.2",
|
||||
"resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz",
|
||||
|
|
@ -6666,7 +6901,6 @@
|
|||
"integrity": "sha512-LEyamqS7W5HB3ujJyvi0HQK/dtVINZvd5mAAp9eT5S/ujByGjiZLCzPcHVzuXbpJDJF/cxwHlfceVUDZ2lnSTw==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"@eslint-community/eslint-utils": "^4.8.0",
|
||||
"@eslint-community/regexpp": "^4.12.1",
|
||||
|
|
@ -7253,6 +7487,16 @@
|
|||
"node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/expect-type": {
|
||||
"version": "1.3.0",
|
||||
"resolved": "https://registry.npmjs.org/expect-type/-/expect-type-1.3.0.tgz",
|
||||
"integrity": "sha512-knvyeauYhqjOYvQ66MznSMs83wmHrCycNEN6Ao+2AeYEfxUIkuiVxdEa1qlGEPK+We3n0THiDciYSsCcgW/DoA==",
|
||||
"dev": true,
|
||||
"license": "Apache-2.0",
|
||||
"engines": {
|
||||
"node": ">=12.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/expressive-code": {
|
||||
"version": "0.41.5",
|
||||
"resolved": "https://registry.npmjs.org/expressive-code/-/expressive-code-0.41.5.tgz",
|
||||
|
|
@ -7368,6 +7612,13 @@
|
|||
}
|
||||
}
|
||||
},
|
||||
"node_modules/fflate": {
|
||||
"version": "0.8.2",
|
||||
"resolved": "https://registry.npmjs.org/fflate/-/fflate-0.8.2.tgz",
|
||||
"integrity": "sha512-cPJU47OaAoCbg0pBvzsgpTPhmhqI5eJjh/JIu8tPj5q+T7iLvW/JAYUqmE7KOB4R1ZyEhzBaIQpQpardBF5z8A==",
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/figlet": {
|
||||
"version": "1.9.4",
|
||||
"resolved": "https://registry.npmjs.org/figlet/-/figlet-1.9.4.tgz",
|
||||
|
|
@ -10228,7 +10479,6 @@
|
|||
"integrity": "sha512-p3JTemJJbkiMjXEMiFwgm0v6ym5g8K+b2oDny+6xdl300tUKySxvilJQLSea48C6OaYNmO30kH9KxpiAg5bWJw==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"globby": "15.0.0",
|
||||
"js-yaml": "4.1.1",
|
||||
|
|
@ -11699,6 +11949,17 @@
|
|||
"url": "https://github.com/fb55/nth-check?sponsor=1"
|
||||
}
|
||||
},
|
||||
"node_modules/obug": {
|
||||
"version": "2.1.1",
|
||||
"resolved": "https://registry.npmjs.org/obug/-/obug-2.1.1.tgz",
|
||||
"integrity": "sha512-uTqF9MuPraAQ+IsnPf366RG4cP9RtUi7MLO1N3KEc+wb0a6yKpeL0lmk2IB1jY5KHPAlTc6T/JRdC/YqxHNwkQ==",
|
||||
"dev": true,
|
||||
"funding": [
|
||||
"https://github.com/sponsors/sxzz",
|
||||
"https://opencollective.com/debug"
|
||||
],
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/ofetch": {
|
||||
"version": "1.5.1",
|
||||
"resolved": "https://registry.npmjs.org/ofetch/-/ofetch-1.5.1.tgz",
|
||||
|
|
@ -12144,6 +12405,13 @@
|
|||
"url": "https://github.com/sponsors/sindresorhus"
|
||||
}
|
||||
},
|
||||
"node_modules/pathe": {
|
||||
"version": "2.0.3",
|
||||
"resolved": "https://registry.npmjs.org/pathe/-/pathe-2.0.3.tgz",
|
||||
"integrity": "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==",
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/piccolore": {
|
||||
"version": "0.1.3",
|
||||
"resolved": "https://registry.npmjs.org/piccolore/-/piccolore-0.1.3.tgz",
|
||||
|
|
@ -12292,7 +12560,6 @@
|
|||
}
|
||||
],
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"nanoid": "^3.3.11",
|
||||
"picocolors": "^1.1.1",
|
||||
|
|
@ -12358,7 +12625,6 @@
|
|||
"integrity": "sha512-v6UNi1+3hSlVvv8fSaoUbggEM5VErKmmpGA7Pl3HF8V6uKY7rvClBOJlH6yNwQtfTueNkGVpOv/mtWL9L4bgRA==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"bin": {
|
||||
"prettier": "bin/prettier.cjs"
|
||||
},
|
||||
|
|
@ -13187,7 +13453,6 @@
|
|||
"integrity": "sha512-3nk8Y3a9Ea8szgKhinMlGMhGMw89mqule3KWczxhIzqudyHdCIOHw8WJlj/r329fACjKLEh13ZSk7oE22kyeIw==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"@types/estree": "1.0.8"
|
||||
},
|
||||
|
|
@ -13371,6 +13636,13 @@
|
|||
"@types/hast": "^3.0.4"
|
||||
}
|
||||
},
|
||||
"node_modules/siginfo": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/siginfo/-/siginfo-2.0.0.tgz",
|
||||
"integrity": "sha512-ybx0WO1/8bSBLEWXZvEd7gMW3Sn3JFlW3TvX1nREbDLRNQNaeNN8WK0meBwPdAaOI7TtRRRJn/Es1zhrrCHu7g==",
|
||||
"dev": true,
|
||||
"license": "ISC"
|
||||
},
|
||||
"node_modules/signal-exit": {
|
||||
"version": "4.1.0",
|
||||
"resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz",
|
||||
|
|
@ -13400,6 +13672,21 @@
|
|||
"dev": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/sirv": {
|
||||
"version": "3.0.2",
|
||||
"resolved": "https://registry.npmjs.org/sirv/-/sirv-3.0.2.tgz",
|
||||
"integrity": "sha512-2wcC/oGxHis/BoHkkPwldgiPSYcpZK3JU28WoMVv55yHJgcZ8rlXvuG9iZggz+sU1d4bRgIGASwyWqjxu3FM0g==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@polka/url": "^1.0.0-next.24",
|
||||
"mrmime": "^2.0.0",
|
||||
"totalist": "^3.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=18"
|
||||
}
|
||||
},
|
||||
"node_modules/sisteransi": {
|
||||
"version": "1.0.5",
|
||||
"resolved": "https://registry.npmjs.org/sisteransi/-/sisteransi-1.0.5.tgz",
|
||||
|
|
@ -13610,6 +13897,20 @@
|
|||
"node": ">=8"
|
||||
}
|
||||
},
|
||||
"node_modules/stackback": {
|
||||
"version": "0.0.2",
|
||||
"resolved": "https://registry.npmjs.org/stackback/-/stackback-0.0.2.tgz",
|
||||
"integrity": "sha512-1XMJE5fQo1jGH6Y/7ebnwPOBEkIEnT4QF32d5R1+VXdXveM0IBMJt8zfaxX1P3QhVwrYe+576+jkANtSS2mBbw==",
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/std-env": {
|
||||
"version": "3.10.0",
|
||||
"resolved": "https://registry.npmjs.org/std-env/-/std-env-3.10.0.tgz",
|
||||
"integrity": "sha512-5GS12FdOZNliM5mAOxFRg7Ir0pWz8MdpYm6AY6VPkGpbA7ZzmbzNcBJQ0GPvvyWgcY7QAhCgf9Uy89I03faLkg==",
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/stream-replace-string": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/stream-replace-string/-/stream-replace-string-2.0.0.tgz",
|
||||
|
|
@ -14024,6 +14325,13 @@
|
|||
"dev": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/tinybench": {
|
||||
"version": "2.9.0",
|
||||
"resolved": "https://registry.npmjs.org/tinybench/-/tinybench-2.9.0.tgz",
|
||||
"integrity": "sha512-0+DUvqWMValLmha6lr4kD8iAMK1HzV0/aKnCtWb9v9641TnP/MFb7Pc2bxoxQjTXAErryXVgUOfv2YqNllqGeg==",
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/tinyexec": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/tinyexec/-/tinyexec-1.0.2.tgz",
|
||||
|
|
@ -14051,6 +14359,16 @@
|
|||
"url": "https://github.com/sponsors/SuperchupuDev"
|
||||
}
|
||||
},
|
||||
"node_modules/tinyrainbow": {
|
||||
"version": "3.0.3",
|
||||
"resolved": "https://registry.npmjs.org/tinyrainbow/-/tinyrainbow-3.0.3.tgz",
|
||||
"integrity": "sha512-PSkbLUoxOFRzJYjjxHJt9xro7D+iilgMX/C9lawzVuYiIdcihh9DXmVibBe8lmcFrRi/VzlPjBxbN7rH24q8/Q==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=14.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/tmpl": {
|
||||
"version": "1.0.5",
|
||||
"resolved": "https://registry.npmjs.org/tmpl/-/tmpl-1.0.5.tgz",
|
||||
|
|
@ -14071,6 +14389,16 @@
|
|||
"node": ">=8.0"
|
||||
}
|
||||
},
|
||||
"node_modules/totalist": {
|
||||
"version": "3.0.1",
|
||||
"resolved": "https://registry.npmjs.org/totalist/-/totalist-3.0.1.tgz",
|
||||
"integrity": "sha512-sf4i37nQ2LBx4m3wB74y+ubopq6W/dIzXg0FDGjsYnZHVa1Da8FH853wlL2gtUhg+xJXjfk3kUZS3BRoQeoQBQ==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=6"
|
||||
}
|
||||
},
|
||||
"node_modules/trim-lines": {
|
||||
"version": "3.0.1",
|
||||
"resolved": "https://registry.npmjs.org/trim-lines/-/trim-lines-3.0.1.tgz",
|
||||
|
|
@ -14727,7 +15055,6 @@
|
|||
"integrity": "sha512-+Oxm7q9hDoLMyJOYfUYBuHQo+dkAloi33apOPP56pzj+vsdJDzr+j1NISE5pyaAuKL4A3UD34qd0lx5+kfKp2g==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
"esbuild": "^0.25.0",
|
||||
"fdir": "^6.4.4",
|
||||
|
|
@ -14817,6 +15144,84 @@
|
|||
}
|
||||
}
|
||||
},
|
||||
"node_modules/vitest": {
|
||||
"version": "4.0.16",
|
||||
"resolved": "https://registry.npmjs.org/vitest/-/vitest-4.0.16.tgz",
|
||||
"integrity": "sha512-E4t7DJ9pESL6E3I8nFjPa4xGUd3PmiWDLsDztS2qXSJWfHtbQnwAWylaBvSNY48I3vr8PTqIZlyK8TE3V3CA4Q==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@vitest/expect": "4.0.16",
|
||||
"@vitest/mocker": "4.0.16",
|
||||
"@vitest/pretty-format": "4.0.16",
|
||||
"@vitest/runner": "4.0.16",
|
||||
"@vitest/snapshot": "4.0.16",
|
||||
"@vitest/spy": "4.0.16",
|
||||
"@vitest/utils": "4.0.16",
|
||||
"es-module-lexer": "^1.7.0",
|
||||
"expect-type": "^1.2.2",
|
||||
"magic-string": "^0.30.21",
|
||||
"obug": "^2.1.1",
|
||||
"pathe": "^2.0.3",
|
||||
"picomatch": "^4.0.3",
|
||||
"std-env": "^3.10.0",
|
||||
"tinybench": "^2.9.0",
|
||||
"tinyexec": "^1.0.2",
|
||||
"tinyglobby": "^0.2.15",
|
||||
"tinyrainbow": "^3.0.3",
|
||||
"vite": "^6.0.0 || ^7.0.0",
|
||||
"why-is-node-running": "^2.3.0"
|
||||
},
|
||||
"bin": {
|
||||
"vitest": "vitest.mjs"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^20.0.0 || ^22.0.0 || >=24.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://opencollective.com/vitest"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@edge-runtime/vm": "*",
|
||||
"@opentelemetry/api": "^1.9.0",
|
||||
"@types/node": "^20.0.0 || ^22.0.0 || >=24.0.0",
|
||||
"@vitest/browser-playwright": "4.0.16",
|
||||
"@vitest/browser-preview": "4.0.16",
|
||||
"@vitest/browser-webdriverio": "4.0.16",
|
||||
"@vitest/ui": "4.0.16",
|
||||
"happy-dom": "*",
|
||||
"jsdom": "*"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"@edge-runtime/vm": {
|
||||
"optional": true
|
||||
},
|
||||
"@opentelemetry/api": {
|
||||
"optional": true
|
||||
},
|
||||
"@types/node": {
|
||||
"optional": true
|
||||
},
|
||||
"@vitest/browser-playwright": {
|
||||
"optional": true
|
||||
},
|
||||
"@vitest/browser-preview": {
|
||||
"optional": true
|
||||
},
|
||||
"@vitest/browser-webdriverio": {
|
||||
"optional": true
|
||||
},
|
||||
"@vitest/ui": {
|
||||
"optional": true
|
||||
},
|
||||
"happy-dom": {
|
||||
"optional": true
|
||||
},
|
||||
"jsdom": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/walker": {
|
||||
"version": "1.0.8",
|
||||
"resolved": "https://registry.npmjs.org/walker/-/walker-1.0.8.tgz",
|
||||
|
|
@ -14872,6 +15277,23 @@
|
|||
"node": ">=4"
|
||||
}
|
||||
},
|
||||
"node_modules/why-is-node-running": {
|
||||
"version": "2.3.0",
|
||||
"resolved": "https://registry.npmjs.org/why-is-node-running/-/why-is-node-running-2.3.0.tgz",
|
||||
"integrity": "sha512-hUrmaWBdVDcxvYqnyh09zunKzROWjbZTiNy8dBEjkS7ehEDQibXJ7XvlmtbwuTclUiIyN+CyXQD4Vmko8fNm8w==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"siginfo": "^2.0.0",
|
||||
"stackback": "0.0.2"
|
||||
},
|
||||
"bin": {
|
||||
"why-is-node-running": "cli.js"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
}
|
||||
},
|
||||
"node_modules/widest-line": {
|
||||
"version": "3.1.0",
|
||||
"resolved": "https://registry.npmjs.org/widest-line/-/widest-line-3.1.0.tgz",
|
||||
|
|
@ -15001,7 +15423,6 @@
|
|||
"resolved": "https://registry.npmjs.org/yaml/-/yaml-2.8.2.tgz",
|
||||
"integrity": "sha512-mplynKqc1C2hTVYxd0PU2xQAc22TI1vShAYGksCCfxbn/dFwnHTNi1bvYsBTkhdUNtGIf5xNOg938rrSSYvS9A==",
|
||||
"license": "ISC",
|
||||
"peer": true,
|
||||
"bin": {
|
||||
"yaml": "bin.mjs"
|
||||
},
|
||||
|
|
@ -15181,7 +15602,6 @@
|
|||
"integrity": "sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/colinhacks"
|
||||
}
|
||||
|
|
|
|||
12
package.json
12
package.json
|
|
@ -45,10 +45,15 @@
|
|||
"release:minor": "gh workflow run \"Manual Release\" -f version_bump=minor",
|
||||
"release:patch": "gh workflow run \"Manual Release\" -f version_bump=patch",
|
||||
"release:watch": "gh run watch",
|
||||
"test": "npm run test:schemas && npm run test:install && npm run validate:schemas && npm run lint && npm run lint:md && npm run format:check",
|
||||
"test:coverage": "c8 --reporter=text --reporter=html npm run test:schemas",
|
||||
"test": "npm run test:schemas && npm run test:install && npm run test:unit && npm run validate:schemas && npm run lint && npm run lint:md && npm run format:check",
|
||||
"test:coverage": "vitest run --coverage",
|
||||
"test:install": "node test/test-installation-components.js",
|
||||
"test:integration": "vitest run test/integration",
|
||||
"test:quick": "vitest run --changed",
|
||||
"test:schemas": "node test/test-agent-schema.js",
|
||||
"test:ui": "vitest --ui",
|
||||
"test:unit": "vitest run",
|
||||
"test:unit:watch": "vitest",
|
||||
"validate:schemas": "node tools/validate-agent-schema.js"
|
||||
},
|
||||
"lint-staged": {
|
||||
|
|
@ -90,6 +95,8 @@
|
|||
"@astrojs/sitemap": "^3.6.0",
|
||||
"@astrojs/starlight": "^0.37.0",
|
||||
"@eslint/js": "^9.33.0",
|
||||
"@vitest/coverage-v8": "^4.0.16",
|
||||
"@vitest/ui": "^4.0.16",
|
||||
"archiver": "^7.0.1",
|
||||
"astro": "^5.16.0",
|
||||
"c8": "^10.1.3",
|
||||
|
|
@ -105,6 +112,7 @@
|
|||
"prettier": "^3.7.4",
|
||||
"prettier-plugin-packagejson": "^2.5.19",
|
||||
"sharp": "^0.33.5",
|
||||
"vitest": "^4.0.16",
|
||||
"yaml-eslint-parser": "^1.2.3",
|
||||
"yaml-lint": "^1.7.0"
|
||||
},
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ name: 'step-01-document-discovery'
|
|||
description: 'Discover and inventory all project documents, handling duplicates and organizing file structure'
|
||||
|
||||
# Path Definitions
|
||||
workflow_path: '{project-root}/_bmad/bmm/workflows/3-solutioning/implementation-readiness'
|
||||
workflow_path: '{project-root}/_bmad/bmm/workflows/3-solutioning/check-implementation-readiness'
|
||||
|
||||
# File References
|
||||
thisStepFile: './step-01-document-discovery.md'
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ name: 'step-02-prd-analysis'
|
|||
description: 'Read and analyze PRD to extract all FRs and NFRs for coverage validation'
|
||||
|
||||
# Path Definitions
|
||||
workflow_path: '{project-root}/_bmad/bmm/workflows/3-solutioning/implementation-readiness'
|
||||
workflow_path: '{project-root}/_bmad/bmm/workflows/3-solutioning/check-implementation-readiness'
|
||||
|
||||
# File References
|
||||
thisStepFile: './step-02-prd-analysis.md'
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ name: 'step-03-epic-coverage-validation'
|
|||
description: 'Validate that all PRD FRs are covered in epics and stories'
|
||||
|
||||
# Path Definitions
|
||||
workflow_path: '{project-root}/_bmad/bmm/workflows/3-solutioning/implementation-readiness'
|
||||
workflow_path: '{project-root}/_bmad/bmm/workflows/3-solutioning/check-implementation-readiness'
|
||||
|
||||
# File References
|
||||
thisStepFile: './step-03-epic-coverage-validation.md'
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ name: 'step-04-ux-alignment'
|
|||
description: 'Check for UX document and validate alignment with PRD and Architecture'
|
||||
|
||||
# Path Definitions
|
||||
workflow_path: '{project-root}/_bmad/bmm/workflows/3-solutioning/implementation-readiness'
|
||||
workflow_path: '{project-root}/_bmad/bmm/workflows/3-solutioning/check-implementation-readiness'
|
||||
|
||||
# File References
|
||||
thisStepFile: './step-04-ux-alignment.md'
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ name: 'step-05-epic-quality-review'
|
|||
description: 'Validate epics and stories against create-epics-and-stories best practices'
|
||||
|
||||
# Path Definitions
|
||||
workflow_path: '{project-root}/_bmad/bmm/workflows/3-solutioning/implementation-readiness'
|
||||
workflow_path: '{project-root}/_bmad/bmm/workflows/3-solutioning/check-implementation-readiness'
|
||||
|
||||
# File References
|
||||
thisStepFile: './step-05-epic-quality-review.md'
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ name: 'step-06-final-assessment'
|
|||
description: 'Compile final assessment and polish the readiness report'
|
||||
|
||||
# Path Definitions
|
||||
workflow_path: '{project-root}/_bmad/bmm/workflows/3-solutioning/implementation-readiness'
|
||||
workflow_path: '{project-root}/_bmad/bmm/workflows/3-solutioning/check-implementation-readiness'
|
||||
|
||||
# File References
|
||||
thisStepFile: './step-06-final-assessment.md'
|
||||
|
|
|
|||
|
|
@ -88,11 +88,11 @@ Use holistic judgment, not mechanical keyword matching.
|
|||
|
||||
### No Escalation (simple request)
|
||||
|
||||
Display: "**Select:** [T] Plan first (tech-spec) [E] Execute directly"
|
||||
Display: "**Select:** [P] Plan first (tech-spec) [E] Execute directly"
|
||||
|
||||
#### Menu Handling Logic:
|
||||
|
||||
- IF T: Direct user to `{quick_spec_workflow}`. **EXIT Quick Dev.**
|
||||
- IF P: Direct user to `{quick_spec_workflow}`. **EXIT Quick Dev.**
|
||||
- IF E: Ask for any additional guidance, then **NEXT:** Read fully and follow: `step-02-context-gathering.md`
|
||||
|
||||
#### EXECUTION RULES:
|
||||
|
|
@ -108,13 +108,13 @@ Present: "This looks like a focused feature with multiple components."
|
|||
|
||||
Display:
|
||||
|
||||
**[T] Create tech-spec first** (recommended)
|
||||
**[P] Plan first (tech-spec)** (recommended)
|
||||
**[W] Seems bigger than quick-dev** - Recommend the Full BMad Flow PRD Process
|
||||
**[E] Execute directly**
|
||||
|
||||
#### Menu Handling Logic:
|
||||
|
||||
- IF T: Direct to `{quick_spec_workflow}`. **EXIT Quick Dev.**
|
||||
- IF P: Direct to `{quick_spec_workflow}`. **EXIT Quick Dev.**
|
||||
- IF W: Direct user to run the PRD workflow instead. **EXIT Quick Dev.**
|
||||
- IF E: Ask for guidance, then **NEXT:** Read fully and follow: `step-02-context-gathering.md`
|
||||
|
||||
|
|
@ -132,12 +132,12 @@ Present: "This sounds like platform/system work."
|
|||
Display:
|
||||
|
||||
**[W] Start BMad Method** (recommended)
|
||||
**[T] Create tech-spec** (lighter planning)
|
||||
**[P] Plan first (tech-spec)** (lighter planning)
|
||||
**[E] Execute directly** - feeling lucky
|
||||
|
||||
#### Menu Handling Logic:
|
||||
|
||||
- IF T: Direct to `{quick_spec_workflow}`. **EXIT Quick Dev.**
|
||||
- IF P: Direct to `{quick_spec_workflow}`. **EXIT Quick Dev.**
|
||||
- IF W: Direct user to run the PRD workflow instead. **EXIT Quick Dev.**
|
||||
- IF E: Ask for guidance, then **NEXT:** Read fully and follow: `step-02-context-gathering.md`
|
||||
|
||||
|
|
@ -154,7 +154,7 @@ Display:
|
|||
|
||||
- Mode A (tech-spec): "**NEXT:** read fully and follow: `step-03-execute.md`"
|
||||
- Mode B (direct, [E] selected): "**NEXT:** Read fully and follow: `step-02-context-gathering.md`"
|
||||
- Escalation ([T] or [W]): "**EXITING Quick Dev.** Follow the directed workflow."
|
||||
- Escalation ([P] or [W]): "**EXITING Quick Dev.** Follow the directed workflow."
|
||||
|
||||
---
|
||||
|
||||
|
|
|
|||
|
|
@ -29,15 +29,15 @@ Present: "How would you like to handle these findings?"
|
|||
|
||||
Display:
|
||||
|
||||
**[1] Walk through** - Discuss each finding individually
|
||||
**[2] Auto-fix** - Automatically fix issues classified as "real"
|
||||
**[3] Skip** - Acknowledge and proceed to commit
|
||||
**[W] Walk through** - Discuss each finding individually
|
||||
**[F] Fix automatically** - Automatically fix issues classified as "real"
|
||||
**[S] Skip** - Acknowledge and proceed to commit
|
||||
|
||||
### Menu Handling Logic:
|
||||
|
||||
- IF 1: Execute OPTION 1 (Walk Through) below
|
||||
- IF 2: Execute OPTION 2 (Auto-fix) below
|
||||
- IF 3: Execute OPTION 3 (Skip) below
|
||||
- IF W: Execute WALK THROUGH section below
|
||||
- IF F: Execute FIX AUTOMATICALLY section below
|
||||
- IF S: Execute SKIP section below
|
||||
|
||||
### EXECUTION RULES:
|
||||
|
||||
|
|
@ -46,7 +46,7 @@ Display:
|
|||
|
||||
---
|
||||
|
||||
## OPTION 1: WALK THROUGH
|
||||
## WALK THROUGH [W]
|
||||
|
||||
For each finding in order:
|
||||
|
||||
|
|
@ -61,7 +61,7 @@ After all findings processed, summarize what was fixed/skipped.
|
|||
|
||||
---
|
||||
|
||||
## OPTION 2: AUTO-FIX
|
||||
## FIX AUTOMATICALLY [F]
|
||||
|
||||
1. Filter findings to only those classified as "real"
|
||||
2. Apply fixes for each real finding
|
||||
|
|
@ -78,7 +78,7 @@ Skipped (noise/uncertain): F2, F4
|
|||
|
||||
---
|
||||
|
||||
## OPTION 3: SKIP
|
||||
## SKIP [S]
|
||||
|
||||
1. Acknowledge all findings were reviewed
|
||||
2. Note that user chose to proceed without fixes
|
||||
|
|
|
|||
|
|
@ -43,14 +43,14 @@ wipFile: '{implementation_artifacts}/tech-spec-wip.md'
|
|||
|
||||
**Present review menu:**
|
||||
|
||||
Display: "**Select:** [Y] Approve [C] Changes [Q] Questions [A] Advanced Elicitation [P] Party Mode"
|
||||
Display: "**Select:** [C] Continue [E] Edit [Q] Questions [A] Advanced Elicitation [P] Party Mode"
|
||||
|
||||
**HALT and wait for user selection.**
|
||||
|
||||
#### Menu Handling Logic:
|
||||
|
||||
- IF Y: Proceed to Section 3 (Finalize the Spec)
|
||||
- IF C: Proceed to Section 2 (Handle Review Feedback), then return here and redisplay menu
|
||||
- IF C: Proceed to Section 3 (Finalize the Spec)
|
||||
- IF E: Proceed to Section 2 (Handle Review Feedback), then return here and redisplay menu
|
||||
- IF Q: Answer questions, then redisplay this menu
|
||||
- IF A: Read fully and follow: `{advanced_elicitation}` with current spec content, process enhanced insights, ask user "Accept improvements? (y/n)", if yes update spec then redisplay menu, if no keep original then redisplay menu
|
||||
- IF P: Read fully and follow: `{party_mode_exec}` with current spec content, process collaborative insights, ask user "Accept changes? (y/n)", if yes update spec then redisplay menu, if no keep original then redisplay menu
|
||||
|
|
@ -59,7 +59,7 @@ Display: "**Select:** [Y] Approve [C] Changes [Q] Questions [A] Advanced Elicita
|
|||
#### EXECUTION RULES:
|
||||
|
||||
- ALWAYS halt and wait for user input after presenting menu
|
||||
- ONLY proceed to finalize when user selects 'Y'
|
||||
- ONLY proceed to finalize when user selects 'C'
|
||||
- After other menu items execution, return to this menu
|
||||
|
||||
### 2. Handle Review Feedback
|
||||
|
|
@ -115,11 +115,11 @@ Saved to: {finalFile}
|
|||
|
||||
**Next Steps:**
|
||||
|
||||
[A] Advanced Elicitation - refine further
|
||||
[R] Adversarial Review - critique of the spec (highly recommended)
|
||||
[B] Begin Development - start implementing now (not recommended)
|
||||
[D] Done - exit workflow
|
||||
[B] Begin Development - start implementing now (not recommended)
|
||||
[A] Advanced Elicitation - refine further
|
||||
[P] Party Mode - get expert feedback before dev
|
||||
[R] Adversarial Review again - critique of the spec (highly recommended)
|
||||
|
||||
---
|
||||
|
||||
|
|
@ -138,9 +138,9 @@ b) **HALT and wait for user selection.**
|
|||
|
||||
#### Menu Handling Logic:
|
||||
|
||||
- IF A: Read fully and follow: `{advanced_elicitation}` with current spec content, process enhanced insights, ask user "Accept improvements? (y/n)", if yes update spec then redisplay menu, if no keep original then redisplay menu
|
||||
- IF B: Load and execute `{quick_dev_workflow}` with the final spec file (warn: fresh context is better)
|
||||
- IF D: Exit workflow - display final confirmation and path to spec
|
||||
- IF B: Load and execute `{quick_dev_workflow}` with the final spec file (warn: fresh context is better)
|
||||
- IF A: Read fully and follow: `{advanced_elicitation}` with current spec content, process enhanced insights, ask user "Accept improvements? (y/n)", if yes update spec then redisplay menu, if no keep original then redisplay menu
|
||||
- IF P: Read fully and follow: `{party_mode_exec}` with current spec content, process collaborative insights, ask user "Accept changes? (y/n)", if yes update spec then redisplay menu, if no keep original then redisplay menu
|
||||
- IF R: Execute Adversarial Review (see below)
|
||||
- IF Any other comments or queries: respond helpfully then redisplay menu
|
||||
|
|
|
|||
|
|
@ -0,0 +1,83 @@
|
|||
import fs from 'fs-extra';
|
||||
import path from 'node:path';
|
||||
import { fileURLToPath } from 'node:url';
|
||||
import yaml from 'yaml';
|
||||
import xml2js from 'xml2js';
|
||||
|
||||
// Get the directory of this module
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = path.dirname(__filename);
|
||||
|
||||
/**
|
||||
* Load a fixture file
|
||||
* @param {string} fixturePath - Relative path to fixture from test/fixtures/
|
||||
* @returns {Promise<string>} File content
|
||||
*/
|
||||
export async function loadFixture(fixturePath) {
|
||||
const fullPath = path.join(__dirname, '..', 'fixtures', fixturePath);
|
||||
return fs.readFile(fullPath, 'utf8');
|
||||
}
|
||||
|
||||
/**
|
||||
* Load a YAML fixture
|
||||
* @param {string} fixturePath - Relative path to YAML fixture
|
||||
* @returns {Promise<Object>} Parsed YAML object
|
||||
*/
|
||||
export async function loadYamlFixture(fixturePath) {
|
||||
const content = await loadFixture(fixturePath);
|
||||
return yaml.parse(content);
|
||||
}
|
||||
|
||||
/**
|
||||
* Load an XML fixture
|
||||
* @param {string} fixturePath - Relative path to XML fixture
|
||||
* @returns {Promise<Object>} Parsed XML object
|
||||
*/
|
||||
export async function loadXmlFixture(fixturePath) {
|
||||
const content = await loadFixture(fixturePath);
|
||||
return xml2js.parseStringPromise(content);
|
||||
}
|
||||
|
||||
/**
|
||||
* Load a JSON fixture
|
||||
* @param {string} fixturePath - Relative path to JSON fixture
|
||||
* @returns {Promise<Object>} Parsed JSON object
|
||||
*/
|
||||
export async function loadJsonFixture(fixturePath) {
|
||||
const content = await loadFixture(fixturePath);
|
||||
return JSON.parse(content);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a fixture file exists
|
||||
* @param {string} fixturePath - Relative path to fixture
|
||||
* @returns {Promise<boolean>} True if fixture exists
|
||||
*/
|
||||
export async function fixtureExists(fixturePath) {
|
||||
const fullPath = path.join(__dirname, '..', 'fixtures', fixturePath);
|
||||
return fs.pathExists(fullPath);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the full path to a fixture
|
||||
* @param {string} fixturePath - Relative path to fixture
|
||||
* @returns {string} Full path to fixture
|
||||
*/
|
||||
export function getFixturePath(fixturePath) {
|
||||
return path.join(__dirname, '..', 'fixtures', fixturePath);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a test file in a temporary directory
|
||||
* (Re-exported from temp-dir for convenience)
|
||||
* @param {string} tmpDir - Temporary directory path
|
||||
* @param {string} relativePath - Relative path for the file
|
||||
* @param {string} content - File content
|
||||
* @returns {Promise<string>} Full path to the created file
|
||||
*/
|
||||
export async function createTestFile(tmpDir, relativePath, content) {
|
||||
const fullPath = path.join(tmpDir, relativePath);
|
||||
await fs.ensureDir(path.dirname(fullPath));
|
||||
await fs.writeFile(fullPath, content, 'utf8');
|
||||
return fullPath;
|
||||
}
|
||||
|
|
@ -0,0 +1,82 @@
|
|||
import fs from 'fs-extra';
|
||||
import path from 'node:path';
|
||||
import os from 'node:os';
|
||||
import { randomUUID } from 'node:crypto';
|
||||
|
||||
/**
|
||||
* Create a temporary directory for testing
|
||||
* @param {string} prefix - Prefix for the directory name
|
||||
* @returns {Promise<string>} Path to the created temporary directory
|
||||
*/
|
||||
export async function createTempDir(prefix = 'bmad-test-') {
|
||||
const tmpDir = path.join(os.tmpdir(), `${prefix}${randomUUID()}`);
|
||||
await fs.ensureDir(tmpDir);
|
||||
return tmpDir;
|
||||
}
|
||||
|
||||
/**
|
||||
* Clean up a temporary directory
|
||||
* @param {string} tmpDir - Path to the temporary directory
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
export async function cleanupTempDir(tmpDir) {
|
||||
if (await fs.pathExists(tmpDir)) {
|
||||
await fs.remove(tmpDir);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute a test function with a temporary directory
|
||||
* Automatically creates and cleans up the directory
|
||||
* @param {Function} testFn - Test function that receives the temp directory path
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
export async function withTempDir(testFn) {
|
||||
const tmpDir = await createTempDir();
|
||||
try {
|
||||
await testFn(tmpDir);
|
||||
} finally {
|
||||
await cleanupTempDir(tmpDir);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a test file in a temporary directory
|
||||
* @param {string} tmpDir - Temporary directory path
|
||||
* @param {string} relativePath - Relative path for the file
|
||||
* @param {string} content - File content
|
||||
* @returns {Promise<string>} Full path to the created file
|
||||
*/
|
||||
export async function createTestFile(tmpDir, relativePath, content) {
|
||||
const fullPath = path.join(tmpDir, relativePath);
|
||||
await fs.ensureDir(path.dirname(fullPath));
|
||||
await fs.writeFile(fullPath, content, 'utf8');
|
||||
return fullPath;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create multiple test files in a temporary directory
|
||||
* @param {string} tmpDir - Temporary directory path
|
||||
* @param {Object} files - Object mapping relative paths to content
|
||||
* @returns {Promise<string[]>} Array of created file paths
|
||||
*/
|
||||
export async function createTestFiles(tmpDir, files) {
|
||||
const paths = [];
|
||||
for (const [relativePath, content] of Object.entries(files)) {
|
||||
const fullPath = await createTestFile(tmpDir, relativePath, content);
|
||||
paths.push(fullPath);
|
||||
}
|
||||
return paths;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a test directory structure
|
||||
* @param {string} tmpDir - Temporary directory path
|
||||
* @param {string[]} dirs - Array of relative directory paths
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
export async function createTestDirs(tmpDir, dirs) {
|
||||
for (const dir of dirs) {
|
||||
await fs.ensureDir(path.join(tmpDir, dir));
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,26 @@
|
|||
import { beforeEach, afterEach } from 'vitest';
|
||||
|
||||
// Global test setup
|
||||
beforeEach(() => {
|
||||
// Reset environment variables to prevent test pollution
|
||||
// Store original env for restoration
|
||||
if (!globalThis.__originalEnv) {
|
||||
globalThis.__originalEnv = { ...process.env };
|
||||
}
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
// Restore original environment variables
|
||||
if (globalThis.__originalEnv) {
|
||||
process.env = { ...globalThis.__originalEnv };
|
||||
}
|
||||
|
||||
// Any global cleanup can go here
|
||||
});
|
||||
|
||||
// Increase timeout for file system operations
|
||||
// (Individual tests can override this if needed)
|
||||
const DEFAULT_TIMEOUT = 10_000; // 10 seconds
|
||||
|
||||
// Make timeout available globally
|
||||
globalThis.DEFAULT_TEST_TIMEOUT = DEFAULT_TIMEOUT;
|
||||
|
|
@ -0,0 +1,428 @@
|
|||
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
||||
import { Config } from '../../../tools/cli/lib/config.js';
|
||||
import { createTempDir, cleanupTempDir, createTestFile } from '../../helpers/temp-dir.js';
|
||||
import fs from 'fs-extra';
|
||||
import path from 'node:path';
|
||||
import yaml from 'yaml';
|
||||
|
||||
describe('Config', () => {
|
||||
let tmpDir;
|
||||
let config;
|
||||
|
||||
beforeEach(async () => {
|
||||
tmpDir = await createTempDir();
|
||||
config = new Config();
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await cleanupTempDir(tmpDir);
|
||||
});
|
||||
|
||||
describe('loadYaml()', () => {
|
||||
it('should load and parse YAML file', async () => {
|
||||
const yamlContent = {
|
||||
key1: 'value1',
|
||||
key2: { nested: 'value2' },
|
||||
array: [1, 2, 3],
|
||||
};
|
||||
|
||||
const configPath = path.join(tmpDir, 'config.yaml');
|
||||
await fs.writeFile(configPath, yaml.stringify(yamlContent));
|
||||
|
||||
const result = await config.loadYaml(configPath);
|
||||
|
||||
expect(result).toEqual(yamlContent);
|
||||
});
|
||||
|
||||
it('should throw error for non-existent file', async () => {
|
||||
const nonExistent = path.join(tmpDir, 'missing.yaml');
|
||||
|
||||
await expect(config.loadYaml(nonExistent)).rejects.toThrow('Configuration file not found');
|
||||
});
|
||||
|
||||
it('should handle Unicode content', async () => {
|
||||
const yamlContent = {
|
||||
chinese: '测试',
|
||||
russian: 'Тест',
|
||||
japanese: 'テスト',
|
||||
};
|
||||
|
||||
const configPath = path.join(tmpDir, 'unicode.yaml');
|
||||
await fs.writeFile(configPath, yaml.stringify(yamlContent));
|
||||
|
||||
const result = await config.loadYaml(configPath);
|
||||
|
||||
expect(result.chinese).toBe('测试');
|
||||
expect(result.russian).toBe('Тест');
|
||||
expect(result.japanese).toBe('テスト');
|
||||
});
|
||||
});
|
||||
|
||||
// Note: saveYaml() is not tested because it uses yaml.dump() which doesn't exist
|
||||
// in yaml 2.7.0 (should use yaml.stringify). This method is never called in production
|
||||
// and represents dead code with a latent bug.
|
||||
|
||||
describe('processConfig()', () => {
|
||||
it('should replace {project-root} placeholder', async () => {
|
||||
const configPath = path.join(tmpDir, 'config.txt');
|
||||
await fs.writeFile(configPath, 'Root is {project-root}/bmad');
|
||||
|
||||
await config.processConfig(configPath, { root: '/home/user/project' });
|
||||
|
||||
const content = await fs.readFile(configPath, 'utf8');
|
||||
expect(content).toBe('Root is /home/user/project/bmad');
|
||||
});
|
||||
|
||||
it('should replace {module} placeholder', async () => {
|
||||
const configPath = path.join(tmpDir, 'config.txt');
|
||||
await fs.writeFile(configPath, 'Module: {module}');
|
||||
|
||||
await config.processConfig(configPath, { module: 'bmm' });
|
||||
|
||||
const content = await fs.readFile(configPath, 'utf8');
|
||||
expect(content).toBe('Module: bmm');
|
||||
});
|
||||
|
||||
it('should replace {version} placeholder with package version', async () => {
|
||||
const configPath = path.join(tmpDir, 'config.txt');
|
||||
await fs.writeFile(configPath, 'Version: {version}');
|
||||
|
||||
await config.processConfig(configPath);
|
||||
|
||||
const content = await fs.readFile(configPath, 'utf8');
|
||||
expect(content).toMatch(/Version: \d+\.\d+\.\d+/); // Semver format
|
||||
});
|
||||
|
||||
it('should replace {date} placeholder with current date', async () => {
|
||||
const configPath = path.join(tmpDir, 'config.txt');
|
||||
await fs.writeFile(configPath, 'Date: {date}');
|
||||
|
||||
await config.processConfig(configPath);
|
||||
|
||||
const content = await fs.readFile(configPath, 'utf8');
|
||||
expect(content).toMatch(/Date: \d{4}-\d{2}-\d{2}/); // YYYY-MM-DD
|
||||
});
|
||||
|
||||
it('should replace multiple placeholders', async () => {
|
||||
const configPath = path.join(tmpDir, 'config.txt');
|
||||
await fs.writeFile(configPath, 'Root: {project-root}, Module: {module}, Version: {version}');
|
||||
|
||||
await config.processConfig(configPath, {
|
||||
root: '/project',
|
||||
module: 'test',
|
||||
});
|
||||
|
||||
const content = await fs.readFile(configPath, 'utf8');
|
||||
expect(content).toContain('Root: /project');
|
||||
expect(content).toContain('Module: test');
|
||||
expect(content).toMatch(/Version: \d+\.\d+/);
|
||||
});
|
||||
|
||||
it('should replace custom placeholders', async () => {
|
||||
const configPath = path.join(tmpDir, 'config.txt');
|
||||
await fs.writeFile(configPath, 'Custom: {custom-placeholder}');
|
||||
|
||||
await config.processConfig(configPath, { '{custom-placeholder}': 'custom-value' });
|
||||
|
||||
const content = await fs.readFile(configPath, 'utf8');
|
||||
expect(content).toBe('Custom: custom-value');
|
||||
});
|
||||
|
||||
it('should escape regex special characters in placeholders', async () => {
|
||||
const configPath = path.join(tmpDir, 'config.txt');
|
||||
await fs.writeFile(configPath, 'Path: {project-root}/test');
|
||||
|
||||
// Test that {project-root} doesn't get interpreted as regex
|
||||
await config.processConfig(configPath, {
|
||||
root: '/path/with/special$chars^',
|
||||
});
|
||||
|
||||
const content = await fs.readFile(configPath, 'utf8');
|
||||
expect(content).toBe('Path: /path/with/special$chars^/test');
|
||||
});
|
||||
|
||||
it('should handle placeholders with regex metacharacters in values', async () => {
|
||||
const configPath = path.join(tmpDir, 'config.txt');
|
||||
await fs.writeFile(configPath, 'Value: {placeholder}');
|
||||
|
||||
await config.processConfig(configPath, {
|
||||
'{placeholder}': String.raw`value with $1 and \backslash`,
|
||||
});
|
||||
|
||||
const content = await fs.readFile(configPath, 'utf8');
|
||||
expect(content).toBe(String.raw`Value: value with $1 and \backslash`);
|
||||
});
|
||||
|
||||
it('should replace all occurrences of placeholder', async () => {
|
||||
const configPath = path.join(tmpDir, 'config.txt');
|
||||
await fs.writeFile(configPath, '{module} is here and {module} is there and {module} everywhere');
|
||||
|
||||
await config.processConfig(configPath, { module: 'BMM' });
|
||||
|
||||
const content = await fs.readFile(configPath, 'utf8');
|
||||
expect(content).toBe('BMM is here and BMM is there and BMM everywhere');
|
||||
});
|
||||
});
|
||||
|
||||
describe('deepMerge()', () => {
|
||||
it('should merge shallow objects', () => {
|
||||
const target = { a: 1, b: 2 };
|
||||
const source = { b: 3, c: 4 };
|
||||
|
||||
const result = config.deepMerge(target, source);
|
||||
|
||||
expect(result).toEqual({ a: 1, b: 3, c: 4 });
|
||||
});
|
||||
|
||||
it('should merge nested objects', () => {
|
||||
const target = { level1: { a: 1, b: 2 } };
|
||||
const source = { level1: { b: 3, c: 4 } };
|
||||
|
||||
const result = config.deepMerge(target, source);
|
||||
|
||||
expect(result.level1).toEqual({ a: 1, b: 3, c: 4 });
|
||||
});
|
||||
|
||||
it('should not merge arrays (just replace)', () => {
|
||||
const target = { items: [1, 2, 3] };
|
||||
const source = { items: [4, 5] };
|
||||
|
||||
const result = config.deepMerge(target, source);
|
||||
|
||||
expect(result.items).toEqual([4, 5]); // Replaced, not merged
|
||||
});
|
||||
|
||||
it('should handle null values', () => {
|
||||
const target = { a: 'value', b: null };
|
||||
const source = { a: null, c: 'new' };
|
||||
|
||||
const result = config.deepMerge(target, source);
|
||||
|
||||
expect(result).toEqual({ a: null, b: null, c: 'new' });
|
||||
});
|
||||
|
||||
it('should not mutate original objects', () => {
|
||||
const target = { a: 1 };
|
||||
const source = { b: 2 };
|
||||
|
||||
config.deepMerge(target, source);
|
||||
|
||||
expect(target).toEqual({ a: 1 });
|
||||
expect(source).toEqual({ b: 2 });
|
||||
});
|
||||
});
|
||||
|
||||
describe('mergeConfigs()', () => {
|
||||
it('should delegate to deepMerge', () => {
|
||||
const base = { setting1: 'base' };
|
||||
const override = { setting2: 'override' };
|
||||
|
||||
const result = config.mergeConfigs(base, override);
|
||||
|
||||
expect(result).toEqual({ setting1: 'base', setting2: 'override' });
|
||||
});
|
||||
});
|
||||
|
||||
describe('isObject()', () => {
|
||||
it('should return true for plain objects', () => {
|
||||
expect(config.isObject({})).toBe(true);
|
||||
expect(config.isObject({ key: 'value' })).toBe(true);
|
||||
});
|
||||
|
||||
it('should return false for arrays', () => {
|
||||
expect(config.isObject([])).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false for null', () => {
|
||||
expect(config.isObject(null)).toBeFalsy();
|
||||
});
|
||||
|
||||
it('should return false for primitives', () => {
|
||||
expect(config.isObject('string')).toBe(false);
|
||||
expect(config.isObject(42)).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getValue() and setValue()', () => {
|
||||
it('should get value by dot notation path', () => {
|
||||
const obj = {
|
||||
level1: {
|
||||
level2: {
|
||||
value: 'test',
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const result = config.getValue(obj, 'level1.level2.value');
|
||||
|
||||
expect(result).toBe('test');
|
||||
});
|
||||
|
||||
it('should set value by dot notation path', () => {
|
||||
const obj = {
|
||||
level1: {
|
||||
level2: {},
|
||||
},
|
||||
};
|
||||
|
||||
config.setValue(obj, 'level1.level2.value', 'new value');
|
||||
|
||||
expect(obj.level1.level2.value).toBe('new value');
|
||||
});
|
||||
|
||||
it('should return default value for non-existent path', () => {
|
||||
const obj = { a: { b: 'value' } };
|
||||
|
||||
const result = config.getValue(obj, 'a.c.d', 'default');
|
||||
|
||||
expect(result).toBe('default');
|
||||
});
|
||||
|
||||
it('should return null default when path not found', () => {
|
||||
const obj = { a: { b: 'value' } };
|
||||
|
||||
const result = config.getValue(obj, 'a.c.d');
|
||||
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it('should handle simple (non-nested) paths', () => {
|
||||
const obj = { key: 'value' };
|
||||
|
||||
expect(config.getValue(obj, 'key')).toBe('value');
|
||||
|
||||
config.setValue(obj, 'newKey', 'newValue');
|
||||
expect(obj.newKey).toBe('newValue');
|
||||
});
|
||||
|
||||
it('should create intermediate objects when setting deep paths', () => {
|
||||
const obj = {};
|
||||
|
||||
config.setValue(obj, 'a.b.c.d', 'deep value');
|
||||
|
||||
expect(obj.a.b.c.d).toBe('deep value');
|
||||
});
|
||||
});
|
||||
|
||||
describe('validateConfig()', () => {
|
||||
it('should validate required fields', () => {
|
||||
const cfg = { field1: 'value1' };
|
||||
const schema = {
|
||||
required: ['field1', 'field2'],
|
||||
};
|
||||
|
||||
const result = config.validateConfig(cfg, schema);
|
||||
|
||||
expect(result.valid).toBe(false);
|
||||
expect(result.errors).toContain('Missing required field: field2');
|
||||
});
|
||||
|
||||
it('should pass when all required fields present', () => {
|
||||
const cfg = { field1: 'value1', field2: 'value2' };
|
||||
const schema = {
|
||||
required: ['field1', 'field2'],
|
||||
};
|
||||
|
||||
const result = config.validateConfig(cfg, schema);
|
||||
|
||||
expect(result.valid).toBe(true);
|
||||
expect(result.errors).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should validate field types', () => {
|
||||
const cfg = {
|
||||
stringField: 'text',
|
||||
numberField: '42', // Wrong type
|
||||
arrayField: [1, 2, 3],
|
||||
objectField: 'not-object', // Wrong type
|
||||
boolField: true,
|
||||
};
|
||||
|
||||
const schema = {
|
||||
properties: {
|
||||
stringField: { type: 'string' },
|
||||
numberField: { type: 'number' },
|
||||
arrayField: { type: 'array' },
|
||||
objectField: { type: 'object' },
|
||||
boolField: { type: 'boolean' },
|
||||
},
|
||||
};
|
||||
|
||||
const result = config.validateConfig(cfg, schema);
|
||||
|
||||
expect(result.valid).toBe(false);
|
||||
expect(result.errors.some((e) => e.includes('numberField'))).toBe(true);
|
||||
expect(result.errors.some((e) => e.includes('objectField'))).toBe(true);
|
||||
});
|
||||
|
||||
it('should validate enum values', () => {
|
||||
const cfg = { level: 'expert' };
|
||||
const schema = {
|
||||
properties: {
|
||||
level: { type: 'string', enum: ['beginner', 'intermediate', 'advanced'] },
|
||||
},
|
||||
};
|
||||
|
||||
const result = config.validateConfig(cfg, schema);
|
||||
|
||||
expect(result.valid).toBe(false);
|
||||
expect(result.errors.some((e) => e.includes('must be one of'))).toBe(true);
|
||||
});
|
||||
|
||||
it('should pass validation for valid enum value', () => {
|
||||
const cfg = { level: 'intermediate' };
|
||||
const schema = {
|
||||
properties: {
|
||||
level: { type: 'string', enum: ['beginner', 'intermediate', 'advanced'] },
|
||||
},
|
||||
};
|
||||
|
||||
const result = config.validateConfig(cfg, schema);
|
||||
|
||||
expect(result.valid).toBe(true);
|
||||
});
|
||||
|
||||
it('should return warnings array', () => {
|
||||
const cfg = { field: 'value' };
|
||||
const schema = { required: ['field'] };
|
||||
|
||||
const result = config.validateConfig(cfg, schema);
|
||||
|
||||
expect(result.warnings).toBeDefined();
|
||||
expect(Array.isArray(result.warnings)).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('edge cases', () => {
|
||||
it('should handle empty YAML file', async () => {
|
||||
const configPath = path.join(tmpDir, 'empty.yaml');
|
||||
await fs.writeFile(configPath, '');
|
||||
|
||||
const result = await config.loadYaml(configPath);
|
||||
|
||||
expect(result).toBeNull(); // Empty YAML parses to null
|
||||
});
|
||||
|
||||
it('should handle YAML with only comments', async () => {
|
||||
const configPath = path.join(tmpDir, 'comments.yaml');
|
||||
await fs.writeFile(configPath, '# Just a comment\n# Another comment\n');
|
||||
|
||||
const result = await config.loadYaml(configPath);
|
||||
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it('should handle very deep object nesting', () => {
|
||||
const deep = {
|
||||
l1: { l2: { l3: { l4: { l5: { l6: { l7: { l8: { value: 'deep' } } } } } } } },
|
||||
};
|
||||
const override = {
|
||||
l1: { l2: { l3: { l4: { l5: { l6: { l7: { l8: { value: 'updated' } } } } } } } },
|
||||
};
|
||||
|
||||
const result = config.deepMerge(deep, override);
|
||||
|
||||
expect(result.l1.l2.l3.l4.l5.l6.l7.l8.value).toBe('updated');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
@ -0,0 +1,558 @@
|
|||
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
||||
import { DependencyResolver } from '../../../tools/cli/installers/lib/core/dependency-resolver.js';
|
||||
import { createTempDir, cleanupTempDir, createTestFile } from '../../helpers/temp-dir.js';
|
||||
import fs from 'fs-extra';
|
||||
import path from 'node:path';
|
||||
|
||||
describe('DependencyResolver - Advanced Scenarios', () => {
|
||||
let tmpDir;
|
||||
let bmadDir;
|
||||
|
||||
beforeEach(async () => {
|
||||
tmpDir = await createTempDir();
|
||||
bmadDir = path.join(tmpDir, 'src');
|
||||
await fs.ensureDir(path.join(bmadDir, 'core', 'agents'));
|
||||
await fs.ensureDir(path.join(bmadDir, 'core', 'tasks'));
|
||||
await fs.ensureDir(path.join(bmadDir, 'core', 'templates'));
|
||||
await fs.ensureDir(path.join(bmadDir, 'modules', 'bmm', 'agents'));
|
||||
await fs.ensureDir(path.join(bmadDir, 'modules', 'bmm', 'tasks'));
|
||||
await fs.ensureDir(path.join(bmadDir, 'modules', 'bmm', 'templates'));
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await cleanupTempDir(tmpDir);
|
||||
});
|
||||
|
||||
describe('module path resolution', () => {
|
||||
it('should resolve bmad/bmm/tasks/task.md (module path)', async () => {
|
||||
await createTestFile(
|
||||
bmadDir,
|
||||
'core/agents/agent.md',
|
||||
`---
|
||||
dependencies: ["{project-root}/bmad/bmm/tasks/analyze.md"]
|
||||
---
|
||||
<agent>Agent</agent>`,
|
||||
);
|
||||
await createTestFile(bmadDir, 'modules/bmm/tasks/analyze.md', 'BMM Task');
|
||||
|
||||
const resolver = new DependencyResolver();
|
||||
const result = await resolver.resolve(bmadDir, []);
|
||||
|
||||
expect([...result.allFiles].some((f) => f.includes('bmm'))).toBe(true);
|
||||
expect([...result.allFiles].some((f) => f.includes('analyze.md'))).toBe(true);
|
||||
});
|
||||
|
||||
it('should handle glob in module path bmad/bmm/tasks/*.md', async () => {
|
||||
await createTestFile(
|
||||
bmadDir,
|
||||
'core/agents/agent.md',
|
||||
`---
|
||||
dependencies: ["{project-root}/bmad/bmm/tasks/*.md"]
|
||||
---
|
||||
<agent>Agent</agent>`,
|
||||
);
|
||||
await createTestFile(bmadDir, 'modules/bmm/tasks/task1.md', 'Task 1');
|
||||
await createTestFile(bmadDir, 'modules/bmm/tasks/task2.md', 'Task 2');
|
||||
|
||||
const resolver = new DependencyResolver();
|
||||
const result = await resolver.resolve(bmadDir, ['bmm']); // Include bmm module
|
||||
|
||||
// Should resolve glob pattern
|
||||
expect(result.allFiles.length).toBeGreaterThanOrEqual(1);
|
||||
});
|
||||
|
||||
it('should handle non-existent module path gracefully', async () => {
|
||||
await createTestFile(
|
||||
bmadDir,
|
||||
'core/agents/agent.md',
|
||||
`---
|
||||
dependencies: ["{project-root}/bmad/nonexistent/tasks/task.md"]
|
||||
---
|
||||
<agent>Agent</agent>`,
|
||||
);
|
||||
|
||||
const resolver = new DependencyResolver();
|
||||
const result = await resolver.resolve(bmadDir, []);
|
||||
|
||||
// Should not crash, just skip missing dependency
|
||||
expect(result.primaryFiles).toHaveLength(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('relative glob patterns', () => {
|
||||
it('should resolve relative glob patterns ../tasks/*.md', async () => {
|
||||
await createTestFile(
|
||||
bmadDir,
|
||||
'core/agents/agent.md',
|
||||
`---
|
||||
dependencies: ["../tasks/*.md"]
|
||||
---
|
||||
<agent>Agent</agent>`,
|
||||
);
|
||||
await createTestFile(bmadDir, 'core/tasks/task1.md', 'Task 1');
|
||||
await createTestFile(bmadDir, 'core/tasks/task2.md', 'Task 2');
|
||||
|
||||
const resolver = new DependencyResolver();
|
||||
const result = await resolver.resolve(bmadDir, []);
|
||||
|
||||
expect(result.allFiles.length).toBeGreaterThanOrEqual(3);
|
||||
});
|
||||
|
||||
it('should handle glob pattern with no matches', async () => {
|
||||
await createTestFile(
|
||||
bmadDir,
|
||||
'core/agents/agent.md',
|
||||
`---
|
||||
dependencies: ["../tasks/nonexistent-*.md"]
|
||||
---
|
||||
<agent>Agent</agent>`,
|
||||
);
|
||||
|
||||
const resolver = new DependencyResolver();
|
||||
const result = await resolver.resolve(bmadDir, []);
|
||||
|
||||
// Should handle gracefully - just the agent
|
||||
expect(result.primaryFiles).toHaveLength(1);
|
||||
});
|
||||
|
||||
it('should handle glob in non-existent directory', async () => {
|
||||
await createTestFile(
|
||||
bmadDir,
|
||||
'core/agents/agent.md',
|
||||
`---
|
||||
dependencies: ["../nonexistent/*.md"]
|
||||
---
|
||||
<agent>Agent</agent>`,
|
||||
);
|
||||
|
||||
const resolver = new DependencyResolver();
|
||||
const result = await resolver.resolve(bmadDir, []);
|
||||
|
||||
// Should handle gracefully
|
||||
expect(result.primaryFiles).toHaveLength(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('template dependencies', () => {
|
||||
it('should resolve template with {project-root} prefix', async () => {
|
||||
await createTestFile(bmadDir, 'core/agents/agent.md', '<agent>Agent</agent>');
|
||||
await createTestFile(
|
||||
bmadDir,
|
||||
'core/tasks/task.md',
|
||||
`---
|
||||
template: "{project-root}/bmad/core/templates/form.yaml"
|
||||
---
|
||||
Task content`,
|
||||
);
|
||||
await createTestFile(bmadDir, 'core/templates/form.yaml', 'template');
|
||||
|
||||
const resolver = new DependencyResolver();
|
||||
const result = await resolver.resolve(bmadDir, []);
|
||||
|
||||
// Template dependency should be resolved
|
||||
expect(result.allFiles.length).toBeGreaterThanOrEqual(1);
|
||||
});
|
||||
|
||||
it('should resolve template from module path', async () => {
|
||||
await createTestFile(bmadDir, 'modules/bmm/agents/agent.md', '<agent>BMM Agent</agent>');
|
||||
await createTestFile(
|
||||
bmadDir,
|
||||
'modules/bmm/tasks/task.md',
|
||||
`---
|
||||
template: "{project-root}/bmad/bmm/templates/prd-template.yaml"
|
||||
---
|
||||
Task`,
|
||||
);
|
||||
await createTestFile(bmadDir, 'modules/bmm/templates/prd-template.yaml', 'template');
|
||||
|
||||
const resolver = new DependencyResolver();
|
||||
const result = await resolver.resolve(bmadDir, ['bmm']);
|
||||
|
||||
// Should resolve files from BMM module
|
||||
expect(result.allFiles.length).toBeGreaterThanOrEqual(1);
|
||||
});
|
||||
|
||||
it('should handle missing template gracefully', async () => {
|
||||
await createTestFile(
|
||||
bmadDir,
|
||||
'core/tasks/task.md',
|
||||
`---
|
||||
template: "../templates/missing.yaml"
|
||||
---
|
||||
Task`,
|
||||
);
|
||||
|
||||
const resolver = new DependencyResolver();
|
||||
const result = await resolver.resolve(bmadDir, []);
|
||||
|
||||
// Should not crash
|
||||
expect(result).toBeDefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('bmad-path type resolution', () => {
|
||||
it('should resolve bmad-path dependencies', async () => {
|
||||
await createTestFile(
|
||||
bmadDir,
|
||||
'core/agents/agent.md',
|
||||
`<agent>
|
||||
<command exec="bmad/core/tasks/analyze" />
|
||||
</agent>`,
|
||||
);
|
||||
await createTestFile(bmadDir, 'core/tasks/analyze.md', 'Task');
|
||||
|
||||
const resolver = new DependencyResolver();
|
||||
const result = await resolver.resolve(bmadDir, []);
|
||||
|
||||
expect([...result.allFiles].some((f) => f.includes('analyze.md'))).toBe(true);
|
||||
});
|
||||
|
||||
it('should resolve bmad-path for module files', async () => {
|
||||
await createTestFile(
|
||||
bmadDir,
|
||||
'core/agents/agent.md',
|
||||
`<agent>
|
||||
<command exec="bmad/bmm/tasks/create-prd" />
|
||||
</agent>`,
|
||||
);
|
||||
await createTestFile(bmadDir, 'modules/bmm/tasks/create-prd.md', 'PRD Task');
|
||||
|
||||
const resolver = new DependencyResolver();
|
||||
const result = await resolver.resolve(bmadDir, []);
|
||||
|
||||
expect([...result.allFiles].some((f) => f.includes('create-prd.md'))).toBe(true);
|
||||
});
|
||||
|
||||
it('should handle non-existent bmad-path gracefully', async () => {
|
||||
await createTestFile(
|
||||
bmadDir,
|
||||
'core/agents/agent.md',
|
||||
`<agent>
|
||||
<command exec="bmad/core/tasks/missing" />
|
||||
</agent>`,
|
||||
);
|
||||
|
||||
const resolver = new DependencyResolver();
|
||||
const result = await resolver.resolve(bmadDir, []);
|
||||
|
||||
// Should not crash
|
||||
expect(result.primaryFiles).toHaveLength(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('command resolution with modules', () => {
|
||||
it('should search multiple modules for @task-name', async () => {
|
||||
await createTestFile(
|
||||
bmadDir,
|
||||
'core/agents/agent.md',
|
||||
`<agent>
|
||||
Use @task-custom-task
|
||||
</agent>`,
|
||||
);
|
||||
await createTestFile(bmadDir, 'modules/bmm/tasks/custom-task.md', 'Custom Task');
|
||||
|
||||
const resolver = new DependencyResolver();
|
||||
const result = await resolver.resolve(bmadDir, ['bmm']);
|
||||
|
||||
expect([...result.allFiles].some((f) => f.includes('custom-task.md'))).toBe(true);
|
||||
});
|
||||
|
||||
it('should search multiple modules for @agent-name', async () => {
|
||||
await createTestFile(
|
||||
bmadDir,
|
||||
'core/agents/main.md',
|
||||
`<agent>
|
||||
Use @agent-pm
|
||||
</agent>`,
|
||||
);
|
||||
await createTestFile(bmadDir, 'modules/bmm/agents/pm.md', '<agent>PM</agent>');
|
||||
|
||||
const resolver = new DependencyResolver();
|
||||
const result = await resolver.resolve(bmadDir, ['bmm']);
|
||||
|
||||
expect([...result.allFiles].some((f) => f.includes('pm.md'))).toBe(true);
|
||||
});
|
||||
|
||||
it('should handle bmad/ path with 4+ segments', async () => {
|
||||
await createTestFile(
|
||||
bmadDir,
|
||||
'core/agents/agent.md',
|
||||
`<agent>
|
||||
Reference bmad/core/tasks/nested/deep/task
|
||||
</agent>`,
|
||||
);
|
||||
await createTestFile(bmadDir, 'core/tasks/nested/deep/task.md', 'Deep task');
|
||||
|
||||
const resolver = new DependencyResolver();
|
||||
const result = await resolver.resolve(bmadDir, []);
|
||||
|
||||
// Implementation may or may not support deeply nested paths in commands
|
||||
// Just verify it doesn't crash
|
||||
expect(result.primaryFiles.length).toBeGreaterThanOrEqual(1);
|
||||
});
|
||||
|
||||
it('should handle bmad path with .md extension already', async () => {
|
||||
await createTestFile(
|
||||
bmadDir,
|
||||
'core/agents/agent.md',
|
||||
`<agent>
|
||||
Use bmad/core/tasks/task.md explicitly
|
||||
</agent>`,
|
||||
);
|
||||
await createTestFile(bmadDir, 'core/tasks/task.md', 'Task');
|
||||
|
||||
const resolver = new DependencyResolver();
|
||||
const result = await resolver.resolve(bmadDir, []);
|
||||
|
||||
expect([...result.allFiles].some((f) => f.includes('task.md'))).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('verbose mode', () => {
|
||||
it('should include console output when verbose is true', async () => {
|
||||
await createTestFile(bmadDir, 'core/agents/agent.md', '<agent>Test</agent>');
|
||||
|
||||
const resolver = new DependencyResolver();
|
||||
|
||||
// Mock console.log to capture output
|
||||
const logs = [];
|
||||
const originalLog = console.log;
|
||||
console.log = (...args) => logs.push(args.join(' '));
|
||||
|
||||
await resolver.resolve(bmadDir, [], { verbose: true });
|
||||
|
||||
console.log = originalLog;
|
||||
|
||||
// Should have logged something in verbose mode
|
||||
expect(logs.length).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
it('should not log when verbose is false', async () => {
|
||||
await createTestFile(bmadDir, 'core/agents/agent.md', '<agent>Test</agent>');
|
||||
|
||||
const resolver = new DependencyResolver();
|
||||
|
||||
const logs = [];
|
||||
const originalLog = console.log;
|
||||
console.log = (...args) => logs.push(args.join(' '));
|
||||
|
||||
await resolver.resolve(bmadDir, [], { verbose: false });
|
||||
|
||||
console.log = originalLog;
|
||||
|
||||
// Should not have logged in non-verbose mode
|
||||
// (There might be warns but no regular logs)
|
||||
expect(logs.length).toBe(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('createWebBundle()', () => {
|
||||
it('should create bundle with metadata', async () => {
|
||||
await createTestFile(bmadDir, 'core/agents/agent.md', '<agent>Agent</agent>');
|
||||
await createTestFile(bmadDir, 'core/tasks/task.md', 'Task');
|
||||
|
||||
const resolver = new DependencyResolver();
|
||||
const resolution = await resolver.resolve(bmadDir, []);
|
||||
|
||||
const bundle = await resolver.createWebBundle(resolution);
|
||||
|
||||
expect(bundle.metadata).toBeDefined();
|
||||
expect(bundle.metadata.modules).toContain('core');
|
||||
expect(bundle.metadata.totalFiles).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
it('should organize bundle by file type', async () => {
|
||||
await createTestFile(bmadDir, 'core/agents/agent.md', '<agent>Agent</agent>');
|
||||
await createTestFile(bmadDir, 'core/tasks/task.md', 'Task');
|
||||
await createTestFile(bmadDir, 'core/templates/template.yaml', 'template');
|
||||
|
||||
const resolver = new DependencyResolver();
|
||||
const resolution = await resolver.resolve(bmadDir, []);
|
||||
|
||||
const bundle = await resolver.createWebBundle(resolution);
|
||||
|
||||
expect(bundle.agents).toBeDefined();
|
||||
expect(bundle.tasks).toBeDefined();
|
||||
expect(bundle.templates).toBeDefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('single string dependency (not array)', () => {
|
||||
it('should handle single string dependency (converted to array)', async () => {
|
||||
await createTestFile(
|
||||
bmadDir,
|
||||
'core/agents/agent.md',
|
||||
`---
|
||||
dependencies: "{project-root}/bmad/core/tasks/task.md"
|
||||
---
|
||||
<agent>Agent</agent>`,
|
||||
);
|
||||
await createTestFile(bmadDir, 'core/tasks/task.md', 'Task');
|
||||
|
||||
const resolver = new DependencyResolver();
|
||||
const result = await resolver.resolve(bmadDir, []);
|
||||
|
||||
// Single string should be converted to array internally
|
||||
expect(result.allFiles.length).toBeGreaterThanOrEqual(2);
|
||||
});
|
||||
|
||||
it('should handle single string template', async () => {
|
||||
await createTestFile(
|
||||
bmadDir,
|
||||
'core/tasks/task.md',
|
||||
`---
|
||||
template: "../templates/form.yaml"
|
||||
---
|
||||
Task`,
|
||||
);
|
||||
await createTestFile(bmadDir, 'core/templates/form.yaml', 'template');
|
||||
|
||||
const resolver = new DependencyResolver();
|
||||
const result = await resolver.resolve(bmadDir, []);
|
||||
|
||||
expect([...result.allFiles].some((f) => f.includes('form.yaml'))).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('missing dependency tracking', () => {
|
||||
it('should track missing relative file dependencies', async () => {
|
||||
await createTestFile(
|
||||
bmadDir,
|
||||
'core/agents/agent.md',
|
||||
`---
|
||||
dependencies: ["../tasks/missing-file.md"]
|
||||
---
|
||||
<agent>Agent</agent>`,
|
||||
);
|
||||
|
||||
const resolver = new DependencyResolver();
|
||||
const result = await resolver.resolve(bmadDir, []);
|
||||
|
||||
// Missing dependency should be tracked
|
||||
expect(result.missing.length).toBeGreaterThanOrEqual(0);
|
||||
// Should not crash
|
||||
expect(result).toBeDefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('reportResults()', () => {
|
||||
it('should report results with file counts', async () => {
|
||||
await createTestFile(bmadDir, 'core/agents/agent1.md', '<agent>1</agent>');
|
||||
await createTestFile(bmadDir, 'core/agents/agent2.md', '<agent>2</agent>');
|
||||
await createTestFile(bmadDir, 'core/tasks/task1.md', 'Task 1');
|
||||
await createTestFile(bmadDir, 'core/tasks/task2.md', 'Task 2');
|
||||
await createTestFile(bmadDir, 'core/templates/template.yaml', 'Template');
|
||||
|
||||
const resolver = new DependencyResolver();
|
||||
|
||||
// Mock console.log
|
||||
const logs = [];
|
||||
const originalLog = console.log;
|
||||
console.log = (...args) => logs.push(args.join(' '));
|
||||
|
||||
const result = await resolver.resolve(bmadDir, [], { verbose: true });
|
||||
|
||||
console.log = originalLog;
|
||||
|
||||
// Should have reported module statistics
|
||||
expect(logs.some((log) => log.includes('CORE'))).toBe(true);
|
||||
expect(logs.some((log) => log.includes('Agents:'))).toBe(true);
|
||||
expect(logs.some((log) => log.includes('Tasks:'))).toBe(true);
|
||||
});
|
||||
|
||||
it('should report missing dependencies', async () => {
|
||||
await createTestFile(
|
||||
bmadDir,
|
||||
'core/agents/agent.md',
|
||||
`---
|
||||
dependencies: ["../tasks/missing.md"]
|
||||
---
|
||||
<agent>Agent</agent>`,
|
||||
);
|
||||
|
||||
const resolver = new DependencyResolver();
|
||||
|
||||
const logs = [];
|
||||
const originalLog = console.log;
|
||||
console.log = (...args) => logs.push(args.join(' '));
|
||||
|
||||
await resolver.resolve(bmadDir, [], { verbose: true });
|
||||
|
||||
console.log = originalLog;
|
||||
|
||||
// May log warning about missing dependencies
|
||||
expect(logs.length).toBeGreaterThan(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('file without .md extension in command', () => {
|
||||
it('should add .md extension to bmad/ commands without extension', async () => {
|
||||
await createTestFile(
|
||||
bmadDir,
|
||||
'core/agents/agent.md',
|
||||
`<agent>
|
||||
Use bmad/core/tasks/analyze without extension
|
||||
</agent>`,
|
||||
);
|
||||
await createTestFile(bmadDir, 'core/tasks/analyze.md', 'Analyze');
|
||||
|
||||
const resolver = new DependencyResolver();
|
||||
const result = await resolver.resolve(bmadDir, []);
|
||||
|
||||
expect([...result.allFiles].some((f) => f.includes('analyze.md'))).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('module structure detection', () => {
|
||||
it('should detect source directory structure (src/)', async () => {
|
||||
// Default structure already uses src/
|
||||
await createTestFile(bmadDir, 'core/agents/agent.md', '<agent>Core</agent>');
|
||||
|
||||
const resolver = new DependencyResolver();
|
||||
const result = await resolver.resolve(bmadDir, []);
|
||||
|
||||
expect(result.primaryFiles.length).toBeGreaterThanOrEqual(1);
|
||||
});
|
||||
|
||||
it('should detect installed directory structure (no src/)', async () => {
|
||||
// Create installed structure
|
||||
const installedDir = path.join(tmpDir, 'installed');
|
||||
await fs.ensureDir(path.join(installedDir, 'core', 'agents'));
|
||||
await fs.ensureDir(path.join(installedDir, 'modules', 'bmm', 'agents'));
|
||||
await createTestFile(installedDir, 'core/agents/agent.md', '<agent>Core</agent>');
|
||||
|
||||
const resolver = new DependencyResolver();
|
||||
const result = await resolver.resolve(installedDir, []);
|
||||
|
||||
expect(result.primaryFiles.length).toBeGreaterThanOrEqual(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('dependency deduplication', () => {
|
||||
it('should not include same file twice', async () => {
|
||||
await createTestFile(
|
||||
bmadDir,
|
||||
'core/agents/agent1.md',
|
||||
`---
|
||||
dependencies: ["{project-root}/bmad/core/tasks/shared.md"]
|
||||
---
|
||||
<agent>1</agent>`,
|
||||
);
|
||||
await createTestFile(
|
||||
bmadDir,
|
||||
'core/agents/agent2.md',
|
||||
`---
|
||||
dependencies: ["{project-root}/bmad/core/tasks/shared.md"]
|
||||
---
|
||||
<agent>2</agent>`,
|
||||
);
|
||||
await createTestFile(bmadDir, 'core/tasks/shared.md', 'Shared');
|
||||
|
||||
const resolver = new DependencyResolver();
|
||||
const result = await resolver.resolve(bmadDir, []);
|
||||
|
||||
// Should have 2 agents + 1 shared task = 3 unique files
|
||||
expect(result.allFiles).toHaveLength(3);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
@ -0,0 +1,796 @@
|
|||
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
||||
import { DependencyResolver } from '../../../tools/cli/installers/lib/core/dependency-resolver.js';
|
||||
import { createTempDir, cleanupTempDir, createTestFile } from '../../helpers/temp-dir.js';
|
||||
import fs from 'fs-extra';
|
||||
import path from 'node:path';
|
||||
|
||||
describe('DependencyResolver', () => {
|
||||
let tmpDir;
|
||||
let bmadDir;
|
||||
|
||||
beforeEach(async () => {
|
||||
tmpDir = await createTempDir();
|
||||
// Create structure: tmpDir/src/core and tmpDir/src/modules/
|
||||
bmadDir = path.join(tmpDir, 'src');
|
||||
await fs.ensureDir(path.join(bmadDir, 'core', 'agents'));
|
||||
await fs.ensureDir(path.join(bmadDir, 'core', 'tasks'));
|
||||
await fs.ensureDir(path.join(bmadDir, 'core', 'templates'));
|
||||
await fs.ensureDir(path.join(bmadDir, 'modules', 'bmm', 'agents'));
|
||||
await fs.ensureDir(path.join(bmadDir, 'modules', 'bmm', 'tasks'));
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await cleanupTempDir(tmpDir);
|
||||
});
|
||||
|
||||
describe('basic resolution', () => {
|
||||
it('should resolve core agents with no dependencies', async () => {
|
||||
await createTestFile(
|
||||
bmadDir,
|
||||
'core/agents/simple.md',
|
||||
`---
|
||||
name: simple
|
||||
---
|
||||
<agent>Simple agent</agent>`,
|
||||
);
|
||||
|
||||
const resolver = new DependencyResolver();
|
||||
const result = await resolver.resolve(bmadDir, []);
|
||||
|
||||
expect(result.primaryFiles).toHaveLength(1);
|
||||
expect(result.primaryFiles[0].type).toBe('agent');
|
||||
expect(result.primaryFiles[0].module).toBe('core');
|
||||
expect(result.allFiles).toHaveLength(1);
|
||||
});
|
||||
|
||||
it('should resolve multiple agents from same module', async () => {
|
||||
await createTestFile(bmadDir, 'core/agents/agent1.md', '<agent>Agent 1</agent>');
|
||||
await createTestFile(bmadDir, 'core/agents/agent2.md', '<agent>Agent 2</agent>');
|
||||
await createTestFile(bmadDir, 'core/agents/agent3.md', '<agent>Agent 3</agent>');
|
||||
|
||||
const resolver = new DependencyResolver();
|
||||
const result = await resolver.resolve(bmadDir, []);
|
||||
|
||||
expect(result.primaryFiles).toHaveLength(3);
|
||||
expect(result.allFiles).toHaveLength(3);
|
||||
});
|
||||
|
||||
it('should always include core module', async () => {
|
||||
await createTestFile(bmadDir, 'core/agents/core-agent.md', '<agent>Core</agent>');
|
||||
|
||||
const resolver = new DependencyResolver();
|
||||
const result = await resolver.resolve(bmadDir, ['bmm']);
|
||||
|
||||
// Core should be included even though only 'bmm' was requested
|
||||
expect(result.byModule.core).toBeDefined();
|
||||
});
|
||||
|
||||
it('should skip agents with localskip="true"', async () => {
|
||||
await createTestFile(bmadDir, 'core/agents/normal.md', '<agent>Normal agent</agent>');
|
||||
await createTestFile(bmadDir, 'core/agents/webonly.md', '<agent localskip="true">Web only agent</agent>');
|
||||
|
||||
const resolver = new DependencyResolver();
|
||||
const result = await resolver.resolve(bmadDir, []);
|
||||
|
||||
expect(result.primaryFiles).toHaveLength(1);
|
||||
expect(result.primaryFiles[0].name).toBe('normal');
|
||||
});
|
||||
});
|
||||
|
||||
describe('path resolution variations', () => {
|
||||
it('should resolve {project-root}/bmad/core/tasks/foo.md dependencies', async () => {
|
||||
await createTestFile(
|
||||
bmadDir,
|
||||
'core/agents/agent.md',
|
||||
`---
|
||||
dependencies: ["{project-root}/bmad/core/tasks/task.md"]
|
||||
---
|
||||
<agent>Agent with task dependency</agent>`,
|
||||
);
|
||||
await createTestFile(bmadDir, 'core/tasks/task.md', 'Task content');
|
||||
|
||||
const resolver = new DependencyResolver();
|
||||
const result = await resolver.resolve(bmadDir, []);
|
||||
|
||||
expect(result.allFiles).toHaveLength(2);
|
||||
expect(result.dependencies.size).toBeGreaterThan(0);
|
||||
expect([...result.dependencies].some((d) => d.includes('task.md'))).toBe(true);
|
||||
});
|
||||
|
||||
it('should resolve relative path dependencies', async () => {
|
||||
await createTestFile(
|
||||
bmadDir,
|
||||
'core/agents/agent.md',
|
||||
`---
|
||||
template: "../templates/template.yaml"
|
||||
---
|
||||
<agent>Agent with template</agent>`,
|
||||
);
|
||||
await createTestFile(bmadDir, 'core/templates/template.yaml', 'template: data');
|
||||
|
||||
const resolver = new DependencyResolver();
|
||||
const result = await resolver.resolve(bmadDir, []);
|
||||
|
||||
expect(result.allFiles).toHaveLength(2);
|
||||
expect([...result.dependencies].some((d) => d.includes('template.yaml'))).toBe(true);
|
||||
});
|
||||
|
||||
it('should resolve glob pattern dependencies', async () => {
|
||||
await createTestFile(
|
||||
bmadDir,
|
||||
'core/agents/agent.md',
|
||||
`---
|
||||
dependencies: ["{project-root}/bmad/core/tasks/*.md"]
|
||||
---
|
||||
<agent>Agent with multiple tasks</agent>`,
|
||||
);
|
||||
await createTestFile(bmadDir, 'core/tasks/task1.md', 'Task 1');
|
||||
await createTestFile(bmadDir, 'core/tasks/task2.md', 'Task 2');
|
||||
await createTestFile(bmadDir, 'core/tasks/task3.md', 'Task 3');
|
||||
|
||||
const resolver = new DependencyResolver();
|
||||
const result = await resolver.resolve(bmadDir, []);
|
||||
|
||||
// Should find agent + 3 tasks
|
||||
expect(result.allFiles).toHaveLength(4);
|
||||
});
|
||||
|
||||
it('should resolve array of dependencies', async () => {
|
||||
await createTestFile(
|
||||
bmadDir,
|
||||
'core/agents/agent.md',
|
||||
`---
|
||||
dependencies:
|
||||
- "{project-root}/bmad/core/tasks/task1.md"
|
||||
- "{project-root}/bmad/core/tasks/task2.md"
|
||||
- "../templates/template.yaml"
|
||||
---
|
||||
<agent>Agent</agent>`,
|
||||
);
|
||||
await createTestFile(bmadDir, 'core/tasks/task1.md', 'Task 1');
|
||||
await createTestFile(bmadDir, 'core/tasks/task2.md', 'Task 2');
|
||||
await createTestFile(bmadDir, 'core/templates/template.yaml', 'template');
|
||||
|
||||
const resolver = new DependencyResolver();
|
||||
const result = await resolver.resolve(bmadDir, []);
|
||||
|
||||
expect(result.allFiles).toHaveLength(4); // agent + 2 tasks + template
|
||||
});
|
||||
});
|
||||
|
||||
describe('command reference resolution', () => {
|
||||
it('should resolve @task-name references', async () => {
|
||||
await createTestFile(
|
||||
bmadDir,
|
||||
'core/agents/agent.md',
|
||||
`<agent>
|
||||
Use @task-analyze for analysis
|
||||
</agent>`,
|
||||
);
|
||||
await createTestFile(bmadDir, 'core/tasks/analyze.md', 'Analyze task');
|
||||
|
||||
const resolver = new DependencyResolver();
|
||||
const result = await resolver.resolve(bmadDir, []);
|
||||
|
||||
expect(result.allFiles.length).toBeGreaterThanOrEqual(2);
|
||||
expect([...result.allFiles].some((f) => f.includes('analyze.md'))).toBe(true);
|
||||
});
|
||||
|
||||
it('should resolve @agent-name references', async () => {
|
||||
await createTestFile(
|
||||
bmadDir,
|
||||
'core/agents/main.md',
|
||||
`<agent>
|
||||
Reference @agent-helper for help
|
||||
</agent>`,
|
||||
);
|
||||
await createTestFile(bmadDir, 'core/agents/helper.md', '<agent>Helper</agent>');
|
||||
|
||||
const resolver = new DependencyResolver();
|
||||
const result = await resolver.resolve(bmadDir, []);
|
||||
|
||||
expect(result.allFiles).toHaveLength(2);
|
||||
expect([...result.allFiles].some((f) => f.includes('helper.md'))).toBe(true);
|
||||
});
|
||||
|
||||
it('should resolve bmad/module/type/name references', async () => {
|
||||
await createTestFile(
|
||||
bmadDir,
|
||||
'core/agents/agent.md',
|
||||
`<agent>
|
||||
See bmad/core/tasks/review
|
||||
</agent>`,
|
||||
);
|
||||
await createTestFile(bmadDir, 'core/tasks/review.md', 'Review task');
|
||||
|
||||
const resolver = new DependencyResolver();
|
||||
const result = await resolver.resolve(bmadDir, []);
|
||||
|
||||
expect([...result.allFiles].some((f) => f.includes('review.md'))).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('exec and tmpl attribute parsing', () => {
|
||||
it('should parse exec attributes from command tags', async () => {
|
||||
await createTestFile(
|
||||
bmadDir,
|
||||
'core/agents/agent.md',
|
||||
`<agent>
|
||||
<command exec="{project-root}/bmad/core/tasks/task.md" />
|
||||
</agent>`,
|
||||
);
|
||||
await createTestFile(bmadDir, 'core/tasks/task.md', 'Task');
|
||||
|
||||
const resolver = new DependencyResolver();
|
||||
const result = await resolver.resolve(bmadDir, []);
|
||||
|
||||
expect([...result.allFiles].some((f) => f.includes('task.md'))).toBe(true);
|
||||
});
|
||||
|
||||
it('should parse tmpl attributes from command tags', async () => {
|
||||
await createTestFile(
|
||||
bmadDir,
|
||||
'core/agents/agent.md',
|
||||
`<agent>
|
||||
<command tmpl="../templates/form.yaml" />
|
||||
</agent>`,
|
||||
);
|
||||
await createTestFile(bmadDir, 'core/templates/form.yaml', 'template');
|
||||
|
||||
const resolver = new DependencyResolver();
|
||||
const result = await resolver.resolve(bmadDir, []);
|
||||
|
||||
expect([...result.allFiles].some((f) => f.includes('form.yaml'))).toBe(true);
|
||||
});
|
||||
|
||||
it('should ignore exec="*" wildcard', async () => {
|
||||
await createTestFile(
|
||||
bmadDir,
|
||||
'core/agents/agent.md',
|
||||
`<agent>
|
||||
<command exec="*" description="Dynamic" />
|
||||
</agent>`,
|
||||
);
|
||||
|
||||
const resolver = new DependencyResolver();
|
||||
const result = await resolver.resolve(bmadDir, []);
|
||||
|
||||
// Should only have the agent itself
|
||||
expect(result.primaryFiles).toHaveLength(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('multi-pass dependency resolution', () => {
|
||||
it('should resolve single-level dependencies (A→B)', async () => {
|
||||
await createTestFile(
|
||||
bmadDir,
|
||||
'core/agents/agent-a.md',
|
||||
`---
|
||||
dependencies: ["{project-root}/bmad/core/tasks/task-b.md"]
|
||||
---
|
||||
<agent>Agent A</agent>`,
|
||||
);
|
||||
await createTestFile(bmadDir, 'core/tasks/task-b.md', 'Task B');
|
||||
|
||||
const resolver = new DependencyResolver();
|
||||
const result = await resolver.resolve(bmadDir, []);
|
||||
|
||||
expect(result.allFiles).toHaveLength(2);
|
||||
// Primary files includes both agents and tasks from selected modules
|
||||
expect(result.primaryFiles.length).toBeGreaterThanOrEqual(1);
|
||||
expect(result.dependencies.size).toBeGreaterThanOrEqual(1);
|
||||
});
|
||||
|
||||
it('should resolve two-level dependencies (A→B→C)', async () => {
|
||||
await createTestFile(
|
||||
bmadDir,
|
||||
'core/agents/agent-a.md',
|
||||
`---
|
||||
dependencies: ["{project-root}/bmad/core/tasks/task-b.md"]
|
||||
---
|
||||
<agent>Agent A</agent>`,
|
||||
);
|
||||
await createTestFile(
|
||||
bmadDir,
|
||||
'core/tasks/task-b.md',
|
||||
`---
|
||||
template: "../templates/template-c.yaml"
|
||||
---
|
||||
Task B content`,
|
||||
);
|
||||
await createTestFile(bmadDir, 'core/templates/template-c.yaml', 'template: data');
|
||||
|
||||
const resolver = new DependencyResolver();
|
||||
const result = await resolver.resolve(bmadDir, []);
|
||||
|
||||
expect(result.allFiles).toHaveLength(3);
|
||||
// Primary files includes agents and tasks
|
||||
expect(result.primaryFiles.length).toBeGreaterThanOrEqual(1);
|
||||
// Total dependencies (direct + transitive) should be at least 2
|
||||
const totalDeps = result.dependencies.size + result.transitiveDependencies.size;
|
||||
expect(totalDeps).toBeGreaterThanOrEqual(1);
|
||||
});
|
||||
|
||||
it('should resolve three-level dependencies (A→B→C→D)', async () => {
|
||||
await createTestFile(
|
||||
bmadDir,
|
||||
'core/agents/agent-a.md',
|
||||
`---
|
||||
dependencies: ["{project-root}/bmad/core/tasks/task-b.md"]
|
||||
---
|
||||
<agent>A</agent>`,
|
||||
);
|
||||
await createTestFile(
|
||||
bmadDir,
|
||||
'core/tasks/task-b.md',
|
||||
`---
|
||||
dependencies: ["{project-root}/bmad/core/tasks/task-c.md"]
|
||||
---
|
||||
Task B`,
|
||||
);
|
||||
await createTestFile(
|
||||
bmadDir,
|
||||
'core/tasks/task-c.md',
|
||||
`---
|
||||
template: "../templates/template-d.yaml"
|
||||
---
|
||||
Task C`,
|
||||
);
|
||||
await createTestFile(bmadDir, 'core/templates/template-d.yaml', 'Template D');
|
||||
|
||||
const resolver = new DependencyResolver();
|
||||
const result = await resolver.resolve(bmadDir, []);
|
||||
|
||||
expect(result.allFiles).toHaveLength(4);
|
||||
});
|
||||
|
||||
it('should resolve multiple branches (A→B, A→C)', async () => {
|
||||
await createTestFile(
|
||||
bmadDir,
|
||||
'core/agents/agent-a.md',
|
||||
`---
|
||||
dependencies:
|
||||
- "{project-root}/bmad/core/tasks/task-b.md"
|
||||
- "{project-root}/bmad/core/tasks/task-c.md"
|
||||
---
|
||||
<agent>A</agent>`,
|
||||
);
|
||||
await createTestFile(bmadDir, 'core/tasks/task-b.md', 'Task B');
|
||||
await createTestFile(bmadDir, 'core/tasks/task-c.md', 'Task C');
|
||||
|
||||
const resolver = new DependencyResolver();
|
||||
const result = await resolver.resolve(bmadDir, []);
|
||||
|
||||
expect(result.allFiles).toHaveLength(3);
|
||||
expect(result.dependencies.size).toBe(2);
|
||||
});
|
||||
|
||||
it('should deduplicate diamond pattern (A→B,C; B,C→D)', async () => {
|
||||
await createTestFile(
|
||||
bmadDir,
|
||||
'core/agents/agent-a.md',
|
||||
`---
|
||||
dependencies:
|
||||
- "{project-root}/bmad/core/tasks/task-b.md"
|
||||
- "{project-root}/bmad/core/tasks/task-c.md"
|
||||
---
|
||||
<agent>A</agent>`,
|
||||
);
|
||||
await createTestFile(
|
||||
bmadDir,
|
||||
'core/tasks/task-b.md',
|
||||
`---
|
||||
template: "../templates/shared.yaml"
|
||||
---
|
||||
Task B`,
|
||||
);
|
||||
await createTestFile(
|
||||
bmadDir,
|
||||
'core/tasks/task-c.md',
|
||||
`---
|
||||
template: "../templates/shared.yaml"
|
||||
---
|
||||
Task C`,
|
||||
);
|
||||
await createTestFile(bmadDir, 'core/templates/shared.yaml', 'Shared template');
|
||||
|
||||
const resolver = new DependencyResolver();
|
||||
const result = await resolver.resolve(bmadDir, []);
|
||||
|
||||
// A + B + C + shared = 4 unique files (D appears twice but should be deduped)
|
||||
expect(result.allFiles).toHaveLength(4);
|
||||
});
|
||||
});
|
||||
|
||||
describe('circular dependency detection', () => {
|
||||
it('should detect direct circular dependency (A→B→A)', async () => {
|
||||
await createTestFile(
|
||||
bmadDir,
|
||||
'core/agents/agent-a.md',
|
||||
`---
|
||||
dependencies: ["{project-root}/bmad/core/tasks/task-b.md"]
|
||||
---
|
||||
<agent>A</agent>`,
|
||||
);
|
||||
await createTestFile(
|
||||
bmadDir,
|
||||
'core/tasks/task-b.md',
|
||||
`---
|
||||
dependencies: ["{project-root}/bmad/core/agents/agent-a.md"]
|
||||
---
|
||||
Task B`,
|
||||
);
|
||||
|
||||
const resolver = new DependencyResolver();
|
||||
|
||||
// Should not hang or crash
|
||||
const resultPromise = resolver.resolve(bmadDir, []);
|
||||
await expect(resultPromise).resolves.toBeDefined();
|
||||
|
||||
const result = await resultPromise;
|
||||
// Should process both files without infinite loop
|
||||
expect(result.allFiles.length).toBeGreaterThanOrEqual(2);
|
||||
}, 5000); // 5 second timeout to ensure no infinite loop
|
||||
|
||||
it('should detect indirect circular dependency (A→B→C→A)', async () => {
|
||||
await createTestFile(
|
||||
bmadDir,
|
||||
'core/agents/agent-a.md',
|
||||
`---
|
||||
dependencies: ["{project-root}/bmad/core/tasks/task-b.md"]
|
||||
---
|
||||
<agent>A</agent>`,
|
||||
);
|
||||
await createTestFile(
|
||||
bmadDir,
|
||||
'core/tasks/task-b.md',
|
||||
`---
|
||||
dependencies: ["{project-root}/bmad/core/tasks/task-c.md"]
|
||||
---
|
||||
Task B`,
|
||||
);
|
||||
await createTestFile(
|
||||
bmadDir,
|
||||
'core/tasks/task-c.md',
|
||||
`---
|
||||
dependencies: ["{project-root}/bmad/core/agents/agent-a.md"]
|
||||
---
|
||||
Task C`,
|
||||
);
|
||||
|
||||
const resolver = new DependencyResolver();
|
||||
const resultPromise = resolver.resolve(bmadDir, []);
|
||||
|
||||
await expect(resultPromise).resolves.toBeDefined();
|
||||
const result = await resultPromise;
|
||||
|
||||
// Should include all 3 files without duplicates
|
||||
expect(result.allFiles.length).toBeGreaterThanOrEqual(3);
|
||||
}, 5000);
|
||||
|
||||
it('should handle self-reference (A→A)', async () => {
|
||||
await createTestFile(
|
||||
bmadDir,
|
||||
'core/agents/agent-a.md',
|
||||
`---
|
||||
dependencies: ["{project-root}/bmad/core/agents/agent-a.md"]
|
||||
---
|
||||
<agent>A</agent>`,
|
||||
);
|
||||
|
||||
const resolver = new DependencyResolver();
|
||||
const result = await resolver.resolve(bmadDir, []);
|
||||
|
||||
// Should include the file once, not infinite times
|
||||
expect(result.allFiles).toHaveLength(1);
|
||||
}, 5000);
|
||||
});
|
||||
|
||||
describe('command reference parsing', () => {
|
||||
describe('parseCommandReferences()', () => {
|
||||
it('should extract @task- references', () => {
|
||||
const resolver = new DependencyResolver();
|
||||
const content = 'Use @task-analyze for analysis\nThen @task-review';
|
||||
|
||||
const refs = resolver.parseCommandReferences(content);
|
||||
|
||||
expect(refs).toContain('@task-analyze');
|
||||
expect(refs).toContain('@task-review');
|
||||
});
|
||||
|
||||
it('should extract @agent- references', () => {
|
||||
const resolver = new DependencyResolver();
|
||||
const content = 'Call @agent-architect then @agent-developer';
|
||||
|
||||
const refs = resolver.parseCommandReferences(content);
|
||||
|
||||
expect(refs).toContain('@agent-architect');
|
||||
expect(refs).toContain('@agent-developer');
|
||||
});
|
||||
|
||||
it('should extract bmad/ path references', () => {
|
||||
const resolver = new DependencyResolver();
|
||||
const content = 'See bmad/core/agents/analyst and bmad/bmm/tasks/review';
|
||||
|
||||
const refs = resolver.parseCommandReferences(content);
|
||||
|
||||
expect(refs).toContain('bmad/core/agents/analyst');
|
||||
expect(refs).toContain('bmad/bmm/tasks/review');
|
||||
});
|
||||
|
||||
it('should extract @bmad- references', () => {
|
||||
const resolver = new DependencyResolver();
|
||||
const content = 'Use @bmad-master command';
|
||||
|
||||
const refs = resolver.parseCommandReferences(content);
|
||||
|
||||
expect(refs).toContain('@bmad-master');
|
||||
});
|
||||
|
||||
it('should handle multiple reference types in same content', () => {
|
||||
const resolver = new DependencyResolver();
|
||||
const content = `
|
||||
Use @task-analyze for analysis
|
||||
Then run @agent-architect
|
||||
Finally check bmad/core/tasks/review
|
||||
`;
|
||||
|
||||
const refs = resolver.parseCommandReferences(content);
|
||||
|
||||
expect(refs.length).toBeGreaterThanOrEqual(3);
|
||||
});
|
||||
});
|
||||
|
||||
describe('parseFileReferences()', () => {
|
||||
it('should extract exec attribute paths', () => {
|
||||
const resolver = new DependencyResolver();
|
||||
const content = '<command exec="{project-root}/bmad/core/tasks/foo.md" />';
|
||||
|
||||
const refs = resolver.parseFileReferences(content);
|
||||
|
||||
expect(refs).toContain('/bmad/core/tasks/foo.md');
|
||||
});
|
||||
|
||||
it('should extract tmpl attribute paths', () => {
|
||||
const resolver = new DependencyResolver();
|
||||
const content = '<command tmpl="../templates/bar.yaml" />';
|
||||
|
||||
const refs = resolver.parseFileReferences(content);
|
||||
|
||||
expect(refs).toContain('../templates/bar.yaml');
|
||||
});
|
||||
|
||||
it('should extract relative file paths', () => {
|
||||
const resolver = new DependencyResolver();
|
||||
const content = 'Load "./data/config.json" and "../templates/form.yaml"';
|
||||
|
||||
const refs = resolver.parseFileReferences(content);
|
||||
|
||||
expect(refs).toContain('./data/config.json');
|
||||
expect(refs).toContain('../templates/form.yaml');
|
||||
});
|
||||
|
||||
it('should skip exec="*" wildcards', () => {
|
||||
const resolver = new DependencyResolver();
|
||||
const content = '<command exec="*" description="Dynamic" />';
|
||||
|
||||
const refs = resolver.parseFileReferences(content);
|
||||
|
||||
// Should not include "*"
|
||||
expect(refs).not.toContain('*');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('module organization', () => {
|
||||
it('should organize files by module correctly', async () => {
|
||||
await createTestFile(bmadDir, 'core/agents/core-agent.md', '<agent>Core</agent>');
|
||||
await createTestFile(bmadDir, 'modules/bmm/agents/bmm-agent.md', '<agent>BMM</agent>');
|
||||
|
||||
const resolver = new DependencyResolver();
|
||||
const result = await resolver.resolve(bmadDir, ['bmm']);
|
||||
|
||||
expect(result.byModule.core).toBeDefined();
|
||||
expect(result.byModule.bmm).toBeDefined();
|
||||
expect(result.byModule.core.agents).toHaveLength(1);
|
||||
expect(result.byModule.bmm.agents).toHaveLength(1);
|
||||
});
|
||||
|
||||
it('should categorize files by type', async () => {
|
||||
await createTestFile(bmadDir, 'core/agents/agent.md', '<agent>Agent</agent>');
|
||||
await createTestFile(bmadDir, 'core/tasks/task.md', 'Task');
|
||||
await createTestFile(bmadDir, 'core/templates/template.yaml', 'template');
|
||||
|
||||
const resolver = new DependencyResolver();
|
||||
const files = [
|
||||
path.join(bmadDir, 'core/agents/agent.md'),
|
||||
path.join(bmadDir, 'core/tasks/task.md'),
|
||||
path.join(bmadDir, 'core/templates/template.yaml'),
|
||||
];
|
||||
|
||||
const organized = resolver.organizeByModule(bmadDir, new Set(files));
|
||||
|
||||
expect(organized.core.agents).toHaveLength(1);
|
||||
expect(organized.core.tasks).toHaveLength(1);
|
||||
expect(organized.core.templates).toHaveLength(1);
|
||||
});
|
||||
|
||||
it('should treat brain-tech as data, not tasks', async () => {
|
||||
await createTestFile(bmadDir, 'core/tasks/brain-tech/data.csv', 'col1,col2\nval1,val2');
|
||||
|
||||
const resolver = new DependencyResolver();
|
||||
const files = [path.join(bmadDir, 'core/tasks/brain-tech/data.csv')];
|
||||
|
||||
const organized = resolver.organizeByModule(bmadDir, new Set(files));
|
||||
|
||||
expect(organized.core.data).toHaveLength(1);
|
||||
expect(organized.core.tasks).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getModuleFromPath()', () => {
|
||||
it('should extract module from src/core path', () => {
|
||||
const resolver = new DependencyResolver();
|
||||
const filePath = path.join(bmadDir, 'core/agents/agent.md');
|
||||
|
||||
const module = resolver.getModuleFromPath(bmadDir, filePath);
|
||||
|
||||
expect(module).toBe('core');
|
||||
});
|
||||
|
||||
it('should extract module from src/modules/bmm path', () => {
|
||||
const resolver = new DependencyResolver();
|
||||
const filePath = path.join(bmadDir, 'modules/bmm/agents/pm.md');
|
||||
|
||||
const module = resolver.getModuleFromPath(bmadDir, filePath);
|
||||
|
||||
expect(module).toBe('bmm');
|
||||
});
|
||||
|
||||
it('should handle installed directory structure', async () => {
|
||||
// Create installed structure (no src/ prefix)
|
||||
const installedDir = path.join(tmpDir, 'installed');
|
||||
await fs.ensureDir(path.join(installedDir, 'core/agents'));
|
||||
await fs.ensureDir(path.join(installedDir, 'modules/bmm/agents'));
|
||||
|
||||
const resolver = new DependencyResolver();
|
||||
|
||||
const coreFile = path.join(installedDir, 'core/agents/agent.md');
|
||||
const moduleFile = path.join(installedDir, 'modules/bmm/agents/pm.md');
|
||||
|
||||
expect(resolver.getModuleFromPath(installedDir, coreFile)).toBe('core');
|
||||
expect(resolver.getModuleFromPath(installedDir, moduleFile)).toBe('bmm');
|
||||
});
|
||||
});
|
||||
|
||||
describe('edge cases', () => {
|
||||
it('should handle malformed YAML frontmatter', async () => {
|
||||
await createTestFile(
|
||||
bmadDir,
|
||||
'core/agents/bad-yaml.md',
|
||||
`---
|
||||
dependencies: [invalid: yaml: here
|
||||
---
|
||||
<agent>Agent</agent>`,
|
||||
);
|
||||
|
||||
const resolver = new DependencyResolver();
|
||||
|
||||
// Should not crash, just warn and continue
|
||||
await expect(resolver.resolve(bmadDir, [])).resolves.toBeDefined();
|
||||
});
|
||||
|
||||
it('should handle backticks in YAML values', async () => {
|
||||
await createTestFile(
|
||||
bmadDir,
|
||||
'core/agents/backticks.md',
|
||||
`---
|
||||
name: \`test\`
|
||||
dependencies: [\`{project-root}/bmad/core/tasks/task.md\`]
|
||||
---
|
||||
<agent>Agent</agent>`,
|
||||
);
|
||||
await createTestFile(bmadDir, 'core/tasks/task.md', 'Task');
|
||||
|
||||
const resolver = new DependencyResolver();
|
||||
const result = await resolver.resolve(bmadDir, []);
|
||||
|
||||
// Backticks should be pre-processed
|
||||
expect(result.allFiles.length).toBeGreaterThanOrEqual(1);
|
||||
});
|
||||
|
||||
it('should handle missing dependencies gracefully', async () => {
|
||||
await createTestFile(
|
||||
bmadDir,
|
||||
'core/agents/agent.md',
|
||||
`---
|
||||
dependencies: ["{project-root}/bmad/core/tasks/missing.md"]
|
||||
---
|
||||
<agent>Agent</agent>`,
|
||||
);
|
||||
// Don't create missing.md
|
||||
|
||||
const resolver = new DependencyResolver();
|
||||
const result = await resolver.resolve(bmadDir, []);
|
||||
|
||||
expect(result.primaryFiles.length).toBeGreaterThanOrEqual(1);
|
||||
// Implementation may or may not track missing dependencies
|
||||
// Just verify it doesn't crash
|
||||
expect(result).toBeDefined();
|
||||
});
|
||||
|
||||
it('should handle empty dependencies array', async () => {
|
||||
await createTestFile(
|
||||
bmadDir,
|
||||
'core/agents/agent.md',
|
||||
`---
|
||||
dependencies: []
|
||||
---
|
||||
<agent>Agent</agent>`,
|
||||
);
|
||||
|
||||
const resolver = new DependencyResolver();
|
||||
const result = await resolver.resolve(bmadDir, []);
|
||||
|
||||
expect(result.primaryFiles).toHaveLength(1);
|
||||
expect(result.allFiles).toHaveLength(1);
|
||||
});
|
||||
|
||||
it('should handle missing frontmatter', async () => {
|
||||
await createTestFile(bmadDir, 'core/agents/no-frontmatter.md', '<agent>Agent</agent>');
|
||||
|
||||
const resolver = new DependencyResolver();
|
||||
const result = await resolver.resolve(bmadDir, []);
|
||||
|
||||
expect(result.primaryFiles).toHaveLength(1);
|
||||
expect(result.allFiles).toHaveLength(1);
|
||||
});
|
||||
|
||||
it('should handle non-existent module directory', async () => {
|
||||
// Create at least one core file so core module appears
|
||||
await createTestFile(bmadDir, 'core/agents/core-agent.md', '<agent>Core</agent>');
|
||||
|
||||
const resolver = new DependencyResolver();
|
||||
const result = await resolver.resolve(bmadDir, ['nonexistent']);
|
||||
|
||||
// Should include core even though nonexistent module not found
|
||||
expect(result.byModule.core).toBeDefined();
|
||||
expect(result.byModule.nonexistent).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('cross-module dependencies', () => {
|
||||
it('should resolve dependencies across modules', async () => {
|
||||
await createTestFile(bmadDir, 'core/agents/core-agent.md', '<agent>Core</agent>');
|
||||
await createTestFile(
|
||||
bmadDir,
|
||||
'modules/bmm/agents/bmm-agent.md',
|
||||
`---
|
||||
dependencies: ["{project-root}/bmad/core/tasks/shared-task.md"]
|
||||
---
|
||||
<agent>BMM Agent</agent>`,
|
||||
);
|
||||
await createTestFile(bmadDir, 'core/tasks/shared-task.md', 'Shared task');
|
||||
|
||||
const resolver = new DependencyResolver();
|
||||
const result = await resolver.resolve(bmadDir, ['bmm']);
|
||||
|
||||
// Should include: core agent + bmm agent + shared task
|
||||
expect(result.allFiles.length).toBeGreaterThanOrEqual(3);
|
||||
expect(result.byModule.core).toBeDefined();
|
||||
expect(result.byModule.bmm).toBeDefined();
|
||||
});
|
||||
|
||||
it('should resolve module tasks', async () => {
|
||||
await createTestFile(bmadDir, 'core/agents/core-agent.md', '<agent>Core</agent>');
|
||||
await createTestFile(bmadDir, 'modules/bmm/agents/pm.md', '<agent>PM</agent>');
|
||||
await createTestFile(bmadDir, 'modules/bmm/tasks/create-prd.md', 'Create PRD task');
|
||||
|
||||
const resolver = new DependencyResolver();
|
||||
const result = await resolver.resolve(bmadDir, ['bmm']);
|
||||
|
||||
expect(result.byModule.bmm.agents).toHaveLength(1);
|
||||
expect(result.byModule.bmm.tasks).toHaveLength(1);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
@ -0,0 +1,243 @@
|
|||
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
||||
import { FileOps } from '../../../tools/cli/lib/file-ops.js';
|
||||
import { createTempDir, cleanupTempDir, createTestFile } from '../../helpers/temp-dir.js';
|
||||
import fs from 'fs-extra';
|
||||
import path from 'node:path';
|
||||
|
||||
describe('FileOps', () => {
|
||||
describe('copyDirectory()', () => {
|
||||
const fileOps = new FileOps();
|
||||
let tmpDir;
|
||||
let sourceDir;
|
||||
let destDir;
|
||||
|
||||
beforeEach(async () => {
|
||||
tmpDir = await createTempDir();
|
||||
sourceDir = path.join(tmpDir, 'source');
|
||||
destDir = path.join(tmpDir, 'dest');
|
||||
await fs.ensureDir(sourceDir);
|
||||
await fs.ensureDir(destDir);
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await cleanupTempDir(tmpDir);
|
||||
});
|
||||
|
||||
describe('basic copying', () => {
|
||||
it('should copy a single file', async () => {
|
||||
await createTestFile(sourceDir, 'test.txt', 'content');
|
||||
|
||||
await fileOps.copyDirectory(sourceDir, destDir);
|
||||
|
||||
const destFile = path.join(destDir, 'test.txt');
|
||||
expect(await fs.pathExists(destFile)).toBe(true);
|
||||
expect(await fs.readFile(destFile, 'utf8')).toBe('content');
|
||||
});
|
||||
|
||||
it('should copy multiple files', async () => {
|
||||
await createTestFile(sourceDir, 'file1.txt', 'content1');
|
||||
await createTestFile(sourceDir, 'file2.md', 'content2');
|
||||
await createTestFile(sourceDir, 'file3.json', '{}');
|
||||
|
||||
await fileOps.copyDirectory(sourceDir, destDir);
|
||||
|
||||
expect(await fs.pathExists(path.join(destDir, 'file1.txt'))).toBe(true);
|
||||
expect(await fs.pathExists(path.join(destDir, 'file2.md'))).toBe(true);
|
||||
expect(await fs.pathExists(path.join(destDir, 'file3.json'))).toBe(true);
|
||||
});
|
||||
|
||||
it('should copy nested directory structure', async () => {
|
||||
await createTestFile(sourceDir, 'root.txt', 'root');
|
||||
await createTestFile(sourceDir, 'level1/file.txt', 'level1');
|
||||
await createTestFile(sourceDir, 'level1/level2/deep.txt', 'deep');
|
||||
|
||||
await fileOps.copyDirectory(sourceDir, destDir);
|
||||
|
||||
expect(await fs.pathExists(path.join(destDir, 'root.txt'))).toBe(true);
|
||||
expect(await fs.pathExists(path.join(destDir, 'level1', 'file.txt'))).toBe(true);
|
||||
expect(await fs.pathExists(path.join(destDir, 'level1', 'level2', 'deep.txt'))).toBe(true);
|
||||
});
|
||||
|
||||
it('should create destination directory if it does not exist', async () => {
|
||||
const newDest = path.join(tmpDir, 'new-dest');
|
||||
await createTestFile(sourceDir, 'test.txt', 'content');
|
||||
|
||||
await fileOps.copyDirectory(sourceDir, newDest);
|
||||
|
||||
expect(await fs.pathExists(newDest)).toBe(true);
|
||||
expect(await fs.pathExists(path.join(newDest, 'test.txt'))).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('overwrite behavior', () => {
|
||||
it('should overwrite existing files by default', async () => {
|
||||
await createTestFile(sourceDir, 'file.txt', 'new content');
|
||||
await createTestFile(destDir, 'file.txt', 'old content');
|
||||
|
||||
await fileOps.copyDirectory(sourceDir, destDir);
|
||||
|
||||
const content = await fs.readFile(path.join(destDir, 'file.txt'), 'utf8');
|
||||
expect(content).toBe('new content');
|
||||
});
|
||||
|
||||
it('should preserve file content when overwriting', async () => {
|
||||
await createTestFile(sourceDir, 'data.json', '{"new": true}');
|
||||
await createTestFile(destDir, 'data.json', '{"old": true}');
|
||||
await createTestFile(destDir, 'keep.txt', 'preserve this');
|
||||
|
||||
await fileOps.copyDirectory(sourceDir, destDir);
|
||||
|
||||
expect(await fs.readFile(path.join(destDir, 'data.json'), 'utf8')).toBe('{"new": true}');
|
||||
// Files not in source should be preserved
|
||||
expect(await fs.pathExists(path.join(destDir, 'keep.txt'))).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('filtering with shouldIgnore', () => {
|
||||
it('should filter out .git directories', async () => {
|
||||
await createTestFile(sourceDir, 'file.txt', 'content');
|
||||
await createTestFile(sourceDir, '.git/config', 'git config');
|
||||
|
||||
await fileOps.copyDirectory(sourceDir, destDir);
|
||||
|
||||
expect(await fs.pathExists(path.join(destDir, 'file.txt'))).toBe(true);
|
||||
expect(await fs.pathExists(path.join(destDir, '.git'))).toBe(false);
|
||||
});
|
||||
|
||||
it('should filter out node_modules directories', async () => {
|
||||
await createTestFile(sourceDir, 'package.json', '{}');
|
||||
await createTestFile(sourceDir, 'node_modules/lib/code.js', 'code');
|
||||
|
||||
await fileOps.copyDirectory(sourceDir, destDir);
|
||||
|
||||
expect(await fs.pathExists(path.join(destDir, 'package.json'))).toBe(true);
|
||||
expect(await fs.pathExists(path.join(destDir, 'node_modules'))).toBe(false);
|
||||
});
|
||||
|
||||
it('should filter out *.swp and *.tmp files', async () => {
|
||||
await createTestFile(sourceDir, 'document.txt', 'content');
|
||||
await createTestFile(sourceDir, 'document.txt.swp', 'vim swap');
|
||||
await createTestFile(sourceDir, 'temp.tmp', 'temporary');
|
||||
|
||||
await fileOps.copyDirectory(sourceDir, destDir);
|
||||
|
||||
expect(await fs.pathExists(path.join(destDir, 'document.txt'))).toBe(true);
|
||||
expect(await fs.pathExists(path.join(destDir, 'document.txt.swp'))).toBe(false);
|
||||
expect(await fs.pathExists(path.join(destDir, 'temp.tmp'))).toBe(false);
|
||||
});
|
||||
|
||||
it('should filter out .DS_Store files', async () => {
|
||||
await createTestFile(sourceDir, 'file.txt', 'content');
|
||||
await createTestFile(sourceDir, '.DS_Store', 'mac metadata');
|
||||
|
||||
await fileOps.copyDirectory(sourceDir, destDir);
|
||||
|
||||
expect(await fs.pathExists(path.join(destDir, 'file.txt'))).toBe(true);
|
||||
expect(await fs.pathExists(path.join(destDir, '.DS_Store'))).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('edge cases', () => {
|
||||
it('should handle empty source directory', async () => {
|
||||
await fileOps.copyDirectory(sourceDir, destDir);
|
||||
|
||||
const files = await fs.readdir(destDir);
|
||||
expect(files).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should handle Unicode filenames', async () => {
|
||||
await createTestFile(sourceDir, '测试.txt', 'chinese');
|
||||
await createTestFile(sourceDir, 'файл.json', 'russian');
|
||||
|
||||
await fileOps.copyDirectory(sourceDir, destDir);
|
||||
|
||||
expect(await fs.pathExists(path.join(destDir, '测试.txt'))).toBe(true);
|
||||
expect(await fs.pathExists(path.join(destDir, 'файл.json'))).toBe(true);
|
||||
});
|
||||
|
||||
it('should handle filenames with special characters', async () => {
|
||||
await createTestFile(sourceDir, 'file with spaces.txt', 'content');
|
||||
await createTestFile(sourceDir, 'special-chars!@#.md', 'content');
|
||||
|
||||
await fileOps.copyDirectory(sourceDir, destDir);
|
||||
|
||||
expect(await fs.pathExists(path.join(destDir, 'file with spaces.txt'))).toBe(true);
|
||||
expect(await fs.pathExists(path.join(destDir, 'special-chars!@#.md'))).toBe(true);
|
||||
});
|
||||
|
||||
it('should handle very deep directory nesting', async () => {
|
||||
const deepPath = Array.from({ length: 10 }, (_, i) => `level${i}`).join('/');
|
||||
await createTestFile(sourceDir, `${deepPath}/deep.txt`, 'very deep');
|
||||
|
||||
await fileOps.copyDirectory(sourceDir, destDir);
|
||||
|
||||
expect(await fs.pathExists(path.join(destDir, ...deepPath.split('/'), 'deep.txt'))).toBe(true);
|
||||
});
|
||||
|
||||
it('should preserve file permissions', async () => {
|
||||
const execFile = path.join(sourceDir, 'script.sh');
|
||||
await fs.writeFile(execFile, '#!/bin/bash\necho "test"');
|
||||
await fs.chmod(execFile, 0o755); // Make executable
|
||||
|
||||
await fileOps.copyDirectory(sourceDir, destDir);
|
||||
|
||||
const destFile = path.join(destDir, 'script.sh');
|
||||
const stats = await fs.stat(destFile);
|
||||
// Check if file is executable (user execute bit)
|
||||
expect((stats.mode & 0o100) !== 0).toBe(true);
|
||||
});
|
||||
|
||||
it('should handle large number of files', async () => {
|
||||
// Create 50 files
|
||||
const promises = Array.from({ length: 50 }, (_, i) => createTestFile(sourceDir, `file${i}.txt`, `content ${i}`));
|
||||
await Promise.all(promises);
|
||||
|
||||
await fileOps.copyDirectory(sourceDir, destDir);
|
||||
|
||||
const destFiles = await fs.readdir(destDir);
|
||||
expect(destFiles).toHaveLength(50);
|
||||
});
|
||||
});
|
||||
|
||||
describe('content integrity', () => {
|
||||
it('should preserve file content exactly', async () => {
|
||||
const content = 'Line 1\nLine 2\nLine 3\n';
|
||||
await createTestFile(sourceDir, 'file.txt', content);
|
||||
|
||||
await fileOps.copyDirectory(sourceDir, destDir);
|
||||
|
||||
const copiedContent = await fs.readFile(path.join(destDir, 'file.txt'), 'utf8');
|
||||
expect(copiedContent).toBe(content);
|
||||
});
|
||||
|
||||
it('should preserve binary file content', async () => {
|
||||
const buffer = Buffer.from([0x89, 0x50, 0x4e, 0x47, 0x0d, 0x0a, 0x1a, 0x0a]);
|
||||
await fs.writeFile(path.join(sourceDir, 'binary.dat'), buffer);
|
||||
|
||||
await fileOps.copyDirectory(sourceDir, destDir);
|
||||
|
||||
const copiedBuffer = await fs.readFile(path.join(destDir, 'binary.dat'));
|
||||
expect(copiedBuffer).toEqual(buffer);
|
||||
});
|
||||
|
||||
it('should preserve UTF-8 content', async () => {
|
||||
const utf8Content = 'Hello 世界 🌍';
|
||||
await createTestFile(sourceDir, 'utf8.txt', utf8Content);
|
||||
|
||||
await fileOps.copyDirectory(sourceDir, destDir);
|
||||
|
||||
const copied = await fs.readFile(path.join(destDir, 'utf8.txt'), 'utf8');
|
||||
expect(copied).toBe(utf8Content);
|
||||
});
|
||||
|
||||
it('should preserve empty files', async () => {
|
||||
await createTestFile(sourceDir, 'empty.txt', '');
|
||||
|
||||
await fileOps.copyDirectory(sourceDir, destDir);
|
||||
|
||||
const content = await fs.readFile(path.join(destDir, 'empty.txt'), 'utf8');
|
||||
expect(content).toBe('');
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
@ -0,0 +1,211 @@
|
|||
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
||||
import { FileOps } from '../../../tools/cli/lib/file-ops.js';
|
||||
import { createTempDir, cleanupTempDir, createTestFile } from '../../helpers/temp-dir.js';
|
||||
|
||||
describe('FileOps', () => {
|
||||
describe('getFileHash()', () => {
|
||||
const fileOps = new FileOps();
|
||||
let tmpDir;
|
||||
|
||||
beforeEach(async () => {
|
||||
tmpDir = await createTempDir();
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await cleanupTempDir(tmpDir);
|
||||
});
|
||||
|
||||
describe('basic hashing', () => {
|
||||
it('should return SHA256 hash for a simple file', async () => {
|
||||
const filePath = await createTestFile(tmpDir, 'test.txt', 'hello');
|
||||
const hash = await fileOps.getFileHash(filePath);
|
||||
|
||||
// SHA256 of 'hello' is known
|
||||
expect(hash).toBe('2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824');
|
||||
expect(hash).toHaveLength(64); // SHA256 is 64 hex characters
|
||||
});
|
||||
|
||||
it('should return consistent hash for same content', async () => {
|
||||
const content = 'test content for hashing';
|
||||
const file1 = await createTestFile(tmpDir, 'file1.txt', content);
|
||||
const file2 = await createTestFile(tmpDir, 'file2.txt', content);
|
||||
|
||||
const hash1 = await fileOps.getFileHash(file1);
|
||||
const hash2 = await fileOps.getFileHash(file2);
|
||||
|
||||
expect(hash1).toBe(hash2);
|
||||
});
|
||||
|
||||
it('should return different hash for different content', async () => {
|
||||
const file1 = await createTestFile(tmpDir, 'file1.txt', 'content A');
|
||||
const file2 = await createTestFile(tmpDir, 'file2.txt', 'content B');
|
||||
|
||||
const hash1 = await fileOps.getFileHash(file1);
|
||||
const hash2 = await fileOps.getFileHash(file2);
|
||||
|
||||
expect(hash1).not.toBe(hash2);
|
||||
});
|
||||
});
|
||||
|
||||
describe('file size handling', () => {
|
||||
it('should handle empty file', async () => {
|
||||
const filePath = await createTestFile(tmpDir, 'empty.txt', '');
|
||||
const hash = await fileOps.getFileHash(filePath);
|
||||
|
||||
// SHA256 of empty string
|
||||
expect(hash).toBe('e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855');
|
||||
});
|
||||
|
||||
it('should handle small file (<4KB)', async () => {
|
||||
const content = 'a'.repeat(1000); // 1KB
|
||||
const filePath = await createTestFile(tmpDir, 'small.txt', content);
|
||||
const hash = await fileOps.getFileHash(filePath);
|
||||
|
||||
expect(hash).toHaveLength(64);
|
||||
expect(hash).toMatch(/^[a-f0-9]{64}$/);
|
||||
});
|
||||
|
||||
it('should handle medium file (~1MB)', async () => {
|
||||
const content = 'x'.repeat(1024 * 1024); // 1MB
|
||||
const filePath = await createTestFile(tmpDir, 'medium.txt', content);
|
||||
const hash = await fileOps.getFileHash(filePath);
|
||||
|
||||
expect(hash).toHaveLength(64);
|
||||
expect(hash).toMatch(/^[a-f0-9]{64}$/);
|
||||
});
|
||||
|
||||
it('should handle large file (~10MB) via streaming', async () => {
|
||||
// Create a 10MB file
|
||||
const chunkSize = 1024 * 1024; // 1MB chunks
|
||||
const chunks = Array.from({ length: 10 }, () => 'y'.repeat(chunkSize));
|
||||
const content = chunks.join('');
|
||||
|
||||
const filePath = await createTestFile(tmpDir, 'large.txt', content);
|
||||
const hash = await fileOps.getFileHash(filePath);
|
||||
|
||||
expect(hash).toHaveLength(64);
|
||||
expect(hash).toMatch(/^[a-f0-9]{64}$/);
|
||||
}, 15_000); // 15 second timeout for large file
|
||||
});
|
||||
|
||||
describe('content type handling', () => {
|
||||
it('should handle binary content', async () => {
|
||||
// Create a buffer with binary data
|
||||
const buffer = Buffer.from([0x89, 0x50, 0x4e, 0x47, 0x0d, 0x0a, 0x1a, 0x0a]);
|
||||
const filePath = await createTestFile(tmpDir, 'binary.dat', buffer.toString('binary'));
|
||||
const hash = await fileOps.getFileHash(filePath);
|
||||
|
||||
expect(hash).toHaveLength(64);
|
||||
expect(hash).toMatch(/^[a-f0-9]{64}$/);
|
||||
});
|
||||
|
||||
it('should handle UTF-8 content correctly', async () => {
|
||||
const content = 'Hello 世界 🌍';
|
||||
const filePath = await createTestFile(tmpDir, 'utf8.txt', content);
|
||||
const hash = await fileOps.getFileHash(filePath);
|
||||
|
||||
// Hash should be consistent for UTF-8 content
|
||||
const hash2 = await fileOps.getFileHash(filePath);
|
||||
expect(hash).toBe(hash2);
|
||||
expect(hash).toHaveLength(64);
|
||||
});
|
||||
|
||||
it('should handle newline characters', async () => {
|
||||
const contentLF = 'line1\nline2\nline3';
|
||||
const contentCRLF = 'line1\r\nline2\r\nline3';
|
||||
|
||||
const fileLF = await createTestFile(tmpDir, 'lf.txt', contentLF);
|
||||
const fileCRLF = await createTestFile(tmpDir, 'crlf.txt', contentCRLF);
|
||||
|
||||
const hashLF = await fileOps.getFileHash(fileLF);
|
||||
const hashCRLF = await fileOps.getFileHash(fileCRLF);
|
||||
|
||||
// Different line endings should produce different hashes
|
||||
expect(hashLF).not.toBe(hashCRLF);
|
||||
});
|
||||
|
||||
it('should handle JSON content', async () => {
|
||||
const json = JSON.stringify({ key: 'value', nested: { array: [1, 2, 3] } }, null, 2);
|
||||
const filePath = await createTestFile(tmpDir, 'data.json', json);
|
||||
const hash = await fileOps.getFileHash(filePath);
|
||||
|
||||
expect(hash).toHaveLength(64);
|
||||
});
|
||||
});
|
||||
|
||||
describe('edge cases', () => {
|
||||
it('should handle file with special characters in name', async () => {
|
||||
const filePath = await createTestFile(tmpDir, 'file with spaces & special-chars.txt', 'content');
|
||||
const hash = await fileOps.getFileHash(filePath);
|
||||
|
||||
expect(hash).toHaveLength(64);
|
||||
});
|
||||
|
||||
it('should handle concurrent hash calculations', async () => {
|
||||
const files = await Promise.all([
|
||||
createTestFile(tmpDir, 'file1.txt', 'content 1'),
|
||||
createTestFile(tmpDir, 'file2.txt', 'content 2'),
|
||||
createTestFile(tmpDir, 'file3.txt', 'content 3'),
|
||||
]);
|
||||
|
||||
// Calculate hashes concurrently
|
||||
const hashes = await Promise.all(files.map((file) => fileOps.getFileHash(file)));
|
||||
|
||||
// All hashes should be valid
|
||||
expect(hashes).toHaveLength(3);
|
||||
for (const hash of hashes) {
|
||||
expect(hash).toMatch(/^[a-f0-9]{64}$/);
|
||||
}
|
||||
|
||||
// Hashes should be different
|
||||
expect(hashes[0]).not.toBe(hashes[1]);
|
||||
expect(hashes[1]).not.toBe(hashes[2]);
|
||||
expect(hashes[0]).not.toBe(hashes[2]);
|
||||
});
|
||||
|
||||
it('should handle file with only whitespace', async () => {
|
||||
const filePath = await createTestFile(tmpDir, 'whitespace.txt', ' ');
|
||||
const hash = await fileOps.getFileHash(filePath);
|
||||
|
||||
expect(hash).toHaveLength(64);
|
||||
// Should be different from empty file
|
||||
expect(hash).not.toBe('e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855');
|
||||
});
|
||||
|
||||
it('should handle very long single line', async () => {
|
||||
const longLine = 'x'.repeat(100_000); // 100KB single line
|
||||
const filePath = await createTestFile(tmpDir, 'longline.txt', longLine);
|
||||
const hash = await fileOps.getFileHash(filePath);
|
||||
|
||||
expect(hash).toHaveLength(64);
|
||||
});
|
||||
});
|
||||
|
||||
describe('error handling', () => {
|
||||
it('should reject for non-existent file', async () => {
|
||||
const nonExistentPath = `${tmpDir}/does-not-exist.txt`;
|
||||
|
||||
await expect(fileOps.getFileHash(nonExistentPath)).rejects.toThrow();
|
||||
});
|
||||
|
||||
it('should reject for directory instead of file', async () => {
|
||||
await expect(fileOps.getFileHash(tmpDir)).rejects.toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe('streaming behavior', () => {
|
||||
it('should use streaming for efficiency (test implementation detail)', async () => {
|
||||
// This test verifies that the implementation uses streams
|
||||
// by checking that large files can be processed without loading entirely into memory
|
||||
const largeContent = 'z'.repeat(5 * 1024 * 1024); // 5MB
|
||||
const filePath = await createTestFile(tmpDir, 'stream.txt', largeContent);
|
||||
|
||||
// If this completes without memory issues, streaming is working
|
||||
const hash = await fileOps.getFileHash(filePath);
|
||||
|
||||
expect(hash).toHaveLength(64);
|
||||
expect(hash).toMatch(/^[a-f0-9]{64}$/);
|
||||
}, 10_000);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
@ -0,0 +1,283 @@
|
|||
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
||||
import { FileOps } from '../../../tools/cli/lib/file-ops.js';
|
||||
import { createTempDir, cleanupTempDir, createTestFile, createTestDirs } from '../../helpers/temp-dir.js';
|
||||
import path from 'node:path';
|
||||
|
||||
describe('FileOps', () => {
|
||||
describe('getFileList()', () => {
|
||||
const fileOps = new FileOps();
|
||||
let tmpDir;
|
||||
|
||||
beforeEach(async () => {
|
||||
tmpDir = await createTempDir();
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await cleanupTempDir(tmpDir);
|
||||
});
|
||||
|
||||
describe('basic functionality', () => {
|
||||
it('should return empty array for empty directory', async () => {
|
||||
const files = await fileOps.getFileList(tmpDir);
|
||||
expect(files).toEqual([]);
|
||||
});
|
||||
|
||||
it('should return single file in directory', async () => {
|
||||
await createTestFile(tmpDir, 'test.txt', 'content');
|
||||
|
||||
const files = await fileOps.getFileList(tmpDir);
|
||||
|
||||
expect(files).toHaveLength(1);
|
||||
expect(files[0]).toBe('test.txt');
|
||||
});
|
||||
|
||||
it('should return multiple files in directory', async () => {
|
||||
await createTestFile(tmpDir, 'file1.txt', 'content1');
|
||||
await createTestFile(tmpDir, 'file2.md', 'content2');
|
||||
await createTestFile(tmpDir, 'file3.json', 'content3');
|
||||
|
||||
const files = await fileOps.getFileList(tmpDir);
|
||||
|
||||
expect(files).toHaveLength(3);
|
||||
expect(files).toContain('file1.txt');
|
||||
expect(files).toContain('file2.md');
|
||||
expect(files).toContain('file3.json');
|
||||
});
|
||||
});
|
||||
|
||||
describe('recursive directory walking', () => {
|
||||
it('should recursively find files in nested directories', async () => {
|
||||
await createTestFile(tmpDir, 'root.txt', 'root');
|
||||
await createTestFile(tmpDir, 'level1/file1.txt', 'level1');
|
||||
await createTestFile(tmpDir, 'level1/level2/file2.txt', 'level2');
|
||||
await createTestFile(tmpDir, 'level1/level2/level3/file3.txt', 'level3');
|
||||
|
||||
const files = await fileOps.getFileList(tmpDir);
|
||||
|
||||
expect(files).toHaveLength(4);
|
||||
expect(files).toContain('root.txt');
|
||||
expect(files).toContain(path.join('level1', 'file1.txt'));
|
||||
expect(files).toContain(path.join('level1', 'level2', 'file2.txt'));
|
||||
expect(files).toContain(path.join('level1', 'level2', 'level3', 'file3.txt'));
|
||||
});
|
||||
|
||||
it('should handle multiple subdirectories at same level', async () => {
|
||||
await createTestFile(tmpDir, 'dir1/file1.txt', 'content');
|
||||
await createTestFile(tmpDir, 'dir2/file2.txt', 'content');
|
||||
await createTestFile(tmpDir, 'dir3/file3.txt', 'content');
|
||||
|
||||
const files = await fileOps.getFileList(tmpDir);
|
||||
|
||||
expect(files).toHaveLength(3);
|
||||
expect(files).toContain(path.join('dir1', 'file1.txt'));
|
||||
expect(files).toContain(path.join('dir2', 'file2.txt'));
|
||||
expect(files).toContain(path.join('dir3', 'file3.txt'));
|
||||
});
|
||||
|
||||
it('should not include empty directories in results', async () => {
|
||||
await createTestDirs(tmpDir, ['empty1', 'empty2', 'has-file']);
|
||||
await createTestFile(tmpDir, 'has-file/file.txt', 'content');
|
||||
|
||||
const files = await fileOps.getFileList(tmpDir);
|
||||
|
||||
expect(files).toHaveLength(1);
|
||||
expect(files[0]).toBe(path.join('has-file', 'file.txt'));
|
||||
});
|
||||
});
|
||||
|
||||
describe('ignore filtering', () => {
|
||||
it('should ignore .git directories', async () => {
|
||||
await createTestFile(tmpDir, 'normal.txt', 'content');
|
||||
await createTestFile(tmpDir, '.git/config', 'git config');
|
||||
await createTestFile(tmpDir, '.git/hooks/pre-commit', 'hook');
|
||||
|
||||
const files = await fileOps.getFileList(tmpDir);
|
||||
|
||||
expect(files).toHaveLength(1);
|
||||
expect(files[0]).toBe('normal.txt');
|
||||
});
|
||||
|
||||
it('should ignore node_modules directories', async () => {
|
||||
await createTestFile(tmpDir, 'package.json', '{}');
|
||||
await createTestFile(tmpDir, 'node_modules/package/index.js', 'code');
|
||||
await createTestFile(tmpDir, 'node_modules/package/lib/util.js', 'util');
|
||||
|
||||
const files = await fileOps.getFileList(tmpDir);
|
||||
|
||||
expect(files).toHaveLength(1);
|
||||
expect(files[0]).toBe('package.json');
|
||||
});
|
||||
|
||||
it('should ignore .DS_Store files', async () => {
|
||||
await createTestFile(tmpDir, 'file.txt', 'content');
|
||||
await createTestFile(tmpDir, '.DS_Store', 'mac metadata');
|
||||
await createTestFile(tmpDir, 'subdir/.DS_Store', 'mac metadata');
|
||||
|
||||
const files = await fileOps.getFileList(tmpDir);
|
||||
|
||||
expect(files).toHaveLength(1);
|
||||
expect(files[0]).toBe('file.txt');
|
||||
});
|
||||
|
||||
it('should ignore *.swp and *.tmp files', async () => {
|
||||
await createTestFile(tmpDir, 'document.txt', 'content');
|
||||
await createTestFile(tmpDir, 'document.txt.swp', 'vim swap');
|
||||
await createTestFile(tmpDir, 'temp.tmp', 'temporary');
|
||||
|
||||
const files = await fileOps.getFileList(tmpDir);
|
||||
|
||||
expect(files).toHaveLength(1);
|
||||
expect(files[0]).toBe('document.txt');
|
||||
});
|
||||
|
||||
it('should ignore multiple ignored patterns together', async () => {
|
||||
await createTestFile(tmpDir, 'src/index.js', 'source code');
|
||||
await createTestFile(tmpDir, 'node_modules/lib/code.js', 'dependency');
|
||||
await createTestFile(tmpDir, '.git/config', 'git config');
|
||||
await createTestFile(tmpDir, '.DS_Store', 'mac file');
|
||||
await createTestFile(tmpDir, 'file.swp', 'swap file');
|
||||
await createTestFile(tmpDir, '.idea/workspace.xml', 'ide');
|
||||
|
||||
const files = await fileOps.getFileList(tmpDir);
|
||||
|
||||
expect(files).toHaveLength(1);
|
||||
expect(files[0]).toBe(path.join('src', 'index.js'));
|
||||
});
|
||||
});
|
||||
|
||||
describe('relative path handling', () => {
|
||||
it('should return paths relative to base directory', async () => {
|
||||
await createTestFile(tmpDir, 'a/b/c/deep.txt', 'deep');
|
||||
|
||||
const files = await fileOps.getFileList(tmpDir);
|
||||
|
||||
expect(files[0]).toBe(path.join('a', 'b', 'c', 'deep.txt'));
|
||||
expect(path.isAbsolute(files[0])).toBe(false);
|
||||
});
|
||||
|
||||
it('should handle subdirectory as base', async () => {
|
||||
await createTestFile(tmpDir, 'root.txt', 'root');
|
||||
await createTestFile(tmpDir, 'sub/file1.txt', 'sub1');
|
||||
await createTestFile(tmpDir, 'sub/file2.txt', 'sub2');
|
||||
|
||||
const subDir = path.join(tmpDir, 'sub');
|
||||
const files = await fileOps.getFileList(subDir);
|
||||
|
||||
expect(files).toHaveLength(2);
|
||||
expect(files).toContain('file1.txt');
|
||||
expect(files).toContain('file2.txt');
|
||||
// Should not include root.txt
|
||||
expect(files).not.toContain('root.txt');
|
||||
});
|
||||
});
|
||||
|
||||
describe('edge cases', () => {
|
||||
it('should handle directory with special characters', async () => {
|
||||
await createTestFile(tmpDir, 'folder with spaces/file.txt', 'content');
|
||||
await createTestFile(tmpDir, 'special-chars!@#/data.json', 'data');
|
||||
|
||||
const files = await fileOps.getFileList(tmpDir);
|
||||
|
||||
expect(files).toHaveLength(2);
|
||||
expect(files).toContain(path.join('folder with spaces', 'file.txt'));
|
||||
expect(files).toContain(path.join('special-chars!@#', 'data.json'));
|
||||
});
|
||||
|
||||
it('should handle Unicode filenames', async () => {
|
||||
await createTestFile(tmpDir, '文档/测试.txt', 'chinese');
|
||||
await createTestFile(tmpDir, 'файл/данные.json', 'russian');
|
||||
await createTestFile(tmpDir, 'ファイル/データ.yaml', 'japanese');
|
||||
|
||||
const files = await fileOps.getFileList(tmpDir);
|
||||
|
||||
expect(files).toHaveLength(3);
|
||||
expect(files.some((f) => f.includes('测试.txt'))).toBe(true);
|
||||
expect(files.some((f) => f.includes('данные.json'))).toBe(true);
|
||||
expect(files.some((f) => f.includes('データ.yaml'))).toBe(true);
|
||||
});
|
||||
|
||||
it('should return empty array for non-existent directory', async () => {
|
||||
const nonExistent = path.join(tmpDir, 'does-not-exist');
|
||||
|
||||
const files = await fileOps.getFileList(nonExistent);
|
||||
|
||||
expect(files).toEqual([]);
|
||||
});
|
||||
|
||||
it('should handle very deep directory nesting', async () => {
|
||||
// Create a deeply nested structure (10 levels)
|
||||
const deepPath = Array.from({ length: 10 }, (_, i) => `level${i}`).join('/');
|
||||
await createTestFile(tmpDir, `${deepPath}/deep.txt`, 'very deep');
|
||||
|
||||
const files = await fileOps.getFileList(tmpDir);
|
||||
|
||||
expect(files).toHaveLength(1);
|
||||
expect(files[0]).toBe(path.join(...deepPath.split('/'), 'deep.txt'));
|
||||
});
|
||||
|
||||
it('should handle directory with many files', async () => {
|
||||
// Create 100 files
|
||||
const promises = Array.from({ length: 100 }, (_, i) => createTestFile(tmpDir, `file${i}.txt`, `content ${i}`));
|
||||
await Promise.all(promises);
|
||||
|
||||
const files = await fileOps.getFileList(tmpDir);
|
||||
|
||||
expect(files).toHaveLength(100);
|
||||
expect(files.every((f) => f.startsWith('file') && f.endsWith('.txt'))).toBe(true);
|
||||
});
|
||||
|
||||
it('should handle mixed ignored and non-ignored files', async () => {
|
||||
await createTestFile(tmpDir, 'src/main.js', 'code');
|
||||
await createTestFile(tmpDir, 'src/main.js.swp', 'swap');
|
||||
await createTestFile(tmpDir, 'lib/utils.js', 'utils');
|
||||
await createTestFile(tmpDir, 'node_modules/dep/index.js', 'dep');
|
||||
await createTestFile(tmpDir, 'test/test.js', 'test');
|
||||
|
||||
const files = await fileOps.getFileList(tmpDir);
|
||||
|
||||
expect(files).toHaveLength(3);
|
||||
expect(files).toContain(path.join('src', 'main.js'));
|
||||
expect(files).toContain(path.join('lib', 'utils.js'));
|
||||
expect(files).toContain(path.join('test', 'test.js'));
|
||||
});
|
||||
});
|
||||
|
||||
describe('file types', () => {
|
||||
it('should include files with no extension', async () => {
|
||||
await createTestFile(tmpDir, 'README', 'readme content');
|
||||
await createTestFile(tmpDir, 'LICENSE', 'license text');
|
||||
await createTestFile(tmpDir, 'Makefile', 'make commands');
|
||||
|
||||
const files = await fileOps.getFileList(tmpDir);
|
||||
|
||||
expect(files).toHaveLength(3);
|
||||
expect(files).toContain('README');
|
||||
expect(files).toContain('LICENSE');
|
||||
expect(files).toContain('Makefile');
|
||||
});
|
||||
|
||||
it('should include dotfiles (except ignored ones)', async () => {
|
||||
await createTestFile(tmpDir, '.gitignore', 'ignore patterns');
|
||||
await createTestFile(tmpDir, '.env', 'environment');
|
||||
await createTestFile(tmpDir, '.eslintrc', 'eslint config');
|
||||
|
||||
const files = await fileOps.getFileList(tmpDir);
|
||||
|
||||
expect(files).toHaveLength(3);
|
||||
expect(files).toContain('.gitignore');
|
||||
expect(files).toContain('.env');
|
||||
expect(files).toContain('.eslintrc');
|
||||
});
|
||||
|
||||
it('should include files with multiple extensions', async () => {
|
||||
await createTestFile(tmpDir, 'archive.tar.gz', 'archive');
|
||||
await createTestFile(tmpDir, 'backup.sql.bak', 'backup');
|
||||
await createTestFile(tmpDir, 'config.yaml.sample', 'sample config');
|
||||
|
||||
const files = await fileOps.getFileList(tmpDir);
|
||||
|
||||
expect(files).toHaveLength(3);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
@ -0,0 +1,177 @@
|
|||
import { describe, it, expect } from 'vitest';
|
||||
import { FileOps } from '../../../tools/cli/lib/file-ops.js';
|
||||
|
||||
describe('FileOps', () => {
|
||||
describe('shouldIgnore()', () => {
|
||||
const fileOps = new FileOps();
|
||||
|
||||
describe('exact matches', () => {
|
||||
it('should ignore .git directory', () => {
|
||||
expect(fileOps.shouldIgnore('.git')).toBe(true);
|
||||
expect(fileOps.shouldIgnore('/path/to/.git')).toBe(true);
|
||||
// Note: basename of '/project/.git/hooks' is 'hooks', not '.git'
|
||||
expect(fileOps.shouldIgnore('/project/.git/hooks')).toBe(false);
|
||||
});
|
||||
|
||||
it('should ignore .DS_Store files', () => {
|
||||
expect(fileOps.shouldIgnore('.DS_Store')).toBe(true);
|
||||
expect(fileOps.shouldIgnore('/path/to/.DS_Store')).toBe(true);
|
||||
});
|
||||
|
||||
it('should ignore node_modules directory', () => {
|
||||
expect(fileOps.shouldIgnore('node_modules')).toBe(true);
|
||||
expect(fileOps.shouldIgnore('/path/to/node_modules')).toBe(true);
|
||||
// Note: basename of '/project/node_modules/package' is 'package', not 'node_modules'
|
||||
expect(fileOps.shouldIgnore('/project/node_modules/package')).toBe(false);
|
||||
});
|
||||
|
||||
it('should ignore .idea directory', () => {
|
||||
expect(fileOps.shouldIgnore('.idea')).toBe(true);
|
||||
expect(fileOps.shouldIgnore('/path/to/.idea')).toBe(true);
|
||||
});
|
||||
|
||||
it('should ignore .vscode directory', () => {
|
||||
expect(fileOps.shouldIgnore('.vscode')).toBe(true);
|
||||
expect(fileOps.shouldIgnore('/path/to/.vscode')).toBe(true);
|
||||
});
|
||||
|
||||
it('should ignore __pycache__ directory', () => {
|
||||
expect(fileOps.shouldIgnore('__pycache__')).toBe(true);
|
||||
expect(fileOps.shouldIgnore('/path/to/__pycache__')).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('glob pattern matches', () => {
|
||||
it('should ignore *.swp files (Vim swap files)', () => {
|
||||
expect(fileOps.shouldIgnore('file.swp')).toBe(true);
|
||||
expect(fileOps.shouldIgnore('.config.yaml.swp')).toBe(true);
|
||||
expect(fileOps.shouldIgnore('/path/to/document.txt.swp')).toBe(true);
|
||||
});
|
||||
|
||||
it('should ignore *.tmp files (temporary files)', () => {
|
||||
expect(fileOps.shouldIgnore('file.tmp')).toBe(true);
|
||||
expect(fileOps.shouldIgnore('temp_data.tmp')).toBe(true);
|
||||
expect(fileOps.shouldIgnore('/path/to/cache.tmp')).toBe(true);
|
||||
});
|
||||
|
||||
it('should ignore *.pyc files (Python compiled)', () => {
|
||||
expect(fileOps.shouldIgnore('module.pyc')).toBe(true);
|
||||
expect(fileOps.shouldIgnore('__init__.pyc')).toBe(true);
|
||||
expect(fileOps.shouldIgnore('/path/to/script.pyc')).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('files that should NOT be ignored', () => {
|
||||
it('should not ignore normal files', () => {
|
||||
expect(fileOps.shouldIgnore('README.md')).toBe(false);
|
||||
expect(fileOps.shouldIgnore('package.json')).toBe(false);
|
||||
expect(fileOps.shouldIgnore('index.js')).toBe(false);
|
||||
});
|
||||
|
||||
it('should not ignore .gitignore itself', () => {
|
||||
expect(fileOps.shouldIgnore('.gitignore')).toBe(false);
|
||||
expect(fileOps.shouldIgnore('/path/to/.gitignore')).toBe(false);
|
||||
});
|
||||
|
||||
it('should not ignore files with similar but different names', () => {
|
||||
expect(fileOps.shouldIgnore('git-file.txt')).toBe(false);
|
||||
expect(fileOps.shouldIgnore('node_modules.backup')).toBe(false);
|
||||
expect(fileOps.shouldIgnore('swap-file.txt')).toBe(false);
|
||||
});
|
||||
|
||||
it('should not ignore files with ignored patterns in parent directory', () => {
|
||||
// The pattern matches basename, not full path
|
||||
expect(fileOps.shouldIgnore('/project/src/utils.js')).toBe(false);
|
||||
expect(fileOps.shouldIgnore('/code/main.py')).toBe(false);
|
||||
});
|
||||
|
||||
it('should not ignore directories with dot prefix (except specific ones)', () => {
|
||||
expect(fileOps.shouldIgnore('.github')).toBe(false);
|
||||
expect(fileOps.shouldIgnore('.husky')).toBe(false);
|
||||
expect(fileOps.shouldIgnore('.npmrc')).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('edge cases', () => {
|
||||
it('should handle empty string', () => {
|
||||
expect(fileOps.shouldIgnore('')).toBe(false);
|
||||
});
|
||||
|
||||
it('should handle paths with multiple segments', () => {
|
||||
// basename of '/very/deep/path/to/node_modules/package' is 'package'
|
||||
expect(fileOps.shouldIgnore('/very/deep/path/to/node_modules/package')).toBe(false);
|
||||
expect(fileOps.shouldIgnore('/very/deep/path/to/file.swp')).toBe(true);
|
||||
expect(fileOps.shouldIgnore('/very/deep/path/to/normal.js')).toBe(false);
|
||||
// But the directory itself would be ignored
|
||||
expect(fileOps.shouldIgnore('/very/deep/path/to/node_modules')).toBe(true);
|
||||
});
|
||||
|
||||
it('should handle Windows-style paths', () => {
|
||||
// Note: path.basename() on Unix doesn't recognize backslashes
|
||||
// On Unix: basename('C:\\project\\file.tmp') = 'C:\\project\\file.tmp'
|
||||
// So we test cross-platform path handling
|
||||
expect(fileOps.shouldIgnore(String.raw`C:\project\file.tmp`)).toBe(true); // .tmp matches
|
||||
expect(fileOps.shouldIgnore(String.raw`test\file.swp`)).toBe(true); // .swp matches
|
||||
// These won't be ignored because they don't match the patterns on Unix
|
||||
expect(fileOps.shouldIgnore(String.raw`C:\project\node_modules\pkg`)).toBe(false);
|
||||
expect(fileOps.shouldIgnore(String.raw`C:\project\src\main.js`)).toBe(false);
|
||||
});
|
||||
|
||||
it('should handle relative paths', () => {
|
||||
// basename of './node_modules/package' is 'package'
|
||||
expect(fileOps.shouldIgnore('./node_modules/package')).toBe(false);
|
||||
// basename of '../.git/hooks' is 'hooks'
|
||||
expect(fileOps.shouldIgnore('../.git/hooks')).toBe(false);
|
||||
expect(fileOps.shouldIgnore('./src/index.js')).toBe(false);
|
||||
// But the directories themselves would be ignored
|
||||
expect(fileOps.shouldIgnore('./node_modules')).toBe(true);
|
||||
expect(fileOps.shouldIgnore('../.git')).toBe(true);
|
||||
});
|
||||
|
||||
it('should handle files with multiple extensions', () => {
|
||||
expect(fileOps.shouldIgnore('file.tar.tmp')).toBe(true);
|
||||
expect(fileOps.shouldIgnore('backup.sql.swp')).toBe(true);
|
||||
expect(fileOps.shouldIgnore('data.json.gz')).toBe(false);
|
||||
});
|
||||
|
||||
it('should be case-sensitive for exact matches', () => {
|
||||
expect(fileOps.shouldIgnore('Node_Modules')).toBe(false);
|
||||
expect(fileOps.shouldIgnore('NODE_MODULES')).toBe(false);
|
||||
expect(fileOps.shouldIgnore('node_modules')).toBe(true);
|
||||
});
|
||||
|
||||
it('should handle files starting with ignored patterns', () => {
|
||||
expect(fileOps.shouldIgnore('.git-credentials')).toBe(false);
|
||||
expect(fileOps.shouldIgnore('.gitattributes')).toBe(false);
|
||||
expect(fileOps.shouldIgnore('.git')).toBe(true);
|
||||
});
|
||||
|
||||
it('should handle Unicode filenames', () => {
|
||||
expect(fileOps.shouldIgnore('文档.swp')).toBe(true);
|
||||
expect(fileOps.shouldIgnore('файл.tmp')).toBe(true);
|
||||
expect(fileOps.shouldIgnore('ドキュメント.txt')).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('pattern matching behavior', () => {
|
||||
it('should match patterns based on basename only', () => {
|
||||
// shouldIgnore uses path.basename(), so only the last segment matters
|
||||
expect(fileOps.shouldIgnore('/home/user/.git/config')).toBe(false); // basename is 'config'
|
||||
expect(fileOps.shouldIgnore('/home/user/project/node_modules')).toBe(true); // basename is 'node_modules'
|
||||
});
|
||||
|
||||
it('should handle trailing slashes', () => {
|
||||
// path.basename() returns the directory name, not empty string for trailing slash
|
||||
expect(fileOps.shouldIgnore('node_modules/')).toBe(true);
|
||||
expect(fileOps.shouldIgnore('.git/')).toBe(true);
|
||||
});
|
||||
|
||||
it('should treat patterns as partial regex matches', () => {
|
||||
// The *.swp pattern becomes /.*\.swp/ regex
|
||||
expect(fileOps.shouldIgnore('test.swp')).toBe(true);
|
||||
expect(fileOps.shouldIgnore('swp')).toBe(false); // doesn't match .*\.swp
|
||||
expect(fileOps.shouldIgnore('.swp')).toBe(true); // matches .*\.swp (. before swp)
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
@ -0,0 +1,316 @@
|
|||
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
||||
import { FileOps } from '../../../tools/cli/lib/file-ops.js';
|
||||
import { createTempDir, cleanupTempDir, createTestFile } from '../../helpers/temp-dir.js';
|
||||
import fs from 'fs-extra';
|
||||
import path from 'node:path';
|
||||
|
||||
describe('FileOps', () => {
|
||||
describe('syncDirectory()', () => {
|
||||
const fileOps = new FileOps();
|
||||
let tmpDir;
|
||||
let sourceDir;
|
||||
let destDir;
|
||||
|
||||
beforeEach(async () => {
|
||||
tmpDir = await createTempDir();
|
||||
sourceDir = path.join(tmpDir, 'source');
|
||||
destDir = path.join(tmpDir, 'dest');
|
||||
await fs.ensureDir(sourceDir);
|
||||
await fs.ensureDir(destDir);
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await cleanupTempDir(tmpDir);
|
||||
});
|
||||
|
||||
describe('hash-based selective update', () => {
|
||||
it('should update file when hashes are identical (safe update)', async () => {
|
||||
const content = 'identical content';
|
||||
await createTestFile(sourceDir, 'file.txt', content);
|
||||
await createTestFile(destDir, 'file.txt', content);
|
||||
|
||||
await fileOps.syncDirectory(sourceDir, destDir);
|
||||
|
||||
// File should be updated (copied over) since hashes match
|
||||
const destContent = await fs.readFile(path.join(destDir, 'file.txt'), 'utf8');
|
||||
expect(destContent).toBe(content);
|
||||
});
|
||||
|
||||
it('should preserve modified file when dest is newer', async () => {
|
||||
await createTestFile(sourceDir, 'file.txt', 'source content');
|
||||
await createTestFile(destDir, 'file.txt', 'modified by user');
|
||||
|
||||
// Make dest file newer
|
||||
const destFile = path.join(destDir, 'file.txt');
|
||||
const futureTime = new Date(Date.now() + 10_000);
|
||||
await fs.utimes(destFile, futureTime, futureTime);
|
||||
|
||||
await fileOps.syncDirectory(sourceDir, destDir);
|
||||
|
||||
// User modification should be preserved
|
||||
const destContent = await fs.readFile(destFile, 'utf8');
|
||||
expect(destContent).toBe('modified by user');
|
||||
});
|
||||
|
||||
it('should update file when source is newer than modified dest', async () => {
|
||||
// Create both files first
|
||||
await createTestFile(sourceDir, 'file.txt', 'new source content');
|
||||
await createTestFile(destDir, 'file.txt', 'old modified content');
|
||||
|
||||
// Make dest older and source newer with explicit times
|
||||
const destFile = path.join(destDir, 'file.txt');
|
||||
const sourceFile = path.join(sourceDir, 'file.txt');
|
||||
|
||||
const pastTime = new Date(Date.now() - 10_000);
|
||||
const futureTime = new Date(Date.now() + 10_000);
|
||||
|
||||
await fs.utimes(destFile, pastTime, pastTime);
|
||||
await fs.utimes(sourceFile, futureTime, futureTime);
|
||||
|
||||
await fileOps.syncDirectory(sourceDir, destDir);
|
||||
|
||||
// Should update to source content since source is newer
|
||||
const destContent = await fs.readFile(destFile, 'utf8');
|
||||
expect(destContent).toBe('new source content');
|
||||
});
|
||||
});
|
||||
|
||||
describe('new file handling', () => {
|
||||
it('should copy new files from source', async () => {
|
||||
await createTestFile(sourceDir, 'new-file.txt', 'new content');
|
||||
|
||||
await fileOps.syncDirectory(sourceDir, destDir);
|
||||
|
||||
expect(await fs.pathExists(path.join(destDir, 'new-file.txt'))).toBe(true);
|
||||
expect(await fs.readFile(path.join(destDir, 'new-file.txt'), 'utf8')).toBe('new content');
|
||||
});
|
||||
|
||||
it('should copy multiple new files', async () => {
|
||||
await createTestFile(sourceDir, 'file1.txt', 'content1');
|
||||
await createTestFile(sourceDir, 'file2.md', 'content2');
|
||||
await createTestFile(sourceDir, 'file3.json', 'content3');
|
||||
|
||||
await fileOps.syncDirectory(sourceDir, destDir);
|
||||
|
||||
expect(await fs.pathExists(path.join(destDir, 'file1.txt'))).toBe(true);
|
||||
expect(await fs.pathExists(path.join(destDir, 'file2.md'))).toBe(true);
|
||||
expect(await fs.pathExists(path.join(destDir, 'file3.json'))).toBe(true);
|
||||
});
|
||||
|
||||
it('should create nested directories for new files', async () => {
|
||||
await createTestFile(sourceDir, 'level1/level2/deep.txt', 'deep content');
|
||||
|
||||
await fileOps.syncDirectory(sourceDir, destDir);
|
||||
|
||||
expect(await fs.pathExists(path.join(destDir, 'level1', 'level2', 'deep.txt'))).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('orphaned file removal', () => {
|
||||
it('should remove files that no longer exist in source', async () => {
|
||||
await createTestFile(sourceDir, 'keep.txt', 'keep this');
|
||||
await createTestFile(destDir, 'keep.txt', 'keep this');
|
||||
await createTestFile(destDir, 'remove.txt', 'delete this');
|
||||
|
||||
await fileOps.syncDirectory(sourceDir, destDir);
|
||||
|
||||
expect(await fs.pathExists(path.join(destDir, 'keep.txt'))).toBe(true);
|
||||
expect(await fs.pathExists(path.join(destDir, 'remove.txt'))).toBe(false);
|
||||
});
|
||||
|
||||
it('should remove multiple orphaned files', async () => {
|
||||
await createTestFile(sourceDir, 'current.txt', 'current');
|
||||
await createTestFile(destDir, 'current.txt', 'current');
|
||||
await createTestFile(destDir, 'old1.txt', 'orphan 1');
|
||||
await createTestFile(destDir, 'old2.txt', 'orphan 2');
|
||||
await createTestFile(destDir, 'old3.txt', 'orphan 3');
|
||||
|
||||
await fileOps.syncDirectory(sourceDir, destDir);
|
||||
|
||||
expect(await fs.pathExists(path.join(destDir, 'current.txt'))).toBe(true);
|
||||
expect(await fs.pathExists(path.join(destDir, 'old1.txt'))).toBe(false);
|
||||
expect(await fs.pathExists(path.join(destDir, 'old2.txt'))).toBe(false);
|
||||
expect(await fs.pathExists(path.join(destDir, 'old3.txt'))).toBe(false);
|
||||
});
|
||||
|
||||
it('should remove orphaned directories', async () => {
|
||||
await createTestFile(sourceDir, 'keep/file.txt', 'keep');
|
||||
await createTestFile(destDir, 'keep/file.txt', 'keep');
|
||||
await createTestFile(destDir, 'remove/orphan.txt', 'orphan');
|
||||
|
||||
await fileOps.syncDirectory(sourceDir, destDir);
|
||||
|
||||
expect(await fs.pathExists(path.join(destDir, 'keep'))).toBe(true);
|
||||
expect(await fs.pathExists(path.join(destDir, 'remove', 'orphan.txt'))).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('complex scenarios', () => {
|
||||
it('should handle mixed operations in single sync', async () => {
|
||||
const now = Date.now();
|
||||
const pastTime = now - 100_000; // 100 seconds ago
|
||||
const futureTime = now + 100_000; // 100 seconds from now
|
||||
|
||||
// Identical file (update)
|
||||
await createTestFile(sourceDir, 'identical.txt', 'same');
|
||||
await createTestFile(destDir, 'identical.txt', 'same');
|
||||
|
||||
// Modified file with newer dest (preserve)
|
||||
await createTestFile(sourceDir, 'modified.txt', 'original');
|
||||
await createTestFile(destDir, 'modified.txt', 'user modified');
|
||||
const modifiedFile = path.join(destDir, 'modified.txt');
|
||||
await fs.utimes(modifiedFile, futureTime, futureTime);
|
||||
|
||||
// New file (copy)
|
||||
await createTestFile(sourceDir, 'new.txt', 'new content');
|
||||
|
||||
// Orphaned file (remove)
|
||||
await createTestFile(destDir, 'orphan.txt', 'delete me');
|
||||
|
||||
await fileOps.syncDirectory(sourceDir, destDir);
|
||||
|
||||
// Verify operations
|
||||
expect(await fs.pathExists(path.join(destDir, 'identical.txt'))).toBe(true);
|
||||
|
||||
expect(await fs.readFile(modifiedFile, 'utf8')).toBe('user modified');
|
||||
|
||||
expect(await fs.pathExists(path.join(destDir, 'new.txt'))).toBe(true);
|
||||
|
||||
expect(await fs.pathExists(path.join(destDir, 'orphan.txt'))).toBe(false);
|
||||
});
|
||||
|
||||
it('should handle nested directory changes', async () => {
|
||||
// Create nested structure in source
|
||||
await createTestFile(sourceDir, 'level1/keep.txt', 'keep');
|
||||
await createTestFile(sourceDir, 'level1/level2/deep.txt', 'deep');
|
||||
|
||||
// Create different nested structure in dest
|
||||
await createTestFile(destDir, 'level1/keep.txt', 'keep');
|
||||
await createTestFile(destDir, 'level1/remove.txt', 'orphan');
|
||||
await createTestFile(destDir, 'old-level/file.txt', 'old');
|
||||
|
||||
await fileOps.syncDirectory(sourceDir, destDir);
|
||||
|
||||
expect(await fs.pathExists(path.join(destDir, 'level1', 'keep.txt'))).toBe(true);
|
||||
expect(await fs.pathExists(path.join(destDir, 'level1', 'level2', 'deep.txt'))).toBe(true);
|
||||
expect(await fs.pathExists(path.join(destDir, 'level1', 'remove.txt'))).toBe(false);
|
||||
expect(await fs.pathExists(path.join(destDir, 'old-level', 'file.txt'))).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('edge cases', () => {
|
||||
it('should handle empty source directory', async () => {
|
||||
await createTestFile(destDir, 'file.txt', 'content');
|
||||
|
||||
await fileOps.syncDirectory(sourceDir, destDir);
|
||||
|
||||
// All files should be removed
|
||||
expect(await fs.pathExists(path.join(destDir, 'file.txt'))).toBe(false);
|
||||
});
|
||||
|
||||
it('should handle empty destination directory', async () => {
|
||||
await createTestFile(sourceDir, 'file.txt', 'content');
|
||||
|
||||
await fileOps.syncDirectory(sourceDir, destDir);
|
||||
|
||||
expect(await fs.pathExists(path.join(destDir, 'file.txt'))).toBe(true);
|
||||
});
|
||||
|
||||
it('should handle Unicode filenames', async () => {
|
||||
await createTestFile(sourceDir, '测试.txt', 'chinese');
|
||||
await createTestFile(destDir, '测试.txt', 'modified chinese');
|
||||
|
||||
// Make dest newer
|
||||
await fs.utimes(path.join(destDir, '测试.txt'), Date.now() + 10_000, Date.now() + 10_000);
|
||||
|
||||
await fileOps.syncDirectory(sourceDir, destDir);
|
||||
|
||||
// Should preserve user modification
|
||||
expect(await fs.readFile(path.join(destDir, '测试.txt'), 'utf8')).toBe('modified chinese');
|
||||
});
|
||||
|
||||
it('should handle large number of files', async () => {
|
||||
// Create 50 files in source
|
||||
for (let i = 0; i < 50; i++) {
|
||||
await createTestFile(sourceDir, `file${i}.txt`, `content ${i}`);
|
||||
}
|
||||
|
||||
// Create 25 matching files and 25 orphaned files in dest
|
||||
for (let i = 0; i < 25; i++) {
|
||||
await createTestFile(destDir, `file${i}.txt`, `content ${i}`);
|
||||
await createTestFile(destDir, `orphan${i}.txt`, `orphan ${i}`);
|
||||
}
|
||||
|
||||
await fileOps.syncDirectory(sourceDir, destDir);
|
||||
|
||||
// All 50 source files should exist
|
||||
for (let i = 0; i < 50; i++) {
|
||||
expect(await fs.pathExists(path.join(destDir, `file${i}.txt`))).toBe(true);
|
||||
}
|
||||
|
||||
// All 25 orphaned files should be removed
|
||||
for (let i = 0; i < 25; i++) {
|
||||
expect(await fs.pathExists(path.join(destDir, `orphan${i}.txt`))).toBe(false);
|
||||
}
|
||||
});
|
||||
|
||||
it('should handle binary files correctly', async () => {
|
||||
const buffer = Buffer.from([0x89, 0x50, 0x4e, 0x47]);
|
||||
await fs.writeFile(path.join(sourceDir, 'binary.dat'), buffer);
|
||||
await fs.writeFile(path.join(destDir, 'binary.dat'), buffer);
|
||||
|
||||
await fileOps.syncDirectory(sourceDir, destDir);
|
||||
|
||||
const destBuffer = await fs.readFile(path.join(destDir, 'binary.dat'));
|
||||
expect(destBuffer).toEqual(buffer);
|
||||
});
|
||||
});
|
||||
|
||||
describe('timestamp precision', () => {
|
||||
it('should handle files with very close modification times', async () => {
|
||||
await createTestFile(sourceDir, 'file.txt', 'source');
|
||||
await createTestFile(destDir, 'file.txt', 'dest modified');
|
||||
|
||||
// Make dest just slightly newer (100ms)
|
||||
const destFile = path.join(destDir, 'file.txt');
|
||||
await fs.utimes(destFile, Date.now() + 100, Date.now() + 100);
|
||||
|
||||
await fileOps.syncDirectory(sourceDir, destDir);
|
||||
|
||||
// Should preserve user modification even with small time difference
|
||||
expect(await fs.readFile(destFile, 'utf8')).toBe('dest modified');
|
||||
});
|
||||
});
|
||||
|
||||
describe('data integrity', () => {
|
||||
it('should not corrupt files during sync', async () => {
|
||||
const content = 'Important data\nLine 2\nLine 3\n';
|
||||
await createTestFile(sourceDir, 'data.txt', content);
|
||||
|
||||
await fileOps.syncDirectory(sourceDir, destDir);
|
||||
|
||||
expect(await fs.readFile(path.join(destDir, 'data.txt'), 'utf8')).toBe(content);
|
||||
});
|
||||
|
||||
it('should handle sync interruption gracefully', async () => {
|
||||
// This test verifies that partial syncs don't leave inconsistent state
|
||||
await createTestFile(sourceDir, 'file1.txt', 'content1');
|
||||
await createTestFile(sourceDir, 'file2.txt', 'content2');
|
||||
|
||||
// First sync
|
||||
await fileOps.syncDirectory(sourceDir, destDir);
|
||||
|
||||
// Modify source
|
||||
await createTestFile(sourceDir, 'file3.txt', 'content3');
|
||||
|
||||
// Second sync
|
||||
await fileOps.syncDirectory(sourceDir, destDir);
|
||||
|
||||
// All files should be present and correct
|
||||
expect(await fs.pathExists(path.join(destDir, 'file1.txt'))).toBe(true);
|
||||
expect(await fs.pathExists(path.join(destDir, 'file2.txt'))).toBe(true);
|
||||
expect(await fs.pathExists(path.join(destDir, 'file3.txt'))).toBe(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
@ -0,0 +1,214 @@
|
|||
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
||||
import { FileOps } from '../../../tools/cli/lib/file-ops.js';
|
||||
import { createTempDir, cleanupTempDir, createTestFile } from '../../helpers/temp-dir.js';
|
||||
import fs from 'fs-extra';
|
||||
import path from 'node:path';
|
||||
|
||||
describe('FileOps', () => {
|
||||
const fileOps = new FileOps();
|
||||
let tmpDir;
|
||||
|
||||
beforeEach(async () => {
|
||||
tmpDir = await createTempDir();
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await cleanupTempDir(tmpDir);
|
||||
});
|
||||
|
||||
describe('ensureDir()', () => {
|
||||
it('should create directory if it does not exist', async () => {
|
||||
const newDir = path.join(tmpDir, 'new-directory');
|
||||
|
||||
await fileOps.ensureDir(newDir);
|
||||
|
||||
expect(await fs.pathExists(newDir)).toBe(true);
|
||||
});
|
||||
|
||||
it('should not fail if directory already exists', async () => {
|
||||
const existingDir = path.join(tmpDir, 'existing');
|
||||
await fs.ensureDir(existingDir);
|
||||
|
||||
await expect(fileOps.ensureDir(existingDir)).resolves.not.toThrow();
|
||||
});
|
||||
|
||||
it('should create nested directories', async () => {
|
||||
const nestedDir = path.join(tmpDir, 'level1', 'level2', 'level3');
|
||||
|
||||
await fileOps.ensureDir(nestedDir);
|
||||
|
||||
expect(await fs.pathExists(nestedDir)).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('remove()', () => {
|
||||
it('should remove a file', async () => {
|
||||
const filePath = await createTestFile(tmpDir, 'test.txt', 'content');
|
||||
|
||||
await fileOps.remove(filePath);
|
||||
|
||||
expect(await fs.pathExists(filePath)).toBe(false);
|
||||
});
|
||||
|
||||
it('should remove a directory', async () => {
|
||||
const dirPath = path.join(tmpDir, 'test-dir');
|
||||
await fs.ensureDir(dirPath);
|
||||
await createTestFile(dirPath, 'file.txt', 'content');
|
||||
|
||||
await fileOps.remove(dirPath);
|
||||
|
||||
expect(await fs.pathExists(dirPath)).toBe(false);
|
||||
});
|
||||
|
||||
it('should not fail if path does not exist', async () => {
|
||||
const nonExistent = path.join(tmpDir, 'does-not-exist');
|
||||
|
||||
await expect(fileOps.remove(nonExistent)).resolves.not.toThrow();
|
||||
});
|
||||
|
||||
it('should remove nested directories', async () => {
|
||||
const nested = path.join(tmpDir, 'a', 'b', 'c');
|
||||
await fs.ensureDir(nested);
|
||||
await createTestFile(nested, 'file.txt', 'content');
|
||||
|
||||
await fileOps.remove(path.join(tmpDir, 'a'));
|
||||
|
||||
expect(await fs.pathExists(path.join(tmpDir, 'a'))).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('readFile()', () => {
|
||||
it('should read file content', async () => {
|
||||
const content = 'test content';
|
||||
const filePath = await createTestFile(tmpDir, 'test.txt', content);
|
||||
|
||||
const result = await fileOps.readFile(filePath);
|
||||
|
||||
expect(result).toBe(content);
|
||||
});
|
||||
|
||||
it('should read UTF-8 content', async () => {
|
||||
const content = 'Hello 世界 🌍';
|
||||
const filePath = await createTestFile(tmpDir, 'utf8.txt', content);
|
||||
|
||||
const result = await fileOps.readFile(filePath);
|
||||
|
||||
expect(result).toBe(content);
|
||||
});
|
||||
|
||||
it('should read empty file', async () => {
|
||||
const filePath = await createTestFile(tmpDir, 'empty.txt', '');
|
||||
|
||||
const result = await fileOps.readFile(filePath);
|
||||
|
||||
expect(result).toBe('');
|
||||
});
|
||||
|
||||
it('should reject for non-existent file', async () => {
|
||||
const nonExistent = path.join(tmpDir, 'does-not-exist.txt');
|
||||
|
||||
await expect(fileOps.readFile(nonExistent)).rejects.toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe('writeFile()', () => {
|
||||
it('should write file content', async () => {
|
||||
const filePath = path.join(tmpDir, 'new-file.txt');
|
||||
const content = 'test content';
|
||||
|
||||
await fileOps.writeFile(filePath, content);
|
||||
|
||||
expect(await fs.readFile(filePath, 'utf8')).toBe(content);
|
||||
});
|
||||
|
||||
it('should create parent directories if they do not exist', async () => {
|
||||
const filePath = path.join(tmpDir, 'level1', 'level2', 'file.txt');
|
||||
|
||||
await fileOps.writeFile(filePath, 'content');
|
||||
|
||||
expect(await fs.pathExists(filePath)).toBe(true);
|
||||
expect(await fs.readFile(filePath, 'utf8')).toBe('content');
|
||||
});
|
||||
|
||||
it('should overwrite existing file', async () => {
|
||||
const filePath = await createTestFile(tmpDir, 'test.txt', 'old content');
|
||||
|
||||
await fileOps.writeFile(filePath, 'new content');
|
||||
|
||||
expect(await fs.readFile(filePath, 'utf8')).toBe('new content');
|
||||
});
|
||||
|
||||
it('should handle UTF-8 content', async () => {
|
||||
const content = '测试 Тест 🎉';
|
||||
const filePath = path.join(tmpDir, 'unicode.txt');
|
||||
|
||||
await fileOps.writeFile(filePath, content);
|
||||
|
||||
expect(await fs.readFile(filePath, 'utf8')).toBe(content);
|
||||
});
|
||||
});
|
||||
|
||||
describe('exists()', () => {
|
||||
it('should return true for existing file', async () => {
|
||||
const filePath = await createTestFile(tmpDir, 'test.txt', 'content');
|
||||
|
||||
const result = await fileOps.exists(filePath);
|
||||
|
||||
expect(result).toBe(true);
|
||||
});
|
||||
|
||||
it('should return true for existing directory', async () => {
|
||||
const dirPath = path.join(tmpDir, 'test-dir');
|
||||
await fs.ensureDir(dirPath);
|
||||
|
||||
const result = await fileOps.exists(dirPath);
|
||||
|
||||
expect(result).toBe(true);
|
||||
});
|
||||
|
||||
it('should return false for non-existent path', async () => {
|
||||
const nonExistent = path.join(tmpDir, 'does-not-exist');
|
||||
|
||||
const result = await fileOps.exists(nonExistent);
|
||||
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('stat()', () => {
|
||||
it('should return stats for file', async () => {
|
||||
const filePath = await createTestFile(tmpDir, 'test.txt', 'content');
|
||||
|
||||
const stats = await fileOps.stat(filePath);
|
||||
|
||||
expect(stats.isFile()).toBe(true);
|
||||
expect(stats.isDirectory()).toBe(false);
|
||||
expect(stats.size).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
it('should return stats for directory', async () => {
|
||||
const dirPath = path.join(tmpDir, 'test-dir');
|
||||
await fs.ensureDir(dirPath);
|
||||
|
||||
const stats = await fileOps.stat(dirPath);
|
||||
|
||||
expect(stats.isDirectory()).toBe(true);
|
||||
expect(stats.isFile()).toBe(false);
|
||||
});
|
||||
|
||||
it('should reject for non-existent path', async () => {
|
||||
const nonExistent = path.join(tmpDir, 'does-not-exist');
|
||||
|
||||
await expect(fileOps.stat(nonExistent)).rejects.toThrow();
|
||||
});
|
||||
|
||||
it('should return modification time', async () => {
|
||||
const filePath = await createTestFile(tmpDir, 'test.txt', 'content');
|
||||
|
||||
const stats = await fileOps.stat(filePath);
|
||||
|
||||
expect(stats.mtime).toBeInstanceOf(Date);
|
||||
expect(stats.mtime.getTime()).toBeLessThanOrEqual(Date.now());
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
@ -0,0 +1,335 @@
|
|||
import { describe, it, expect, beforeEach } from 'vitest';
|
||||
import { YamlXmlBuilder } from '../../../tools/cli/lib/yaml-xml-builder.js';
|
||||
|
||||
describe('YamlXmlBuilder - buildCommandsXml()', () => {
|
||||
let builder;
|
||||
|
||||
beforeEach(() => {
|
||||
builder = new YamlXmlBuilder();
|
||||
});
|
||||
|
||||
describe('menu injection', () => {
|
||||
it('should always inject *menu item first', () => {
|
||||
const xml = builder.buildCommandsXml([]);
|
||||
|
||||
expect(xml).toContain('<item cmd="*menu">[M] Redisplay Menu Options</item>');
|
||||
});
|
||||
|
||||
it('should always inject *dismiss item last', () => {
|
||||
const xml = builder.buildCommandsXml([]);
|
||||
|
||||
expect(xml).toContain('<item cmd="*dismiss">[D] Dismiss Agent</item>');
|
||||
// Should be at the end before </menu>
|
||||
expect(xml).toMatch(/\*dismiss.*<\/menu>/s);
|
||||
});
|
||||
|
||||
it('should place user items between *menu and *dismiss', () => {
|
||||
const menuItems = [{ trigger: 'help', description: 'Show help', action: 'show_help' }];
|
||||
|
||||
const xml = builder.buildCommandsXml(menuItems);
|
||||
|
||||
const menuIndex = xml.indexOf('*menu');
|
||||
const helpIndex = xml.indexOf('*help');
|
||||
const dismissIndex = xml.indexOf('*dismiss');
|
||||
|
||||
expect(menuIndex).toBeLessThan(helpIndex);
|
||||
expect(helpIndex).toBeLessThan(dismissIndex);
|
||||
});
|
||||
});
|
||||
|
||||
describe('legacy format items', () => {
|
||||
it('should add * prefix to triggers', () => {
|
||||
const menuItems = [{ trigger: 'help', description: 'Help', action: 'show_help' }];
|
||||
|
||||
const xml = builder.buildCommandsXml(menuItems);
|
||||
|
||||
expect(xml).toContain('cmd="*help"');
|
||||
expect(xml).not.toContain('cmd="help"'); // Should not have unprefixed version
|
||||
});
|
||||
|
||||
it('should preserve * prefix if already present', () => {
|
||||
const menuItems = [{ trigger: '*custom', description: 'Custom', action: 'custom_action' }];
|
||||
|
||||
const xml = builder.buildCommandsXml(menuItems);
|
||||
|
||||
expect(xml).toContain('cmd="*custom"');
|
||||
expect(xml).not.toContain('cmd="**custom"'); // Should not double-prefix
|
||||
});
|
||||
|
||||
it('should include description as item content', () => {
|
||||
const menuItems = [{ trigger: 'analyze', description: '[A] Analyze code', action: 'analyze' }];
|
||||
|
||||
const xml = builder.buildCommandsXml(menuItems);
|
||||
|
||||
expect(xml).toContain('>[A] Analyze code</item>');
|
||||
});
|
||||
|
||||
it('should escape XML special characters in description', () => {
|
||||
const menuItems = [
|
||||
{
|
||||
trigger: 'test',
|
||||
description: 'Test <brackets> & "quotes"',
|
||||
action: 'test',
|
||||
},
|
||||
];
|
||||
|
||||
const xml = builder.buildCommandsXml(menuItems);
|
||||
|
||||
expect(xml).toContain('<brackets> & "quotes"');
|
||||
});
|
||||
});
|
||||
|
||||
describe('handler attributes', () => {
|
||||
it('should include workflow attribute', () => {
|
||||
const menuItems = [{ trigger: 'start', description: 'Start workflow', workflow: 'main-workflow' }];
|
||||
|
||||
const xml = builder.buildCommandsXml(menuItems);
|
||||
|
||||
expect(xml).toContain('workflow="main-workflow"');
|
||||
});
|
||||
|
||||
it('should include exec attribute', () => {
|
||||
const menuItems = [{ trigger: 'run', description: 'Run task', exec: 'path/to/task.md' }];
|
||||
|
||||
const xml = builder.buildCommandsXml(menuItems);
|
||||
|
||||
expect(xml).toContain('exec="path/to/task.md"');
|
||||
});
|
||||
|
||||
it('should include action attribute', () => {
|
||||
const menuItems = [{ trigger: 'help', description: 'Help', action: 'show_help' }];
|
||||
|
||||
const xml = builder.buildCommandsXml(menuItems);
|
||||
|
||||
expect(xml).toContain('action="show_help"');
|
||||
});
|
||||
|
||||
it('should include tmpl attribute', () => {
|
||||
const menuItems = [{ trigger: 'form', description: 'Form', tmpl: 'templates/form.yaml' }];
|
||||
|
||||
const xml = builder.buildCommandsXml(menuItems);
|
||||
|
||||
expect(xml).toContain('tmpl="templates/form.yaml"');
|
||||
});
|
||||
|
||||
it('should include data attribute', () => {
|
||||
const menuItems = [{ trigger: 'load', description: 'Load', data: 'data/config.json' }];
|
||||
|
||||
const xml = builder.buildCommandsXml(menuItems);
|
||||
|
||||
expect(xml).toContain('data="data/config.json"');
|
||||
});
|
||||
|
||||
it('should include validate-workflow attribute', () => {
|
||||
const menuItems = [
|
||||
{
|
||||
trigger: 'validate',
|
||||
description: 'Validate',
|
||||
'validate-workflow': 'validation-flow',
|
||||
},
|
||||
];
|
||||
|
||||
const xml = builder.buildCommandsXml(menuItems);
|
||||
|
||||
expect(xml).toContain('validate-workflow="validation-flow"');
|
||||
});
|
||||
|
||||
it('should prioritize workflow-install over workflow', () => {
|
||||
const menuItems = [
|
||||
{
|
||||
trigger: 'start',
|
||||
description: 'Start',
|
||||
workflow: 'original',
|
||||
'workflow-install': 'installed-location',
|
||||
},
|
||||
];
|
||||
|
||||
const xml = builder.buildCommandsXml(menuItems);
|
||||
|
||||
expect(xml).toContain('workflow="installed-location"');
|
||||
expect(xml).not.toContain('workflow="original"');
|
||||
});
|
||||
|
||||
it('should handle multiple attributes on same item', () => {
|
||||
const menuItems = [
|
||||
{
|
||||
trigger: 'complex',
|
||||
description: 'Complex command',
|
||||
workflow: 'flow',
|
||||
data: 'data.json',
|
||||
action: 'custom',
|
||||
},
|
||||
];
|
||||
|
||||
const xml = builder.buildCommandsXml(menuItems);
|
||||
|
||||
expect(xml).toContain('workflow="flow"');
|
||||
expect(xml).toContain('data="data.json"');
|
||||
expect(xml).toContain('action="custom"');
|
||||
});
|
||||
});
|
||||
|
||||
describe('IDE and web filtering', () => {
|
||||
it('should include ide-only items for IDE installation', () => {
|
||||
const menuItems = [
|
||||
{ trigger: 'local', description: 'Local only', action: 'local', 'ide-only': true },
|
||||
{ trigger: 'normal', description: 'Normal', action: 'normal' },
|
||||
];
|
||||
|
||||
const xml = builder.buildCommandsXml(menuItems, false);
|
||||
|
||||
expect(xml).toContain('*local');
|
||||
expect(xml).toContain('*normal');
|
||||
});
|
||||
|
||||
it('should skip ide-only items for web bundle', () => {
|
||||
const menuItems = [
|
||||
{ trigger: 'local', description: 'Local only', action: 'local', 'ide-only': true },
|
||||
{ trigger: 'normal', description: 'Normal', action: 'normal' },
|
||||
];
|
||||
|
||||
const xml = builder.buildCommandsXml(menuItems, true);
|
||||
|
||||
expect(xml).not.toContain('*local');
|
||||
expect(xml).toContain('*normal');
|
||||
});
|
||||
|
||||
it('should include web-only items for web bundle', () => {
|
||||
const menuItems = [
|
||||
{ trigger: 'web', description: 'Web only', action: 'web', 'web-only': true },
|
||||
{ trigger: 'normal', description: 'Normal', action: 'normal' },
|
||||
];
|
||||
|
||||
const xml = builder.buildCommandsXml(menuItems, true);
|
||||
|
||||
expect(xml).toContain('*web');
|
||||
expect(xml).toContain('*normal');
|
||||
});
|
||||
|
||||
it('should skip web-only items for IDE installation', () => {
|
||||
const menuItems = [
|
||||
{ trigger: 'web', description: 'Web only', action: 'web', 'web-only': true },
|
||||
{ trigger: 'normal', description: 'Normal', action: 'normal' },
|
||||
];
|
||||
|
||||
const xml = builder.buildCommandsXml(menuItems, false);
|
||||
|
||||
expect(xml).not.toContain('*web');
|
||||
expect(xml).toContain('*normal');
|
||||
});
|
||||
});
|
||||
|
||||
describe('multi format with nested handlers', () => {
|
||||
it('should build multi format items with nested handlers', () => {
|
||||
const menuItems = [
|
||||
{
|
||||
multi: '[TS] Technical Specification',
|
||||
triggers: [
|
||||
{
|
||||
'tech-spec': [{ input: 'Create technical specification' }, { route: 'workflows/tech-spec.yaml' }],
|
||||
},
|
||||
{
|
||||
TS: [{ input: 'Create technical specification' }, { route: 'workflows/tech-spec.yaml' }],
|
||||
},
|
||||
],
|
||||
},
|
||||
];
|
||||
|
||||
const xml = builder.buildCommandsXml(menuItems);
|
||||
|
||||
expect(xml).toContain('<item type="multi">');
|
||||
expect(xml).toContain('[TS] Technical Specification');
|
||||
expect(xml).toContain('<handler');
|
||||
expect(xml).toContain('match="Create technical specification"');
|
||||
expect(xml).toContain('</item>');
|
||||
});
|
||||
|
||||
it('should escape XML in multi description', () => {
|
||||
const menuItems = [
|
||||
{
|
||||
multi: '[A] Analyze <code>',
|
||||
triggers: [
|
||||
{
|
||||
analyze: [{ input: 'Analyze', route: 'task.md' }],
|
||||
},
|
||||
],
|
||||
},
|
||||
];
|
||||
|
||||
const xml = builder.buildCommandsXml(menuItems);
|
||||
|
||||
expect(xml).toContain('<code>');
|
||||
});
|
||||
});
|
||||
|
||||
describe('edge cases', () => {
|
||||
it('should handle empty menu items array', () => {
|
||||
const xml = builder.buildCommandsXml([]);
|
||||
|
||||
expect(xml).toContain('<menu>');
|
||||
expect(xml).toContain('</menu>');
|
||||
expect(xml).toContain('*menu');
|
||||
expect(xml).toContain('*dismiss');
|
||||
});
|
||||
|
||||
it('should handle null menu items', () => {
|
||||
const xml = builder.buildCommandsXml(null);
|
||||
|
||||
expect(xml).toContain('<menu>');
|
||||
expect(xml).toContain('*menu');
|
||||
expect(xml).toContain('*dismiss');
|
||||
});
|
||||
|
||||
it('should handle undefined menu items', () => {
|
||||
const xml = builder.buildCommandsXml();
|
||||
|
||||
expect(xml).toContain('<menu>');
|
||||
});
|
||||
|
||||
it('should handle empty description', () => {
|
||||
const menuItems = [{ trigger: 'test', description: '', action: 'test' }];
|
||||
|
||||
const xml = builder.buildCommandsXml(menuItems);
|
||||
|
||||
expect(xml).toContain('cmd="*test"');
|
||||
expect(xml).toContain('></item>'); // Empty content between tags
|
||||
});
|
||||
|
||||
it('should handle missing trigger (edge case)', () => {
|
||||
const menuItems = [{ description: 'No trigger', action: 'test' }];
|
||||
|
||||
const xml = builder.buildCommandsXml(menuItems);
|
||||
|
||||
// Should handle gracefully - might skip or add * prefix to empty
|
||||
expect(xml).toContain('<menu>');
|
||||
});
|
||||
|
||||
it('should handle Unicode in descriptions', () => {
|
||||
const menuItems = [{ trigger: 'test', description: '[测试] Test 日本語', action: 'test' }];
|
||||
|
||||
const xml = builder.buildCommandsXml(menuItems);
|
||||
|
||||
expect(xml).toContain('测试');
|
||||
expect(xml).toContain('日本語');
|
||||
});
|
||||
});
|
||||
|
||||
describe('multiple menu items', () => {
|
||||
it('should process all menu items in order', () => {
|
||||
const menuItems = [
|
||||
{ trigger: 'first', description: 'First', action: 'first' },
|
||||
{ trigger: 'second', description: 'Second', action: 'second' },
|
||||
{ trigger: 'third', description: 'Third', action: 'third' },
|
||||
];
|
||||
|
||||
const xml = builder.buildCommandsXml(menuItems);
|
||||
|
||||
const firstIndex = xml.indexOf('*first');
|
||||
const secondIndex = xml.indexOf('*second');
|
||||
const thirdIndex = xml.indexOf('*third');
|
||||
|
||||
expect(firstIndex).toBeLessThan(secondIndex);
|
||||
expect(secondIndex).toBeLessThan(thirdIndex);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
@ -0,0 +1,605 @@
|
|||
import { describe, it, expect, beforeEach } from 'vitest';
|
||||
import { YamlXmlBuilder } from '../../../tools/cli/lib/yaml-xml-builder.js';
|
||||
|
||||
describe('YamlXmlBuilder - convertToXml()', () => {
|
||||
let builder;
|
||||
|
||||
beforeEach(() => {
|
||||
builder = new YamlXmlBuilder();
|
||||
});
|
||||
|
||||
describe('basic XML generation', () => {
|
||||
it('should generate XML with agent tag and attributes', async () => {
|
||||
const agentYaml = {
|
||||
agent: {
|
||||
metadata: {
|
||||
id: 'test-agent',
|
||||
name: 'Test Agent',
|
||||
title: 'Test Agent Title',
|
||||
icon: '🔧',
|
||||
},
|
||||
persona: {
|
||||
role: 'Test Role',
|
||||
identity: 'Test Identity',
|
||||
communication_style: 'Professional',
|
||||
principles: ['Principle 1'],
|
||||
},
|
||||
menu: [{ trigger: 'help', description: 'Help', action: 'show_help' }],
|
||||
},
|
||||
};
|
||||
|
||||
const xml = await builder.convertToXml(agentYaml, { skipActivation: true });
|
||||
|
||||
expect(xml).toContain('<agent id="test-agent"');
|
||||
expect(xml).toContain('name="Test Agent"');
|
||||
expect(xml).toContain('title="Test Agent Title"');
|
||||
expect(xml).toContain('icon="🔧"');
|
||||
expect(xml).toContain('</agent>');
|
||||
});
|
||||
|
||||
it('should include persona section', async () => {
|
||||
const agentYaml = {
|
||||
agent: {
|
||||
metadata: { id: 'test', name: 'Test', title: 'Test', icon: '🔧' },
|
||||
persona: {
|
||||
role: 'Developer',
|
||||
identity: 'Helpful assistant',
|
||||
communication_style: 'Professional',
|
||||
principles: ['Clear', 'Concise'],
|
||||
},
|
||||
menu: [],
|
||||
},
|
||||
};
|
||||
|
||||
const xml = await builder.convertToXml(agentYaml, { skipActivation: true });
|
||||
|
||||
expect(xml).toContain('<persona>');
|
||||
expect(xml).toContain('<role>Developer</role>');
|
||||
expect(xml).toContain('<identity>Helpful assistant</identity>');
|
||||
expect(xml).toContain('<communication_style>Professional</communication_style>');
|
||||
expect(xml).toContain('<principles>Clear Concise</principles>');
|
||||
});
|
||||
|
||||
it('should include memories section if present', async () => {
|
||||
const agentYaml = {
|
||||
agent: {
|
||||
metadata: { id: 'test', name: 'Test', title: 'Test', icon: '🔧' },
|
||||
persona: {
|
||||
role: 'Role',
|
||||
identity: 'ID',
|
||||
communication_style: 'Style',
|
||||
principles: ['P'],
|
||||
},
|
||||
memories: ['Memory 1', 'Memory 2'],
|
||||
menu: [],
|
||||
},
|
||||
};
|
||||
|
||||
const xml = await builder.convertToXml(agentYaml, { skipActivation: true });
|
||||
|
||||
expect(xml).toContain('<memories>');
|
||||
expect(xml).toContain('<memory>Memory 1</memory>');
|
||||
expect(xml).toContain('<memory>Memory 2</memory>');
|
||||
});
|
||||
|
||||
it('should include prompts section if present', async () => {
|
||||
const agentYaml = {
|
||||
agent: {
|
||||
metadata: { id: 'test', name: 'Test', title: 'Test', icon: '🔧' },
|
||||
persona: {
|
||||
role: 'Role',
|
||||
identity: 'ID',
|
||||
communication_style: 'Style',
|
||||
principles: ['P'],
|
||||
},
|
||||
prompts: [{ id: 'p1', content: 'Prompt content' }],
|
||||
menu: [],
|
||||
},
|
||||
};
|
||||
|
||||
const xml = await builder.convertToXml(agentYaml, { skipActivation: true });
|
||||
|
||||
expect(xml).toContain('<prompts>');
|
||||
expect(xml).toContain('<prompt id="p1">');
|
||||
expect(xml).toContain('Prompt content');
|
||||
});
|
||||
|
||||
it('should include menu section', async () => {
|
||||
const agentYaml = {
|
||||
agent: {
|
||||
metadata: { id: 'test', name: 'Test', title: 'Test', icon: '🔧' },
|
||||
persona: {
|
||||
role: 'Role',
|
||||
identity: 'ID',
|
||||
communication_style: 'Style',
|
||||
principles: ['P'],
|
||||
},
|
||||
menu: [
|
||||
{ trigger: 'help', description: 'Show help', action: 'show_help' },
|
||||
{ trigger: 'start', description: 'Start workflow', workflow: 'main' },
|
||||
],
|
||||
},
|
||||
};
|
||||
|
||||
const xml = await builder.convertToXml(agentYaml, { skipActivation: true });
|
||||
|
||||
expect(xml).toContain('<menu>');
|
||||
expect(xml).toContain('</menu>');
|
||||
// Menu always includes injected *menu item
|
||||
expect(xml).toContain('*menu');
|
||||
});
|
||||
});
|
||||
|
||||
describe('XML escaping', () => {
|
||||
it('should escape special characters in all fields', async () => {
|
||||
const agentYaml = {
|
||||
agent: {
|
||||
metadata: {
|
||||
id: 'test',
|
||||
name: 'Test',
|
||||
title: 'Test Agent',
|
||||
icon: '🔧',
|
||||
},
|
||||
persona: {
|
||||
role: 'Role with <brackets>',
|
||||
identity: 'Identity with & ampersand',
|
||||
communication_style: 'Style with "quotes"',
|
||||
principles: ["Principle with ' apostrophe"],
|
||||
},
|
||||
menu: [],
|
||||
},
|
||||
};
|
||||
|
||||
const xml = await builder.convertToXml(agentYaml, { skipActivation: true });
|
||||
|
||||
// Metadata in attributes might not be escaped - focus on content
|
||||
expect(xml).toContain('<brackets>');
|
||||
expect(xml).toContain('& ampersand');
|
||||
expect(xml).toContain('"quotes"');
|
||||
expect(xml).toContain('' apostrophe');
|
||||
});
|
||||
|
||||
it('should preserve Unicode characters', async () => {
|
||||
const agentYaml = {
|
||||
agent: {
|
||||
metadata: {
|
||||
id: 'unicode',
|
||||
name: '测试代理',
|
||||
title: 'Тестовый агент',
|
||||
icon: '🔧',
|
||||
},
|
||||
persona: {
|
||||
role: '開発者',
|
||||
identity: 'مساعد مفيد',
|
||||
communication_style: 'Profesional',
|
||||
principles: ['原则'],
|
||||
},
|
||||
menu: [],
|
||||
},
|
||||
};
|
||||
|
||||
const xml = await builder.convertToXml(agentYaml, { skipActivation: true });
|
||||
|
||||
expect(xml).toContain('测试代理');
|
||||
expect(xml).toContain('Тестовый агент');
|
||||
expect(xml).toContain('開発者');
|
||||
expect(xml).toContain('مساعد مفيد');
|
||||
expect(xml).toContain('原则');
|
||||
});
|
||||
});
|
||||
|
||||
describe('module detection', () => {
|
||||
it('should handle module in buildMetadata', async () => {
|
||||
const agentYaml = {
|
||||
agent: {
|
||||
metadata: { id: 'test', name: 'Test', title: 'Test', icon: '🔧' },
|
||||
persona: {
|
||||
role: 'Role',
|
||||
identity: 'ID',
|
||||
communication_style: 'Style',
|
||||
principles: ['P'],
|
||||
},
|
||||
menu: [],
|
||||
},
|
||||
};
|
||||
|
||||
const xml = await builder.convertToXml(agentYaml, {
|
||||
module: 'bmm',
|
||||
skipActivation: true,
|
||||
});
|
||||
|
||||
// Module is stored in metadata but may not be rendered as attribute
|
||||
expect(xml).toContain('<agent');
|
||||
expect(xml).toBeDefined();
|
||||
});
|
||||
|
||||
it('should not include module attribute for core agents', async () => {
|
||||
const agentYaml = {
|
||||
agent: {
|
||||
metadata: { id: 'test', name: 'Test', title: 'Test', icon: '🔧' },
|
||||
persona: {
|
||||
role: 'Role',
|
||||
identity: 'ID',
|
||||
communication_style: 'Style',
|
||||
principles: ['P'],
|
||||
},
|
||||
menu: [],
|
||||
},
|
||||
};
|
||||
|
||||
const xml = await builder.convertToXml(agentYaml, { skipActivation: true });
|
||||
|
||||
// No module attribute for core
|
||||
expect(xml).not.toContain('module=');
|
||||
});
|
||||
});
|
||||
|
||||
describe('output format variations', () => {
|
||||
it('should generate installation format with YAML frontmatter', async () => {
|
||||
const agentYaml = {
|
||||
agent: {
|
||||
metadata: { id: 'test', name: 'Test', title: 'Test Agent', icon: '🔧' },
|
||||
persona: {
|
||||
role: 'Role',
|
||||
identity: 'ID',
|
||||
communication_style: 'Style',
|
||||
principles: ['P'],
|
||||
},
|
||||
menu: [],
|
||||
},
|
||||
};
|
||||
|
||||
const xml = await builder.convertToXml(agentYaml, {
|
||||
sourceFile: 'test-agent.yaml',
|
||||
skipActivation: true,
|
||||
});
|
||||
|
||||
// Installation format has YAML frontmatter
|
||||
expect(xml).toMatch(/^---\n/);
|
||||
expect(xml).toContain('name: "test agent"'); // Derived from filename
|
||||
expect(xml).toContain('description: "Test Agent"');
|
||||
expect(xml).toContain('---');
|
||||
});
|
||||
|
||||
it('should generate web bundle format without frontmatter', async () => {
|
||||
const agentYaml = {
|
||||
agent: {
|
||||
metadata: { id: 'test', name: 'Test', title: 'Test Agent', icon: '🔧' },
|
||||
persona: {
|
||||
role: 'Role',
|
||||
identity: 'ID',
|
||||
communication_style: 'Style',
|
||||
principles: ['P'],
|
||||
},
|
||||
menu: [],
|
||||
},
|
||||
};
|
||||
|
||||
const xml = await builder.convertToXml(agentYaml, {
|
||||
forWebBundle: true,
|
||||
skipActivation: true,
|
||||
});
|
||||
|
||||
// Web bundle format has comment header
|
||||
expect(xml).toContain('<!-- Powered by BMAD-CORE™ -->');
|
||||
expect(xml).toContain('# Test Agent');
|
||||
expect(xml).not.toMatch(/^---\n/);
|
||||
});
|
||||
|
||||
it('should derive name from filename (remove .agent suffix)', async () => {
|
||||
const agentYaml = {
|
||||
agent: {
|
||||
metadata: { id: 'pm', name: 'PM', title: 'Product Manager', icon: '📋' },
|
||||
persona: {
|
||||
role: 'Role',
|
||||
identity: 'ID',
|
||||
communication_style: 'Style',
|
||||
principles: ['P'],
|
||||
},
|
||||
menu: [],
|
||||
},
|
||||
};
|
||||
|
||||
const xml = await builder.convertToXml(agentYaml, {
|
||||
sourceFile: 'pm.agent.yaml',
|
||||
skipActivation: true,
|
||||
});
|
||||
|
||||
// Should convert pm.agent.yaml → "pm"
|
||||
expect(xml).toContain('name: "pm"');
|
||||
});
|
||||
|
||||
it('should convert hyphens to spaces in filename', async () => {
|
||||
const agentYaml = {
|
||||
agent: {
|
||||
metadata: { id: 'cli', name: 'CLI', title: 'CLI Chief', icon: '⚙️' },
|
||||
persona: {
|
||||
role: 'Role',
|
||||
identity: 'ID',
|
||||
communication_style: 'Style',
|
||||
principles: ['P'],
|
||||
},
|
||||
menu: [],
|
||||
},
|
||||
};
|
||||
|
||||
const xml = await builder.convertToXml(agentYaml, {
|
||||
sourceFile: 'cli-chief.yaml',
|
||||
skipActivation: true,
|
||||
});
|
||||
|
||||
// Should convert cli-chief.yaml → "cli chief"
|
||||
expect(xml).toContain('name: "cli chief"');
|
||||
});
|
||||
});
|
||||
|
||||
describe('localskip attribute', () => {
|
||||
it('should add localskip="true" when metadata has localskip', async () => {
|
||||
const agentYaml = {
|
||||
agent: {
|
||||
metadata: {
|
||||
id: 'web-only',
|
||||
name: 'Web Only',
|
||||
title: 'Web Only Agent',
|
||||
icon: '🌐',
|
||||
localskip: true,
|
||||
},
|
||||
persona: {
|
||||
role: 'Role',
|
||||
identity: 'ID',
|
||||
communication_style: 'Style',
|
||||
principles: ['P'],
|
||||
},
|
||||
menu: [],
|
||||
},
|
||||
};
|
||||
|
||||
const xml = await builder.convertToXml(agentYaml, { skipActivation: true });
|
||||
|
||||
expect(xml).toContain('localskip="true"');
|
||||
});
|
||||
|
||||
it('should not add localskip when false or missing', async () => {
|
||||
const agentYaml = {
|
||||
agent: {
|
||||
metadata: { id: 'test', name: 'Test', title: 'Test', icon: '🔧' },
|
||||
persona: {
|
||||
role: 'Role',
|
||||
identity: 'ID',
|
||||
communication_style: 'Style',
|
||||
principles: ['P'],
|
||||
},
|
||||
menu: [],
|
||||
},
|
||||
};
|
||||
|
||||
const xml = await builder.convertToXml(agentYaml, { skipActivation: true });
|
||||
|
||||
expect(xml).not.toContain('localskip=');
|
||||
});
|
||||
});
|
||||
|
||||
describe('edge cases', () => {
|
||||
it('should handle empty menu array', async () => {
|
||||
const agentYaml = {
|
||||
agent: {
|
||||
metadata: { id: 'test', name: 'Test', title: 'Test', icon: '🔧' },
|
||||
persona: {
|
||||
role: 'Role',
|
||||
identity: 'ID',
|
||||
communication_style: 'Style',
|
||||
principles: ['P'],
|
||||
},
|
||||
menu: [],
|
||||
},
|
||||
};
|
||||
|
||||
const xml = await builder.convertToXml(agentYaml, { skipActivation: true });
|
||||
|
||||
expect(xml).toContain('<menu>');
|
||||
expect(xml).toContain('</menu>');
|
||||
// Should still have injected *menu item
|
||||
expect(xml).toContain('*menu');
|
||||
});
|
||||
|
||||
it('should handle missing memories', async () => {
|
||||
const agentYaml = {
|
||||
agent: {
|
||||
metadata: { id: 'test', name: 'Test', title: 'Test', icon: '🔧' },
|
||||
persona: {
|
||||
role: 'Role',
|
||||
identity: 'ID',
|
||||
communication_style: 'Style',
|
||||
principles: ['P'],
|
||||
},
|
||||
menu: [],
|
||||
},
|
||||
};
|
||||
|
||||
const xml = await builder.convertToXml(agentYaml, { skipActivation: true });
|
||||
|
||||
expect(xml).not.toContain('<memories>');
|
||||
});
|
||||
|
||||
it('should handle missing prompts', async () => {
|
||||
const agentYaml = {
|
||||
agent: {
|
||||
metadata: { id: 'test', name: 'Test', title: 'Test', icon: '🔧' },
|
||||
persona: {
|
||||
role: 'Role',
|
||||
identity: 'ID',
|
||||
communication_style: 'Style',
|
||||
principles: ['P'],
|
||||
},
|
||||
menu: [],
|
||||
},
|
||||
};
|
||||
|
||||
const xml = await builder.convertToXml(agentYaml, { skipActivation: true });
|
||||
|
||||
expect(xml).not.toContain('<prompts>');
|
||||
});
|
||||
|
||||
it('should wrap XML in markdown code fence', async () => {
|
||||
const agentYaml = {
|
||||
agent: {
|
||||
metadata: { id: 'test', name: 'Test', title: 'Test', icon: '🔧' },
|
||||
persona: {
|
||||
role: 'Role',
|
||||
identity: 'ID',
|
||||
communication_style: 'Style',
|
||||
principles: ['P'],
|
||||
},
|
||||
menu: [],
|
||||
},
|
||||
};
|
||||
|
||||
const xml = await builder.convertToXml(agentYaml, { skipActivation: true });
|
||||
|
||||
expect(xml).toContain('```xml');
|
||||
expect(xml).toContain('```\n');
|
||||
});
|
||||
|
||||
it('should include activation instruction for installation format', async () => {
|
||||
const agentYaml = {
|
||||
agent: {
|
||||
metadata: { id: 'test', name: 'Test', title: 'Test', icon: '🔧' },
|
||||
persona: {
|
||||
role: 'Role',
|
||||
identity: 'ID',
|
||||
communication_style: 'Style',
|
||||
principles: ['P'],
|
||||
},
|
||||
menu: [],
|
||||
},
|
||||
};
|
||||
|
||||
const xml = await builder.convertToXml(agentYaml, {
|
||||
sourceFile: 'test.yaml',
|
||||
skipActivation: true,
|
||||
});
|
||||
|
||||
expect(xml).toContain('You must fully embody this agent');
|
||||
expect(xml).toContain('NEVER break character');
|
||||
});
|
||||
|
||||
it('should not include activation instruction for web bundle', async () => {
|
||||
const agentYaml = {
|
||||
agent: {
|
||||
metadata: { id: 'test', name: 'Test', title: 'Test', icon: '🔧' },
|
||||
persona: {
|
||||
role: 'Role',
|
||||
identity: 'ID',
|
||||
communication_style: 'Style',
|
||||
principles: ['P'],
|
||||
},
|
||||
menu: [],
|
||||
},
|
||||
};
|
||||
|
||||
const xml = await builder.convertToXml(agentYaml, {
|
||||
forWebBundle: true,
|
||||
skipActivation: true,
|
||||
});
|
||||
|
||||
expect(xml).not.toContain('You must fully embody');
|
||||
expect(xml).toContain('<!-- Powered by BMAD-CORE™ -->');
|
||||
});
|
||||
});
|
||||
|
||||
describe('legacy commands field support', () => {
|
||||
it('should handle legacy "commands" field (renamed to menu)', async () => {
|
||||
const agentYaml = {
|
||||
agent: {
|
||||
metadata: { id: 'test', name: 'Test', title: 'Test', icon: '🔧' },
|
||||
persona: {
|
||||
role: 'Role',
|
||||
identity: 'ID',
|
||||
communication_style: 'Style',
|
||||
principles: ['P'],
|
||||
},
|
||||
commands: [{ trigger: 'help', description: 'Help', action: 'show_help' }],
|
||||
},
|
||||
};
|
||||
|
||||
const xml = await builder.convertToXml(agentYaml, { skipActivation: true });
|
||||
|
||||
expect(xml).toContain('<menu>');
|
||||
// Should process commands as menu items
|
||||
});
|
||||
|
||||
it('should prioritize menu over commands when both exist', async () => {
|
||||
const agentYaml = {
|
||||
agent: {
|
||||
metadata: { id: 'test', name: 'Test', title: 'Test', icon: '🔧' },
|
||||
persona: {
|
||||
role: 'Role',
|
||||
identity: 'ID',
|
||||
communication_style: 'Style',
|
||||
principles: ['P'],
|
||||
},
|
||||
menu: [{ trigger: 'new', description: 'New', action: 'new_action' }],
|
||||
commands: [{ trigger: 'old', description: 'Old', action: 'old_action' }],
|
||||
},
|
||||
};
|
||||
|
||||
const xml = await builder.convertToXml(agentYaml, { skipActivation: true });
|
||||
|
||||
// Should use menu, not commands
|
||||
expect(xml).toContain('<menu>');
|
||||
});
|
||||
});
|
||||
|
||||
describe('complete agent transformation', () => {
|
||||
it('should transform a complete agent with all fields', async () => {
|
||||
const agentYaml = {
|
||||
agent: {
|
||||
metadata: {
|
||||
id: 'full-agent',
|
||||
name: 'Full Agent',
|
||||
title: 'Complete Test Agent',
|
||||
icon: '🤖',
|
||||
},
|
||||
persona: {
|
||||
role: 'Full Stack Developer',
|
||||
identity: 'Experienced software engineer',
|
||||
communication_style: 'Clear and professional',
|
||||
principles: ['Quality', 'Performance', 'Maintainability'],
|
||||
},
|
||||
memories: ['Remember project context', 'Track user preferences'],
|
||||
prompts: [
|
||||
{ id: 'init', content: 'Initialize the agent' },
|
||||
{ id: 'task', content: 'Process the task' },
|
||||
],
|
||||
critical_actions: ['Never delete data', 'Always backup'],
|
||||
menu: [
|
||||
{ trigger: 'help', description: '[H] Show help', action: 'show_help' },
|
||||
{ trigger: 'start', description: '[S] Start workflow', workflow: 'main' },
|
||||
],
|
||||
},
|
||||
};
|
||||
|
||||
const xml = await builder.convertToXml(agentYaml, {
|
||||
sourceFile: 'full-agent.yaml',
|
||||
module: 'bmm',
|
||||
skipActivation: true,
|
||||
});
|
||||
|
||||
// Verify all sections are present
|
||||
expect(xml).toContain('```xml');
|
||||
expect(xml).toContain('<agent id="full-agent"');
|
||||
expect(xml).toContain('<persona>');
|
||||
expect(xml).toContain('<memories>');
|
||||
expect(xml).toContain('<prompts>');
|
||||
expect(xml).toContain('<menu>');
|
||||
expect(xml).toContain('</agent>');
|
||||
expect(xml).toContain('```');
|
||||
// Verify persona content
|
||||
expect(xml).toContain('Full Stack Developer');
|
||||
// Verify memories
|
||||
expect(xml).toContain('Remember project context');
|
||||
// Verify prompts
|
||||
expect(xml).toContain('Initialize the agent');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
@ -0,0 +1,636 @@
|
|||
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
||||
import { YamlXmlBuilder } from '../../../tools/cli/lib/yaml-xml-builder.js';
|
||||
import { createTempDir, cleanupTempDir, createTestFile } from '../../helpers/temp-dir.js';
|
||||
import fs from 'fs-extra';
|
||||
import path from 'node:path';
|
||||
import yaml from 'yaml';
|
||||
|
||||
describe('YamlXmlBuilder', () => {
|
||||
let tmpDir;
|
||||
let builder;
|
||||
|
||||
beforeEach(async () => {
|
||||
tmpDir = await createTempDir();
|
||||
builder = new YamlXmlBuilder();
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await cleanupTempDir(tmpDir);
|
||||
});
|
||||
|
||||
describe('deepMerge()', () => {
|
||||
it('should merge shallow objects', () => {
|
||||
const target = { a: 1, b: 2 };
|
||||
const source = { b: 3, c: 4 };
|
||||
|
||||
const result = builder.deepMerge(target, source);
|
||||
|
||||
expect(result).toEqual({ a: 1, b: 3, c: 4 });
|
||||
});
|
||||
|
||||
it('should merge nested objects', () => {
|
||||
const target = { level1: { a: 1, b: 2 } };
|
||||
const source = { level1: { b: 3, c: 4 } };
|
||||
|
||||
const result = builder.deepMerge(target, source);
|
||||
|
||||
expect(result).toEqual({ level1: { a: 1, b: 3, c: 4 } });
|
||||
});
|
||||
|
||||
it('should merge deeply nested objects', () => {
|
||||
const target = { l1: { l2: { l3: { value: 'old' } } } };
|
||||
const source = { l1: { l2: { l3: { value: 'new', extra: 'data' } } } };
|
||||
|
||||
const result = builder.deepMerge(target, source);
|
||||
|
||||
expect(result).toEqual({ l1: { l2: { l3: { value: 'new', extra: 'data' } } } });
|
||||
});
|
||||
|
||||
it('should append arrays instead of replacing', () => {
|
||||
const target = { items: [1, 2, 3] };
|
||||
const source = { items: [4, 5, 6] };
|
||||
|
||||
const result = builder.deepMerge(target, source);
|
||||
|
||||
expect(result.items).toEqual([1, 2, 3, 4, 5, 6]);
|
||||
});
|
||||
|
||||
it('should handle arrays in nested objects', () => {
|
||||
const target = { config: { values: ['a', 'b'] } };
|
||||
const source = { config: { values: ['c', 'd'] } };
|
||||
|
||||
const result = builder.deepMerge(target, source);
|
||||
|
||||
expect(result.config.values).toEqual(['a', 'b', 'c', 'd']);
|
||||
});
|
||||
|
||||
it('should replace arrays if target is not an array', () => {
|
||||
const target = { items: 'string' };
|
||||
const source = { items: ['a', 'b'] };
|
||||
|
||||
const result = builder.deepMerge(target, source);
|
||||
|
||||
expect(result.items).toEqual(['a', 'b']);
|
||||
});
|
||||
|
||||
it('should handle null values', () => {
|
||||
const target = { a: null, b: 2 };
|
||||
const source = { a: 1, c: null };
|
||||
|
||||
const result = builder.deepMerge(target, source);
|
||||
|
||||
expect(result).toEqual({ a: 1, b: 2, c: null });
|
||||
});
|
||||
|
||||
it('should preserve target values when source has no override', () => {
|
||||
const target = { a: 1, b: 2, c: 3 };
|
||||
const source = { d: 4 };
|
||||
|
||||
const result = builder.deepMerge(target, source);
|
||||
|
||||
expect(result).toEqual({ a: 1, b: 2, c: 3, d: 4 });
|
||||
});
|
||||
|
||||
it('should not mutate original objects', () => {
|
||||
const target = { a: 1 };
|
||||
const source = { b: 2 };
|
||||
|
||||
builder.deepMerge(target, source);
|
||||
|
||||
expect(target).toEqual({ a: 1 }); // Unchanged
|
||||
expect(source).toEqual({ b: 2 }); // Unchanged
|
||||
});
|
||||
});
|
||||
|
||||
describe('isObject()', () => {
|
||||
it('should return true for plain objects', () => {
|
||||
expect(builder.isObject({})).toBe(true);
|
||||
expect(builder.isObject({ key: 'value' })).toBe(true);
|
||||
});
|
||||
|
||||
it('should return false for arrays', () => {
|
||||
expect(builder.isObject([])).toBe(false);
|
||||
expect(builder.isObject([1, 2, 3])).toBe(false);
|
||||
});
|
||||
|
||||
it('should return falsy for null', () => {
|
||||
expect(builder.isObject(null)).toBeFalsy();
|
||||
});
|
||||
|
||||
it('should return falsy for primitives', () => {
|
||||
expect(builder.isObject('string')).toBeFalsy();
|
||||
expect(builder.isObject(42)).toBeFalsy();
|
||||
expect(builder.isObject(true)).toBeFalsy();
|
||||
expect(builder.isObject()).toBeFalsy();
|
||||
});
|
||||
});
|
||||
|
||||
describe('loadAndMergeAgent()', () => {
|
||||
it('should load agent YAML without customization', async () => {
|
||||
const agentYaml = {
|
||||
agent: {
|
||||
metadata: { id: 'test', name: 'Test', title: 'Test Agent', icon: '🔧' },
|
||||
persona: {
|
||||
role: 'Test Role',
|
||||
identity: 'Test Identity',
|
||||
communication_style: 'Professional',
|
||||
principles: ['Principle 1'],
|
||||
},
|
||||
menu: [],
|
||||
},
|
||||
};
|
||||
|
||||
const agentPath = path.join(tmpDir, 'agent.yaml');
|
||||
await fs.writeFile(agentPath, yaml.stringify(agentYaml));
|
||||
|
||||
const result = await builder.loadAndMergeAgent(agentPath);
|
||||
|
||||
expect(result.agent.metadata.id).toBe('test');
|
||||
expect(result.agent.persona.role).toBe('Test Role');
|
||||
});
|
||||
|
||||
it('should preserve base persona when customize has empty strings', async () => {
|
||||
const baseYaml = {
|
||||
agent: {
|
||||
metadata: { id: 'base', name: 'Base', title: 'Base', icon: '🔧' },
|
||||
persona: {
|
||||
role: 'Base Role',
|
||||
identity: 'Base Identity',
|
||||
communication_style: 'Base Style',
|
||||
principles: ['Base Principle'],
|
||||
},
|
||||
menu: [],
|
||||
},
|
||||
};
|
||||
|
||||
const customizeYaml = {
|
||||
persona: {
|
||||
role: 'Custom Role',
|
||||
identity: '', // Empty - should NOT override
|
||||
communication_style: 'Custom Style',
|
||||
// principles omitted
|
||||
},
|
||||
};
|
||||
|
||||
const basePath = path.join(tmpDir, 'base.yaml');
|
||||
const customizePath = path.join(tmpDir, 'customize.yaml');
|
||||
await fs.writeFile(basePath, yaml.stringify(baseYaml));
|
||||
await fs.writeFile(customizePath, yaml.stringify(customizeYaml));
|
||||
|
||||
const result = await builder.loadAndMergeAgent(basePath, customizePath);
|
||||
|
||||
expect(result.agent.persona.role).toBe('Custom Role'); // Overridden
|
||||
expect(result.agent.persona.identity).toBe('Base Identity'); // Preserved
|
||||
expect(result.agent.persona.communication_style).toBe('Custom Style'); // Overridden
|
||||
expect(result.agent.persona.principles).toEqual(['Base Principle']); // Preserved
|
||||
});
|
||||
|
||||
it('should preserve base persona when customize has null values', async () => {
|
||||
const baseYaml = {
|
||||
agent: {
|
||||
metadata: { id: 'base', name: 'Base', title: 'Base', icon: '🔧' },
|
||||
persona: {
|
||||
role: 'Base Role',
|
||||
identity: 'Base Identity',
|
||||
communication_style: 'Base Style',
|
||||
principles: ['Base'],
|
||||
},
|
||||
menu: [],
|
||||
},
|
||||
};
|
||||
|
||||
const customizeYaml = {
|
||||
persona: {
|
||||
role: null,
|
||||
identity: 'Custom Identity',
|
||||
},
|
||||
};
|
||||
|
||||
const basePath = path.join(tmpDir, 'base.yaml');
|
||||
const customizePath = path.join(tmpDir, 'customize.yaml');
|
||||
await fs.writeFile(basePath, yaml.stringify(baseYaml));
|
||||
await fs.writeFile(customizePath, yaml.stringify(customizeYaml));
|
||||
|
||||
const result = await builder.loadAndMergeAgent(basePath, customizePath);
|
||||
|
||||
expect(result.agent.persona.role).toBe('Base Role'); // Preserved (null skipped)
|
||||
expect(result.agent.persona.identity).toBe('Custom Identity'); // Overridden
|
||||
});
|
||||
|
||||
it('should preserve base persona when customize has empty arrays', async () => {
|
||||
const baseYaml = {
|
||||
agent: {
|
||||
metadata: { id: 'base', name: 'Base', title: 'Base', icon: '🔧' },
|
||||
persona: {
|
||||
role: 'Base Role',
|
||||
identity: 'Base Identity',
|
||||
communication_style: 'Base Style',
|
||||
principles: ['Principle 1', 'Principle 2'],
|
||||
},
|
||||
menu: [],
|
||||
},
|
||||
};
|
||||
|
||||
const customizeYaml = {
|
||||
persona: {
|
||||
principles: [], // Empty array - should NOT override
|
||||
},
|
||||
};
|
||||
|
||||
const basePath = path.join(tmpDir, 'base.yaml');
|
||||
const customizePath = path.join(tmpDir, 'customize.yaml');
|
||||
await fs.writeFile(basePath, yaml.stringify(baseYaml));
|
||||
await fs.writeFile(customizePath, yaml.stringify(customizeYaml));
|
||||
|
||||
const result = await builder.loadAndMergeAgent(basePath, customizePath);
|
||||
|
||||
expect(result.agent.persona.principles).toEqual(['Principle 1', 'Principle 2']);
|
||||
});
|
||||
|
||||
it('should append menu items from customize', async () => {
|
||||
const baseYaml = {
|
||||
agent: {
|
||||
metadata: { id: 'base', name: 'Base', title: 'Base', icon: '🔧' },
|
||||
persona: { role: 'Role', identity: 'ID', communication_style: 'Style', principles: ['P'] },
|
||||
menu: [{ trigger: 'help', description: 'Help', action: 'show_help' }],
|
||||
},
|
||||
};
|
||||
|
||||
const customizeYaml = {
|
||||
menu: [{ trigger: 'custom', description: 'Custom', action: 'custom_action' }],
|
||||
};
|
||||
|
||||
const basePath = path.join(tmpDir, 'base.yaml');
|
||||
const customizePath = path.join(tmpDir, 'customize.yaml');
|
||||
await fs.writeFile(basePath, yaml.stringify(baseYaml));
|
||||
await fs.writeFile(customizePath, yaml.stringify(customizeYaml));
|
||||
|
||||
const result = await builder.loadAndMergeAgent(basePath, customizePath);
|
||||
|
||||
expect(result.agent.menu).toHaveLength(2);
|
||||
expect(result.agent.menu[0].trigger).toBe('help');
|
||||
expect(result.agent.menu[1].trigger).toBe('custom');
|
||||
});
|
||||
|
||||
it('should append critical_actions from customize', async () => {
|
||||
const baseYaml = {
|
||||
agent: {
|
||||
metadata: { id: 'base', name: 'Base', title: 'Base', icon: '🔧' },
|
||||
persona: { role: 'Role', identity: 'ID', communication_style: 'Style', principles: ['P'] },
|
||||
critical_actions: ['Action 1'],
|
||||
menu: [],
|
||||
},
|
||||
};
|
||||
|
||||
const customizeYaml = {
|
||||
critical_actions: ['Action 2', 'Action 3'],
|
||||
};
|
||||
|
||||
const basePath = path.join(tmpDir, 'base.yaml');
|
||||
const customizePath = path.join(tmpDir, 'customize.yaml');
|
||||
await fs.writeFile(basePath, yaml.stringify(baseYaml));
|
||||
await fs.writeFile(customizePath, yaml.stringify(customizeYaml));
|
||||
|
||||
const result = await builder.loadAndMergeAgent(basePath, customizePath);
|
||||
|
||||
expect(result.agent.critical_actions).toHaveLength(3);
|
||||
expect(result.agent.critical_actions).toEqual(['Action 1', 'Action 2', 'Action 3']);
|
||||
});
|
||||
|
||||
it('should append prompts from customize', async () => {
|
||||
const baseYaml = {
|
||||
agent: {
|
||||
metadata: { id: 'base', name: 'Base', title: 'Base', icon: '🔧' },
|
||||
persona: { role: 'Role', identity: 'ID', communication_style: 'Style', principles: ['P'] },
|
||||
prompts: [{ id: 'p1', content: 'Prompt 1' }],
|
||||
menu: [],
|
||||
},
|
||||
};
|
||||
|
||||
const customizeYaml = {
|
||||
prompts: [{ id: 'p2', content: 'Prompt 2' }],
|
||||
};
|
||||
|
||||
const basePath = path.join(tmpDir, 'base.yaml');
|
||||
const customizePath = path.join(tmpDir, 'customize.yaml');
|
||||
await fs.writeFile(basePath, yaml.stringify(baseYaml));
|
||||
await fs.writeFile(customizePath, yaml.stringify(customizeYaml));
|
||||
|
||||
const result = await builder.loadAndMergeAgent(basePath, customizePath);
|
||||
|
||||
expect(result.agent.prompts).toHaveLength(2);
|
||||
});
|
||||
|
||||
it('should handle missing customization file', async () => {
|
||||
const agentYaml = {
|
||||
agent: {
|
||||
metadata: { id: 'test', name: 'Test', title: 'Test', icon: '🔧' },
|
||||
persona: { role: 'Role', identity: 'ID', communication_style: 'Style', principles: ['P'] },
|
||||
menu: [],
|
||||
},
|
||||
};
|
||||
|
||||
const agentPath = path.join(tmpDir, 'agent.yaml');
|
||||
await fs.writeFile(agentPath, yaml.stringify(agentYaml));
|
||||
|
||||
const nonExistent = path.join(tmpDir, 'nonexistent.yaml');
|
||||
const result = await builder.loadAndMergeAgent(agentPath, nonExistent);
|
||||
|
||||
expect(result.agent.metadata.id).toBe('test');
|
||||
});
|
||||
|
||||
it('should handle legacy commands field (renamed to menu)', async () => {
|
||||
const baseYaml = {
|
||||
agent: {
|
||||
metadata: { id: 'base', name: 'Base', title: 'Base', icon: '🔧' },
|
||||
persona: { role: 'Role', identity: 'ID', communication_style: 'Style', principles: ['P'] },
|
||||
commands: [{ trigger: 'old', description: 'Old', action: 'old_action' }],
|
||||
},
|
||||
};
|
||||
|
||||
const customizeYaml = {
|
||||
commands: [{ trigger: 'new', description: 'New', action: 'new_action' }],
|
||||
};
|
||||
|
||||
const basePath = path.join(tmpDir, 'base.yaml');
|
||||
const customizePath = path.join(tmpDir, 'customize.yaml');
|
||||
await fs.writeFile(basePath, yaml.stringify(baseYaml));
|
||||
await fs.writeFile(customizePath, yaml.stringify(customizeYaml));
|
||||
|
||||
const result = await builder.loadAndMergeAgent(basePath, customizePath);
|
||||
|
||||
expect(result.agent.commands).toHaveLength(2);
|
||||
});
|
||||
|
||||
it('should override metadata with non-empty values', async () => {
|
||||
const baseYaml = {
|
||||
agent: {
|
||||
metadata: { id: 'base', name: 'Base Name', title: 'Base Title', icon: '🔧' },
|
||||
persona: { role: 'Role', identity: 'ID', communication_style: 'Style', principles: ['P'] },
|
||||
menu: [],
|
||||
},
|
||||
};
|
||||
|
||||
const customizeYaml = {
|
||||
agent: {
|
||||
metadata: {
|
||||
name: 'Custom Name',
|
||||
title: '', // Empty - should be skipped
|
||||
icon: '🎯',
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const basePath = path.join(tmpDir, 'base.yaml');
|
||||
const customizePath = path.join(tmpDir, 'customize.yaml');
|
||||
await fs.writeFile(basePath, yaml.stringify(baseYaml));
|
||||
await fs.writeFile(customizePath, yaml.stringify(customizeYaml));
|
||||
|
||||
const result = await builder.loadAndMergeAgent(basePath, customizePath);
|
||||
|
||||
expect(result.agent.metadata.name).toBe('Custom Name');
|
||||
expect(result.agent.metadata.title).toBe('Base Title'); // Preserved
|
||||
expect(result.agent.metadata.icon).toBe('🎯');
|
||||
});
|
||||
});
|
||||
|
||||
describe('buildPersonaXml()', () => {
|
||||
it('should build complete persona XML', () => {
|
||||
const persona = {
|
||||
role: 'Test Role',
|
||||
identity: 'Test Identity',
|
||||
communication_style: 'Professional',
|
||||
principles: ['Principle 1', 'Principle 2', 'Principle 3'],
|
||||
};
|
||||
|
||||
const xml = builder.buildPersonaXml(persona);
|
||||
|
||||
expect(xml).toContain('<persona>');
|
||||
expect(xml).toContain('</persona>');
|
||||
expect(xml).toContain('<role>Test Role</role>');
|
||||
expect(xml).toContain('<identity>Test Identity</identity>');
|
||||
expect(xml).toContain('<communication_style>Professional</communication_style>');
|
||||
expect(xml).toContain('<principles>Principle 1 Principle 2 Principle 3</principles>');
|
||||
});
|
||||
|
||||
it('should escape XML special characters in persona', () => {
|
||||
const persona = {
|
||||
role: 'Role with <tags> & "quotes"',
|
||||
identity: "O'Reilly's Identity",
|
||||
communication_style: 'Use <code> tags',
|
||||
principles: ['Principle with & ampersand'],
|
||||
};
|
||||
|
||||
const xml = builder.buildPersonaXml(persona);
|
||||
|
||||
expect(xml).toContain('<tags> & "quotes"');
|
||||
expect(xml).toContain('O'Reilly's Identity');
|
||||
expect(xml).toContain('<code> tags');
|
||||
expect(xml).toContain('& ampersand');
|
||||
});
|
||||
|
||||
it('should handle principles as array', () => {
|
||||
const persona = {
|
||||
role: 'Role',
|
||||
identity: 'ID',
|
||||
communication_style: 'Style',
|
||||
principles: ['P1', 'P2', 'P3'],
|
||||
};
|
||||
|
||||
const xml = builder.buildPersonaXml(persona);
|
||||
|
||||
expect(xml).toContain('<principles>P1 P2 P3</principles>');
|
||||
});
|
||||
|
||||
it('should handle principles as string', () => {
|
||||
const persona = {
|
||||
role: 'Role',
|
||||
identity: 'ID',
|
||||
communication_style: 'Style',
|
||||
principles: 'Single principle string',
|
||||
};
|
||||
|
||||
const xml = builder.buildPersonaXml(persona);
|
||||
|
||||
expect(xml).toContain('<principles>Single principle string</principles>');
|
||||
});
|
||||
|
||||
it('should preserve Unicode in persona fields', () => {
|
||||
const persona = {
|
||||
role: 'Тестовая роль',
|
||||
identity: '日本語のアイデンティティ',
|
||||
communication_style: 'Estilo profesional',
|
||||
principles: ['原则一', 'Принцип два'],
|
||||
};
|
||||
|
||||
const xml = builder.buildPersonaXml(persona);
|
||||
|
||||
expect(xml).toContain('Тестовая роль');
|
||||
expect(xml).toContain('日本語のアイデンティティ');
|
||||
expect(xml).toContain('Estilo profesional');
|
||||
expect(xml).toContain('原则一 Принцип два');
|
||||
});
|
||||
|
||||
it('should handle missing persona gracefully', () => {
|
||||
const xml = builder.buildPersonaXml(null);
|
||||
|
||||
expect(xml).toBe('');
|
||||
});
|
||||
|
||||
it('should handle partial persona (missing optional fields)', () => {
|
||||
const persona = {
|
||||
role: 'Role',
|
||||
identity: 'ID',
|
||||
communication_style: 'Style',
|
||||
// principles missing
|
||||
};
|
||||
|
||||
const xml = builder.buildPersonaXml(persona);
|
||||
|
||||
expect(xml).toContain('<role>Role</role>');
|
||||
expect(xml).toContain('<identity>ID</identity>');
|
||||
expect(xml).toContain('<communication_style>Style</communication_style>');
|
||||
expect(xml).not.toContain('<principles>');
|
||||
});
|
||||
});
|
||||
|
||||
describe('buildMemoriesXml()', () => {
|
||||
it('should build memories XML from array', () => {
|
||||
const memories = ['Memory 1', 'Memory 2', 'Memory 3'];
|
||||
|
||||
const xml = builder.buildMemoriesXml(memories);
|
||||
|
||||
expect(xml).toContain('<memories>');
|
||||
expect(xml).toContain('</memories>');
|
||||
expect(xml).toContain('<memory>Memory 1</memory>');
|
||||
expect(xml).toContain('<memory>Memory 2</memory>');
|
||||
expect(xml).toContain('<memory>Memory 3</memory>');
|
||||
});
|
||||
|
||||
it('should escape XML special characters in memories', () => {
|
||||
const memories = ['Memory with <tags>', 'Memory with & ampersand', 'Memory with "quotes"'];
|
||||
|
||||
const xml = builder.buildMemoriesXml(memories);
|
||||
|
||||
expect(xml).toContain('<tags>');
|
||||
expect(xml).toContain('& ampersand');
|
||||
expect(xml).toContain('"quotes"');
|
||||
});
|
||||
|
||||
it('should return empty string for null memories', () => {
|
||||
expect(builder.buildMemoriesXml(null)).toBe('');
|
||||
});
|
||||
|
||||
it('should return empty string for empty array', () => {
|
||||
expect(builder.buildMemoriesXml([])).toBe('');
|
||||
});
|
||||
|
||||
it('should handle Unicode in memories', () => {
|
||||
const memories = ['记忆 1', 'Память 2', '記憶 3'];
|
||||
|
||||
const xml = builder.buildMemoriesXml(memories);
|
||||
|
||||
expect(xml).toContain('记忆 1');
|
||||
expect(xml).toContain('Память 2');
|
||||
expect(xml).toContain('記憶 3');
|
||||
});
|
||||
});
|
||||
|
||||
describe('buildPromptsXml()', () => {
|
||||
it('should build prompts XML from array format', () => {
|
||||
const prompts = [
|
||||
{ id: 'p1', content: 'Prompt 1 content' },
|
||||
{ id: 'p2', content: 'Prompt 2 content' },
|
||||
];
|
||||
|
||||
const xml = builder.buildPromptsXml(prompts);
|
||||
|
||||
expect(xml).toContain('<prompts>');
|
||||
expect(xml).toContain('</prompts>');
|
||||
expect(xml).toContain('<prompt id="p1">');
|
||||
expect(xml).toContain('<content>');
|
||||
expect(xml).toContain('Prompt 1 content');
|
||||
expect(xml).toContain('<prompt id="p2">');
|
||||
expect(xml).toContain('Prompt 2 content');
|
||||
});
|
||||
|
||||
it('should escape XML special characters in prompts', () => {
|
||||
const prompts = [{ id: 'test', content: 'Content with <tags> & "quotes"' }];
|
||||
|
||||
const xml = builder.buildPromptsXml(prompts);
|
||||
|
||||
expect(xml).toContain('<content>');
|
||||
expect(xml).toContain('<tags> & "quotes"');
|
||||
});
|
||||
|
||||
it('should return empty string for null prompts', () => {
|
||||
expect(builder.buildPromptsXml(null)).toBe('');
|
||||
});
|
||||
|
||||
it('should handle Unicode in prompts', () => {
|
||||
const prompts = [{ id: 'unicode', content: 'Test 测试 тест テスト' }];
|
||||
|
||||
const xml = builder.buildPromptsXml(prompts);
|
||||
|
||||
expect(xml).toContain('<content>');
|
||||
expect(xml).toContain('测试 тест テスト');
|
||||
});
|
||||
|
||||
it('should handle object/dictionary format prompts', () => {
|
||||
const prompts = {
|
||||
p1: 'Prompt 1 content',
|
||||
p2: 'Prompt 2 content',
|
||||
};
|
||||
|
||||
const xml = builder.buildPromptsXml(prompts);
|
||||
|
||||
expect(xml).toContain('<prompts>');
|
||||
expect(xml).toContain('<prompt id="p1">');
|
||||
expect(xml).toContain('Prompt 1 content');
|
||||
expect(xml).toContain('<prompt id="p2">');
|
||||
expect(xml).toContain('Prompt 2 content');
|
||||
});
|
||||
|
||||
it('should return empty string for empty array', () => {
|
||||
expect(builder.buildPromptsXml([])).toBe('');
|
||||
});
|
||||
});
|
||||
|
||||
describe('calculateFileHash()', () => {
|
||||
it('should calculate MD5 hash of file content', async () => {
|
||||
const content = 'test content for hashing';
|
||||
const filePath = await createTestFile(tmpDir, 'test.txt', content);
|
||||
|
||||
const hash = await builder.calculateFileHash(filePath);
|
||||
|
||||
expect(hash).toHaveLength(8); // MD5 truncated to 8 chars
|
||||
expect(hash).toMatch(/^[a-f0-9]{8}$/);
|
||||
});
|
||||
|
||||
it('should return consistent hash for same content', async () => {
|
||||
const file1 = await createTestFile(tmpDir, 'file1.txt', 'content');
|
||||
const file2 = await createTestFile(tmpDir, 'file2.txt', 'content');
|
||||
|
||||
const hash1 = await builder.calculateFileHash(file1);
|
||||
const hash2 = await builder.calculateFileHash(file2);
|
||||
|
||||
expect(hash1).toBe(hash2);
|
||||
});
|
||||
|
||||
it('should return null for non-existent file', async () => {
|
||||
const nonExistent = path.join(tmpDir, 'missing.txt');
|
||||
|
||||
const hash = await builder.calculateFileHash(nonExistent);
|
||||
|
||||
expect(hash).toBeNull();
|
||||
});
|
||||
|
||||
it('should handle empty file', async () => {
|
||||
const file = await createTestFile(tmpDir, 'empty.txt', '');
|
||||
|
||||
const hash = await builder.calculateFileHash(file);
|
||||
|
||||
expect(hash).toHaveLength(8);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
@ -0,0 +1,84 @@
|
|||
import { describe, it, expect } from 'vitest';
|
||||
import { escapeXml } from '../../../tools/lib/xml-utils.js';
|
||||
|
||||
describe('xml-utils', () => {
|
||||
describe('escapeXml()', () => {
|
||||
it('should escape ampersand (&) to &', () => {
|
||||
expect(escapeXml('Tom & Jerry')).toBe('Tom & Jerry');
|
||||
});
|
||||
|
||||
it('should escape less than (<) to <', () => {
|
||||
expect(escapeXml('5 < 10')).toBe('5 < 10');
|
||||
});
|
||||
|
||||
it('should escape greater than (>) to >', () => {
|
||||
expect(escapeXml('10 > 5')).toBe('10 > 5');
|
||||
});
|
||||
|
||||
it('should escape double quote (") to "', () => {
|
||||
expect(escapeXml('He said "hello"')).toBe('He said "hello"');
|
||||
});
|
||||
|
||||
it("should escape single quote (') to '", () => {
|
||||
expect(escapeXml("It's working")).toBe('It's working');
|
||||
});
|
||||
|
||||
it('should preserve Unicode characters', () => {
|
||||
expect(escapeXml('Hello 世界 🌍')).toBe('Hello 世界 🌍');
|
||||
});
|
||||
|
||||
it('should escape multiple special characters in sequence', () => {
|
||||
expect(escapeXml('<tag attr="value">')).toBe('<tag attr="value">');
|
||||
});
|
||||
|
||||
it('should escape all five special characters together', () => {
|
||||
expect(escapeXml(`&<>"'`)).toBe('&<>"'');
|
||||
});
|
||||
|
||||
it('should handle empty string', () => {
|
||||
expect(escapeXml('')).toBe('');
|
||||
});
|
||||
|
||||
it('should handle null', () => {
|
||||
expect(escapeXml(null)).toBe('');
|
||||
});
|
||||
|
||||
it('should handle undefined', () => {
|
||||
expect(escapeXml()).toBe('');
|
||||
});
|
||||
|
||||
it('should handle text with no special characters', () => {
|
||||
expect(escapeXml('Hello World')).toBe('Hello World');
|
||||
});
|
||||
|
||||
it('should handle text that is only special characters', () => {
|
||||
expect(escapeXml('&&&')).toBe('&&&');
|
||||
});
|
||||
|
||||
it('should not double-escape already escaped entities', () => {
|
||||
// Note: This is expected behavior - the function WILL double-escape
|
||||
// This test documents the actual behavior
|
||||
expect(escapeXml('&')).toBe('&amp;');
|
||||
});
|
||||
|
||||
it('should escape special characters in XML content', () => {
|
||||
const xmlContent = '<persona role="Developer & Architect">Use <code> tags</persona>';
|
||||
const expected = '<persona role="Developer & Architect">Use <code> tags</persona>';
|
||||
expect(escapeXml(xmlContent)).toBe(expected);
|
||||
});
|
||||
|
||||
it('should handle mixed Unicode and special characters', () => {
|
||||
expect(escapeXml('测试 <tag> & "quotes"')).toBe('测试 <tag> & "quotes"');
|
||||
});
|
||||
|
||||
it('should handle newlines and special characters', () => {
|
||||
const multiline = 'Line 1 & text\n<Line 2>\n"Line 3"';
|
||||
const expected = 'Line 1 & text\n<Line 2>\n"Line 3"';
|
||||
expect(escapeXml(multiline)).toBe(expected);
|
||||
});
|
||||
|
||||
it('should handle string with only whitespace', () => {
|
||||
expect(escapeXml(' ')).toBe(' ');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
@ -167,50 +167,31 @@ class Installer {
|
|||
if (newlySelectedIdes.length > 0) {
|
||||
console.log('\n'); // Add spacing before IDE questions
|
||||
|
||||
// Use IdeManager to get handlers (supports both config-driven and custom IDEs)
|
||||
await this.ideManager.ensureInitialized();
|
||||
|
||||
for (const ide of newlySelectedIdes) {
|
||||
// List of IDEs that have interactive prompts
|
||||
//TODO: Why is this here, hardcoding this list here is bad, fix me!
|
||||
const needsPrompts = ['claude-code', 'github-copilot', 'roo', 'cline', 'auggie', 'codex', 'qwen', 'gemini', 'rovo-dev'].includes(
|
||||
ide,
|
||||
);
|
||||
try {
|
||||
const handler = this.ideManager.handlers.get(ide);
|
||||
|
||||
if (needsPrompts) {
|
||||
// Get IDE handler and collect configuration
|
||||
try {
|
||||
// Dynamically load the IDE setup module
|
||||
const ideModule = require(`../ide/${ide}`);
|
||||
|
||||
// Get the setup class (handle different export formats)
|
||||
let SetupClass;
|
||||
const className =
|
||||
ide
|
||||
.split('-')
|
||||
.map((part) => part.charAt(0).toUpperCase() + part.slice(1))
|
||||
.join('') + 'Setup';
|
||||
|
||||
if (ideModule[className]) {
|
||||
SetupClass = ideModule[className];
|
||||
} else if (ideModule.default) {
|
||||
SetupClass = ideModule.default;
|
||||
} else {
|
||||
continue;
|
||||
}
|
||||
|
||||
const ideSetup = new SetupClass();
|
||||
|
||||
// Check if this IDE has a collectConfiguration method
|
||||
if (typeof ideSetup.collectConfiguration === 'function') {
|
||||
console.log(chalk.cyan(`\nConfiguring ${ide}...`));
|
||||
ideConfigurations[ide] = await ideSetup.collectConfiguration({
|
||||
selectedModules: selectedModules || [],
|
||||
projectDir,
|
||||
bmadDir,
|
||||
});
|
||||
}
|
||||
} catch {
|
||||
// IDE doesn't have a setup file or collectConfiguration method
|
||||
console.warn(chalk.yellow(`Warning: Could not load configuration for ${ide}`));
|
||||
if (!handler) {
|
||||
// IDE not recognized - skip silently
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check if this IDE handler has a collectConfiguration method
|
||||
if (typeof handler.collectConfiguration === 'function') {
|
||||
console.log(chalk.cyan(`\nConfiguring ${ide}...`));
|
||||
ideConfigurations[ide] = await handler.collectConfiguration({
|
||||
selectedModules: selectedModules || [],
|
||||
projectDir,
|
||||
bmadDir,
|
||||
});
|
||||
}
|
||||
// Most config-driven IDEs don't need configuration - silently skip
|
||||
} catch {
|
||||
// IDE doesn't have collectConfiguration or had an error - skip
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,208 +0,0 @@
|
|||
# IDE Installer Standardization Plan
|
||||
|
||||
## Overview
|
||||
|
||||
Standardize IDE installers to use **flat file naming** with **underscores** (Windows-compatible) and centralize duplicated code in shared utilities.
|
||||
|
||||
**Key Rule: All IDEs use underscore format for Windows compatibility (colons don't work on Windows).**
|
||||
|
||||
## Current State Analysis
|
||||
|
||||
### File Structure Patterns
|
||||
|
||||
| IDE | Current Pattern | Path Format |
|
||||
|-----|-----------------|-------------|
|
||||
| **claude-code** | Hierarchical | `.claude/commands/bmad/{module}/agents/{name}.md` |
|
||||
| **cursor** | Hierarchical | `.cursor/commands/bmad/{module}/agents/{name}.md` |
|
||||
| **crush** | Hierarchical | `.crush/commands/bmad/{module}/agents/{name}.md` |
|
||||
| **antigravity** | Flattened (underscores) | `.agent/workflows/bmad_module_agents_name.md` |
|
||||
| **codex** | Flattened (underscores) | `~/.codex/prompts/bmad_module_agents_name.md` |
|
||||
| **cline** | Flattened (underscores) | `.clinerules/workflows/bmad_module_type_name.md` |
|
||||
| **roo** | Flattened (underscores) | `.roo/commands/bmad_module_agent_name.md` |
|
||||
| **auggie** | Hybrid | `.augment/commands/bmad/agents/{module}-{name}.md` |
|
||||
| **iflow** | Hybrid | `.iflow/commands/bmad/agents/{module}-{name}.md` |
|
||||
| **trae** | Different (rules) | `.trae/rules/bmad-agent-{module}-{name}.md` |
|
||||
| **github-copilot** | Different (agents) | `.github/agents/bmd-custom-{module}-{name}.agent.md` |
|
||||
|
||||
### Shared Generators (in `/shared`)
|
||||
|
||||
1. `agent-command-generator.js` - generates agent launchers
|
||||
2. `task-tool-command-generator.js` - generates task/tool commands
|
||||
3. `workflow-command-generator.js` - generates workflow commands
|
||||
|
||||
All currently create artifacts with **nested relative paths** like `{module}/agents/{name}.md`
|
||||
|
||||
### Code Duplication Issues
|
||||
|
||||
1. **Flattening logic** duplicated in multiple IDEs
|
||||
2. **Agent launcher content creation** duplicated
|
||||
3. **Path transformation** duplicated
|
||||
|
||||
## Target Standardization
|
||||
|
||||
### For All IDEs (underscore format - Windows-compatible)
|
||||
|
||||
**IDEs affected:** claude-code, cursor, crush, antigravity, codex, cline, roo
|
||||
|
||||
```
|
||||
Format: bmad_{module}_{type}_{name}.md
|
||||
|
||||
Examples:
|
||||
- Agent: bmad_bmm_agents_pm.md
|
||||
- Agent: bmad_core_agents_dev.md
|
||||
- Workflow: bmad_bmm_workflows_correct-course.md
|
||||
- Task: bmad_bmm_tasks_bmad-help.md
|
||||
- Tool: bmad_core_tools_code-review.md
|
||||
- Custom: bmad_custom_agents_fred-commit-poet.md
|
||||
```
|
||||
|
||||
**Note:** Type segments (agents, workflows, tasks, tools) are filtered out from names:
|
||||
- `bmm/agents/pm.md` → `bmad_bmm_pm.md` (not `bmad_bmm_agents_pm.md`)
|
||||
|
||||
### For Hybrid IDEs (keep as-is)
|
||||
|
||||
**IDEs affected:** auggie, iflow
|
||||
|
||||
These use `{module}-{name}.md` format within subdirectories - keep as-is.
|
||||
|
||||
### Skip (drastically different)
|
||||
|
||||
**IDEs affected:** trae, github-copilot
|
||||
|
||||
## Implementation Plan
|
||||
|
||||
### Phase 1: Create Shared Utility
|
||||
|
||||
**File:** `shared/path-utils.js`
|
||||
|
||||
```javascript
|
||||
/**
|
||||
* Convert hierarchical path to flat underscore-separated name (Windows-compatible)
|
||||
* @param {string} module - Module name (e.g., 'bmm', 'core')
|
||||
* @param {string} type - Artifact type ('agents', 'workflows', 'tasks', 'tools') - filtered out
|
||||
* @param {string} name - Artifact name (e.g., 'pm', 'correct-course')
|
||||
* @returns {string} Flat filename like 'bmad_bmm_pm.md'
|
||||
*/
|
||||
function toUnderscoreName(module, type, name) {
|
||||
return `bmad_${module}_${name}.md`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert relative path to flat underscore-separated name (Windows-compatible)
|
||||
* @param {string} relativePath - Path like 'bmm/agents/pm.md'
|
||||
* @returns {string} Flat filename like 'bmad_bmm_pm.md'
|
||||
*/
|
||||
function toUnderscorePath(relativePath) {
|
||||
const withoutExt = relativePath.replace('.md', '');
|
||||
const parts = withoutExt.split(/[\/\\]/);
|
||||
// Filter out type segments (agents, workflows, tasks, tools)
|
||||
const filtered = parts.filter((p) => !TYPE_SEGMENTS.includes(p));
|
||||
return `bmad_${filtered.join('_')}.md`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create custom agent underscore name
|
||||
* @param {string} agentName - Custom agent name
|
||||
* @returns {string} Flat filename like 'bmad_custom_fred-commit-poet.md'
|
||||
*/
|
||||
function customAgentUnderscoreName(agentName) {
|
||||
return `bmad_custom_${agentName}.md`;
|
||||
}
|
||||
|
||||
// Backward compatibility aliases
|
||||
const toColonName = toUnderscoreName;
|
||||
const toColonPath = toUnderscorePath;
|
||||
const toDashPath = toUnderscorePath;
|
||||
const customAgentColonName = customAgentUnderscoreName;
|
||||
const customAgentDashName = customAgentUnderscoreName;
|
||||
|
||||
module.exports = {
|
||||
toUnderscoreName,
|
||||
toUnderscorePath,
|
||||
customAgentUnderscoreName,
|
||||
// Backward compatibility
|
||||
toColonName,
|
||||
toColonPath,
|
||||
toDashPath,
|
||||
customAgentColonName,
|
||||
customAgentDashName,
|
||||
};
|
||||
```
|
||||
|
||||
### Phase 2: Update Shared Generators
|
||||
|
||||
**Files to modify:**
|
||||
- `shared/agent-command-generator.js`
|
||||
- `shared/task-tool-command-generator.js`
|
||||
- `shared/workflow-command-generator.js`
|
||||
|
||||
**Changes:**
|
||||
1. Import path utilities
|
||||
2. Change `relativePath` to use flat format
|
||||
3. Add method `writeColonArtifacts()` for folder-based IDEs (uses underscore)
|
||||
4. Add method `writeDashArtifacts()` for flat IDEs (uses underscore)
|
||||
|
||||
### Phase 3: Update All IDEs
|
||||
|
||||
**Files to modify:**
|
||||
- `claude-code.js`
|
||||
- `cursor.js`
|
||||
- `crush.js`
|
||||
- `antigravity.js`
|
||||
- `codex.js`
|
||||
- `cline.js`
|
||||
- `roo.js`
|
||||
|
||||
**Changes:**
|
||||
1. Import utilities from path-utils
|
||||
2. Change from hierarchical to flat underscore naming
|
||||
3. Update cleanup to handle flat structure (`startsWith('bmad')`)
|
||||
|
||||
### Phase 4: Update Base Class
|
||||
|
||||
**File:** `_base-ide.js`
|
||||
|
||||
**Changes:**
|
||||
1. Mark `flattenFilename()` as `@deprecated`
|
||||
2. Add comment pointing to new path-utils
|
||||
|
||||
## Migration Checklist
|
||||
|
||||
### New Files
|
||||
- [x] Create `shared/path-utils.js`
|
||||
|
||||
### All IDEs (convert to underscore format)
|
||||
- [x] Update `shared/agent-command-generator.js` - update for underscore
|
||||
- [x] Update `shared/task-tool-command-generator.js` - update for underscore
|
||||
- [x] Update `shared/workflow-command-generator.js` - update for underscore
|
||||
- [x] Update `claude-code.js` - convert to underscore format
|
||||
- [x] Update `cursor.js` - convert to underscore format
|
||||
- [x] Update `crush.js` - convert to underscore format
|
||||
- [ ] Update `antigravity.js` - use underscore format
|
||||
- [ ] Update `codex.js` - use underscore format
|
||||
- [ ] Update `cline.js` - use underscore format
|
||||
- [ ] Update `roo.js` - use underscore format
|
||||
|
||||
### CSV Command Files
|
||||
- [x] Update `src/core/module-help.csv` - change colons to underscores
|
||||
- [x] Update `src/bmm/module-help.csv` - change colons to underscores
|
||||
|
||||
### Base Class
|
||||
- [ ] Update `_base-ide.js` - add deprecation notice
|
||||
|
||||
### Testing
|
||||
- [ ] Test claude-code installation
|
||||
- [ ] Test cursor installation
|
||||
- [ ] Test crush installation
|
||||
- [ ] Test antigravity installation
|
||||
- [ ] Test codex installation
|
||||
- [ ] Test cline installation
|
||||
- [ ] Test roo installation
|
||||
|
||||
## Notes
|
||||
|
||||
1. **Filter type segments**: agents, workflows, tasks, tools are filtered out from flat names
|
||||
2. **Underscore format**: Universal underscore format for Windows compatibility
|
||||
3. **Custom agents**: Follow the same pattern as regular agents
|
||||
4. **Backward compatibility**: Old function names kept as aliases
|
||||
5. **Cleanup**: Will remove old `bmad:` format files on next install
|
||||
|
|
@ -0,0 +1,446 @@
|
|||
const path = require('node:path');
|
||||
const fs = require('fs-extra');
|
||||
const chalk = require('chalk');
|
||||
const yaml = require('yaml');
|
||||
const { BaseIdeSetup } = require('./_base-ide');
|
||||
const { UnifiedInstaller } = require('./shared/unified-installer');
|
||||
const { toSuffixBasedName, getArtifactSuffix, customAgentSuffixName } = require('./shared/path-utils');
|
||||
|
||||
/**
|
||||
* Load platform codes configuration from platform-codes.yaml
|
||||
* @returns {Object} Platform configuration object
|
||||
*/
|
||||
async function loadPlatformCodes() {
|
||||
const platformCodesPath = path.join(__dirname, 'platform-codes.yaml');
|
||||
|
||||
if (!(await fs.pathExists(platformCodesPath))) {
|
||||
console.warn(chalk.yellow('Warning: platform-codes.yaml not found'));
|
||||
return { platforms: {} };
|
||||
}
|
||||
|
||||
const content = await fs.readFile(platformCodesPath, 'utf8');
|
||||
const config = yaml.parse(content);
|
||||
return config;
|
||||
}
|
||||
|
||||
/**
|
||||
* Config-driven IDE setup handler
|
||||
*
|
||||
* Reads installer configuration from platform-codes.yaml and uses
|
||||
* UnifiedInstaller to perform the actual installation.
|
||||
*
|
||||
* This eliminates the need for separate installer files for most IDEs.
|
||||
*/
|
||||
class ConfigDrivenIdeSetup extends BaseIdeSetup {
|
||||
/**
|
||||
* @param {string} platformCode - Platform code (e.g., 'claude-code', 'cursor')
|
||||
* @param {Object} platformConfig - Platform configuration from platform-codes.yaml
|
||||
*/
|
||||
constructor(platformCode, platformConfig) {
|
||||
super(platformCode, platformConfig.name, platformConfig.preferred);
|
||||
this.platformConfig = platformConfig;
|
||||
this.installerConfig = platformConfig.installer || null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Setup IDE configuration using config-driven approach
|
||||
* @param {string} projectDir - Project directory
|
||||
* @param {string} bmadDir - BMAD installation directory
|
||||
* @param {Object} options - Setup options
|
||||
* @returns {Promise<Object>} Setup result
|
||||
*/
|
||||
async setup(projectDir, bmadDir, options = {}) {
|
||||
console.log(chalk.cyan(`Setting up ${this.name}...`));
|
||||
|
||||
if (!this.installerConfig) {
|
||||
console.warn(chalk.yellow(`No installer configuration found for ${this.name}`));
|
||||
return { success: false, reason: 'no-config' };
|
||||
}
|
||||
|
||||
// Handle multi-target installations (like github-copilot, opencode)
|
||||
if (this.installerConfig.targets) {
|
||||
return this.installToMultipleTargets(projectDir, bmadDir, this.installerConfig.targets, options);
|
||||
}
|
||||
|
||||
// Handle single-target installations
|
||||
if (this.installerConfig.target_dir) {
|
||||
return this.installToTarget(projectDir, bmadDir, this.installerConfig, options);
|
||||
}
|
||||
|
||||
console.warn(chalk.yellow(`Invalid installer configuration for ${this.name}`));
|
||||
return { success: false, reason: 'invalid-config' };
|
||||
}
|
||||
|
||||
/**
|
||||
* Install artifacts to a single target directory
|
||||
* @param {string} projectDir - Project directory
|
||||
* @param {string} bmadDir - BMAD installation directory
|
||||
* @param {Object} targetConfig - Target configuration
|
||||
* @param {Object} options - Setup options
|
||||
* @returns {Promise<Object>} Setup result
|
||||
*/
|
||||
async installToTarget(projectDir, bmadDir, targetConfig, options) {
|
||||
const targetDir = path.join(projectDir, targetConfig.dir || targetConfig.target_dir);
|
||||
|
||||
// Clean up old BMAD installation first
|
||||
await this.cleanupTarget(targetDir, targetConfig.file_extension || '.md');
|
||||
|
||||
// Ensure target directory exists
|
||||
await this.ensureDir(targetDir);
|
||||
|
||||
// Get frontmatter template from config (defaults to common-yaml.md)
|
||||
const frontmatterTemplate = targetConfig.frontmatter_template || 'common-yaml.md';
|
||||
|
||||
// Use the unified installer
|
||||
const installer = new UnifiedInstaller(this.bmadFolderName);
|
||||
const counts = await installer.install(
|
||||
projectDir,
|
||||
bmadDir,
|
||||
{
|
||||
targetDir,
|
||||
namingStyle: 'suffix-based',
|
||||
frontmatterTemplate,
|
||||
fileExtension: targetConfig.file_extension || '.md',
|
||||
skipExisting: targetConfig.skip_existing || false,
|
||||
artifactTypes: targetConfig.artifact_types,
|
||||
},
|
||||
options.selectedModules || [],
|
||||
);
|
||||
|
||||
console.log(chalk.green(`✓ ${this.name} configured:`));
|
||||
console.log(chalk.dim(` - ${counts.agents} agents installed`));
|
||||
if (counts.workflows > 0) {
|
||||
console.log(chalk.dim(` - ${counts.workflows} workflow commands generated`));
|
||||
}
|
||||
if (counts.tasks + counts.tools > 0) {
|
||||
console.log(
|
||||
chalk.dim(` - ${counts.tasks + counts.tools} task/tool commands generated (${counts.tasks} tasks, ${counts.tools} tools)`),
|
||||
);
|
||||
}
|
||||
console.log(chalk.dim(` - Target directory: ${path.relative(projectDir, targetDir)}`));
|
||||
|
||||
return {
|
||||
success: true,
|
||||
agents: counts.agents,
|
||||
tasks: counts.tasks,
|
||||
tools: counts.tools,
|
||||
workflows: counts.workflows,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Install artifacts to multiple target directories
|
||||
* @param {string} projectDir - Project directory
|
||||
* @param {string} bmadDir - BMAD installation directory
|
||||
* @param {Array} targets - Array of target configurations
|
||||
* @param {Object} options - Setup options
|
||||
* @returns {Promise<Object>} Setup result
|
||||
*/
|
||||
async installToMultipleTargets(projectDir, bmadDir, targets, options) {
|
||||
const totalCounts = {
|
||||
agents: 0,
|
||||
workflows: 0,
|
||||
tasks: 0,
|
||||
tools: 0,
|
||||
total: 0,
|
||||
};
|
||||
|
||||
const targetNames = [];
|
||||
|
||||
for (const targetConfig of targets) {
|
||||
const targetDir = path.join(projectDir, targetConfig.dir);
|
||||
|
||||
// Clean up old BMAD installation first
|
||||
await this.cleanupTarget(targetDir, targetConfig.file_extension || '.md');
|
||||
|
||||
// Ensure target directory exists
|
||||
await this.ensureDir(targetDir);
|
||||
|
||||
// Get frontmatter template from config (defaults to common-yaml.md)
|
||||
const frontmatterTemplate = targetConfig.frontmatter_template || 'common-yaml.md';
|
||||
|
||||
// Use the unified installer for this target
|
||||
const installer = new UnifiedInstaller(this.bmadFolderName);
|
||||
const counts = await installer.install(
|
||||
projectDir,
|
||||
bmadDir,
|
||||
{
|
||||
targetDir,
|
||||
namingStyle: 'suffix-based',
|
||||
frontmatterTemplate,
|
||||
fileExtension: targetConfig.file_extension || '.md',
|
||||
skipExisting: targetConfig.skip_existing || false,
|
||||
artifactTypes: targetConfig.artifact_types,
|
||||
},
|
||||
options.selectedModules || [],
|
||||
);
|
||||
|
||||
// Accumulate counts
|
||||
totalCounts.agents += counts.agents;
|
||||
totalCounts.workflows += counts.workflows;
|
||||
totalCounts.tasks += counts.tasks;
|
||||
totalCounts.tools += counts.tools;
|
||||
|
||||
targetNames.push(path.relative(projectDir, targetDir));
|
||||
}
|
||||
|
||||
totalCounts.total = totalCounts.agents + totalCounts.workflows + totalCounts.tasks + totalCounts.tools;
|
||||
|
||||
console.log(chalk.green(`✓ ${this.name} configured:`));
|
||||
console.log(chalk.dim(` - ${totalCounts.agents} agents installed`));
|
||||
if (totalCounts.workflows > 0) {
|
||||
console.log(chalk.dim(` - ${totalCounts.workflows} workflow commands generated`));
|
||||
}
|
||||
if (totalCounts.tasks + totalCounts.tools > 0) {
|
||||
console.log(
|
||||
chalk.dim(
|
||||
` - ${totalCounts.tasks + totalCounts.tools} task/tool commands generated (${totalCounts.tasks} tasks, ${totalCounts.tools} tools)`,
|
||||
),
|
||||
);
|
||||
}
|
||||
console.log(chalk.dim(` - Target directories: ${targetNames.join(', ')}`));
|
||||
|
||||
// Handle VS Code settings if needed (for github-copilot)
|
||||
if (this.installerConfig.has_vscode_settings) {
|
||||
await this.configureVsCodeSettings(projectDir, options);
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
...totalCounts,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Configure VS Code settings for GitHub Copilot
|
||||
* @param {string} projectDir - Project directory
|
||||
* @param {Object} options - Setup options
|
||||
*/
|
||||
async configureVsCodeSettings(projectDir, options) {
|
||||
const vscodeDir = path.join(projectDir, '.vscode');
|
||||
const settingsPath = path.join(vscodeDir, 'settings.json');
|
||||
|
||||
await this.ensureDir(vscodeDir);
|
||||
|
||||
// Read existing settings
|
||||
let existingSettings = {};
|
||||
if (await fs.pathExists(settingsPath)) {
|
||||
try {
|
||||
const content = await fs.readFile(settingsPath, 'utf8');
|
||||
existingSettings = JSON.parse(content);
|
||||
} catch {
|
||||
console.warn(chalk.yellow(' Could not parse settings.json, creating new'));
|
||||
}
|
||||
}
|
||||
|
||||
// BMAD VS Code settings
|
||||
const bmadSettings = {
|
||||
'chat.agent.enabled': true,
|
||||
'chat.agent.maxRequests': 15,
|
||||
'github.copilot.chat.agent.runTasks': true,
|
||||
'chat.mcp.discovery.enabled': true,
|
||||
'github.copilot.chat.agent.autoFix': true,
|
||||
'chat.tools.autoApprove': false,
|
||||
};
|
||||
|
||||
// Merge settings (existing take precedence)
|
||||
const mergedSettings = { ...bmadSettings, ...existingSettings };
|
||||
|
||||
// Write settings
|
||||
await fs.writeFile(settingsPath, JSON.stringify(mergedSettings, null, 2));
|
||||
console.log(chalk.dim(` - VS Code settings configured`));
|
||||
}
|
||||
|
||||
/**
|
||||
* Clean up a specific target directory
|
||||
* @param {string} targetDir - Target directory to clean
|
||||
* @param {string} [fileExtension='.md'] - File extension to match
|
||||
*/
|
||||
async cleanupTarget(targetDir, fileExtension = '.md') {
|
||||
if (!(await fs.pathExists(targetDir))) {
|
||||
return;
|
||||
}
|
||||
|
||||
const entries = await fs.readdir(targetDir);
|
||||
let removed = 0;
|
||||
|
||||
for (const entry of entries) {
|
||||
// Remove bmad* files with the matching extension
|
||||
if (entry.startsWith('bmad') && entry.endsWith(fileExtension)) {
|
||||
await fs.remove(path.join(targetDir, entry));
|
||||
removed++;
|
||||
}
|
||||
}
|
||||
|
||||
if (removed > 0) {
|
||||
console.log(chalk.dim(` Cleaned up ${removed} existing BMAD files`));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Cleanup IDE configuration
|
||||
* @param {string} projectDir - Project directory
|
||||
*/
|
||||
async cleanup(projectDir) {
|
||||
if (!this.installerConfig) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Handle multi-target cleanup
|
||||
if (this.installerConfig.targets) {
|
||||
for (const targetConfig of this.installerConfig.targets) {
|
||||
const targetDir = path.join(projectDir, targetConfig.dir);
|
||||
await this.cleanupTarget(targetDir, targetConfig.file_extension || '.md');
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
// Handle single-target cleanup
|
||||
if (this.installerConfig.target_dir) {
|
||||
const targetDir = path.join(projectDir, this.installerConfig.target_dir);
|
||||
await this.cleanupTarget(targetDir, this.installerConfig.file_extension || '.md');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Install a custom agent launcher for this IDE
|
||||
* @param {string} projectDir - Project directory
|
||||
* @param {string} agentName - Agent name (e.g., "fred-commit-poet")
|
||||
* @param {string} agentPath - Path to compiled agent (relative to project root)
|
||||
* @param {Object} metadata - Agent metadata
|
||||
* @returns {Object|null} Info about created command
|
||||
*/
|
||||
async installCustomAgentLauncher(projectDir, agentName, agentPath, metadata) {
|
||||
if (!this.installerConfig) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// Determine target directory for agents
|
||||
let targetDir;
|
||||
let fileExtension = '.md';
|
||||
let frontmatterTemplate = 'common-yaml.md';
|
||||
|
||||
if (this.installerConfig.targets) {
|
||||
// For multi-target IDEs like github-copilot, find the agents target
|
||||
const agentsTarget = this.installerConfig.targets.find((t) => t.artifact_types && t.artifact_types.includes('agents'));
|
||||
if (!agentsTarget) {
|
||||
return null; // No agents target found
|
||||
}
|
||||
targetDir = path.join(projectDir, agentsTarget.dir);
|
||||
fileExtension = agentsTarget.file_extension || '.md';
|
||||
frontmatterTemplate = agentsTarget.frontmatter_template || 'common-yaml.md';
|
||||
} else if (this.installerConfig.target_dir) {
|
||||
targetDir = path.join(projectDir, this.installerConfig.target_dir);
|
||||
fileExtension = this.installerConfig.file_extension || '.md';
|
||||
frontmatterTemplate = this.installerConfig.frontmatter_template || 'common-yaml.md';
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (!(await this.exists(targetDir))) {
|
||||
return null;
|
||||
}
|
||||
|
||||
await this.ensureDir(targetDir);
|
||||
|
||||
// Create launcher content using frontmatter template
|
||||
const launcherContent = await this.createLauncherContent(agentName, agentPath, metadata, frontmatterTemplate);
|
||||
|
||||
// Use suffix-based naming for custom agents
|
||||
const fileName = customAgentSuffixName(agentName, fileExtension);
|
||||
const launcherPath = path.join(targetDir, fileName);
|
||||
await this.writeFile(launcherPath, launcherContent);
|
||||
|
||||
return {
|
||||
path: launcherPath,
|
||||
command: fileName.replace(fileExtension, ''),
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Create launcher content using frontmatter template
|
||||
* @param {string} agentName - Agent name
|
||||
* @param {string} agentPath - Path to agent file
|
||||
* @param {Object} metadata - Agent metadata
|
||||
* @param {string} frontmatterTemplate - Template filename
|
||||
* @returns {Promise<string>} Launcher content
|
||||
*/
|
||||
async createLauncherContent(agentName, agentPath, metadata, frontmatterTemplate) {
|
||||
const title = metadata.title || this.formatTitle(agentName);
|
||||
|
||||
// Base activation content
|
||||
const activationContent = `You must fully embody this agent's persona and follow all activation instructions exactly as specified. NEVER break character until given an exit command.
|
||||
|
||||
<agent-activation CRITICAL="TRUE">
|
||||
1. LOAD the FULL agent file from @${agentPath}
|
||||
2. READ its entire contents - this contains the complete agent persona, menu, and instructions
|
||||
3. FOLLOW every step in the <activation> section precisely
|
||||
4. DISPLAY the welcome/greeting as instructed
|
||||
5. PRESENT the numbered menu
|
||||
6. WAIT for user input before proceeding
|
||||
</agent-activation>
|
||||
`;
|
||||
|
||||
// Load frontmatter template
|
||||
const { UnifiedInstaller } = require('./shared/unified-installer');
|
||||
const installer = new UnifiedInstaller(this.bmadFolderName);
|
||||
const templateContent = await installer.loadFrontmatterTemplate(frontmatterTemplate);
|
||||
|
||||
if (!templateContent) {
|
||||
// Fallback to basic YAML
|
||||
return `---
|
||||
name: '${agentName}'
|
||||
description: '${title} agent'
|
||||
---
|
||||
|
||||
${activationContent}`;
|
||||
}
|
||||
|
||||
// Apply template variables
|
||||
const variables = {
|
||||
name: agentName,
|
||||
title,
|
||||
displayName: agentName,
|
||||
description: `Activates the ${title} agent persona.`,
|
||||
icon: '🤖',
|
||||
content: activationContent,
|
||||
tools: JSON.stringify([
|
||||
'changes',
|
||||
'edit',
|
||||
'fetch',
|
||||
'githubRepo',
|
||||
'problems',
|
||||
'runCommands',
|
||||
'runTasks',
|
||||
'runTests',
|
||||
'search',
|
||||
'runSubagent',
|
||||
'testFailure',
|
||||
'todos',
|
||||
'usages',
|
||||
]),
|
||||
};
|
||||
|
||||
let result = templateContent;
|
||||
for (const [key, value] of Object.entries(variables)) {
|
||||
result = result.replaceAll(`{{${key}}}`, value);
|
||||
}
|
||||
|
||||
// Handle TOML templates specially
|
||||
if (frontmatterTemplate.includes('toml')) {
|
||||
const escapedContent = activationContent.replaceAll('"""', String.raw`\"\"\"`);
|
||||
result = result.replace(
|
||||
/prompt = """/,
|
||||
`prompt = """\n**⚠️ IMPORTANT**: Run @${agentPath} first to load the complete agent!\n\n${escapedContent}`,
|
||||
);
|
||||
return result;
|
||||
}
|
||||
|
||||
return result + activationContent;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
ConfigDrivenIdeSetup,
|
||||
loadPlatformCodes,
|
||||
};
|
||||
|
|
@ -1,474 +0,0 @@
|
|||
const path = require('node:path');
|
||||
const fs = require('fs-extra');
|
||||
const { BaseIdeSetup } = require('./_base-ide');
|
||||
const chalk = require('chalk');
|
||||
const { getProjectRoot, getSourcePath, getModulePath } = require('../../../lib/project-root');
|
||||
const { WorkflowCommandGenerator } = require('./shared/workflow-command-generator');
|
||||
const { TaskToolCommandGenerator } = require('./shared/task-tool-command-generator');
|
||||
const { AgentCommandGenerator } = require('./shared/agent-command-generator');
|
||||
const {
|
||||
loadModuleInjectionConfig,
|
||||
shouldApplyInjection,
|
||||
filterAgentInstructions,
|
||||
resolveSubagentFiles,
|
||||
} = require('./shared/module-injections');
|
||||
const { getAgentsFromBmad, getAgentsFromDir } = require('./shared/bmad-artifacts');
|
||||
const { toDashPath, customAgentDashName } = require('./shared/path-utils');
|
||||
const prompts = require('../../../lib/prompts');
|
||||
|
||||
/**
|
||||
* Google Antigravity IDE setup handler
|
||||
*
|
||||
* Uses .agent/workflows/ directory for slash commands
|
||||
*/
|
||||
class AntigravitySetup extends BaseIdeSetup {
|
||||
constructor() {
|
||||
super('antigravity', 'Google Antigravity', true);
|
||||
this.configDir = '.agent';
|
||||
this.workflowsDir = 'workflows';
|
||||
}
|
||||
|
||||
/**
|
||||
* Prompt for subagent installation location
|
||||
* @returns {Promise<string>} Selected location ('project' or 'user')
|
||||
*/
|
||||
async _promptInstallLocation() {
|
||||
return prompts.select({
|
||||
message: 'Where would you like to install Antigravity subagents?',
|
||||
choices: [
|
||||
{ name: 'Project level (.agent/agents/)', value: 'project' },
|
||||
{ name: 'User level (~/.agent/agents/)', value: 'user' },
|
||||
],
|
||||
default: 'project',
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Collect configuration choices before installation
|
||||
* @param {Object} options - Configuration options
|
||||
* @returns {Object} Collected configuration
|
||||
*/
|
||||
async collectConfiguration(options = {}) {
|
||||
// const config = {
|
||||
// subagentChoices: null,
|
||||
// installLocation: null,
|
||||
// };
|
||||
|
||||
// const sourceModulesPath = getSourcePath('modules');
|
||||
// const modules = options.selectedModules || [];
|
||||
|
||||
// for (const moduleName of modules) {
|
||||
// // Check for Antigravity sub-module injection config in SOURCE directory
|
||||
// const injectionConfigPath = path.join(sourceModulesPath, moduleName, 'sub-modules', 'antigravity', 'injections.yaml');
|
||||
|
||||
// if (await this.exists(injectionConfigPath)) {
|
||||
// const yaml = require('yaml');
|
||||
|
||||
// try {
|
||||
// // Load injection configuration
|
||||
// const configContent = await fs.readFile(injectionConfigPath, 'utf8');
|
||||
// const injectionConfig = yaml.parse(configContent);
|
||||
|
||||
// // Ask about subagents if they exist and we haven't asked yet
|
||||
// if (injectionConfig.subagents && !config.subagentChoices) {
|
||||
// config.subagentChoices = await this.promptSubagentInstallation(injectionConfig.subagents);
|
||||
|
||||
// if (config.subagentChoices.install !== 'none') {
|
||||
// config.installLocation = await this._promptInstallLocation();
|
||||
// }
|
||||
// }
|
||||
// } catch (error) {
|
||||
// console.log(chalk.yellow(` Warning: Failed to process ${moduleName} features: ${error.message}`));
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
|
||||
return config;
|
||||
}
|
||||
|
||||
/**
|
||||
* Cleanup old BMAD installation before reinstalling
|
||||
* @param {string} projectDir - Project directory
|
||||
*/
|
||||
async cleanup(projectDir) {
|
||||
const bmadWorkflowsDir = path.join(projectDir, this.configDir, this.workflowsDir, 'bmad');
|
||||
|
||||
if (await fs.pathExists(bmadWorkflowsDir)) {
|
||||
await fs.remove(bmadWorkflowsDir);
|
||||
console.log(chalk.dim(` Removed old BMAD workflows from ${this.name}`));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Setup Antigravity IDE configuration
|
||||
* @param {string} projectDir - Project directory
|
||||
* @param {string} bmadDir - BMAD installation directory
|
||||
* @param {Object} options - Setup options
|
||||
*/
|
||||
async setup(projectDir, bmadDir, options = {}) {
|
||||
// Store project directory for use in processContent
|
||||
this.projectDir = projectDir;
|
||||
|
||||
console.log(chalk.cyan(`Setting up ${this.name}...`));
|
||||
|
||||
// Clean up old BMAD installation first
|
||||
await this.cleanup(projectDir);
|
||||
|
||||
// Create .agent/workflows directory structure
|
||||
const agentDir = path.join(projectDir, this.configDir);
|
||||
const workflowsDir = path.join(agentDir, this.workflowsDir);
|
||||
const bmadWorkflowsDir = path.join(workflowsDir, 'bmad');
|
||||
|
||||
await this.ensureDir(bmadWorkflowsDir);
|
||||
|
||||
// Generate agent launchers using AgentCommandGenerator
|
||||
// This creates small launcher files that reference the actual agents in _bmad/
|
||||
const agentGen = new AgentCommandGenerator(this.bmadFolderName);
|
||||
const { artifacts: agentArtifacts, counts: agentCounts } = await agentGen.collectAgentArtifacts(bmadDir, options.selectedModules || []);
|
||||
|
||||
// Write agent launcher files with FLATTENED naming using shared utility
|
||||
// Antigravity ignores directory structure, so we flatten to: bmad_module_name.md
|
||||
// This creates slash commands like /bmad_bmm_dev instead of /dev
|
||||
const agentCount = await agentGen.writeDashArtifacts(bmadWorkflowsDir, agentArtifacts);
|
||||
|
||||
// Process Antigravity specific injections for installed modules
|
||||
// Use pre-collected configuration if available, or skip if already configured
|
||||
if (options.preCollectedConfig && options.preCollectedConfig._alreadyConfigured) {
|
||||
// IDE is already configured from previous installation, skip prompting
|
||||
// Just process with default/existing configuration
|
||||
await this.processModuleInjectionsWithConfig(projectDir, bmadDir, options, {});
|
||||
} else if (options.preCollectedConfig) {
|
||||
await this.processModuleInjectionsWithConfig(projectDir, bmadDir, options, options.preCollectedConfig);
|
||||
} else {
|
||||
await this.processModuleInjections(projectDir, bmadDir, options);
|
||||
}
|
||||
|
||||
// Generate workflow commands from manifest (if it exists)
|
||||
const workflowGen = new WorkflowCommandGenerator(this.bmadFolderName);
|
||||
const { artifacts: workflowArtifacts } = await workflowGen.collectWorkflowArtifacts(bmadDir);
|
||||
|
||||
// Write workflow-command artifacts with FLATTENED naming using shared utility
|
||||
const workflowCommandCount = await workflowGen.writeDashArtifacts(bmadWorkflowsDir, workflowArtifacts);
|
||||
|
||||
// Generate task and tool commands from manifests (if they exist)
|
||||
const taskToolGen = new TaskToolCommandGenerator();
|
||||
const taskToolResult = await taskToolGen.generateTaskToolCommands(projectDir, bmadDir);
|
||||
|
||||
console.log(chalk.green(`✓ ${this.name} configured:`));
|
||||
console.log(chalk.dim(` - ${agentCount} agents installed`));
|
||||
if (workflowCommandCount > 0) {
|
||||
console.log(chalk.dim(` - ${workflowCommandCount} workflow commands generated`));
|
||||
}
|
||||
if (taskToolResult.generated > 0) {
|
||||
console.log(
|
||||
chalk.dim(
|
||||
` - ${taskToolResult.generated} task/tool commands generated (${taskToolResult.tasks} tasks, ${taskToolResult.tools} tools)`,
|
||||
),
|
||||
);
|
||||
}
|
||||
console.log(chalk.dim(` - Workflows directory: ${path.relative(projectDir, bmadWorkflowsDir)}`));
|
||||
console.log(chalk.yellow(`\n Note: Antigravity uses flattened slash commands (e.g., /bmad_module_agents_name)`));
|
||||
|
||||
return {
|
||||
success: true,
|
||||
agents: agentCount,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Read and process file content
|
||||
*/
|
||||
async readAndProcess(filePath, metadata) {
|
||||
const content = await fs.readFile(filePath, 'utf8');
|
||||
return this.processContent(content, metadata);
|
||||
}
|
||||
|
||||
/**
|
||||
* Override processContent to keep {project-root} placeholder
|
||||
*/
|
||||
processContent(content, metadata = {}) {
|
||||
// Use the base class method WITHOUT projectDir to preserve {project-root} placeholder
|
||||
return super.processContent(content, metadata);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get agents from source modules (not installed location)
|
||||
*/
|
||||
async getAgentsFromSource(sourceDir, selectedModules) {
|
||||
const agents = [];
|
||||
|
||||
// Add core agents
|
||||
const corePath = getModulePath('core');
|
||||
if (await fs.pathExists(path.join(corePath, 'agents'))) {
|
||||
const coreAgents = await getAgentsFromDir(path.join(corePath, 'agents'), 'core');
|
||||
agents.push(...coreAgents);
|
||||
}
|
||||
|
||||
// Add module agents
|
||||
for (const moduleName of selectedModules) {
|
||||
const modulePath = path.join(sourceDir, moduleName);
|
||||
const agentsPath = path.join(modulePath, 'agents');
|
||||
|
||||
if (await fs.pathExists(agentsPath)) {
|
||||
const moduleAgents = await getAgentsFromDir(agentsPath, moduleName);
|
||||
agents.push(...moduleAgents);
|
||||
}
|
||||
}
|
||||
|
||||
return agents;
|
||||
}
|
||||
|
||||
/**
|
||||
* Process module injections with pre-collected configuration
|
||||
*/
|
||||
async processModuleInjectionsWithConfig(projectDir, bmadDir, options, preCollectedConfig) {
|
||||
// Get list of installed modules
|
||||
const modules = options.selectedModules || [];
|
||||
const { subagentChoices, installLocation } = preCollectedConfig;
|
||||
|
||||
// Get the actual source directory (not the installation directory)
|
||||
await this.processModuleInjectionsInternal({
|
||||
projectDir,
|
||||
modules,
|
||||
handler: 'antigravity',
|
||||
subagentChoices,
|
||||
installLocation,
|
||||
interactive: false,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Process Antigravity specific injections for installed modules
|
||||
* Looks for injections.yaml in each module's antigravity sub-module
|
||||
*/
|
||||
async processModuleInjections(projectDir, bmadDir, options) {
|
||||
// Get list of installed modules
|
||||
const modules = options.selectedModules || [];
|
||||
let subagentChoices = null;
|
||||
let installLocation = null;
|
||||
|
||||
// Get the actual source directory (not the installation directory)
|
||||
const { subagentChoices: updatedChoices, installLocation: updatedLocation } = await this.processModuleInjectionsInternal({
|
||||
projectDir,
|
||||
modules,
|
||||
handler: 'antigravity',
|
||||
subagentChoices,
|
||||
installLocation,
|
||||
interactive: true,
|
||||
});
|
||||
|
||||
if (updatedChoices) {
|
||||
subagentChoices = updatedChoices;
|
||||
}
|
||||
if (updatedLocation) {
|
||||
installLocation = updatedLocation;
|
||||
}
|
||||
}
|
||||
|
||||
async processModuleInjectionsInternal({ projectDir, modules, handler, subagentChoices, installLocation, interactive = false }) {
|
||||
let choices = subagentChoices;
|
||||
let location = installLocation;
|
||||
|
||||
for (const moduleName of modules) {
|
||||
const configData = await loadModuleInjectionConfig(handler, moduleName);
|
||||
|
||||
if (!configData) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const { config, handlerBaseDir } = configData;
|
||||
|
||||
if (interactive) {
|
||||
console.log(chalk.cyan(`\nConfiguring ${moduleName} ${handler} features...`));
|
||||
}
|
||||
|
||||
// if (interactive && config.subagents && !choices) {
|
||||
// choices = await this.promptSubagentInstallation(config.subagents);
|
||||
|
||||
// if (choices.install !== 'none') {
|
||||
// location = await this._promptInstallLocation();
|
||||
// }
|
||||
// }
|
||||
|
||||
if (config.injections && choices && choices.install !== 'none') {
|
||||
for (const injection of config.injections) {
|
||||
if (shouldApplyInjection(injection, choices)) {
|
||||
await this.injectContent(projectDir, injection, choices);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (config.subagents && choices && choices.install !== 'none') {
|
||||
await this.copySelectedSubagents(projectDir, handlerBaseDir, config.subagents, choices, location || 'project');
|
||||
}
|
||||
}
|
||||
|
||||
return { subagentChoices: choices, installLocation: location };
|
||||
}
|
||||
|
||||
/**
|
||||
* Prompt user for subagent installation preferences
|
||||
*/
|
||||
async promptSubagentInstallation(subagentConfig) {
|
||||
// First ask if they want to install subagents
|
||||
const install = await prompts.select({
|
||||
message: 'Would you like to install Antigravity subagents for enhanced functionality?',
|
||||
choices: [
|
||||
{ name: 'Yes, install all subagents', value: 'all' },
|
||||
{ name: 'Yes, let me choose specific subagents', value: 'selective' },
|
||||
{ name: 'No, skip subagent installation', value: 'none' },
|
||||
],
|
||||
default: 'all',
|
||||
});
|
||||
|
||||
if (install === 'selective') {
|
||||
// Show list of available subagents with descriptions
|
||||
const subagentInfo = {
|
||||
'market-researcher.md': 'Market research and competitive analysis',
|
||||
'requirements-analyst.md': 'Requirements extraction and validation',
|
||||
'technical-evaluator.md': 'Technology stack evaluation',
|
||||
'epic-optimizer.md': 'Epic and story breakdown optimization',
|
||||
'document-reviewer.md': 'Document quality review',
|
||||
};
|
||||
|
||||
const selected = await prompts.multiselect({
|
||||
message: `Select subagents to install ${chalk.dim('(↑/↓ navigates multiselect, SPACE toggles, A to toggles All, ENTER confirm)')}:`,
|
||||
choices: subagentConfig.files.map((file) => ({
|
||||
name: `${file.replace('.md', '')} - ${subagentInfo[file] || 'Specialized assistant'}`,
|
||||
value: file,
|
||||
checked: true,
|
||||
})),
|
||||
});
|
||||
|
||||
return { install: 'selective', selected };
|
||||
}
|
||||
|
||||
return { install };
|
||||
}
|
||||
|
||||
/**
|
||||
* Inject content at specified point in file
|
||||
*/
|
||||
async injectContent(projectDir, injection, subagentChoices = null) {
|
||||
const targetPath = path.join(projectDir, injection.file);
|
||||
|
||||
if (await this.exists(targetPath)) {
|
||||
let content = await fs.readFile(targetPath, 'utf8');
|
||||
const marker = `<!-- IDE-INJECT-POINT: ${injection.point} -->`;
|
||||
|
||||
if (content.includes(marker)) {
|
||||
let injectionContent = injection.content;
|
||||
|
||||
// Filter content if selective subagents chosen
|
||||
if (subagentChoices && subagentChoices.install === 'selective' && injection.point === 'pm-agent-instructions') {
|
||||
injectionContent = filterAgentInstructions(injection.content, subagentChoices.selected);
|
||||
}
|
||||
|
||||
content = content.replace(marker, injectionContent);
|
||||
await fs.writeFile(targetPath, content);
|
||||
console.log(chalk.dim(` Injected: ${injection.point} → ${injection.file}`));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Copy selected subagents to appropriate Antigravity agents directory
|
||||
*/
|
||||
async copySelectedSubagents(projectDir, handlerBaseDir, subagentConfig, choices, location) {
|
||||
const os = require('node:os');
|
||||
|
||||
// Determine target directory based on user choice
|
||||
let targetDir;
|
||||
if (location === 'user') {
|
||||
targetDir = path.join(os.homedir(), '.agent', 'agents');
|
||||
console.log(chalk.dim(` Installing subagents globally to: ~/.agent/agents/`));
|
||||
} else {
|
||||
targetDir = path.join(projectDir, '.agent', 'agents');
|
||||
console.log(chalk.dim(` Installing subagents to project: .agent/agents/`));
|
||||
}
|
||||
|
||||
// Ensure target directory exists
|
||||
await this.ensureDir(targetDir);
|
||||
|
||||
const resolvedFiles = await resolveSubagentFiles(handlerBaseDir, subagentConfig, choices);
|
||||
|
||||
let copiedCount = 0;
|
||||
for (const resolved of resolvedFiles) {
|
||||
try {
|
||||
const sourcePath = resolved.absolutePath;
|
||||
|
||||
const subFolder = path.dirname(resolved.relativePath);
|
||||
let targetPath;
|
||||
if (subFolder && subFolder !== '.') {
|
||||
const targetSubDir = path.join(targetDir, subFolder);
|
||||
await this.ensureDir(targetSubDir);
|
||||
targetPath = path.join(targetSubDir, path.basename(resolved.file));
|
||||
} else {
|
||||
targetPath = path.join(targetDir, path.basename(resolved.file));
|
||||
}
|
||||
|
||||
await fs.copyFile(sourcePath, targetPath);
|
||||
console.log(chalk.green(` ✓ Installed: ${subFolder === '.' ? '' : `${subFolder}/`}${path.basename(resolved.file, '.md')}`));
|
||||
copiedCount++;
|
||||
} catch (error) {
|
||||
console.log(chalk.yellow(` ⚠ Error copying ${resolved.file}: ${error.message}`));
|
||||
}
|
||||
}
|
||||
|
||||
if (copiedCount > 0) {
|
||||
console.log(chalk.dim(` Total subagents installed: ${copiedCount}`));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Install a custom agent launcher for Antigravity
|
||||
* @param {string} projectDir - Project directory
|
||||
* @param {string} agentName - Agent name (e.g., "fred-commit-poet")
|
||||
* @param {string} agentPath - Path to compiled agent (relative to project root)
|
||||
* @param {Object} metadata - Agent metadata
|
||||
* @returns {Object} Installation result
|
||||
*/
|
||||
async installCustomAgentLauncher(projectDir, agentName, agentPath, metadata) {
|
||||
// Create .agent/workflows/bmad directory structure (same as regular agents)
|
||||
const agentDir = path.join(projectDir, this.configDir);
|
||||
const workflowsDir = path.join(agentDir, this.workflowsDir);
|
||||
const bmadWorkflowsDir = path.join(workflowsDir, 'bmad');
|
||||
|
||||
await fs.ensureDir(bmadWorkflowsDir);
|
||||
|
||||
// Create custom agent launcher with same pattern as regular agents
|
||||
const launcherContent = `name: '${agentName}'
|
||||
description: '${agentName} agent'
|
||||
usage: |
|
||||
Custom BMAD agent: ${agentName}
|
||||
|
||||
Launch with: /${agentName}
|
||||
|
||||
You must fully embody this agent's persona and follow all activation instructions exactly as specified. NEVER break character until given an exit command.
|
||||
<agent-activation CRITICAL="TRUE">
|
||||
1. LOAD the FULL agent file from @${agentPath}
|
||||
2. READ its entire contents - this contains the complete agent persona, menu, and instructions
|
||||
3. EXECUTE as ${agentName} with full persona adoption
|
||||
</agent-activation>
|
||||
|
||||
---
|
||||
|
||||
⚠️ **IMPORTANT**: Run @${agentPath} to load the complete agent before using this launcher!`;
|
||||
|
||||
// Use underscore format: bmad_custom_fred-commit-poet.md
|
||||
const fileName = customAgentDashName(agentName);
|
||||
const launcherPath = path.join(bmadWorkflowsDir, fileName);
|
||||
|
||||
// Write the launcher file
|
||||
await fs.writeFile(launcherPath, launcherContent, 'utf8');
|
||||
|
||||
return {
|
||||
ide: 'antigravity',
|
||||
path: path.relative(projectDir, launcherPath),
|
||||
command: `/${fileName.replace('.md', '')}`,
|
||||
type: 'custom-agent-launcher',
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { AntigravitySetup };
|
||||
|
|
@ -1,244 +0,0 @@
|
|||
const path = require('node:path');
|
||||
const fs = require('fs-extra');
|
||||
const { BaseIdeSetup } = require('./_base-ide');
|
||||
const chalk = require('chalk');
|
||||
const { AgentCommandGenerator } = require('./shared/agent-command-generator');
|
||||
const { WorkflowCommandGenerator } = require('./shared/workflow-command-generator');
|
||||
|
||||
/**
|
||||
* Auggie CLI setup handler
|
||||
* Installs to project directory (.augment/commands)
|
||||
*/
|
||||
class AuggieSetup extends BaseIdeSetup {
|
||||
constructor() {
|
||||
super('auggie', 'Auggie CLI');
|
||||
this.detectionPaths = ['.augment'];
|
||||
}
|
||||
|
||||
/**
|
||||
* Setup Auggie CLI configuration
|
||||
* @param {string} projectDir - Project directory
|
||||
* @param {string} bmadDir - BMAD installation directory
|
||||
* @param {Object} options - Setup options
|
||||
*/
|
||||
async setup(projectDir, bmadDir, options = {}) {
|
||||
console.log(chalk.cyan(`Setting up ${this.name}...`));
|
||||
|
||||
// Always use project directory
|
||||
const location = path.join(projectDir, '.augment', 'commands');
|
||||
|
||||
// Clean up old BMAD installation first
|
||||
await this.cleanup(projectDir);
|
||||
|
||||
// Generate agent launchers
|
||||
const agentGen = new AgentCommandGenerator(this.bmadFolderName);
|
||||
const { artifacts: agentArtifacts } = await agentGen.collectAgentArtifacts(bmadDir, options.selectedModules || []);
|
||||
|
||||
// Get tasks, tools, and workflows (ALL workflows now generate commands)
|
||||
const tasks = await this.getTasks(bmadDir, true);
|
||||
const tools = await this.getTools(bmadDir, true);
|
||||
|
||||
// Get ALL workflows using the new workflow command generator
|
||||
const workflowGenerator = new WorkflowCommandGenerator(this.bmadFolderName);
|
||||
const { artifacts: workflowArtifacts, counts: workflowCounts } = await workflowGenerator.collectWorkflowArtifacts(bmadDir);
|
||||
|
||||
// Convert workflow artifacts to expected format
|
||||
const workflows = workflowArtifacts
|
||||
.filter((artifact) => artifact.type === 'workflow-command')
|
||||
.map((artifact) => ({
|
||||
module: artifact.module,
|
||||
name: path.basename(artifact.relativePath, '.md'),
|
||||
path: artifact.sourcePath,
|
||||
content: artifact.content,
|
||||
}));
|
||||
|
||||
const bmadCommandsDir = path.join(location, 'bmad');
|
||||
const agentsDir = path.join(bmadCommandsDir, 'agents');
|
||||
const tasksDir = path.join(bmadCommandsDir, 'tasks');
|
||||
const toolsDir = path.join(bmadCommandsDir, 'tools');
|
||||
const workflowsDir = path.join(bmadCommandsDir, 'workflows');
|
||||
|
||||
await this.ensureDir(agentsDir);
|
||||
await this.ensureDir(tasksDir);
|
||||
await this.ensureDir(toolsDir);
|
||||
await this.ensureDir(workflowsDir);
|
||||
|
||||
// Install agent launchers
|
||||
for (const artifact of agentArtifacts) {
|
||||
const targetPath = path.join(agentsDir, `${artifact.module}-${artifact.name}.md`);
|
||||
await this.writeFile(targetPath, artifact.content);
|
||||
}
|
||||
|
||||
// Install tasks
|
||||
for (const task of tasks) {
|
||||
const content = await this.readFile(task.path);
|
||||
const commandContent = this.createTaskCommand(task, content);
|
||||
|
||||
const targetPath = path.join(tasksDir, `${task.module}-${task.name}.md`);
|
||||
await this.writeFile(targetPath, commandContent);
|
||||
}
|
||||
|
||||
// Install tools
|
||||
for (const tool of tools) {
|
||||
const content = await this.readFile(tool.path);
|
||||
const commandContent = this.createToolCommand(tool, content);
|
||||
|
||||
const targetPath = path.join(toolsDir, `${tool.module}-${tool.name}.md`);
|
||||
await this.writeFile(targetPath, commandContent);
|
||||
}
|
||||
|
||||
// Install workflows (already generated commands)
|
||||
for (const workflow of workflows) {
|
||||
// Use the pre-generated workflow command content
|
||||
const targetPath = path.join(workflowsDir, `${workflow.module}-${workflow.name}.md`);
|
||||
await this.writeFile(targetPath, workflow.content);
|
||||
}
|
||||
|
||||
const totalInstalled = agentArtifacts.length + tasks.length + tools.length + workflows.length;
|
||||
|
||||
console.log(chalk.green(`✓ ${this.name} configured:`));
|
||||
console.log(chalk.dim(` - ${agentArtifacts.length} agents installed`));
|
||||
console.log(chalk.dim(` - ${tasks.length} tasks installed`));
|
||||
console.log(chalk.dim(` - ${tools.length} tools installed`));
|
||||
console.log(chalk.dim(` - ${workflows.length} workflows installed`));
|
||||
console.log(chalk.dim(` - Location: ${path.relative(projectDir, location)}`));
|
||||
console.log(chalk.yellow(`\n 💡 Tip: Add 'model: gpt-4o' to command frontmatter to specify AI model`));
|
||||
|
||||
return {
|
||||
success: true,
|
||||
agents: agentArtifacts.length,
|
||||
tasks: tasks.length,
|
||||
tools: tools.length,
|
||||
workflows: workflows.length,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Create task command content
|
||||
*/
|
||||
createTaskCommand(task, content) {
|
||||
const nameMatch = content.match(/name="([^"]+)"/);
|
||||
const taskName = nameMatch ? nameMatch[1] : this.formatTitle(task.name);
|
||||
|
||||
return `---
|
||||
description: "Execute the ${taskName} task"
|
||||
---
|
||||
|
||||
# ${taskName} Task
|
||||
|
||||
${content}
|
||||
|
||||
## Module
|
||||
BMAD ${task.module.toUpperCase()} module
|
||||
`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create tool command content
|
||||
*/
|
||||
createToolCommand(tool, content) {
|
||||
const nameMatch = content.match(/name="([^"]+)"/);
|
||||
const toolName = nameMatch ? nameMatch[1] : this.formatTitle(tool.name);
|
||||
|
||||
return `---
|
||||
description: "Use the ${toolName} tool"
|
||||
---
|
||||
|
||||
# ${toolName} Tool
|
||||
|
||||
${content}
|
||||
|
||||
## Module
|
||||
BMAD ${tool.module.toUpperCase()} module
|
||||
`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create workflow command content
|
||||
*/
|
||||
createWorkflowCommand(workflow, content) {
|
||||
const description = workflow.description || `Execute the ${workflow.name} workflow`;
|
||||
|
||||
return `---
|
||||
description: "${description}"
|
||||
---
|
||||
|
||||
# ${workflow.name} Workflow
|
||||
|
||||
${content}
|
||||
|
||||
## Module
|
||||
BMAD ${workflow.module.toUpperCase()} module
|
||||
`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Cleanup Auggie configuration
|
||||
*/
|
||||
async cleanup(projectDir) {
|
||||
const fs = require('fs-extra');
|
||||
|
||||
// Only clean up project directory
|
||||
const location = path.join(projectDir, '.augment', 'commands');
|
||||
const bmadDir = path.join(location, 'bmad');
|
||||
|
||||
if (await fs.pathExists(bmadDir)) {
|
||||
await fs.remove(bmadDir);
|
||||
console.log(chalk.dim(` Removed old BMAD commands`));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Install a custom agent launcher for Auggie
|
||||
* @param {string} projectDir - Project directory
|
||||
* @param {string} agentName - Agent name (e.g., "fred-commit-poet")
|
||||
* @param {string} agentPath - Path to compiled agent (relative to project root)
|
||||
* @param {Object} metadata - Agent metadata
|
||||
* @returns {Object} Installation result
|
||||
*/
|
||||
async installCustomAgentLauncher(projectDir, agentName, agentPath, metadata) {
|
||||
// Auggie uses .augment/commands directory
|
||||
const location = path.join(projectDir, '.augment', 'commands');
|
||||
const bmadCommandsDir = path.join(location, 'bmad');
|
||||
const agentsDir = path.join(bmadCommandsDir, 'agents');
|
||||
|
||||
// Create .augment/commands/bmad/agents directory if it doesn't exist
|
||||
await fs.ensureDir(agentsDir);
|
||||
|
||||
// Create custom agent launcher
|
||||
const launcherContent = `---
|
||||
description: "Use the ${agentName} custom agent"
|
||||
---
|
||||
|
||||
# ${agentName} Custom Agent
|
||||
|
||||
**⚠️ IMPORTANT**: Run @${agentPath} first to load the complete agent!
|
||||
|
||||
This is a launcher for the custom BMAD agent "${agentName}".
|
||||
|
||||
## Usage
|
||||
1. First run: \`${agentPath}\` to load the complete agent
|
||||
2. Then use this command to activate ${agentName}
|
||||
|
||||
The agent will follow the persona and instructions from the main agent file.
|
||||
|
||||
## Module
|
||||
BMAD Custom agent
|
||||
`;
|
||||
|
||||
const fileName = `custom-${agentName.toLowerCase()}.md`;
|
||||
const launcherPath = path.join(agentsDir, fileName);
|
||||
|
||||
// Write the launcher file
|
||||
await fs.writeFile(launcherPath, launcherContent, 'utf8');
|
||||
|
||||
return {
|
||||
ide: 'auggie',
|
||||
path: path.relative(projectDir, launcherPath),
|
||||
command: agentName,
|
||||
type: 'custom-agent-launcher',
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { AuggieSetup };
|
||||
|
|
@ -1,506 +0,0 @@
|
|||
const path = require('node:path');
|
||||
const fs = require('fs-extra');
|
||||
const { BaseIdeSetup } = require('./_base-ide');
|
||||
const chalk = require('chalk');
|
||||
const { getProjectRoot, getSourcePath, getModulePath } = require('../../../lib/project-root');
|
||||
const { WorkflowCommandGenerator } = require('./shared/workflow-command-generator');
|
||||
const { TaskToolCommandGenerator } = require('./shared/task-tool-command-generator');
|
||||
const { AgentCommandGenerator } = require('./shared/agent-command-generator');
|
||||
const {
|
||||
loadModuleInjectionConfig,
|
||||
shouldApplyInjection,
|
||||
filterAgentInstructions,
|
||||
resolveSubagentFiles,
|
||||
} = require('./shared/module-injections');
|
||||
const { getAgentsFromBmad, getAgentsFromDir } = require('./shared/bmad-artifacts');
|
||||
const { customAgentColonName } = require('./shared/path-utils');
|
||||
const prompts = require('../../../lib/prompts');
|
||||
|
||||
/**
|
||||
* Claude Code IDE setup handler
|
||||
*/
|
||||
class ClaudeCodeSetup extends BaseIdeSetup {
|
||||
constructor() {
|
||||
super('claude-code', 'Claude Code', true); // preferred IDE
|
||||
this.configDir = '.claude';
|
||||
this.commandsDir = 'commands';
|
||||
this.agentsDir = 'agents';
|
||||
}
|
||||
|
||||
/**
|
||||
* Prompt for subagent installation location
|
||||
* @returns {Promise<string>} Selected location ('project' or 'user')
|
||||
*/
|
||||
async promptInstallLocation() {
|
||||
return prompts.select({
|
||||
message: 'Where would you like to install Claude Code subagents?',
|
||||
choices: [
|
||||
{ name: 'Project level (.claude/agents/)', value: 'project' },
|
||||
{ name: 'User level (~/.claude/agents/)', value: 'user' },
|
||||
],
|
||||
default: 'project',
|
||||
});
|
||||
}
|
||||
|
||||
// /**
|
||||
// * Collect configuration choices before installation
|
||||
// * @param {Object} options - Configuration options
|
||||
// * @returns {Object} Collected configuration
|
||||
// */
|
||||
// async collectConfiguration(options = {}) {
|
||||
// const config = {
|
||||
// subagentChoices: null,
|
||||
// installLocation: null,
|
||||
// };
|
||||
|
||||
// const sourceModulesPath = getSourcePath('modules');
|
||||
// const modules = options.selectedModules || [];
|
||||
|
||||
// for (const moduleName of modules) {
|
||||
// // Check for Claude Code sub-module injection config in SOURCE directory
|
||||
// const injectionConfigPath = path.join(sourceModulesPath, moduleName, 'sub-modules', 'claude-code', 'injections.yaml');
|
||||
|
||||
// if (await this.exists(injectionConfigPath)) {
|
||||
// const yaml = require('yaml');
|
||||
|
||||
// try {
|
||||
// // Load injection configuration
|
||||
// const configContent = await fs.readFile(injectionConfigPath, 'utf8');
|
||||
// const injectionConfig = yaml.parse(configContent);
|
||||
|
||||
// // Ask about subagents if they exist and we haven't asked yet
|
||||
// if (injectionConfig.subagents && !config.subagentChoices) {
|
||||
// config.subagentChoices = await this.promptSubagentInstallation(injectionConfig.subagents);
|
||||
|
||||
// if (config.subagentChoices.install !== 'none') {
|
||||
// config.installLocation = await this.promptInstallLocation();
|
||||
// }
|
||||
// }
|
||||
// } catch (error) {
|
||||
// console.log(chalk.yellow(` Warning: Failed to process ${moduleName} features: ${error.message}`));
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
|
||||
// return config;
|
||||
// }
|
||||
|
||||
/**
|
||||
* Cleanup old BMAD installation before reinstalling
|
||||
* @param {string} projectDir - Project directory
|
||||
*/
|
||||
async cleanup(projectDir) {
|
||||
const commandsDir = path.join(projectDir, this.configDir, this.commandsDir);
|
||||
|
||||
// Remove any bmad* files from the commands directory (cleans up old bmad: and bmad- formats)
|
||||
if (await fs.pathExists(commandsDir)) {
|
||||
const entries = await fs.readdir(commandsDir);
|
||||
let removedCount = 0;
|
||||
for (const entry of entries) {
|
||||
if (entry.startsWith('bmad')) {
|
||||
await fs.remove(path.join(commandsDir, entry));
|
||||
removedCount++;
|
||||
}
|
||||
}
|
||||
// Also remove legacy bmad folder if it exists
|
||||
const bmadFolder = path.join(commandsDir, 'bmad');
|
||||
if (await fs.pathExists(bmadFolder)) {
|
||||
await fs.remove(bmadFolder);
|
||||
console.log(chalk.dim(` Removed old BMAD commands from ${this.name}`));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Clean up legacy folder structure (module/type/name.md) if it exists
|
||||
* This can be called after migration to remove old nested directories
|
||||
* @param {string} projectDir - Project directory
|
||||
*/
|
||||
async cleanupLegacyFolders(projectDir) {
|
||||
const commandsDir = path.join(projectDir, this.configDir, this.commandsDir);
|
||||
|
||||
if (!(await fs.pathExists(commandsDir))) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Remove legacy bmad folder if it exists
|
||||
const bmadFolder = path.join(commandsDir, 'bmad');
|
||||
if (await fs.pathExists(bmadFolder)) {
|
||||
await fs.remove(bmadFolder);
|
||||
console.log(chalk.dim(` Removed legacy bmad folder from ${this.name}`));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Setup Claude Code IDE configuration
|
||||
* @param {string} projectDir - Project directory
|
||||
* @param {string} bmadDir - BMAD installation directory
|
||||
* @param {Object} options - Setup options
|
||||
*/
|
||||
async setup(projectDir, bmadDir, options = {}) {
|
||||
// Store project directory for use in processContent
|
||||
this.projectDir = projectDir;
|
||||
|
||||
console.log(chalk.cyan(`Setting up ${this.name}...`));
|
||||
|
||||
// Clean up old BMAD installation first
|
||||
await this.cleanup(projectDir);
|
||||
|
||||
// Create .claude/commands directory structure
|
||||
const claudeDir = path.join(projectDir, this.configDir);
|
||||
const commandsDir = path.join(claudeDir, this.commandsDir);
|
||||
await this.ensureDir(commandsDir);
|
||||
|
||||
// Use underscore format: files written directly to commands dir (no bmad subfolder)
|
||||
// Creates: .claude/commands/bmad_bmm_pm.md
|
||||
|
||||
// Generate agent launchers using AgentCommandGenerator
|
||||
// This creates small launcher files that reference the actual agents in _bmad/
|
||||
const agentGen = new AgentCommandGenerator(this.bmadFolderName);
|
||||
const { artifacts: agentArtifacts, counts: agentCounts } = await agentGen.collectAgentArtifacts(bmadDir, options.selectedModules || []);
|
||||
|
||||
// Write agent launcher files using flat underscore naming
|
||||
// Creates files like: bmad_bmm_pm.md
|
||||
const agentCount = await agentGen.writeColonArtifacts(commandsDir, agentArtifacts);
|
||||
|
||||
// Process Claude Code specific injections for installed modules
|
||||
// Use pre-collected configuration if available, or skip if already configured
|
||||
if (options.preCollectedConfig && options.preCollectedConfig._alreadyConfigured) {
|
||||
// IDE is already configured from previous installation, skip prompting
|
||||
// Just process with default/existing configuration
|
||||
await this.processModuleInjectionsWithConfig(projectDir, bmadDir, options, {});
|
||||
} else if (options.preCollectedConfig) {
|
||||
await this.processModuleInjectionsWithConfig(projectDir, bmadDir, options, options.preCollectedConfig);
|
||||
} else {
|
||||
await this.processModuleInjections(projectDir, bmadDir, options);
|
||||
}
|
||||
|
||||
// Skip CLAUDE.md creation - let user manage their own CLAUDE.md file
|
||||
// await this.createClaudeConfig(projectDir, modules);
|
||||
|
||||
// Generate workflow commands from manifest (if it exists)
|
||||
const workflowGen = new WorkflowCommandGenerator(this.bmadFolderName);
|
||||
const { artifacts: workflowArtifacts } = await workflowGen.collectWorkflowArtifacts(bmadDir);
|
||||
|
||||
// Write workflow-command artifacts using flat underscore naming
|
||||
// Creates files like: bmad_bmm_correct-course.md
|
||||
const workflowCommandCount = await workflowGen.writeColonArtifacts(commandsDir, workflowArtifacts);
|
||||
|
||||
// Generate task and tool commands from manifests (if they exist)
|
||||
const taskToolGen = new TaskToolCommandGenerator();
|
||||
const taskToolResult = await taskToolGen.generateColonTaskToolCommands(projectDir, bmadDir, commandsDir);
|
||||
|
||||
console.log(chalk.green(`✓ ${this.name} configured:`));
|
||||
console.log(chalk.dim(` - ${agentCount} agents installed`));
|
||||
if (workflowCommandCount > 0) {
|
||||
console.log(chalk.dim(` - ${workflowCommandCount} workflow commands generated`));
|
||||
}
|
||||
if (taskToolResult.generated > 0) {
|
||||
console.log(
|
||||
chalk.dim(
|
||||
` - ${taskToolResult.generated} task/tool commands generated (${taskToolResult.tasks} tasks, ${taskToolResult.tools} tools)`,
|
||||
),
|
||||
);
|
||||
}
|
||||
console.log(chalk.dim(` - Commands directory: ${path.relative(projectDir, commandsDir)}`));
|
||||
|
||||
return {
|
||||
success: true,
|
||||
agents: agentCount,
|
||||
};
|
||||
}
|
||||
|
||||
// Method removed - CLAUDE.md file management left to user
|
||||
|
||||
/**
|
||||
* Read and process file content
|
||||
*/
|
||||
async readAndProcess(filePath, metadata) {
|
||||
const content = await fs.readFile(filePath, 'utf8');
|
||||
return this.processContent(content, metadata);
|
||||
}
|
||||
|
||||
/**
|
||||
* Override processContent to keep {project-root} placeholder
|
||||
*/
|
||||
processContent(content, metadata = {}) {
|
||||
// Use the base class method WITHOUT projectDir to preserve {project-root} placeholder
|
||||
return super.processContent(content, metadata);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get agents from source modules (not installed location)
|
||||
*/
|
||||
async getAgentsFromSource(sourceDir, selectedModules) {
|
||||
const agents = [];
|
||||
|
||||
// Add core agents
|
||||
const corePath = getModulePath('core');
|
||||
if (await fs.pathExists(path.join(corePath, 'agents'))) {
|
||||
const coreAgents = await getAgentsFromDir(path.join(corePath, 'agents'), 'core');
|
||||
agents.push(...coreAgents);
|
||||
}
|
||||
|
||||
// Add module agents
|
||||
for (const moduleName of selectedModules) {
|
||||
const modulePath = path.join(sourceDir, moduleName);
|
||||
const agentsPath = path.join(modulePath, 'agents');
|
||||
|
||||
if (await fs.pathExists(agentsPath)) {
|
||||
const moduleAgents = await getAgentsFromDir(agentsPath, moduleName);
|
||||
agents.push(...moduleAgents);
|
||||
}
|
||||
}
|
||||
|
||||
return agents;
|
||||
}
|
||||
|
||||
/**
|
||||
* Process module injections with pre-collected configuration
|
||||
*/
|
||||
async processModuleInjectionsWithConfig(projectDir, bmadDir, options, preCollectedConfig) {
|
||||
// Get list of installed modules
|
||||
const modules = options.selectedModules || [];
|
||||
const { subagentChoices, installLocation } = preCollectedConfig;
|
||||
|
||||
// Get the actual source directory (not the installation directory)
|
||||
await this.processModuleInjectionsInternal({
|
||||
projectDir,
|
||||
modules,
|
||||
handler: 'claude-code',
|
||||
subagentChoices,
|
||||
installLocation,
|
||||
interactive: false,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Process Claude Code specific injections for installed modules
|
||||
* Looks for injections.yaml in each module's claude-code sub-module
|
||||
*/
|
||||
async processModuleInjections(projectDir, bmadDir, options) {
|
||||
// Get list of installed modules
|
||||
const modules = options.selectedModules || [];
|
||||
let subagentChoices = null;
|
||||
let installLocation = null;
|
||||
|
||||
// Get the actual source directory (not the installation directory)
|
||||
const { subagentChoices: updatedChoices, installLocation: updatedLocation } = await this.processModuleInjectionsInternal({
|
||||
projectDir,
|
||||
modules,
|
||||
handler: 'claude-code',
|
||||
subagentChoices,
|
||||
installLocation,
|
||||
interactive: true,
|
||||
});
|
||||
|
||||
if (updatedChoices) {
|
||||
subagentChoices = updatedChoices;
|
||||
}
|
||||
if (updatedLocation) {
|
||||
installLocation = updatedLocation;
|
||||
}
|
||||
}
|
||||
|
||||
async processModuleInjectionsInternal({ projectDir, modules, handler, subagentChoices, installLocation, interactive = false }) {
|
||||
let choices = subagentChoices;
|
||||
let location = installLocation;
|
||||
|
||||
for (const moduleName of modules) {
|
||||
const configData = await loadModuleInjectionConfig(handler, moduleName);
|
||||
|
||||
if (!configData) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const { config, handlerBaseDir } = configData;
|
||||
|
||||
if (interactive) {
|
||||
console.log(chalk.cyan(`\nConfiguring ${moduleName} ${handler.replace('-', ' ')} features...`));
|
||||
}
|
||||
|
||||
if (interactive && config.subagents && !choices) {
|
||||
// choices = await this.promptSubagentInstallation(config.subagents);
|
||||
// if (choices.install !== 'none') {
|
||||
// location = await this.promptInstallLocation();
|
||||
// }
|
||||
}
|
||||
|
||||
if (config.injections && choices && choices.install !== 'none') {
|
||||
for (const injection of config.injections) {
|
||||
if (shouldApplyInjection(injection, choices)) {
|
||||
await this.injectContent(projectDir, injection, choices);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (config.subagents && choices && choices.install !== 'none') {
|
||||
await this.copySelectedSubagents(projectDir, handlerBaseDir, config.subagents, choices, location || 'project');
|
||||
}
|
||||
}
|
||||
|
||||
return { subagentChoices: choices, installLocation: location };
|
||||
}
|
||||
|
||||
/**
|
||||
* Prompt user for subagent installation preferences
|
||||
*/
|
||||
async promptSubagentInstallation(subagentConfig) {
|
||||
// First ask if they want to install subagents
|
||||
const install = await prompts.select({
|
||||
message: 'Would you like to install Claude Code subagents for enhanced functionality?',
|
||||
choices: [
|
||||
{ name: 'Yes, install all subagents', value: 'all' },
|
||||
{ name: 'Yes, let me choose specific subagents', value: 'selective' },
|
||||
{ name: 'No, skip subagent installation', value: 'none' },
|
||||
],
|
||||
default: 'all',
|
||||
});
|
||||
|
||||
if (install === 'selective') {
|
||||
// Show list of available subagents with descriptions
|
||||
const subagentInfo = {
|
||||
'market-researcher.md': 'Market research and competitive analysis',
|
||||
'requirements-analyst.md': 'Requirements extraction and validation',
|
||||
'technical-evaluator.md': 'Technology stack evaluation',
|
||||
'epic-optimizer.md': 'Epic and story breakdown optimization',
|
||||
'document-reviewer.md': 'Document quality review',
|
||||
};
|
||||
|
||||
const selected = await prompts.multiselect({
|
||||
message: `Select subagents to install ${chalk.dim('(↑/↓ navigates multiselect, SPACE toggles, A to toggles All, ENTER confirm)')}:`,
|
||||
options: subagentConfig.files.map((file) => ({
|
||||
label: `${file.replace('.md', '')} - ${subagentInfo[file] || 'Specialized assistant'}`,
|
||||
value: file,
|
||||
})),
|
||||
initialValues: subagentConfig.files,
|
||||
});
|
||||
|
||||
return { install: 'selective', selected };
|
||||
}
|
||||
|
||||
return { install };
|
||||
}
|
||||
|
||||
/**
|
||||
* Inject content at specified point in file
|
||||
*/
|
||||
async injectContent(projectDir, injection, subagentChoices = null) {
|
||||
const targetPath = path.join(projectDir, injection.file);
|
||||
|
||||
if (await this.exists(targetPath)) {
|
||||
let content = await fs.readFile(targetPath, 'utf8');
|
||||
const marker = `<!-- IDE-INJECT-POINT: ${injection.point} -->`;
|
||||
|
||||
if (content.includes(marker)) {
|
||||
let injectionContent = injection.content;
|
||||
|
||||
// Filter content if selective subagents chosen
|
||||
if (subagentChoices && subagentChoices.install === 'selective' && injection.point === 'pm-agent-instructions') {
|
||||
injectionContent = filterAgentInstructions(injection.content, subagentChoices.selected);
|
||||
}
|
||||
|
||||
content = content.replace(marker, injectionContent);
|
||||
await fs.writeFile(targetPath, content);
|
||||
console.log(chalk.dim(` Injected: ${injection.point} → ${injection.file}`));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Copy selected subagents to appropriate Claude agents directory
|
||||
*/
|
||||
async copySelectedSubagents(projectDir, handlerBaseDir, subagentConfig, choices, location) {
|
||||
const os = require('node:os');
|
||||
|
||||
// Determine target directory based on user choice
|
||||
let targetDir;
|
||||
if (location === 'user') {
|
||||
targetDir = path.join(os.homedir(), '.claude', 'agents');
|
||||
console.log(chalk.dim(` Installing subagents globally to: ~/.claude/agents/`));
|
||||
} else {
|
||||
targetDir = path.join(projectDir, '.claude', 'agents');
|
||||
console.log(chalk.dim(` Installing subagents to project: .claude/agents/`));
|
||||
}
|
||||
|
||||
// Ensure target directory exists
|
||||
await this.ensureDir(targetDir);
|
||||
|
||||
const resolvedFiles = await resolveSubagentFiles(handlerBaseDir, subagentConfig, choices);
|
||||
|
||||
let copiedCount = 0;
|
||||
for (const resolved of resolvedFiles) {
|
||||
try {
|
||||
const sourcePath = resolved.absolutePath;
|
||||
|
||||
const subFolder = path.dirname(resolved.relativePath);
|
||||
let targetPath;
|
||||
if (subFolder && subFolder !== '.') {
|
||||
const targetSubDir = path.join(targetDir, subFolder);
|
||||
await this.ensureDir(targetSubDir);
|
||||
targetPath = path.join(targetSubDir, path.basename(resolved.file));
|
||||
} else {
|
||||
targetPath = path.join(targetDir, path.basename(resolved.file));
|
||||
}
|
||||
|
||||
await fs.copyFile(sourcePath, targetPath);
|
||||
console.log(chalk.green(` ✓ Installed: ${subFolder === '.' ? '' : `${subFolder}/`}${path.basename(resolved.file, '.md')}`));
|
||||
copiedCount++;
|
||||
} catch (error) {
|
||||
console.log(chalk.yellow(` ⚠ Error copying ${resolved.file}: ${error.message}`));
|
||||
}
|
||||
}
|
||||
|
||||
if (copiedCount > 0) {
|
||||
console.log(chalk.dim(` Total subagents installed: ${copiedCount}`));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Install a custom agent launcher for Claude Code
|
||||
* @param {string} projectDir - Project directory
|
||||
* @param {string} agentName - Agent name (e.g., "fred-commit-poet")
|
||||
* @param {string} agentPath - Path to compiled agent (relative to project root)
|
||||
* @param {Object} metadata - Agent metadata
|
||||
* @returns {Object|null} Info about created command
|
||||
*/
|
||||
async installCustomAgentLauncher(projectDir, agentName, agentPath, metadata) {
|
||||
const commandsDir = path.join(projectDir, this.configDir, this.commandsDir);
|
||||
|
||||
if (!(await this.exists(path.join(projectDir, this.configDir)))) {
|
||||
return null; // IDE not configured for this project
|
||||
}
|
||||
|
||||
await this.ensureDir(commandsDir);
|
||||
|
||||
const launcherContent = `---
|
||||
name: '${agentName}'
|
||||
description: '${agentName} agent'
|
||||
---
|
||||
|
||||
You must fully embody this agent's persona and follow all activation instructions exactly as specified. NEVER break character until given an exit command.
|
||||
|
||||
<agent-activation CRITICAL="TRUE">
|
||||
1. LOAD the FULL agent file from @${agentPath}
|
||||
2. READ its entire contents - this contains the complete agent persona, menu, and instructions
|
||||
3. FOLLOW every step in the <activation> section precisely
|
||||
4. DISPLAY the welcome/greeting as instructed
|
||||
5. PRESENT the numbered menu
|
||||
6. WAIT for user input before proceeding
|
||||
</agent-activation>
|
||||
`;
|
||||
|
||||
// Use underscore format: bmad_custom_fred-commit-poet.md
|
||||
// Written directly to commands dir (no bmad subfolder)
|
||||
const launcherName = customAgentColonName(agentName);
|
||||
const launcherPath = path.join(commandsDir, launcherName);
|
||||
await this.writeFile(launcherPath, launcherContent);
|
||||
|
||||
return {
|
||||
path: launcherPath,
|
||||
command: `/${launcherName.replace('.md', '')}`,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { ClaudeCodeSetup };
|
||||
|
|
@ -1,272 +0,0 @@
|
|||
const path = require('node:path');
|
||||
const fs = require('fs-extra');
|
||||
const chalk = require('chalk');
|
||||
const { BaseIdeSetup } = require('./_base-ide');
|
||||
const { WorkflowCommandGenerator } = require('./shared/workflow-command-generator');
|
||||
const { AgentCommandGenerator } = require('./shared/agent-command-generator');
|
||||
const { TaskToolCommandGenerator } = require('./shared/task-tool-command-generator');
|
||||
const { getAgentsFromBmad, getTasksFromBmad } = require('./shared/bmad-artifacts');
|
||||
const { toDashPath, customAgentDashName } = require('./shared/path-utils');
|
||||
|
||||
/**
|
||||
* Cline IDE setup handler
|
||||
* Installs BMAD artifacts to .clinerules/workflows with flattened naming
|
||||
*/
|
||||
class ClineSetup extends BaseIdeSetup {
|
||||
constructor() {
|
||||
super('cline', 'Cline', false);
|
||||
this.configDir = '.clinerules';
|
||||
this.workflowsDir = 'workflows';
|
||||
}
|
||||
|
||||
/**
|
||||
* Setup Cline IDE configuration
|
||||
* @param {string} projectDir - Project directory
|
||||
* @param {string} bmadDir - BMAD installation directory
|
||||
* @param {Object} options - Setup options
|
||||
*/
|
||||
async setup(projectDir, bmadDir, options = {}) {
|
||||
console.log(chalk.cyan(`Setting up ${this.name}...`));
|
||||
|
||||
// Create .clinerules/workflows directory
|
||||
const clineDir = path.join(projectDir, this.configDir);
|
||||
const workflowsDir = path.join(clineDir, this.workflowsDir);
|
||||
|
||||
await this.ensureDir(workflowsDir);
|
||||
|
||||
// Clear old BMAD files
|
||||
await this.clearOldBmadFiles(workflowsDir);
|
||||
|
||||
// Collect all artifacts
|
||||
const { artifacts, counts } = await this.collectClineArtifacts(projectDir, bmadDir, options);
|
||||
|
||||
// Write flattened files
|
||||
const written = await this.flattenAndWriteArtifacts(artifacts, workflowsDir);
|
||||
|
||||
console.log(chalk.green(`✓ ${this.name} configured:`));
|
||||
console.log(chalk.dim(` - ${counts.agents} agents installed`));
|
||||
console.log(chalk.dim(` - ${counts.tasks} tasks installed`));
|
||||
console.log(chalk.dim(` - ${counts.workflows} workflow commands installed`));
|
||||
if (counts.workflowLaunchers > 0) {
|
||||
console.log(chalk.dim(` - ${counts.workflowLaunchers} workflow launchers installed`));
|
||||
}
|
||||
console.log(chalk.dim(` - ${written} files written to ${path.relative(projectDir, workflowsDir)}`));
|
||||
|
||||
// Usage instructions
|
||||
console.log(chalk.yellow('\n ⚠️ How to Use Cline Workflows'));
|
||||
console.log(chalk.cyan(' BMAD workflows are available as slash commands in Cline'));
|
||||
console.log(chalk.dim(' Usage:'));
|
||||
console.log(chalk.dim(' - Type / to see available commands'));
|
||||
console.log(chalk.dim(' - All BMAD items start with "bmad_"'));
|
||||
console.log(chalk.dim(' - Example: /bmad_bmm_pm'));
|
||||
|
||||
return {
|
||||
success: true,
|
||||
agents: counts.agents,
|
||||
tasks: counts.tasks,
|
||||
workflows: counts.workflows,
|
||||
workflowLaunchers: counts.workflowLaunchers,
|
||||
written,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Detect Cline installation by checking for .clinerules/workflows directory
|
||||
*/
|
||||
async detect(projectDir) {
|
||||
const workflowsDir = path.join(projectDir, this.configDir, this.workflowsDir);
|
||||
|
||||
if (!(await fs.pathExists(workflowsDir))) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const entries = await fs.readdir(workflowsDir);
|
||||
return entries.some((entry) => entry.startsWith('bmad'));
|
||||
}
|
||||
|
||||
/**
|
||||
* Collect all artifacts for Cline export
|
||||
*/
|
||||
async collectClineArtifacts(projectDir, bmadDir, options = {}) {
|
||||
const selectedModules = options.selectedModules || [];
|
||||
const artifacts = [];
|
||||
|
||||
// Generate agent launchers
|
||||
const agentGen = new AgentCommandGenerator(this.bmadFolderName);
|
||||
const { artifacts: agentArtifacts } = await agentGen.collectAgentArtifacts(bmadDir, selectedModules);
|
||||
|
||||
// Process agent launchers with project-specific paths
|
||||
for (const agentArtifact of agentArtifacts) {
|
||||
const content = agentArtifact.content;
|
||||
|
||||
artifacts.push({
|
||||
type: 'agent',
|
||||
module: agentArtifact.module,
|
||||
sourcePath: agentArtifact.sourcePath,
|
||||
relativePath: agentArtifact.relativePath,
|
||||
content,
|
||||
});
|
||||
}
|
||||
|
||||
// Get tasks
|
||||
const tasks = await getTasksFromBmad(bmadDir, selectedModules);
|
||||
for (const task of tasks) {
|
||||
const content = await this.readAndProcessWithProject(
|
||||
task.path,
|
||||
{
|
||||
module: task.module,
|
||||
name: task.name,
|
||||
},
|
||||
projectDir,
|
||||
);
|
||||
|
||||
artifacts.push({
|
||||
type: 'task',
|
||||
module: task.module,
|
||||
sourcePath: task.path,
|
||||
relativePath: path.join(task.module, 'tasks', `${task.name}.md`),
|
||||
content,
|
||||
});
|
||||
}
|
||||
|
||||
// Get workflows
|
||||
const workflowGenerator = new WorkflowCommandGenerator(this.bmadFolderName);
|
||||
const { artifacts: workflowArtifacts, counts: workflowCounts } = await workflowGenerator.collectWorkflowArtifacts(bmadDir);
|
||||
artifacts.push(...workflowArtifacts);
|
||||
|
||||
return {
|
||||
artifacts,
|
||||
counts: {
|
||||
agents: agentArtifacts.length,
|
||||
tasks: tasks.length,
|
||||
workflows: workflowCounts.commands,
|
||||
workflowLaunchers: workflowCounts.launchers,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Flatten file path to bmad_module_type_name.md format
|
||||
* Uses shared toDashPath utility
|
||||
*/
|
||||
flattenFilename(relativePath) {
|
||||
return toDashPath(relativePath);
|
||||
}
|
||||
|
||||
/**
|
||||
* Write all artifacts with flattened names
|
||||
*/
|
||||
async flattenAndWriteArtifacts(artifacts, destDir) {
|
||||
let written = 0;
|
||||
|
||||
for (const artifact of artifacts) {
|
||||
const flattenedName = this.flattenFilename(artifact.relativePath);
|
||||
const targetPath = path.join(destDir, flattenedName);
|
||||
await fs.writeFile(targetPath, artifact.content);
|
||||
written++;
|
||||
}
|
||||
|
||||
return written;
|
||||
}
|
||||
|
||||
/**
|
||||
* Clear old BMAD files from the workflows directory
|
||||
*/
|
||||
async clearOldBmadFiles(destDir) {
|
||||
if (!(await fs.pathExists(destDir))) {
|
||||
return;
|
||||
}
|
||||
|
||||
const entries = await fs.readdir(destDir);
|
||||
|
||||
for (const entry of entries) {
|
||||
if (!entry.startsWith('bmad')) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const entryPath = path.join(destDir, entry);
|
||||
const stat = await fs.stat(entryPath);
|
||||
if (stat.isFile()) {
|
||||
await fs.remove(entryPath);
|
||||
} else if (stat.isDirectory()) {
|
||||
await fs.remove(entryPath);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Read and process file with project-specific paths
|
||||
*/
|
||||
async readAndProcessWithProject(filePath, metadata, projectDir) {
|
||||
const content = await fs.readFile(filePath, 'utf8');
|
||||
return super.processContent(content, metadata, projectDir);
|
||||
}
|
||||
|
||||
/**
|
||||
* Cleanup Cline configuration
|
||||
*/
|
||||
async cleanup(projectDir) {
|
||||
const workflowsDir = path.join(projectDir, this.configDir, this.workflowsDir);
|
||||
await this.clearOldBmadFiles(workflowsDir);
|
||||
console.log(chalk.dim(`Removed ${this.name} BMAD configuration`));
|
||||
}
|
||||
|
||||
/**
|
||||
* Install a custom agent launcher for Cline
|
||||
* @param {string} projectDir - Project directory
|
||||
* @param {string} agentName - Agent name (e.g., "fred-commit-poet")
|
||||
* @param {string} agentPath - Path to compiled agent (relative to project root)
|
||||
* @param {Object} metadata - Agent metadata
|
||||
* @returns {Object} Installation result
|
||||
*/
|
||||
async installCustomAgentLauncher(projectDir, agentName, agentPath, metadata) {
|
||||
const clineDir = path.join(projectDir, this.configDir);
|
||||
const workflowsDir = path.join(clineDir, this.workflowsDir);
|
||||
|
||||
// Create .clinerules/workflows directory if it doesn't exist
|
||||
await fs.ensureDir(workflowsDir);
|
||||
|
||||
// Create custom agent launcher workflow
|
||||
const launcherContent = `name: ${agentName}
|
||||
description: Custom BMAD agent: ${agentName}
|
||||
|
||||
# ${agentName} Custom Agent
|
||||
|
||||
**⚠️ IMPORTANT**: Run @${agentPath} first to load the complete agent!
|
||||
|
||||
This is a launcher for the custom BMAD agent "${agentName}".
|
||||
|
||||
## Usage
|
||||
1. First run: \`${agentPath}\` to load the complete agent
|
||||
2. Then use this workflow as ${agentName}
|
||||
|
||||
The agent will follow the persona and instructions from the main agent file.
|
||||
|
||||
---
|
||||
|
||||
*Generated by BMAD Method*`;
|
||||
|
||||
// Use underscore format: bmad_custom_fred-commit-poet.md
|
||||
const fileName = customAgentDashName(agentName);
|
||||
const launcherPath = path.join(workflowsDir, fileName);
|
||||
|
||||
// Write the launcher file
|
||||
await fs.writeFile(launcherPath, launcherContent, 'utf8');
|
||||
|
||||
return {
|
||||
ide: 'cline',
|
||||
path: path.relative(projectDir, launcherPath),
|
||||
command: fileName.replace('.md', ''),
|
||||
type: 'custom-agent-launcher',
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Utility: Ensure directory exists
|
||||
*/
|
||||
async ensureDir(dirPath) {
|
||||
await fs.ensureDir(dirPath);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { ClineSetup };
|
||||
|
|
@ -2,28 +2,85 @@ const path = require('node:path');
|
|||
const fs = require('fs-extra');
|
||||
const os = require('node:os');
|
||||
const chalk = require('chalk');
|
||||
const { BaseIdeSetup } = require('./_base-ide');
|
||||
const { WorkflowCommandGenerator } = require('./shared/workflow-command-generator');
|
||||
const { AgentCommandGenerator } = require('./shared/agent-command-generator');
|
||||
const { TaskToolCommandGenerator } = require('./shared/task-tool-command-generator');
|
||||
const { getTasksFromBmad } = require('./shared/bmad-artifacts');
|
||||
const { toDashPath, customAgentDashName } = require('./shared/path-utils');
|
||||
const { ConfigDrivenIdeSetup } = require('./_config-driven');
|
||||
const { getSourcePath } = require('../../../lib/project-root');
|
||||
const prompts = require('../../../lib/prompts');
|
||||
|
||||
/**
|
||||
* Codex setup handler (CLI mode)
|
||||
*
|
||||
* Extends config-driven setup with Codex-specific features:
|
||||
* - Install location choice (global vs project-specific)
|
||||
* - Configuration prompts
|
||||
* - Detailed setup instructions
|
||||
*/
|
||||
class CodexSetup extends BaseIdeSetup {
|
||||
class CodexSetup extends ConfigDrivenIdeSetup {
|
||||
constructor() {
|
||||
super('codex', 'Codex', true); // preferred IDE
|
||||
// Initialize with codex platform config
|
||||
const platformConfig = {
|
||||
name: 'Codex',
|
||||
preferred: false,
|
||||
installer: {
|
||||
target_dir: '.codex/prompts',
|
||||
frontmatter_template: 'none', // Codex uses no frontmatter
|
||||
},
|
||||
};
|
||||
super('codex', platformConfig);
|
||||
}
|
||||
|
||||
/**
|
||||
* Collect configuration choices before installation
|
||||
* @param {Object} options - Configuration options
|
||||
* @returns {Object} Collected configuration
|
||||
* Get the Codex agent command activation header from central template
|
||||
* @returns {string} The activation header text
|
||||
*/
|
||||
async collectConfiguration(options = {}) {
|
||||
async getAgentCommandHeader() {
|
||||
const headerPath = getSourcePath('tools/cli/installers/lib/ide/templates', 'codex-agent-command-template.md');
|
||||
return await fs.readFile(headerPath, 'utf8');
|
||||
}
|
||||
|
||||
/**
|
||||
* Override setup to add install location choice and instructions
|
||||
*/
|
||||
async setup(projectDir, bmadDir, options = {}) {
|
||||
console.log(chalk.cyan(`Setting up ${this.name}...`));
|
||||
|
||||
// Collect install location choice
|
||||
const installLocation = options.preCollectedConfig?.installLocation || (await this.collectInstallLocation());
|
||||
|
||||
// Determine destination directory
|
||||
const destDir = this.getCodexPromptDir(projectDir, installLocation);
|
||||
await fs.ensureDir(destDir);
|
||||
await this.clearOldBmadFiles(destDir);
|
||||
|
||||
// Use unified installer with custom destination
|
||||
const { UnifiedInstaller, NamingStyle } = require('./shared/unified-installer');
|
||||
const installer = new UnifiedInstaller(this.bmadFolderName);
|
||||
const counts = await installer.install(
|
||||
projectDir,
|
||||
bmadDir,
|
||||
{
|
||||
targetDir: destDir,
|
||||
namingStyle: NamingStyle.FLAT_DASH,
|
||||
frontmatterTemplate: 'none', // Codex uses no frontmatter
|
||||
},
|
||||
options.selectedModules || [],
|
||||
);
|
||||
|
||||
// Show results and instructions
|
||||
this.printResults(counts, destDir, installLocation);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
mode: 'cli',
|
||||
...counts,
|
||||
destination: destDir,
|
||||
installLocation,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Collect install location choice from user
|
||||
*/
|
||||
async collectInstallLocation() {
|
||||
let confirmed = false;
|
||||
let installLocation = 'global';
|
||||
|
||||
|
|
@ -32,18 +89,17 @@ class CodexSetup extends BaseIdeSetup {
|
|||
message: 'Where would you like to install Codex CLI prompts?',
|
||||
choices: [
|
||||
{
|
||||
name: 'Global - Simple for single project ' + '(~/.codex/prompts, but references THIS project only)',
|
||||
name: 'Global - Simple for single project (~/codex/prompts, references THIS project only)',
|
||||
value: 'global',
|
||||
},
|
||||
{
|
||||
name: `Project-specific - Recommended for real work (requires CODEX_HOME=<project-dir>${path.sep}.codex)`,
|
||||
name: `Project-specific - Recommended for real work (requires CODEX_HOME=<project-dir>/.codex)`,
|
||||
value: 'project',
|
||||
},
|
||||
],
|
||||
default: 'global',
|
||||
});
|
||||
|
||||
// Display detailed instructions for the chosen option
|
||||
console.log('');
|
||||
if (installLocation === 'project') {
|
||||
console.log(this.getProjectSpecificInstructions());
|
||||
|
|
@ -51,7 +107,6 @@ class CodexSetup extends BaseIdeSetup {
|
|||
console.log(this.getGlobalInstructions());
|
||||
}
|
||||
|
||||
// Confirm the choice
|
||||
confirmed = await prompts.confirm({
|
||||
message: 'Proceed with this installation option?',
|
||||
default: true,
|
||||
|
|
@ -66,168 +121,8 @@ class CodexSetup extends BaseIdeSetup {
|
|||
}
|
||||
|
||||
/**
|
||||
* Setup Codex configuration
|
||||
* @param {string} projectDir - Project directory
|
||||
* @param {string} bmadDir - BMAD installation directory
|
||||
* @param {Object} options - Setup options
|
||||
* Get Codex prompts directory based on location choice
|
||||
*/
|
||||
async setup(projectDir, bmadDir, options = {}) {
|
||||
console.log(chalk.cyan(`Setting up ${this.name}...`));
|
||||
|
||||
// Always use CLI mode
|
||||
const mode = 'cli';
|
||||
|
||||
// Get installation location from pre-collected config or default to global
|
||||
const installLocation = options.preCollectedConfig?.installLocation || 'global';
|
||||
|
||||
const { artifacts, counts } = await this.collectClaudeArtifacts(projectDir, bmadDir, options);
|
||||
|
||||
const destDir = this.getCodexPromptDir(projectDir, installLocation);
|
||||
await fs.ensureDir(destDir);
|
||||
await this.clearOldBmadFiles(destDir);
|
||||
|
||||
// Collect artifacts and write using underscore format
|
||||
const agentGen = new AgentCommandGenerator(this.bmadFolderName);
|
||||
const { artifacts: agentArtifacts } = await agentGen.collectAgentArtifacts(bmadDir, options.selectedModules || []);
|
||||
const agentCount = await agentGen.writeDashArtifacts(destDir, agentArtifacts);
|
||||
|
||||
const tasks = await getTasksFromBmad(bmadDir, options.selectedModules || []);
|
||||
const taskArtifacts = [];
|
||||
for (const task of tasks) {
|
||||
const content = await this.readAndProcessWithProject(
|
||||
task.path,
|
||||
{
|
||||
module: task.module,
|
||||
name: task.name,
|
||||
},
|
||||
projectDir,
|
||||
);
|
||||
taskArtifacts.push({
|
||||
type: 'task',
|
||||
module: task.module,
|
||||
sourcePath: task.path,
|
||||
relativePath: path.join(task.module, 'tasks', `${task.name}.md`),
|
||||
content,
|
||||
});
|
||||
}
|
||||
|
||||
const workflowGenerator = new WorkflowCommandGenerator(this.bmadFolderName);
|
||||
const { artifacts: workflowArtifacts } = await workflowGenerator.collectWorkflowArtifacts(bmadDir);
|
||||
const workflowCount = await workflowGenerator.writeDashArtifacts(destDir, workflowArtifacts);
|
||||
|
||||
// Also write tasks using underscore format
|
||||
const ttGen = new TaskToolCommandGenerator();
|
||||
const tasksWritten = await ttGen.writeDashArtifacts(destDir, taskArtifacts);
|
||||
|
||||
const written = agentCount + workflowCount + tasksWritten;
|
||||
|
||||
console.log(chalk.green(`✓ ${this.name} configured:`));
|
||||
console.log(chalk.dim(` - Mode: CLI`));
|
||||
console.log(chalk.dim(` - ${counts.agents} agents exported`));
|
||||
console.log(chalk.dim(` - ${counts.tasks} tasks exported`));
|
||||
console.log(chalk.dim(` - ${counts.workflows} workflow commands exported`));
|
||||
if (counts.workflowLaunchers > 0) {
|
||||
console.log(chalk.dim(` - ${counts.workflowLaunchers} workflow launchers exported`));
|
||||
}
|
||||
console.log(chalk.dim(` - ${written} Codex prompt files written`));
|
||||
console.log(chalk.dim(` - Destination: ${destDir}`));
|
||||
|
||||
return {
|
||||
success: true,
|
||||
mode,
|
||||
artifacts,
|
||||
counts,
|
||||
destination: destDir,
|
||||
written,
|
||||
installLocation,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Detect Codex installation by checking for BMAD prompt exports
|
||||
*/
|
||||
async detect(projectDir) {
|
||||
// Check both global and project-specific locations
|
||||
const globalDir = this.getCodexPromptDir(null, 'global');
|
||||
const projectDir_local = projectDir || process.cwd();
|
||||
const projectSpecificDir = this.getCodexPromptDir(projectDir_local, 'project');
|
||||
|
||||
// Check global location
|
||||
if (await fs.pathExists(globalDir)) {
|
||||
const entries = await fs.readdir(globalDir);
|
||||
if (entries.some((entry) => entry.startsWith('bmad'))) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
// Check project-specific location
|
||||
if (await fs.pathExists(projectSpecificDir)) {
|
||||
const entries = await fs.readdir(projectSpecificDir);
|
||||
if (entries.some((entry) => entry.startsWith('bmad'))) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Collect Claude-style artifacts for Codex export.
|
||||
* Returns the normalized artifact list for further processing.
|
||||
*/
|
||||
async collectClaudeArtifacts(projectDir, bmadDir, options = {}) {
|
||||
const selectedModules = options.selectedModules || [];
|
||||
const artifacts = [];
|
||||
|
||||
// Generate agent launchers
|
||||
const agentGen = new AgentCommandGenerator(this.bmadFolderName);
|
||||
const { artifacts: agentArtifacts } = await agentGen.collectAgentArtifacts(bmadDir, selectedModules);
|
||||
|
||||
for (const artifact of agentArtifacts) {
|
||||
artifacts.push({
|
||||
type: 'agent',
|
||||
module: artifact.module,
|
||||
sourcePath: artifact.sourcePath,
|
||||
relativePath: artifact.relativePath,
|
||||
content: artifact.content,
|
||||
});
|
||||
}
|
||||
|
||||
const tasks = await getTasksFromBmad(bmadDir, selectedModules);
|
||||
for (const task of tasks) {
|
||||
const content = await this.readAndProcessWithProject(
|
||||
task.path,
|
||||
{
|
||||
module: task.module,
|
||||
name: task.name,
|
||||
},
|
||||
projectDir,
|
||||
);
|
||||
|
||||
artifacts.push({
|
||||
type: 'task',
|
||||
module: task.module,
|
||||
sourcePath: task.path,
|
||||
relativePath: path.join(task.module, 'tasks', `${task.name}.md`),
|
||||
content,
|
||||
});
|
||||
}
|
||||
|
||||
const workflowGenerator = new WorkflowCommandGenerator(this.bmadFolderName);
|
||||
const { artifacts: workflowArtifacts, counts: workflowCounts } = await workflowGenerator.collectWorkflowArtifacts(bmadDir);
|
||||
artifacts.push(...workflowArtifacts);
|
||||
|
||||
return {
|
||||
artifacts,
|
||||
counts: {
|
||||
agents: agentArtifacts.length,
|
||||
tasks: tasks.length,
|
||||
workflows: workflowCounts.commands,
|
||||
workflowLaunchers: workflowCounts.launchers,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
getCodexPromptDir(projectDir = null, location = 'global') {
|
||||
if (location === 'project' && projectDir) {
|
||||
return path.join(projectDir, '.codex', 'prompts');
|
||||
|
|
@ -235,51 +130,35 @@ class CodexSetup extends BaseIdeSetup {
|
|||
return path.join(os.homedir(), '.codex', 'prompts');
|
||||
}
|
||||
|
||||
async flattenAndWriteArtifacts(artifacts, destDir) {
|
||||
let written = 0;
|
||||
|
||||
for (const artifact of artifacts) {
|
||||
const flattenedName = this.flattenFilename(artifact.relativePath);
|
||||
const targetPath = path.join(destDir, flattenedName);
|
||||
await fs.writeFile(targetPath, artifact.content);
|
||||
written++;
|
||||
/**
|
||||
* Print results and instructions
|
||||
*/
|
||||
printResults(counts, destDir, installLocation) {
|
||||
console.log(chalk.green(`✓ Codex configured:`));
|
||||
console.log(chalk.dim(` - Mode: CLI`));
|
||||
console.log(chalk.dim(` - Location: ${installLocation}`));
|
||||
console.log(chalk.dim(` - ${counts.agents} agents installed`));
|
||||
if (counts.workflows > 0) {
|
||||
console.log(chalk.dim(` - ${counts.workflows} workflow commands generated`));
|
||||
}
|
||||
|
||||
return written;
|
||||
}
|
||||
|
||||
async clearOldBmadFiles(destDir) {
|
||||
if (!(await fs.pathExists(destDir))) {
|
||||
return;
|
||||
if (counts.tasks + counts.tools > 0) {
|
||||
console.log(chalk.dim(` - ${counts.tasks + counts.tools} task/tool commands (${counts.tasks} tasks, ${counts.tools} tools)`));
|
||||
}
|
||||
console.log(chalk.dim(` - ${counts.total} files written`));
|
||||
console.log(chalk.dim(` - Destination: ${destDir}`));
|
||||
|
||||
const entries = await fs.readdir(destDir);
|
||||
|
||||
for (const entry of entries) {
|
||||
if (!entry.startsWith('bmad')) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const entryPath = path.join(destDir, entry);
|
||||
const stat = await fs.stat(entryPath);
|
||||
if (stat.isFile()) {
|
||||
await fs.remove(entryPath);
|
||||
} else if (stat.isDirectory()) {
|
||||
await fs.remove(entryPath);
|
||||
}
|
||||
// Show setup instructions if project-specific
|
||||
if (installLocation === 'project') {
|
||||
console.log('');
|
||||
console.log(chalk.yellow(' Next steps:'));
|
||||
console.log(chalk.dim(this.getProjectSpecificNextSteps()));
|
||||
}
|
||||
}
|
||||
|
||||
async readAndProcessWithProject(filePath, metadata, projectDir) {
|
||||
const content = await fs.readFile(filePath, 'utf8');
|
||||
return super.processContent(content, metadata, projectDir);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get instructions for global installation
|
||||
* @returns {string} Instructions text
|
||||
*/
|
||||
getGlobalInstructions(destDir) {
|
||||
getGlobalInstructions() {
|
||||
const lines = [
|
||||
'',
|
||||
chalk.bold.cyan('═'.repeat(70)),
|
||||
|
|
@ -292,7 +171,7 @@ class CodexSetup extends BaseIdeSetup {
|
|||
chalk.dim(" To use with other projects, you'd need to copy the _bmad dir"),
|
||||
'',
|
||||
chalk.green(' ✓ You can now use /commands in Codex CLI'),
|
||||
chalk.dim(' Example: /bmad_bmm_pm'),
|
||||
chalk.dim(' Example: /bmad-bmm-pm'),
|
||||
chalk.dim(' Type / to see all available commands'),
|
||||
'',
|
||||
chalk.bold.cyan('═'.repeat(70)),
|
||||
|
|
@ -303,11 +182,8 @@ class CodexSetup extends BaseIdeSetup {
|
|||
|
||||
/**
|
||||
* Get instructions for project-specific installation
|
||||
* @param {string} projectDir - Optional project directory
|
||||
* @param {string} destDir - Optional destination directory
|
||||
* @returns {string} Instructions text
|
||||
*/
|
||||
getProjectSpecificInstructions(projectDir = null, destDir = null) {
|
||||
getProjectSpecificInstructions() {
|
||||
const isWindows = os.platform() === 'win32';
|
||||
|
||||
const commonLines = [
|
||||
|
|
@ -316,7 +192,7 @@ class CodexSetup extends BaseIdeSetup {
|
|||
chalk.bold.yellow(' Project-Specific Codex Configuration'),
|
||||
chalk.bold.cyan('═'.repeat(70)),
|
||||
'',
|
||||
chalk.white(' Prompts will be installed to: ') + chalk.cyan(destDir || '<project>/.codex/prompts'),
|
||||
chalk.white(' Prompts will be installed to: ') + chalk.cyan('<project>/.codex/prompts'),
|
||||
'',
|
||||
chalk.bold.yellow(' ⚠️ REQUIRED: You must set CODEX_HOME to use these prompts'),
|
||||
'',
|
||||
|
|
@ -341,24 +217,75 @@ class CodexSetup extends BaseIdeSetup {
|
|||
chalk.dim(' After adding, run: source ~/.bashrc (or source ~/.zshrc)'),
|
||||
chalk.dim(' (The $PWD uses your current working directory)'),
|
||||
];
|
||||
const closingLines = [
|
||||
'',
|
||||
chalk.dim(' This tells Codex CLI to use prompts from this project instead of ~/.codex'),
|
||||
'',
|
||||
chalk.bold.cyan('═'.repeat(70)),
|
||||
'',
|
||||
];
|
||||
|
||||
const lines = [...commonLines, ...(isWindows ? windowsLines : unixLines), ...closingLines];
|
||||
|
||||
return lines.join('\n');
|
||||
return [...commonLines, ...(isWindows ? windowsLines : unixLines)].join('\n');
|
||||
}
|
||||
|
||||
/**
|
||||
* Cleanup Codex configuration
|
||||
* Get next steps for project-specific installation
|
||||
*/
|
||||
getProjectSpecificNextSteps() {
|
||||
const isWindows = os.platform() === 'win32';
|
||||
if (isWindows) {
|
||||
return `Create codex.cmd in project root with:\n set CODEX_HOME=%~dp0.codex\n codex %*`;
|
||||
}
|
||||
return `Add to ~/.bashrc or ~/.zshrc:\n alias codex='CODEX_HOME="$PWD/.codex" codex'`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Clear old BMAD files from destination
|
||||
*/
|
||||
async clearOldBmadFiles(destDir) {
|
||||
if (!(await fs.pathExists(destDir))) {
|
||||
return;
|
||||
}
|
||||
|
||||
const entries = await fs.readdir(destDir);
|
||||
for (const entry of entries) {
|
||||
if (!entry.startsWith('bmad')) {
|
||||
continue;
|
||||
}
|
||||
const entryPath = path.join(destDir, entry);
|
||||
const stat = await fs.stat(entryPath);
|
||||
if (stat.isFile()) {
|
||||
await fs.remove(entryPath);
|
||||
} else if (stat.isDirectory()) {
|
||||
await fs.remove(entryPath);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Detect Codex installation (checks both global and project locations)
|
||||
*/
|
||||
async detect(projectDir) {
|
||||
const globalDir = this.getCodexPromptDir(null, 'global');
|
||||
const projectDir_local = projectDir || process.cwd();
|
||||
const projectSpecificDir = this.getCodexPromptDir(projectDir_local, 'project');
|
||||
|
||||
// Check global location
|
||||
if (await fs.pathExists(globalDir)) {
|
||||
const entries = await fs.readdir(globalDir);
|
||||
if (entries.some((entry) => entry.startsWith('bmad'))) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
// Check project-specific location
|
||||
if (await fs.pathExists(projectSpecificDir)) {
|
||||
const entries = await fs.readdir(projectSpecificDir);
|
||||
if (entries.some((entry) => entry.startsWith('bmad'))) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Cleanup Codex configuration (both global and project-specific)
|
||||
*/
|
||||
async cleanup(projectDir = null) {
|
||||
// Clean both global and project-specific locations
|
||||
const globalDir = this.getCodexPromptDir(null, 'global');
|
||||
await this.clearOldBmadFiles(globalDir);
|
||||
|
||||
|
|
@ -370,37 +297,30 @@ class CodexSetup extends BaseIdeSetup {
|
|||
|
||||
/**
|
||||
* Install a custom agent launcher for Codex
|
||||
* @param {string} projectDir - Project directory (not used, Codex installs to home)
|
||||
* @param {string} agentName - Agent name (e.g., "fred-commit-poet")
|
||||
* @param {string} agentPath - Path to compiled agent (relative to project root)
|
||||
* @param {Object} metadata - Agent metadata
|
||||
* @returns {Object|null} Info about created command
|
||||
*/
|
||||
async installCustomAgentLauncher(projectDir, agentName, agentPath, metadata) {
|
||||
const destDir = this.getCodexPromptDir(projectDir, 'project');
|
||||
await fs.ensureDir(destDir);
|
||||
|
||||
const launcherContent = `---
|
||||
name: '${agentName}'
|
||||
description: '${agentName} agent'
|
||||
---
|
||||
// Load the custom agent launcher template
|
||||
const templatePath = getSourcePath('tools/cli/installers/lib/ide/templates', 'codex-custom-agent-template.md');
|
||||
let templateContent = await fs.readFile(templatePath, 'utf8');
|
||||
|
||||
You must fully embody this agent's persona and follow all activation instructions exactly as specified. NEVER break character until given an exit command.
|
||||
// Get activation header
|
||||
const activationHeader = await this.getAgentCommandHeader();
|
||||
|
||||
<agent-activation CRITICAL="TRUE">
|
||||
1. LOAD the FULL agent file from @${agentPath}
|
||||
2. READ its entire contents - this contains the complete agent persona, menu, and instructions
|
||||
3. FOLLOW every step in the <activation> section precisely
|
||||
4. DISPLAY the welcome/greeting as instructed
|
||||
5. PRESENT the numbered menu
|
||||
6. WAIT for user input before proceeding
|
||||
</agent-activation>
|
||||
`;
|
||||
// Replace placeholders
|
||||
const relativePath = `_bmad/${agentPath}`;
|
||||
templateContent = templateContent
|
||||
.replaceAll('{{name}}', agentName)
|
||||
.replaceAll('{{description}}', `${agentName} agent`)
|
||||
.replaceAll('{{activationHeader}}', activationHeader)
|
||||
.replaceAll('{{relativePath}}', relativePath);
|
||||
|
||||
// Use underscore format: bmad_custom_fred-commit-poet.md
|
||||
const { customAgentDashName } = require('./shared/path-utils');
|
||||
const fileName = customAgentDashName(agentName);
|
||||
const launcherPath = path.join(destDir, fileName);
|
||||
await fs.writeFile(launcherPath, launcherContent, 'utf8');
|
||||
await fs.writeFile(launcherPath, templateContent, 'utf8');
|
||||
|
||||
return {
|
||||
path: path.relative(projectDir, launcherPath),
|
||||
|
|
|
|||
|
|
@ -1,149 +0,0 @@
|
|||
const path = require('node:path');
|
||||
const fs = require('fs-extra');
|
||||
const { BaseIdeSetup } = require('./_base-ide');
|
||||
const chalk = require('chalk');
|
||||
const { AgentCommandGenerator } = require('./shared/agent-command-generator');
|
||||
const { WorkflowCommandGenerator } = require('./shared/workflow-command-generator');
|
||||
const { TaskToolCommandGenerator } = require('./shared/task-tool-command-generator');
|
||||
const { customAgentColonName } = require('./shared/path-utils');
|
||||
|
||||
/**
|
||||
* Crush IDE setup handler
|
||||
* Creates commands in .crush/commands/ directory structure using flat colon naming
|
||||
*/
|
||||
class CrushSetup extends BaseIdeSetup {
|
||||
constructor() {
|
||||
super('crush', 'Crush');
|
||||
this.configDir = '.crush';
|
||||
this.commandsDir = 'commands';
|
||||
}
|
||||
|
||||
/**
|
||||
* Setup Crush IDE configuration
|
||||
* @param {string} projectDir - Project directory
|
||||
* @param {string} bmadDir - BMAD installation directory
|
||||
* @param {Object} options - Setup options
|
||||
*/
|
||||
async setup(projectDir, bmadDir, options = {}) {
|
||||
console.log(chalk.cyan(`Setting up ${this.name}...`));
|
||||
|
||||
// Clean up old BMAD installation first
|
||||
await this.cleanup(projectDir);
|
||||
|
||||
// Create .crush/commands directory
|
||||
const crushDir = path.join(projectDir, this.configDir);
|
||||
const commandsDir = path.join(crushDir, this.commandsDir);
|
||||
await this.ensureDir(commandsDir);
|
||||
|
||||
// Use underscore format: files written directly to commands dir (no bmad subfolder)
|
||||
// Creates: .crush/commands/bmad_bmm_pm.md
|
||||
|
||||
// Generate agent launchers
|
||||
const agentGen = new AgentCommandGenerator(this.bmadFolderName);
|
||||
const { artifacts: agentArtifacts } = await agentGen.collectAgentArtifacts(bmadDir, options.selectedModules || []);
|
||||
|
||||
// Write agent launcher files using flat underscore naming
|
||||
// Creates files like: bmad_bmm_pm.md
|
||||
const agentCount = await agentGen.writeColonArtifacts(commandsDir, agentArtifacts);
|
||||
|
||||
// Get ALL workflows using the new workflow command generator
|
||||
const workflowGenerator = new WorkflowCommandGenerator(this.bmadFolderName);
|
||||
const { artifacts: workflowArtifacts } = await workflowGenerator.collectWorkflowArtifacts(bmadDir);
|
||||
|
||||
// Write workflow-command artifacts using flat underscore naming
|
||||
// Creates files like: bmad_bmm_correct-course.md
|
||||
const workflowCount = await workflowGenerator.writeColonArtifacts(commandsDir, workflowArtifacts);
|
||||
|
||||
// Generate task and tool commands using flat underscore naming
|
||||
const taskToolGen = new TaskToolCommandGenerator();
|
||||
const taskToolResult = await taskToolGen.generateColonTaskToolCommands(projectDir, bmadDir, commandsDir);
|
||||
|
||||
console.log(chalk.green(`✓ ${this.name} configured:`));
|
||||
console.log(chalk.dim(` - ${agentCount} agent commands created`));
|
||||
console.log(chalk.dim(` - ${taskToolResult.tasks} task commands created`));
|
||||
console.log(chalk.dim(` - ${taskToolResult.tools} tool commands created`));
|
||||
console.log(chalk.dim(` - ${workflowCount} workflow commands created`));
|
||||
console.log(chalk.dim(` - Commands directory: ${path.relative(projectDir, commandsDir)}`));
|
||||
console.log(chalk.dim('\n Commands can be accessed via Crush command palette'));
|
||||
|
||||
return {
|
||||
success: true,
|
||||
agents: agentCount,
|
||||
tasks: taskToolResult.tasks || 0,
|
||||
tools: taskToolResult.tools || 0,
|
||||
workflows: workflowCount,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Cleanup Crush configuration
|
||||
*/
|
||||
async cleanup(projectDir) {
|
||||
const commandsDir = path.join(projectDir, this.configDir, this.commandsDir);
|
||||
|
||||
// Remove any bmad* files from the commands directory (cleans up old bmad: and bmad- formats)
|
||||
if (await fs.pathExists(commandsDir)) {
|
||||
const entries = await fs.readdir(commandsDir);
|
||||
for (const entry of entries) {
|
||||
if (entry.startsWith('bmad')) {
|
||||
await fs.remove(path.join(commandsDir, entry));
|
||||
}
|
||||
}
|
||||
}
|
||||
// Also remove legacy bmad folder if it exists
|
||||
const bmadFolder = path.join(commandsDir, 'bmad');
|
||||
if (await fs.pathExists(bmadFolder)) {
|
||||
await fs.remove(bmadFolder);
|
||||
console.log(chalk.dim(`Removed BMAD commands from Crush`));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Install a custom agent launcher for Crush
|
||||
* @param {string} projectDir - Project directory
|
||||
* @param {string} agentName - Agent name (e.g., "fred-commit-poet")
|
||||
* @param {string} agentPath - Path to compiled agent (relative to project root)
|
||||
* @param {Object} metadata - Agent metadata
|
||||
* @returns {Object} Installation result
|
||||
*/
|
||||
async installCustomAgentLauncher(projectDir, agentName, agentPath, metadata) {
|
||||
const commandsDir = path.join(projectDir, this.configDir, this.commandsDir);
|
||||
|
||||
// Create .crush/commands directory if it doesn't exist
|
||||
await fs.ensureDir(commandsDir);
|
||||
|
||||
// Create custom agent launcher
|
||||
const launcherContent = `# ${agentName} Custom Agent
|
||||
|
||||
**⚠️ IMPORTANT**: Run @${agentPath} first to load the complete agent!
|
||||
|
||||
This is a launcher for the custom BMAD agent "${agentName}".
|
||||
|
||||
## Usage
|
||||
1. First run: \`${agentPath}\` to load the complete agent
|
||||
2. Then use this command to activate ${agentName}
|
||||
|
||||
The agent will follow the persona and instructions from the main agent file.
|
||||
|
||||
---
|
||||
|
||||
*Generated by BMAD Method*`;
|
||||
|
||||
// Use underscore format: bmad_custom_fred-commit-poet.md
|
||||
// Written directly to commands dir (no bmad subfolder)
|
||||
const launcherName = customAgentColonName(agentName);
|
||||
const launcherPath = path.join(commandsDir, launcherName);
|
||||
|
||||
// Write the launcher file
|
||||
await fs.writeFile(launcherPath, launcherContent, 'utf8');
|
||||
|
||||
return {
|
||||
ide: 'crush',
|
||||
path: path.relative(projectDir, launcherPath),
|
||||
command: launcherName.replace('.md', ''),
|
||||
type: 'custom-agent-launcher',
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { CrushSetup };
|
||||
|
|
@ -1,160 +0,0 @@
|
|||
const path = require('node:path');
|
||||
const { BaseIdeSetup } = require('./_base-ide');
|
||||
const chalk = require('chalk');
|
||||
const { AgentCommandGenerator } = require('./shared/agent-command-generator');
|
||||
const { WorkflowCommandGenerator } = require('./shared/workflow-command-generator');
|
||||
const { TaskToolCommandGenerator } = require('./shared/task-tool-command-generator');
|
||||
const { customAgentColonName } = require('./shared/path-utils');
|
||||
|
||||
/**
|
||||
* Cursor IDE setup handler
|
||||
*/
|
||||
class CursorSetup extends BaseIdeSetup {
|
||||
constructor() {
|
||||
super('cursor', 'Cursor', true); // preferred IDE
|
||||
this.configDir = '.cursor';
|
||||
this.rulesDir = 'rules';
|
||||
this.commandsDir = 'commands';
|
||||
}
|
||||
|
||||
/**
|
||||
* Cleanup old BMAD installation before reinstalling
|
||||
* @param {string} projectDir - Project directory
|
||||
*/
|
||||
async cleanup(projectDir) {
|
||||
const fs = require('fs-extra');
|
||||
const commandsDir = path.join(projectDir, this.configDir, this.commandsDir);
|
||||
|
||||
// Remove any bmad* files from the commands directory (cleans up old bmad: and bmad- formats)
|
||||
if (await fs.pathExists(commandsDir)) {
|
||||
const entries = await fs.readdir(commandsDir);
|
||||
for (const entry of entries) {
|
||||
if (entry.startsWith('bmad')) {
|
||||
await fs.remove(path.join(commandsDir, entry));
|
||||
}
|
||||
}
|
||||
}
|
||||
// Also remove legacy bmad folder if it exists
|
||||
const bmadFolder = path.join(commandsDir, 'bmad');
|
||||
if (await fs.pathExists(bmadFolder)) {
|
||||
await fs.remove(bmadFolder);
|
||||
console.log(chalk.dim(` Removed old BMAD commands from ${this.name}`));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Setup Cursor IDE configuration
|
||||
* @param {string} projectDir - Project directory
|
||||
* @param {string} bmadDir - BMAD installation directory
|
||||
* @param {Object} options - Setup options
|
||||
*/
|
||||
async setup(projectDir, bmadDir, options = {}) {
|
||||
console.log(chalk.cyan(`Setting up ${this.name}...`));
|
||||
|
||||
// Clean up old BMAD installation first
|
||||
await this.cleanup(projectDir);
|
||||
|
||||
// Create .cursor/commands directory structure
|
||||
const cursorDir = path.join(projectDir, this.configDir);
|
||||
const commandsDir = path.join(cursorDir, this.commandsDir);
|
||||
await this.ensureDir(commandsDir);
|
||||
|
||||
// Use underscore format: files written directly to commands dir (no bmad subfolder)
|
||||
// Creates: .cursor/commands/bmad_bmm_pm.md
|
||||
|
||||
// Generate agent launchers using AgentCommandGenerator
|
||||
// This creates small launcher files that reference the actual agents in _bmad/
|
||||
const agentGen = new AgentCommandGenerator(this.bmadFolderName);
|
||||
const { artifacts: agentArtifacts, counts: agentCounts } = await agentGen.collectAgentArtifacts(bmadDir, options.selectedModules || []);
|
||||
|
||||
// Write agent launcher files using flat underscore naming
|
||||
// Creates files like: bmad_bmm_pm.md
|
||||
const agentCount = await agentGen.writeColonArtifacts(commandsDir, agentArtifacts);
|
||||
|
||||
// Generate workflow commands from manifest (if it exists)
|
||||
const workflowGen = new WorkflowCommandGenerator(this.bmadFolderName);
|
||||
const { artifacts: workflowArtifacts } = await workflowGen.collectWorkflowArtifacts(bmadDir);
|
||||
|
||||
// Write workflow-command artifacts using flat underscore naming
|
||||
// Creates files like: bmad_bmm_correct-course.md
|
||||
const workflowCommandCount = await workflowGen.writeColonArtifacts(commandsDir, workflowArtifacts);
|
||||
|
||||
// Generate task and tool commands from manifests (if they exist)
|
||||
const taskToolGen = new TaskToolCommandGenerator();
|
||||
const taskToolResult = await taskToolGen.generateColonTaskToolCommands(projectDir, bmadDir, commandsDir);
|
||||
|
||||
console.log(chalk.green(`✓ ${this.name} configured:`));
|
||||
console.log(chalk.dim(` - ${agentCount} agents installed`));
|
||||
if (workflowCommandCount > 0) {
|
||||
console.log(chalk.dim(` - ${workflowCommandCount} workflow commands generated`));
|
||||
}
|
||||
if (taskToolResult.generated > 0) {
|
||||
console.log(
|
||||
chalk.dim(
|
||||
` - ${taskToolResult.generated} task/tool commands generated (${taskToolResult.tasks} tasks, ${taskToolResult.tools} tools)`,
|
||||
),
|
||||
);
|
||||
}
|
||||
console.log(chalk.dim(` - Commands directory: ${path.relative(projectDir, commandsDir)}`));
|
||||
|
||||
return {
|
||||
success: true,
|
||||
agents: agentCount,
|
||||
tasks: taskToolResult.tasks || 0,
|
||||
tools: taskToolResult.tools || 0,
|
||||
workflows: workflowCommandCount,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Install a custom agent launcher for Cursor
|
||||
* @param {string} projectDir - Project directory
|
||||
* @param {string} agentName - Agent name (e.g., "fred-commit-poet")
|
||||
* @param {string} agentPath - Path to compiled agent (relative to project root)
|
||||
* @param {Object} metadata - Agent metadata
|
||||
* @returns {Object|null} Info about created command
|
||||
*/
|
||||
async installCustomAgentLauncher(projectDir, agentName, agentPath, metadata) {
|
||||
const commandsDir = path.join(projectDir, this.configDir, this.commandsDir);
|
||||
|
||||
if (!(await this.exists(path.join(projectDir, this.configDir)))) {
|
||||
return null; // IDE not configured for this project
|
||||
}
|
||||
|
||||
await this.ensureDir(commandsDir);
|
||||
|
||||
const launcherContent = `You must fully embody this agent's persona and follow all activation instructions exactly as specified. NEVER break character until given an exit command.
|
||||
|
||||
<agent-activation CRITICAL="TRUE">
|
||||
1. LOAD the FULL agent file from @${agentPath}
|
||||
2. READ its entire contents - this contains the complete agent persona, menu, and instructions
|
||||
3. FOLLOW every step in the <activation> section precisely
|
||||
4. DISPLAY the welcome/greeting as instructed
|
||||
5. PRESENT the numbered menu
|
||||
6. WAIT for user input before proceeding
|
||||
</agent-activation>
|
||||
`;
|
||||
|
||||
// Cursor uses YAML frontmatter matching Claude Code format
|
||||
const commandContent = `---
|
||||
name: '${agentName}'
|
||||
description: '${agentName} agent'
|
||||
---
|
||||
|
||||
${launcherContent}
|
||||
`;
|
||||
|
||||
// Use underscore format: bmad_custom_fred-commit-poet.md
|
||||
// Written directly to commands dir (no bmad subfolder)
|
||||
const launcherName = customAgentColonName(agentName);
|
||||
const launcherPath = path.join(commandsDir, launcherName);
|
||||
await this.writeFile(launcherPath, commandContent);
|
||||
|
||||
return {
|
||||
path: launcherPath,
|
||||
command: `/${launcherName.replace('.md', '')}`,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { CursorSetup };
|
||||
|
|
@ -1,301 +0,0 @@
|
|||
const path = require('node:path');
|
||||
const fs = require('fs-extra');
|
||||
const yaml = require('yaml');
|
||||
const { BaseIdeSetup } = require('./_base-ide');
|
||||
const chalk = require('chalk');
|
||||
const { AgentCommandGenerator } = require('./shared/agent-command-generator');
|
||||
const { WorkflowCommandGenerator } = require('./shared/workflow-command-generator');
|
||||
|
||||
/**
|
||||
* Gemini CLI setup handler
|
||||
* Creates TOML files in .gemini/commands/ structure
|
||||
*/
|
||||
class GeminiSetup extends BaseIdeSetup {
|
||||
constructor() {
|
||||
super('gemini', 'Gemini CLI', false);
|
||||
this.configDir = '.gemini';
|
||||
this.commandsDir = 'commands';
|
||||
this.agentTemplatePath = path.join(__dirname, 'templates', 'gemini-agent-command.toml');
|
||||
this.taskTemplatePath = path.join(__dirname, 'templates', 'gemini-task-command.toml');
|
||||
}
|
||||
|
||||
/**
|
||||
* Load config values from bmad installation
|
||||
* @param {string} bmadDir - BMAD installation directory
|
||||
* @returns {Object} Config values
|
||||
*/
|
||||
async loadConfigValues(bmadDir) {
|
||||
const configValues = {
|
||||
user_name: 'User', // Default fallback
|
||||
};
|
||||
|
||||
// Try to load core config.yaml
|
||||
const coreConfigPath = path.join(bmadDir, 'core', 'config.yaml');
|
||||
if (await fs.pathExists(coreConfigPath)) {
|
||||
try {
|
||||
const configContent = await fs.readFile(coreConfigPath, 'utf8');
|
||||
const config = yaml.parse(configContent);
|
||||
|
||||
if (config.user_name) {
|
||||
configValues.user_name = config.user_name;
|
||||
}
|
||||
} catch (error) {
|
||||
console.warn(chalk.yellow(` Warning: Could not load config values: ${error.message}`));
|
||||
}
|
||||
}
|
||||
|
||||
return configValues;
|
||||
}
|
||||
|
||||
/**
|
||||
* Setup Gemini CLI configuration
|
||||
* @param {string} projectDir - Project directory
|
||||
* @param {string} bmadDir - BMAD installation directory
|
||||
* @param {Object} options - Setup options
|
||||
*/
|
||||
async setup(projectDir, bmadDir, options = {}) {
|
||||
console.log(chalk.cyan(`Setting up ${this.name}...`));
|
||||
|
||||
// Create .gemini/commands directory (flat structure with bmad- prefix)
|
||||
const geminiDir = path.join(projectDir, this.configDir);
|
||||
const commandsDir = path.join(geminiDir, this.commandsDir);
|
||||
|
||||
await this.ensureDir(commandsDir);
|
||||
|
||||
// Clean up any existing BMAD files before reinstalling
|
||||
await this.cleanup(projectDir);
|
||||
|
||||
// Generate agent launchers
|
||||
const agentGen = new AgentCommandGenerator(this.bmadFolderName);
|
||||
const { artifacts: agentArtifacts } = await agentGen.collectAgentArtifacts(bmadDir, options.selectedModules || []);
|
||||
|
||||
// Get tasks and workflows (ALL workflows now generate commands)
|
||||
const tasks = await this.getTasks(bmadDir);
|
||||
|
||||
// Get ALL workflows using the new workflow command generator
|
||||
const workflowGenerator = new WorkflowCommandGenerator(this.bmadFolderName);
|
||||
const { artifacts: workflowArtifacts, counts: workflowCounts } = await workflowGenerator.collectWorkflowArtifacts(bmadDir);
|
||||
|
||||
// Install agents as TOML files with bmad- prefix (flat structure)
|
||||
let agentCount = 0;
|
||||
for (const artifact of agentArtifacts) {
|
||||
const tomlContent = await this.createAgentLauncherToml(artifact);
|
||||
|
||||
// Flat structure: bmad-agent-{module}-{name}.toml
|
||||
const tomlPath = path.join(commandsDir, `bmad-agent-${artifact.module}-${artifact.name}.toml`);
|
||||
await this.writeFile(tomlPath, tomlContent);
|
||||
agentCount++;
|
||||
|
||||
console.log(chalk.green(` ✓ Added agent: /bmad_agents_${artifact.module}_${artifact.name}`));
|
||||
}
|
||||
|
||||
// Install tasks as TOML files with bmad- prefix (flat structure)
|
||||
let taskCount = 0;
|
||||
for (const task of tasks) {
|
||||
const content = await this.readFile(task.path);
|
||||
const tomlContent = await this.createTaskToml(task, content);
|
||||
|
||||
// Flat structure: bmad-task-{module}-{name}.toml
|
||||
const tomlPath = path.join(commandsDir, `bmad-task-${task.module}-${task.name}.toml`);
|
||||
await this.writeFile(tomlPath, tomlContent);
|
||||
taskCount++;
|
||||
|
||||
console.log(chalk.green(` ✓ Added task: /bmad_tasks_${task.module}_${task.name}`));
|
||||
}
|
||||
|
||||
// Install workflows as TOML files with bmad- prefix (flat structure)
|
||||
let workflowCount = 0;
|
||||
for (const artifact of workflowArtifacts) {
|
||||
if (artifact.type === 'workflow-command') {
|
||||
// Create TOML wrapper around workflow command content
|
||||
const tomlContent = await this.createWorkflowToml(artifact);
|
||||
|
||||
// Flat structure: bmad-workflow-{module}-{name}.toml
|
||||
const workflowName = path.basename(artifact.relativePath, '.md');
|
||||
const tomlPath = path.join(commandsDir, `bmad-workflow-${artifact.module}-${workflowName}.toml`);
|
||||
await this.writeFile(tomlPath, tomlContent);
|
||||
workflowCount++;
|
||||
|
||||
console.log(chalk.green(` ✓ Added workflow: /bmad_workflows_${artifact.module}_${workflowName}`));
|
||||
}
|
||||
}
|
||||
|
||||
console.log(chalk.green(`✓ ${this.name} configured:`));
|
||||
console.log(chalk.dim(` - ${agentCount} agents configured`));
|
||||
console.log(chalk.dim(` - ${taskCount} tasks configured`));
|
||||
console.log(chalk.dim(` - ${workflowCount} workflows configured`));
|
||||
console.log(chalk.dim(` - Commands directory: ${path.relative(projectDir, commandsDir)}`));
|
||||
console.log(chalk.dim(` - Agent activation: /bmad_agents_{agent-name}`));
|
||||
console.log(chalk.dim(` - Task activation: /bmad_tasks_{task-name}`));
|
||||
console.log(chalk.dim(` - Workflow activation: /bmad_workflows_{workflow-name}`));
|
||||
|
||||
return {
|
||||
success: true,
|
||||
agents: agentCount,
|
||||
tasks: taskCount,
|
||||
workflows: workflowCount,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Create agent launcher TOML content from artifact
|
||||
*/
|
||||
async createAgentLauncherToml(artifact) {
|
||||
// Strip frontmatter from launcher content
|
||||
const frontmatterRegex = /^---\s*\n[\s\S]*?\n---\s*\n/;
|
||||
const contentWithoutFrontmatter = artifact.content.replace(frontmatterRegex, '').trim();
|
||||
|
||||
// Extract title from launcher frontmatter
|
||||
const titleMatch = artifact.content.match(/description:\s*"([^"]+)"/);
|
||||
const title = titleMatch ? titleMatch[1] : this.formatTitle(artifact.name);
|
||||
|
||||
// Create TOML wrapper around launcher content (without frontmatter)
|
||||
const description = `BMAD ${artifact.module.toUpperCase()} Agent: ${title}`;
|
||||
|
||||
return `description = "${description}"
|
||||
prompt = """
|
||||
${contentWithoutFrontmatter}
|
||||
"""
|
||||
`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create agent TOML content using template
|
||||
*/
|
||||
async createAgentToml(agent, content) {
|
||||
// Extract metadata
|
||||
const titleMatch = content.match(/title="([^"]+)"/);
|
||||
const title = titleMatch ? titleMatch[1] : this.formatTitle(agent.name);
|
||||
|
||||
// Load template
|
||||
const template = await fs.readFile(this.agentTemplatePath, 'utf8');
|
||||
|
||||
// Replace template variables
|
||||
// Note: {user_name} and other {config_values} are left as-is for runtime substitution by Gemini
|
||||
const tomlContent = template
|
||||
.replaceAll('{{title}}', title)
|
||||
.replaceAll('{_bmad}', '_bmad')
|
||||
.replaceAll('{_bmad}', this.bmadFolderName)
|
||||
.replaceAll('{{module}}', agent.module)
|
||||
.replaceAll('{{name}}', agent.name);
|
||||
|
||||
return tomlContent;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create task TOML content using template
|
||||
*/
|
||||
async createTaskToml(task, content) {
|
||||
// Extract task name from XML if available
|
||||
const nameMatch = content.match(/<name>([^<]+)<\/name>/);
|
||||
const taskName = nameMatch ? nameMatch[1] : this.formatTitle(task.name);
|
||||
|
||||
// Load template
|
||||
const template = await fs.readFile(this.taskTemplatePath, 'utf8');
|
||||
|
||||
// Replace template variables
|
||||
const tomlContent = template
|
||||
.replaceAll('{{taskName}}', taskName)
|
||||
.replaceAll('{_bmad}', '_bmad')
|
||||
.replaceAll('{_bmad}', this.bmadFolderName)
|
||||
.replaceAll('{{module}}', task.module)
|
||||
.replaceAll('{{filename}}', task.filename);
|
||||
|
||||
return tomlContent;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create workflow TOML content from artifact
|
||||
*/
|
||||
async createWorkflowToml(artifact) {
|
||||
// Extract description from artifact content
|
||||
const descriptionMatch = artifact.content.match(/description:\s*"([^"]+)"/);
|
||||
const description = descriptionMatch
|
||||
? descriptionMatch[1]
|
||||
: `BMAD ${artifact.module.toUpperCase()} Workflow: ${path.basename(artifact.relativePath, '.md')}`;
|
||||
|
||||
// Strip frontmatter from command content
|
||||
const frontmatterRegex = /^---\s*\n[\s\S]*?\n---\s*\n/;
|
||||
const contentWithoutFrontmatter = artifact.content.replace(frontmatterRegex, '').trim();
|
||||
|
||||
return `description = "${description}"
|
||||
prompt = """
|
||||
${contentWithoutFrontmatter}
|
||||
"""
|
||||
`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Cleanup Gemini configuration - surgically remove only BMAD files
|
||||
*/
|
||||
async cleanup(projectDir) {
|
||||
const fs = require('fs-extra');
|
||||
const commandsDir = path.join(projectDir, this.configDir, this.commandsDir);
|
||||
|
||||
if (await fs.pathExists(commandsDir)) {
|
||||
// Remove any bmad* files (cleans up old bmad- and bmad: formats)
|
||||
const files = await fs.readdir(commandsDir);
|
||||
let removed = 0;
|
||||
|
||||
for (const file of files) {
|
||||
if (file.startsWith('bmad') && file.endsWith('.toml')) {
|
||||
await fs.remove(path.join(commandsDir, file));
|
||||
removed++;
|
||||
}
|
||||
}
|
||||
|
||||
if (removed > 0) {
|
||||
console.log(chalk.dim(` Cleaned up ${removed} existing BMAD files`));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Install a custom agent launcher for Gemini
|
||||
* @param {string} projectDir - Project directory
|
||||
* @param {string} agentName - Agent name (e.g., "fred-commit-poet")
|
||||
* @param {string} agentPath - Path to compiled agent (relative to project root)
|
||||
* @param {Object} metadata - Agent metadata
|
||||
* @returns {Object} Installation result
|
||||
*/
|
||||
async installCustomAgentLauncher(projectDir, agentName, agentPath, metadata) {
|
||||
const geminiDir = path.join(projectDir, this.configDir);
|
||||
const commandsDir = path.join(geminiDir, this.commandsDir);
|
||||
|
||||
// Create .gemini/commands directory if it doesn't exist
|
||||
await fs.ensureDir(commandsDir);
|
||||
|
||||
// Create custom agent launcher in TOML format
|
||||
const launcherContent = `description = "Custom BMAD Agent: ${agentName}"
|
||||
prompt = """
|
||||
**⚠️ IMPORTANT**: Run @${agentPath} first to load the complete agent!
|
||||
|
||||
This is a launcher for the custom BMAD agent "${agentName}".
|
||||
|
||||
## Usage
|
||||
1. First run: \`${agentPath}\` to load the complete agent
|
||||
2. Then use this command to activate ${agentName}
|
||||
|
||||
The agent will follow the persona and instructions from the main agent file.
|
||||
|
||||
---
|
||||
|
||||
*Generated by BMAD Method*
|
||||
"""`;
|
||||
|
||||
const fileName = `bmad-custom-${agentName.toLowerCase()}.toml`;
|
||||
const launcherPath = path.join(commandsDir, fileName);
|
||||
|
||||
// Write the launcher file
|
||||
await fs.writeFile(launcherPath, launcherContent, 'utf8');
|
||||
|
||||
return {
|
||||
ide: 'gemini',
|
||||
path: path.relative(projectDir, launcherPath),
|
||||
command: agentName,
|
||||
type: 'custom-agent-launcher',
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { GeminiSetup };
|
||||
|
|
@ -1,383 +0,0 @@
|
|||
const path = require('node:path');
|
||||
const { BaseIdeSetup } = require('./_base-ide');
|
||||
const chalk = require('chalk');
|
||||
const { AgentCommandGenerator } = require('./shared/agent-command-generator');
|
||||
const prompts = require('../../../lib/prompts');
|
||||
|
||||
/**
|
||||
* GitHub Copilot setup handler
|
||||
* Creates agents in .github/agents/ and configures VS Code settings
|
||||
*/
|
||||
class GitHubCopilotSetup extends BaseIdeSetup {
|
||||
constructor() {
|
||||
super('github-copilot', 'GitHub Copilot', true); // preferred IDE
|
||||
this.configDir = '.github';
|
||||
this.agentsDir = 'agents';
|
||||
this.vscodeDir = '.vscode';
|
||||
}
|
||||
|
||||
/**
|
||||
* Collect configuration choices before installation
|
||||
* @param {Object} options - Configuration options
|
||||
* @returns {Object} Collected configuration
|
||||
*/
|
||||
async collectConfiguration(options = {}) {
|
||||
const config = {};
|
||||
|
||||
console.log('\n' + chalk.blue(' 🔧 VS Code Settings Configuration'));
|
||||
console.log(chalk.dim(' GitHub Copilot works best with specific settings\n'));
|
||||
|
||||
config.vsCodeConfig = await prompts.select({
|
||||
message: 'How would you like to configure VS Code settings?',
|
||||
choices: [
|
||||
{ name: 'Use recommended defaults (fastest)', value: 'defaults' },
|
||||
{ name: 'Configure each setting manually', value: 'manual' },
|
||||
{ name: 'Skip settings configuration', value: 'skip' },
|
||||
],
|
||||
default: 'defaults',
|
||||
});
|
||||
|
||||
if (config.vsCodeConfig === 'manual') {
|
||||
config.manualSettings = await prompts.prompt([
|
||||
{
|
||||
type: 'input',
|
||||
name: 'maxRequests',
|
||||
message: 'Maximum requests per session (1-50)?',
|
||||
default: '15',
|
||||
validate: (input) => {
|
||||
const num = parseInt(input, 10);
|
||||
if (isNaN(num)) return 'Enter a valid number 1-50';
|
||||
if (num < 1 || num > 50) return 'Enter a number between 1-50';
|
||||
return true;
|
||||
},
|
||||
},
|
||||
{
|
||||
type: 'confirm',
|
||||
name: 'runTasks',
|
||||
message: 'Allow running workspace tasks?',
|
||||
default: true,
|
||||
},
|
||||
{
|
||||
type: 'confirm',
|
||||
name: 'mcpDiscovery',
|
||||
message: 'Enable MCP server discovery?',
|
||||
default: true,
|
||||
},
|
||||
{
|
||||
type: 'confirm',
|
||||
name: 'autoFix',
|
||||
message: 'Enable automatic error fixing?',
|
||||
default: true,
|
||||
},
|
||||
{
|
||||
type: 'confirm',
|
||||
name: 'autoApprove',
|
||||
message: 'Auto-approve tools (less secure)?',
|
||||
default: false,
|
||||
},
|
||||
]);
|
||||
}
|
||||
|
||||
return config;
|
||||
}
|
||||
|
||||
/**
|
||||
* Setup GitHub Copilot configuration
|
||||
* @param {string} projectDir - Project directory
|
||||
* @param {string} bmadDir - BMAD installation directory
|
||||
* @param {Object} options - Setup options
|
||||
*/
|
||||
async setup(projectDir, bmadDir, options = {}) {
|
||||
console.log(chalk.cyan(`Setting up ${this.name}...`));
|
||||
|
||||
// Configure VS Code settings using pre-collected config if available
|
||||
const config = options.preCollectedConfig || {};
|
||||
await this.configureVsCodeSettings(projectDir, { ...options, ...config });
|
||||
|
||||
// Create .github/agents directory
|
||||
const githubDir = path.join(projectDir, this.configDir);
|
||||
const agentsDir = path.join(githubDir, this.agentsDir);
|
||||
await this.ensureDir(agentsDir);
|
||||
|
||||
// Clean up any existing BMAD files before reinstalling
|
||||
await this.cleanup(projectDir);
|
||||
|
||||
// Generate agent launchers
|
||||
const agentGen = new AgentCommandGenerator(this.bmadFolderName);
|
||||
const { artifacts: agentArtifacts } = await agentGen.collectAgentArtifacts(bmadDir, options.selectedModules || []);
|
||||
|
||||
// Create agent files with bmd- prefix
|
||||
let agentCount = 0;
|
||||
for (const artifact of agentArtifacts) {
|
||||
const content = artifact.content;
|
||||
const agentContent = await this.createAgentContent({ module: artifact.module, name: artifact.name }, content);
|
||||
|
||||
// Use bmd- prefix: bmd-custom-{module}-{name}.agent.md
|
||||
const targetPath = path.join(agentsDir, `bmd-custom-${artifact.module}-${artifact.name}.agent.md`);
|
||||
await this.writeFile(targetPath, agentContent);
|
||||
agentCount++;
|
||||
|
||||
console.log(chalk.green(` ✓ Created agent: bmd-custom-${artifact.module}-${artifact.name}`));
|
||||
}
|
||||
|
||||
console.log(chalk.green(`✓ ${this.name} configured:`));
|
||||
console.log(chalk.dim(` - ${agentCount} agents created`));
|
||||
console.log(chalk.dim(` - Agents directory: ${path.relative(projectDir, agentsDir)}`));
|
||||
console.log(chalk.dim(` - VS Code settings configured`));
|
||||
console.log(chalk.dim('\n Agents available in VS Code Chat view'));
|
||||
|
||||
return {
|
||||
success: true,
|
||||
agents: agentCount,
|
||||
settings: true,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Configure VS Code settings for GitHub Copilot
|
||||
*/
|
||||
async configureVsCodeSettings(projectDir, options) {
|
||||
const fs = require('fs-extra');
|
||||
const vscodeDir = path.join(projectDir, this.vscodeDir);
|
||||
const settingsPath = path.join(vscodeDir, 'settings.json');
|
||||
|
||||
await this.ensureDir(vscodeDir);
|
||||
|
||||
// Read existing settings
|
||||
let existingSettings = {};
|
||||
if (await fs.pathExists(settingsPath)) {
|
||||
try {
|
||||
const content = await fs.readFile(settingsPath, 'utf8');
|
||||
existingSettings = JSON.parse(content);
|
||||
console.log(chalk.yellow(' Found existing .vscode/settings.json'));
|
||||
} catch {
|
||||
console.warn(chalk.yellow(' Could not parse settings.json, creating new'));
|
||||
}
|
||||
}
|
||||
|
||||
// Use pre-collected configuration or skip if not available
|
||||
let configChoice = options.vsCodeConfig;
|
||||
if (!configChoice) {
|
||||
// If no pre-collected config, skip configuration
|
||||
console.log(chalk.yellow(' ⚠ No configuration collected, skipping VS Code settings'));
|
||||
return;
|
||||
}
|
||||
|
||||
if (configChoice === 'skip') {
|
||||
console.log(chalk.yellow(' ⚠ Skipping VS Code settings'));
|
||||
return;
|
||||
}
|
||||
|
||||
let bmadSettings = {};
|
||||
|
||||
if (configChoice === 'defaults') {
|
||||
bmadSettings = {
|
||||
'chat.agent.enabled': true,
|
||||
'chat.agent.maxRequests': 15,
|
||||
'github.copilot.chat.agent.runTasks': true,
|
||||
'chat.mcp.discovery.enabled': true,
|
||||
'github.copilot.chat.agent.autoFix': true,
|
||||
'chat.tools.autoApprove': false,
|
||||
};
|
||||
console.log(chalk.green(' ✓ Using recommended defaults'));
|
||||
} else {
|
||||
// Manual configuration - use pre-collected settings
|
||||
const manual = options.manualSettings || {};
|
||||
|
||||
const maxRequests = parseInt(manual.maxRequests || '15', 10);
|
||||
bmadSettings = {
|
||||
'chat.agent.enabled': true,
|
||||
'chat.agent.maxRequests': isNaN(maxRequests) ? 15 : maxRequests,
|
||||
'github.copilot.chat.agent.runTasks': manual.runTasks === undefined ? true : manual.runTasks,
|
||||
'chat.mcp.discovery.enabled': manual.mcpDiscovery === undefined ? true : manual.mcpDiscovery,
|
||||
'github.copilot.chat.agent.autoFix': manual.autoFix === undefined ? true : manual.autoFix,
|
||||
'chat.tools.autoApprove': manual.autoApprove || false,
|
||||
};
|
||||
}
|
||||
|
||||
// Merge settings (existing take precedence)
|
||||
const mergedSettings = { ...bmadSettings, ...existingSettings };
|
||||
|
||||
// Write settings
|
||||
await fs.writeFile(settingsPath, JSON.stringify(mergedSettings, null, 2));
|
||||
console.log(chalk.green(' ✓ VS Code settings configured'));
|
||||
}
|
||||
|
||||
/**
|
||||
* Create agent content
|
||||
*/
|
||||
async createAgentContent(agent, content) {
|
||||
// Extract metadata from launcher frontmatter if present
|
||||
const descMatch = content.match(/description:\s*"([^"]+)"/);
|
||||
const title = descMatch ? descMatch[1] : this.formatTitle(agent.name);
|
||||
|
||||
const description = `Activates the ${title} agent persona.`;
|
||||
|
||||
// Strip any existing frontmatter from the content
|
||||
const frontmatterRegex = /^---\s*\n[\s\S]*?\n---\s*\n/;
|
||||
let cleanContent = content;
|
||||
if (frontmatterRegex.test(content)) {
|
||||
cleanContent = content.replace(frontmatterRegex, '').trim();
|
||||
}
|
||||
|
||||
// Available GitHub Copilot tools (November 2025 - Official VS Code Documentation)
|
||||
// Reference: https://code.visualstudio.com/docs/copilot/reference/copilot-vscode-features#_chat-tools
|
||||
const tools = [
|
||||
'changes', // List of source control changes
|
||||
'edit', // Edit files in your workspace including: createFile, createDirectory, editNotebook, newJupyterNotebook and editFiles
|
||||
'fetch', // Fetch content from web page
|
||||
'githubRepo', // Perform code search in GitHub repo
|
||||
'problems', // Add workspace issues from Problems panel
|
||||
'runCommands', // Runs commands in the terminal including: getTerminalOutput, terminalSelection, terminalLastCommand and runInTerminal
|
||||
'runTasks', // Runs tasks and gets their output for your workspace
|
||||
'runTests', // Run unit tests in workspace
|
||||
'search', // Search and read files in your workspace, including:fileSearch, textSearch, listDirectory, readFile, codebase and searchResults
|
||||
'runSubagent', // Runs a task within an isolated subagent context. Enables efficient organization of tasks and context window management.
|
||||
'testFailure', // Get unit test failure information
|
||||
'todos', // Tool for managing and tracking todo items for task planning
|
||||
'usages', // Find references and navigate definitions
|
||||
];
|
||||
|
||||
let agentContent = `---
|
||||
description: "${description.replaceAll('"', String.raw`\"`)}"
|
||||
tools: ${JSON.stringify(tools)}
|
||||
---
|
||||
|
||||
# ${title} Agent
|
||||
|
||||
${cleanContent}
|
||||
|
||||
`;
|
||||
|
||||
return agentContent;
|
||||
}
|
||||
|
||||
/**
|
||||
* Format name as title
|
||||
*/
|
||||
formatTitle(name) {
|
||||
return name
|
||||
.split('-')
|
||||
.map((word) => word.charAt(0).toUpperCase() + word.slice(1))
|
||||
.join(' ');
|
||||
}
|
||||
|
||||
/**
|
||||
* Cleanup GitHub Copilot configuration - surgically remove only BMAD files
|
||||
*/
|
||||
async cleanup(projectDir) {
|
||||
const fs = require('fs-extra');
|
||||
|
||||
// Clean up old chatmodes directory
|
||||
const chatmodesDir = path.join(projectDir, this.configDir, 'chatmodes');
|
||||
if (await fs.pathExists(chatmodesDir)) {
|
||||
const files = await fs.readdir(chatmodesDir);
|
||||
let removed = 0;
|
||||
|
||||
for (const file of files) {
|
||||
if (file.startsWith('bmad') && file.endsWith('.chatmode.md')) {
|
||||
await fs.remove(path.join(chatmodesDir, file));
|
||||
removed++;
|
||||
}
|
||||
}
|
||||
|
||||
if (removed > 0) {
|
||||
console.log(chalk.dim(` Cleaned up ${removed} old BMAD chat modes`));
|
||||
}
|
||||
}
|
||||
|
||||
// Clean up new agents directory
|
||||
const agentsDir = path.join(projectDir, this.configDir, this.agentsDir);
|
||||
if (await fs.pathExists(agentsDir)) {
|
||||
const files = await fs.readdir(agentsDir);
|
||||
let removed = 0;
|
||||
|
||||
for (const file of files) {
|
||||
if (file.startsWith('bmd-') && file.endsWith('.agent.md')) {
|
||||
await fs.remove(path.join(agentsDir, file));
|
||||
removed++;
|
||||
}
|
||||
}
|
||||
|
||||
if (removed > 0) {
|
||||
console.log(chalk.dim(` Cleaned up ${removed} existing BMAD agents`));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Install a custom agent launcher for GitHub Copilot
|
||||
* @param {string} projectDir - Project directory
|
||||
* @param {string} agentName - Agent name (e.g., "fred-commit-poet")
|
||||
* @param {string} agentPath - Path to compiled agent (relative to project root)
|
||||
* @param {Object} metadata - Agent metadata
|
||||
* @returns {Object|null} Info about created command
|
||||
*/
|
||||
async installCustomAgentLauncher(projectDir, agentName, agentPath, metadata) {
|
||||
const agentsDir = path.join(projectDir, this.configDir, this.agentsDir);
|
||||
|
||||
if (!(await this.exists(path.join(projectDir, this.configDir)))) {
|
||||
return null; // IDE not configured for this project
|
||||
}
|
||||
|
||||
await this.ensureDir(agentsDir);
|
||||
|
||||
const launcherContent = `You must fully embody this agent's persona and follow all activation instructions exactly as specified. NEVER break character until given an exit command.
|
||||
|
||||
<agent-activation CRITICAL="TRUE">
|
||||
1. LOAD the FULL agent file from @${agentPath}
|
||||
2. READ its entire contents - this contains the complete agent persona, menu, and instructions
|
||||
3. FOLLOW every step in the <activation> section precisely
|
||||
4. DISPLAY the welcome/greeting as instructed
|
||||
5. PRESENT the numbered menu
|
||||
6. WAIT for user input before proceeding
|
||||
</agent-activation>
|
||||
`;
|
||||
|
||||
// GitHub Copilot needs specific tools in frontmatter
|
||||
const copilotTools = [
|
||||
'changes',
|
||||
'codebase',
|
||||
'createDirectory',
|
||||
'createFile',
|
||||
'editFiles',
|
||||
'fetch',
|
||||
'fileSearch',
|
||||
'githubRepo',
|
||||
'listDirectory',
|
||||
'problems',
|
||||
'readFile',
|
||||
'runInTerminal',
|
||||
'runTask',
|
||||
'runTests',
|
||||
'runVscodeCommand',
|
||||
'search',
|
||||
'searchResults',
|
||||
'terminalLastCommand',
|
||||
'terminalSelection',
|
||||
'testFailure',
|
||||
'textSearch',
|
||||
'usages',
|
||||
];
|
||||
|
||||
const agentContent = `---
|
||||
description: "Activates the ${metadata.title || agentName} agent persona."
|
||||
tools: ${JSON.stringify(copilotTools)}
|
||||
---
|
||||
|
||||
# ${metadata.title || agentName} Agent
|
||||
|
||||
${launcherContent}
|
||||
`;
|
||||
|
||||
const agentFilePath = path.join(agentsDir, `bmd-custom-${agentName}.agent.md`);
|
||||
await this.writeFile(agentFilePath, agentContent);
|
||||
|
||||
return {
|
||||
path: agentFilePath,
|
||||
command: `bmd-custom-${agentName}`,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { GitHubCopilotSetup };
|
||||
|
|
@ -1,191 +0,0 @@
|
|||
const path = require('node:path');
|
||||
const fs = require('fs-extra');
|
||||
const { BaseIdeSetup } = require('./_base-ide');
|
||||
const chalk = require('chalk');
|
||||
const { AgentCommandGenerator } = require('./shared/agent-command-generator');
|
||||
const { WorkflowCommandGenerator } = require('./shared/workflow-command-generator');
|
||||
|
||||
/**
|
||||
* iFlow CLI setup handler
|
||||
* Creates commands in .iflow/commands/ directory structure
|
||||
*/
|
||||
class IFlowSetup extends BaseIdeSetup {
|
||||
constructor() {
|
||||
super('iflow', 'iFlow CLI');
|
||||
this.configDir = '.iflow';
|
||||
this.commandsDir = 'commands';
|
||||
}
|
||||
|
||||
/**
|
||||
* Setup iFlow CLI configuration
|
||||
* @param {string} projectDir - Project directory
|
||||
* @param {string} bmadDir - BMAD installation directory
|
||||
* @param {Object} options - Setup options
|
||||
*/
|
||||
async setup(projectDir, bmadDir, options = {}) {
|
||||
console.log(chalk.cyan(`Setting up ${this.name}...`));
|
||||
|
||||
// Create .iflow/commands/bmad directory structure
|
||||
const iflowDir = path.join(projectDir, this.configDir);
|
||||
const commandsDir = path.join(iflowDir, this.commandsDir, 'bmad');
|
||||
const agentsDir = path.join(commandsDir, 'agents');
|
||||
const tasksDir = path.join(commandsDir, 'tasks');
|
||||
const workflowsDir = path.join(commandsDir, 'workflows');
|
||||
|
||||
await this.ensureDir(agentsDir);
|
||||
await this.ensureDir(tasksDir);
|
||||
await this.ensureDir(workflowsDir);
|
||||
|
||||
// Generate agent launchers
|
||||
const agentGen = new AgentCommandGenerator(this.bmadFolderName);
|
||||
const { artifacts: agentArtifacts } = await agentGen.collectAgentArtifacts(bmadDir, options.selectedModules || []);
|
||||
|
||||
// Setup agents as commands
|
||||
let agentCount = 0;
|
||||
for (const artifact of agentArtifacts) {
|
||||
const commandContent = await this.createAgentCommand(artifact);
|
||||
|
||||
const targetPath = path.join(agentsDir, `${artifact.module}-${artifact.name}.md`);
|
||||
await this.writeFile(targetPath, commandContent);
|
||||
agentCount++;
|
||||
}
|
||||
|
||||
// Get tasks and workflows (ALL workflows now generate commands)
|
||||
const tasks = await this.getTasks(bmadDir);
|
||||
|
||||
// Get ALL workflows using the new workflow command generator
|
||||
const workflowGenerator = new WorkflowCommandGenerator(this.bmadFolderName);
|
||||
const { artifacts: workflowArtifacts, counts: workflowCounts } = await workflowGenerator.collectWorkflowArtifacts(bmadDir);
|
||||
|
||||
// Setup tasks as commands
|
||||
let taskCount = 0;
|
||||
for (const task of tasks) {
|
||||
const content = await this.readFile(task.path);
|
||||
const commandContent = this.createTaskCommand(task, content);
|
||||
|
||||
const targetPath = path.join(tasksDir, `${task.module}-${task.name}.md`);
|
||||
await this.writeFile(targetPath, commandContent);
|
||||
taskCount++;
|
||||
}
|
||||
|
||||
// Setup workflows as commands (already generated)
|
||||
let workflowCount = 0;
|
||||
for (const artifact of workflowArtifacts) {
|
||||
if (artifact.type === 'workflow-command') {
|
||||
const targetPath = path.join(workflowsDir, `${artifact.module}-${path.basename(artifact.relativePath, '.md')}.md`);
|
||||
await this.writeFile(targetPath, artifact.content);
|
||||
workflowCount++;
|
||||
}
|
||||
}
|
||||
|
||||
console.log(chalk.green(`✓ ${this.name} configured:`));
|
||||
console.log(chalk.dim(` - ${agentCount} agent commands created`));
|
||||
console.log(chalk.dim(` - ${taskCount} task commands created`));
|
||||
console.log(chalk.dim(` - ${workflowCount} workflow commands created`));
|
||||
console.log(chalk.dim(` - Commands directory: ${path.relative(projectDir, commandsDir)}`));
|
||||
|
||||
return {
|
||||
success: true,
|
||||
agents: agentCount,
|
||||
tasks: taskCount,
|
||||
workflows: workflowCount,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Create agent command content
|
||||
*/
|
||||
async createAgentCommand(artifact) {
|
||||
// The launcher content is already complete - just return it as-is
|
||||
return artifact.content;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create task command content
|
||||
*/
|
||||
createTaskCommand(task, content) {
|
||||
// Extract task name
|
||||
const nameMatch = content.match(/<name>([^<]+)<\/name>/);
|
||||
const taskName = nameMatch ? nameMatch[1] : this.formatTitle(task.name);
|
||||
|
||||
let commandContent = `# /task-${task.name} Command
|
||||
|
||||
When this command is used, execute the following task:
|
||||
|
||||
## ${taskName} Task
|
||||
|
||||
${content}
|
||||
|
||||
## Usage
|
||||
|
||||
This command executes the ${taskName} task from the BMAD ${task.module.toUpperCase()} module.
|
||||
|
||||
## Module
|
||||
|
||||
Part of the BMAD ${task.module.toUpperCase()} module.
|
||||
`;
|
||||
|
||||
return commandContent;
|
||||
}
|
||||
|
||||
/**
|
||||
* Cleanup iFlow configuration
|
||||
*/
|
||||
async cleanup(projectDir) {
|
||||
const fs = require('fs-extra');
|
||||
const bmadCommandsDir = path.join(projectDir, this.configDir, this.commandsDir, 'bmad');
|
||||
|
||||
if (await fs.pathExists(bmadCommandsDir)) {
|
||||
await fs.remove(bmadCommandsDir);
|
||||
console.log(chalk.dim(`Removed BMAD commands from iFlow CLI`));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Install a custom agent launcher for iFlow
|
||||
* @param {string} projectDir - Project directory
|
||||
* @param {string} agentName - Agent name (e.g., "fred-commit-poet")
|
||||
* @param {string} agentPath - Path to compiled agent (relative to project root)
|
||||
* @param {Object} metadata - Agent metadata
|
||||
* @returns {Object} Installation result
|
||||
*/
|
||||
async installCustomAgentLauncher(projectDir, agentName, agentPath, metadata) {
|
||||
const iflowDir = path.join(projectDir, this.configDir);
|
||||
const bmadCommandsDir = path.join(iflowDir, this.commandsDir, 'bmad');
|
||||
|
||||
// Create .iflow/commands/bmad directory if it doesn't exist
|
||||
await fs.ensureDir(bmadCommandsDir);
|
||||
|
||||
// Create custom agent launcher
|
||||
const launcherContent = `# ${agentName} Custom Agent
|
||||
|
||||
**⚠️ IMPORTANT**: Run @${agentPath} first to load the complete agent!
|
||||
|
||||
This is a launcher for the custom BMAD agent "${agentName}".
|
||||
|
||||
## Usage
|
||||
1. First run: \`${agentPath}\` to load the complete agent
|
||||
2. Then use this command to activate ${agentName}
|
||||
|
||||
The agent will follow the persona and instructions from the main agent file.
|
||||
|
||||
---
|
||||
|
||||
*Generated by BMAD Method*`;
|
||||
|
||||
const fileName = `custom-${agentName.toLowerCase()}.md`;
|
||||
const launcherPath = path.join(bmadCommandsDir, fileName);
|
||||
|
||||
// Write the launcher file
|
||||
await fs.writeFile(launcherPath, launcherContent, 'utf8');
|
||||
|
||||
return {
|
||||
ide: 'iflow',
|
||||
path: path.relative(projectDir, launcherPath),
|
||||
command: agentName,
|
||||
type: 'custom-agent-launcher',
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { IFlowSetup };
|
||||
|
|
@ -115,18 +115,20 @@ class KiloSetup extends BaseIdeSetup {
|
|||
|
||||
// Build mode entry (KiloCode uses same schema as Roo)
|
||||
const slug = `bmad-${artifact.module}-${artifact.name}`;
|
||||
let modeEntry = ` - slug: ${slug}\n`;
|
||||
modeEntry += ` name: '${icon} ${title}'\n`;
|
||||
modeEntry += ` roleDefinition: ${roleDefinition}\n`;
|
||||
modeEntry += ` whenToUse: ${whenToUse}\n`;
|
||||
modeEntry += ` customInstructions: |\n`;
|
||||
modeEntry += ` ${activationHeader} Read the full YAML from ${relativePath} start activation to alter your state of being follow startup section instructions stay in this being until told to exit this mode\n`;
|
||||
modeEntry += ` groups:\n`;
|
||||
modeEntry += ` - read\n`;
|
||||
modeEntry += ` - edit\n`;
|
||||
modeEntry += ` - browser\n`;
|
||||
modeEntry += ` - command\n`;
|
||||
modeEntry += ` - mcp\n`;
|
||||
const modeEntry = ` - slug: ${slug}
|
||||
name: '${icon} ${title}'
|
||||
roleDefinition: ${roleDefinition}
|
||||
whenToUse: ${whenToUse}
|
||||
customInstructions: |
|
||||
${activationHeader.trim()}
|
||||
Read the full YAML from ${relativePath} start activation to alter your state of being follow startup section instructions stay in this being until told to exit this mode
|
||||
groups:
|
||||
- read
|
||||
- edit
|
||||
- browser
|
||||
- command
|
||||
- mcp
|
||||
`;
|
||||
|
||||
return modeEntry;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,16 +1,36 @@
|
|||
const fs = require('fs-extra');
|
||||
const path = require('node:path');
|
||||
const chalk = require('chalk');
|
||||
const yaml = require('yaml');
|
||||
const { ConfigDrivenIdeSetup, loadPlatformCodes } = require('./_config-driven');
|
||||
|
||||
/**
|
||||
* IDE Manager - handles IDE-specific setup
|
||||
* Dynamically discovers and loads IDE handlers
|
||||
*
|
||||
* NEW: Loads config-driven handlers from platform-codes.yaml
|
||||
* Custom installer files (like kilo.js, kiro-cli.js) are still supported
|
||||
* for IDEs with truly unique requirements.
|
||||
*/
|
||||
class IdeManager {
|
||||
constructor() {
|
||||
this.handlers = new Map();
|
||||
this.loadHandlers();
|
||||
this.platformConfig = null;
|
||||
this.bmadFolderName = 'bmad'; // Default, can be overridden
|
||||
this._initialized = false;
|
||||
// Load custom handlers synchronously
|
||||
this.loadCustomInstallerFiles(__dirname);
|
||||
}
|
||||
|
||||
/**
|
||||
* Ensure handlers are initialized (loads config-driven handlers)
|
||||
* Call this before using handlers if needed
|
||||
*/
|
||||
async ensureInitialized() {
|
||||
if (!this._initialized) {
|
||||
await this.loadConfigDrivenHandlers();
|
||||
this._initialized = true;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -28,15 +48,28 @@ class IdeManager {
|
|||
}
|
||||
|
||||
/**
|
||||
* Dynamically load all IDE handlers from directory
|
||||
* Dynamically load all IDE handlers
|
||||
*
|
||||
* Loading order:
|
||||
* 1. Load custom installer files (kilo.js, kiro-cli.js) for IDEs with unique requirements
|
||||
* 2. Load config-driven handlers from platform-codes.yaml for all other IDEs
|
||||
* @deprecated Use ensureInitialized() instead
|
||||
*/
|
||||
loadHandlers() {
|
||||
const ideDir = __dirname;
|
||||
async loadHandlers() {
|
||||
await this.ensureInitialized();
|
||||
}
|
||||
|
||||
/**
|
||||
* Load custom installer files (for IDEs with truly unique requirements)
|
||||
* Synchronous version for constructor
|
||||
* @param {string} ideDir - IDE handlers directory
|
||||
*/
|
||||
loadCustomInstallerFiles(ideDir) {
|
||||
try {
|
||||
// Get all JS files in the IDE directory
|
||||
const files = fs.readdirSync(ideDir).filter((file) => {
|
||||
// Skip base class, manager, utility files (starting with _), and helper modules
|
||||
// Skip base class, manager, config-driven, utility files (starting with _)
|
||||
// Also skip shared directory and generator files
|
||||
return (
|
||||
file.endsWith('.js') &&
|
||||
!file.startsWith('_') &&
|
||||
|
|
@ -74,15 +107,64 @@ class IdeManager {
|
|||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(chalk.red('Failed to load IDE handlers:'), error.message);
|
||||
console.error(chalk.red('Failed to load custom IDE handlers:'), error.message);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Load config-driven handlers from platform-codes.yaml
|
||||
* Async version called by ensureInitialized()
|
||||
*/
|
||||
async loadConfigDrivenHandlers() {
|
||||
try {
|
||||
// Load platform-codes.yaml configuration
|
||||
this.platformConfig = await loadPlatformCodes();
|
||||
|
||||
// Create config-driven handlers for platforms with installer config
|
||||
if (this.platformConfig.platforms) {
|
||||
for (const [platformCode, platformInfo] of Object.entries(this.platformConfig.platforms)) {
|
||||
// Skip if custom handler already exists
|
||||
if (this.handlers.has(platformCode)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Skip if no installer config
|
||||
if (!platformInfo.installer) {
|
||||
continue;
|
||||
}
|
||||
|
||||
try {
|
||||
const handler = new ConfigDrivenIdeSetup(platformCode, platformInfo);
|
||||
handler.setBmadFolderName(this.bmadFolderName);
|
||||
this.handlers.set(platformCode, handler);
|
||||
} catch (error) {
|
||||
console.warn(chalk.yellow(` Warning: Could not create config-driven handler for ${platformCode}: ${error.message}`));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Log summary
|
||||
const customCount = [...this.handlers.entries()].filter(([key]) => {
|
||||
const handler = this.handlers.get(key);
|
||||
return handler && !(handler instanceof ConfigDrivenIdeSetup);
|
||||
}).length;
|
||||
const configCount = [...this.handlers.entries()].filter(([key]) => {
|
||||
const handler = this.handlers.get(key);
|
||||
return handler && handler instanceof ConfigDrivenIdeSetup;
|
||||
}).length;
|
||||
console.log(chalk.dim(` Loaded ${customCount} custom handlers, ${configCount} config-driven handlers`));
|
||||
} catch (error) {
|
||||
console.error(chalk.red('Failed to load config-driven handlers:'), error.message);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all available IDEs with their metadata
|
||||
* @returns {Array} Array of IDE information objects
|
||||
* @returns {Promise<Array>} Array of IDE information objects
|
||||
*/
|
||||
getAvailableIdes() {
|
||||
async getAvailableIdes() {
|
||||
await this.ensureInitialized();
|
||||
|
||||
const ides = [];
|
||||
|
||||
for (const [key, handler] of this.handlers) {
|
||||
|
|
@ -113,18 +195,20 @@ class IdeManager {
|
|||
|
||||
/**
|
||||
* Get preferred IDEs
|
||||
* @returns {Array} Array of preferred IDE information
|
||||
* @returns {Promise<Array>} Array of preferred IDE information
|
||||
*/
|
||||
getPreferredIdes() {
|
||||
return this.getAvailableIdes().filter((ide) => ide.preferred);
|
||||
async getPreferredIdes() {
|
||||
const ides = await this.getAvailableIdes();
|
||||
return ides.filter((ide) => ide.preferred);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get non-preferred IDEs
|
||||
* @returns {Array} Array of non-preferred IDE information
|
||||
* @returns {Promise<Array>} Array of non-preferred IDE information
|
||||
*/
|
||||
getOtherIdes() {
|
||||
return this.getAvailableIdes().filter((ide) => !ide.preferred);
|
||||
async getOtherIdes() {
|
||||
const ides = await this.getAvailableIdes();
|
||||
return ides.filter((ide) => !ide.preferred);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -135,6 +219,8 @@ class IdeManager {
|
|||
* @param {Object} options - Setup options
|
||||
*/
|
||||
async setup(ideName, projectDir, bmadDir, options = {}) {
|
||||
await this.ensureInitialized();
|
||||
|
||||
const handler = this.handlers.get(ideName.toLowerCase());
|
||||
|
||||
if (!handler) {
|
||||
|
|
|
|||
|
|
@ -1,257 +0,0 @@
|
|||
const path = require('node:path');
|
||||
const fs = require('fs-extra');
|
||||
const os = require('node:os');
|
||||
const chalk = require('chalk');
|
||||
const yaml = require('yaml');
|
||||
const { BaseIdeSetup } = require('./_base-ide');
|
||||
const { WorkflowCommandGenerator } = require('./shared/workflow-command-generator');
|
||||
const { TaskToolCommandGenerator } = require('./shared/task-tool-command-generator');
|
||||
const { AgentCommandGenerator } = require('./shared/agent-command-generator');
|
||||
|
||||
/**
|
||||
* OpenCode IDE setup handler
|
||||
*/
|
||||
class OpenCodeSetup extends BaseIdeSetup {
|
||||
constructor() {
|
||||
super('opencode', 'OpenCode', true); // Mark as preferred/recommended
|
||||
this.configDir = '.opencode';
|
||||
this.commandsDir = 'command';
|
||||
this.agentsDir = 'agent';
|
||||
}
|
||||
|
||||
async setup(projectDir, bmadDir, options = {}) {
|
||||
console.log(chalk.cyan(`Setting up ${this.name}...`));
|
||||
|
||||
const baseDir = path.join(projectDir, this.configDir);
|
||||
const commandsBaseDir = path.join(baseDir, this.commandsDir);
|
||||
const agentsBaseDir = path.join(baseDir, this.agentsDir);
|
||||
|
||||
await this.ensureDir(commandsBaseDir);
|
||||
await this.ensureDir(agentsBaseDir);
|
||||
|
||||
// Clean up any existing BMAD files before reinstalling
|
||||
await this.cleanup(projectDir);
|
||||
|
||||
// Generate agent launchers
|
||||
const agentGen = new AgentCommandGenerator(this.bmadFolderName);
|
||||
const { artifacts: agentArtifacts } = await agentGen.collectAgentArtifacts(bmadDir, options.selectedModules || []);
|
||||
|
||||
// Install primary agents with flat naming: bmad-agent-{module}-{name}.md
|
||||
// OpenCode agents go in the agent folder (not command folder)
|
||||
let agentCount = 0;
|
||||
for (const artifact of agentArtifacts) {
|
||||
const agentContent = artifact.content;
|
||||
// Flat structure in agent folder: bmad-agent-{module}-{name}.md
|
||||
const targetPath = path.join(agentsBaseDir, `bmad-agent-${artifact.module}-${artifact.name}.md`);
|
||||
await this.writeFile(targetPath, agentContent);
|
||||
agentCount++;
|
||||
}
|
||||
|
||||
// Install workflow commands with flat naming: bmad-{module}-{workflow-name}
|
||||
const workflowGenerator = new WorkflowCommandGenerator(this.bmadFolderName);
|
||||
const { artifacts: workflowArtifacts, counts: workflowCounts } = await workflowGenerator.collectWorkflowArtifacts(bmadDir);
|
||||
|
||||
let workflowCommandCount = 0;
|
||||
for (const artifact of workflowArtifacts) {
|
||||
if (artifact.type === 'workflow-command') {
|
||||
const commandContent = artifact.content;
|
||||
// Flat structure: bmad-{module}-{workflow-name}.md
|
||||
// artifact.relativePath is like: bmm/workflows/plan-project.md
|
||||
const workflowName = path.basename(artifact.relativePath, '.md');
|
||||
const targetPath = path.join(commandsBaseDir, `bmad-${artifact.module}-${workflowName}.md`);
|
||||
await this.writeFile(targetPath, commandContent);
|
||||
workflowCommandCount++;
|
||||
}
|
||||
// Skip workflow launcher READMEs as they're not needed in flat structure
|
||||
}
|
||||
|
||||
// Install task and tool commands with flat naming
|
||||
const { tasks, tools } = await this.generateFlatTaskToolCommands(bmadDir, commandsBaseDir);
|
||||
|
||||
console.log(chalk.green(`✓ ${this.name} configured:`));
|
||||
console.log(chalk.dim(` - ${agentCount} agents installed to .opencode/agent/`));
|
||||
if (workflowCommandCount > 0) {
|
||||
console.log(chalk.dim(` - ${workflowCommandCount} workflows installed to .opencode/command/`));
|
||||
}
|
||||
if (tasks + tools > 0) {
|
||||
console.log(chalk.dim(` - ${tasks + tools} tasks/tools installed to .opencode/command/ (${tasks} tasks, ${tools} tools)`));
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
agents: agentCount,
|
||||
workflows: workflowCommandCount,
|
||||
workflowCounts,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate flat task and tool commands for OpenCode
|
||||
* OpenCode doesn't support nested command directories
|
||||
*/
|
||||
async generateFlatTaskToolCommands(bmadDir, commandsBaseDir) {
|
||||
const taskToolGen = new TaskToolCommandGenerator();
|
||||
const tasks = await taskToolGen.loadTaskManifest(bmadDir);
|
||||
const tools = await taskToolGen.loadToolManifest(bmadDir);
|
||||
|
||||
// Filter to only standalone items
|
||||
const standaloneTasks = tasks ? tasks.filter((t) => t.standalone === 'true' || t.standalone === true) : [];
|
||||
const standaloneTools = tools ? tools.filter((t) => t.standalone === 'true' || t.standalone === true) : [];
|
||||
|
||||
// Generate command files for tasks with flat naming: bmad-task-{module}-{name}.md
|
||||
for (const task of standaloneTasks) {
|
||||
const commandContent = taskToolGen.generateCommandContent(task, 'task');
|
||||
const targetPath = path.join(commandsBaseDir, `bmad-task-${task.module}-${task.name}.md`);
|
||||
await this.writeFile(targetPath, commandContent);
|
||||
}
|
||||
|
||||
// Generate command files for tools with flat naming: bmad-tool-{module}-{name}.md
|
||||
for (const tool of standaloneTools) {
|
||||
const commandContent = taskToolGen.generateCommandContent(tool, 'tool');
|
||||
const targetPath = path.join(commandsBaseDir, `bmad-tool-${tool.module}-${tool.name}.md`);
|
||||
await this.writeFile(targetPath, commandContent);
|
||||
}
|
||||
|
||||
return {
|
||||
tasks: standaloneTasks.length,
|
||||
tools: standaloneTools.length,
|
||||
};
|
||||
}
|
||||
|
||||
async readAndProcess(filePath, metadata) {
|
||||
const content = await fs.readFile(filePath, 'utf8');
|
||||
return this.processContent(content, metadata);
|
||||
}
|
||||
|
||||
async createAgentContent(content, metadata) {
|
||||
const { frontmatter = {}, body } = this.parseFrontmatter(content);
|
||||
|
||||
frontmatter.description =
|
||||
frontmatter.description && String(frontmatter.description).trim().length > 0
|
||||
? frontmatter.description
|
||||
: `BMAD ${metadata.module} agent: ${metadata.name}`;
|
||||
|
||||
// OpenCode agents use: 'primary' mode for main agents
|
||||
frontmatter.mode = 'primary';
|
||||
|
||||
const frontmatterString = this.stringifyFrontmatter(frontmatter);
|
||||
|
||||
// Get the activation header from central template
|
||||
const activationHeader = await this.getAgentCommandHeader();
|
||||
|
||||
return `${frontmatterString}\n\n${activationHeader}\n\n${body}`;
|
||||
}
|
||||
|
||||
parseFrontmatter(content) {
|
||||
const match = content.match(/^---\s*\n([\s\S]*?)\n---\s*\n?/);
|
||||
if (!match) {
|
||||
return { data: {}, body: content };
|
||||
}
|
||||
|
||||
const body = content.slice(match[0].length);
|
||||
|
||||
let frontmatter = {};
|
||||
try {
|
||||
frontmatter = yaml.parse(match[1]) || {};
|
||||
} catch {
|
||||
frontmatter = {};
|
||||
}
|
||||
|
||||
return { frontmatter, body };
|
||||
}
|
||||
|
||||
stringifyFrontmatter(frontmatter) {
|
||||
const yamlText = yaml
|
||||
.dump(frontmatter, {
|
||||
indent: 2,
|
||||
lineWidth: -1,
|
||||
noRefs: true,
|
||||
sortKeys: false,
|
||||
})
|
||||
.trimEnd();
|
||||
|
||||
return `---\n${yamlText}\n---`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Cleanup OpenCode configuration - surgically remove only BMAD files
|
||||
*/
|
||||
async cleanup(projectDir) {
|
||||
const agentsDir = path.join(projectDir, this.configDir, this.agentsDir);
|
||||
const commandsDir = path.join(projectDir, this.configDir, this.commandsDir);
|
||||
let removed = 0;
|
||||
|
||||
// Clean up agent folder
|
||||
if (await fs.pathExists(agentsDir)) {
|
||||
const files = await fs.readdir(agentsDir);
|
||||
for (const file of files) {
|
||||
if (file.startsWith('bmad') && file.endsWith('.md')) {
|
||||
await fs.remove(path.join(agentsDir, file));
|
||||
removed++;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Clean up command folder
|
||||
if (await fs.pathExists(commandsDir)) {
|
||||
const files = await fs.readdir(commandsDir);
|
||||
for (const file of files) {
|
||||
if (file.startsWith('bmad') && file.endsWith('.md')) {
|
||||
await fs.remove(path.join(commandsDir, file));
|
||||
removed++;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (removed > 0) {
|
||||
console.log(chalk.dim(` Cleaned up ${removed} existing BMAD files`));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Install a custom agent launcher for OpenCode
|
||||
* @param {string} projectDir - Project directory
|
||||
* @param {string} agentName - Agent name (e.g., "fred-commit-poet")
|
||||
* @param {string} agentPath - Path to compiled agent (relative to project root)
|
||||
* @param {Object} metadata - Agent metadata
|
||||
* @returns {Object|null} Info about created command
|
||||
*/
|
||||
async installCustomAgentLauncher(projectDir, agentName, agentPath, metadata) {
|
||||
const agentsDir = path.join(projectDir, this.configDir, this.agentsDir);
|
||||
|
||||
if (!(await this.exists(path.join(projectDir, this.configDir)))) {
|
||||
return null; // IDE not configured for this project
|
||||
}
|
||||
|
||||
await this.ensureDir(agentsDir);
|
||||
|
||||
const launcherContent = `---
|
||||
name: '${agentName}'
|
||||
description: '${metadata.title || agentName} agent'
|
||||
mode: 'primary'
|
||||
---
|
||||
|
||||
You must fully embody this agent's persona and follow all activation instructions exactly as specified. NEVER break character until given an exit command.
|
||||
|
||||
<agent-activation CRITICAL="TRUE">
|
||||
1. LOAD the FULL agent file from @${agentPath}
|
||||
2. READ its entire contents - this contains the complete agent persona, menu, and instructions
|
||||
3. FOLLOW every step in the <activation> section precisely
|
||||
4. DISPLAY the welcome/greeting as instructed
|
||||
5. PRESENT the numbered menu
|
||||
6. WAIT for user input before proceeding
|
||||
</agent-activation>
|
||||
`;
|
||||
|
||||
// OpenCode uses flat naming: bmad-agent-custom-{name}.md
|
||||
const launcherPath = path.join(agentsDir, `bmad-agent-custom-${agentName}.md`);
|
||||
await this.writeFile(launcherPath, launcherContent);
|
||||
|
||||
return {
|
||||
path: launcherPath,
|
||||
command: `bmad-agent-custom-${agentName}`,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { OpenCodeSetup };
|
||||
|
|
@ -5,127 +5,177 @@
|
|||
# the installation system to identify different platforms (IDEs, tools, etc.)
|
||||
#
|
||||
# Format:
|
||||
# code: Platform identifier used internally
|
||||
# code: Platform identifier used internally (key)
|
||||
# name: Display name shown to users
|
||||
# preferred: Whether this platform is shown as a recommended option on install
|
||||
# category: Type of platform (ide, tool, service, etc.)
|
||||
# category: Type of platform (ide, cli, tool, service, etc.)
|
||||
# installer: Installation configuration (optional)
|
||||
# frontmatter_template: Path to frontmatter template file (relative to templates/frontmatter/)
|
||||
# If not specified, uses 'common-yaml.md' default
|
||||
|
||||
platforms:
|
||||
# Recommended Platforms
|
||||
claude-code:
|
||||
name: "Claude Code"
|
||||
preferred: true
|
||||
category: cli
|
||||
description: "Anthropic's official CLI for Claude"
|
||||
|
||||
windsurf:
|
||||
name: "Windsurf"
|
||||
preferred: true
|
||||
category: ide
|
||||
description: "AI-powered IDE with cascade flows"
|
||||
|
||||
cursor:
|
||||
name: "Cursor"
|
||||
preferred: true
|
||||
category: ide
|
||||
description: "AI-first code editor"
|
||||
|
||||
# Other IDEs and Tools
|
||||
cline:
|
||||
name: "Cline"
|
||||
antigravity:
|
||||
name: "Google Antigravity"
|
||||
preferred: false
|
||||
category: ide
|
||||
description: "AI coding assistant"
|
||||
|
||||
opencode:
|
||||
name: "OpenCode"
|
||||
preferred: false
|
||||
category: ide
|
||||
description: "OpenCode terminal coding assistant"
|
||||
description: "Google's AI development environment"
|
||||
installer:
|
||||
target_dir: .antigravity/commands
|
||||
frontmatter_template: common-yaml.md
|
||||
|
||||
auggie:
|
||||
name: "Auggie"
|
||||
preferred: false
|
||||
category: cli
|
||||
description: "AI development tool"
|
||||
installer:
|
||||
target_dir: .augment/commands
|
||||
frontmatter_template: common-yaml.md
|
||||
|
||||
roo:
|
||||
name: "Roo Cline"
|
||||
cline:
|
||||
name: "Cline"
|
||||
preferred: false
|
||||
category: ide
|
||||
description: "Enhanced Cline fork"
|
||||
description: "AI coding assistant"
|
||||
installer:
|
||||
target_dir: .cline/commands
|
||||
frontmatter_template: none # No frontmatter, content as-is
|
||||
|
||||
rovo:
|
||||
name: "Rovo"
|
||||
preferred: false
|
||||
category: ide
|
||||
description: "Atlassian's AI coding assistant"
|
||||
|
||||
rovo-dev:
|
||||
name: "Rovo Dev"
|
||||
preferred: false
|
||||
category: ide
|
||||
description: "Atlassian's Rovo development environment"
|
||||
|
||||
kiro-cli:
|
||||
name: "Kiro CLI"
|
||||
preferred: false
|
||||
claude-code:
|
||||
name: "Claude Code"
|
||||
preferred: true
|
||||
category: cli
|
||||
description: "Kiro command-line interface"
|
||||
|
||||
github-copilot:
|
||||
name: "GitHub Copilot"
|
||||
preferred: false
|
||||
category: ide
|
||||
description: "GitHub's AI pair programmer"
|
||||
|
||||
codex:
|
||||
name: "Codex"
|
||||
preferred: false
|
||||
category: cli
|
||||
description: "OpenAI Codex integration"
|
||||
|
||||
qwen:
|
||||
name: "QwenCoder"
|
||||
preferred: false
|
||||
category: ide
|
||||
description: "Qwen AI coding assistant"
|
||||
|
||||
gemini:
|
||||
name: "Gemini CLI"
|
||||
preferred: false
|
||||
category: cli
|
||||
description: "Google's CLI for Gemini"
|
||||
|
||||
iflow:
|
||||
name: "iFlow"
|
||||
preferred: false
|
||||
category: ide
|
||||
description: "AI workflow automation"
|
||||
|
||||
kilo:
|
||||
name: "KiloCoder"
|
||||
preferred: false
|
||||
category: ide
|
||||
description: "AI coding platform"
|
||||
description: "Anthropic's official CLI for Claude"
|
||||
installer:
|
||||
target_dir: .claude/commands
|
||||
frontmatter_template: common-yaml.md
|
||||
|
||||
crush:
|
||||
name: "Crush"
|
||||
preferred: false
|
||||
category: ide
|
||||
description: "AI development assistant"
|
||||
installer:
|
||||
target_dir: .crush/commands
|
||||
frontmatter_template: common-yaml.md
|
||||
|
||||
antigravity:
|
||||
name: "Google Antigravity"
|
||||
cursor:
|
||||
name: "Cursor"
|
||||
preferred: true
|
||||
category: ide
|
||||
description: "AI-first code editor"
|
||||
installer:
|
||||
target_dir: .cursor/commands
|
||||
frontmatter_template: common-yaml.md
|
||||
|
||||
gemini:
|
||||
name: "Gemini CLI"
|
||||
preferred: false
|
||||
category: cli
|
||||
description: "Google's CLI for Gemini"
|
||||
installer:
|
||||
target_dir: .gemini/commands
|
||||
file_extension: .toml
|
||||
frontmatter_template: common-toml.md
|
||||
|
||||
github-copilot:
|
||||
name: "GitHub Copilot"
|
||||
preferred: true
|
||||
category: ide
|
||||
description: "GitHub's AI pair programmer"
|
||||
installer:
|
||||
targets:
|
||||
- dir: .github/agents
|
||||
frontmatter_template: copilot-agent.md
|
||||
artifact_types: [agents]
|
||||
- dir: .github/prompts
|
||||
frontmatter_template: copilot.md
|
||||
artifact_types: [workflows, tasks, tools]
|
||||
has_vscode_settings: true
|
||||
|
||||
iflow:
|
||||
name: "iFlow"
|
||||
preferred: false
|
||||
category: ide
|
||||
description: "Google's AI development environment"
|
||||
description: "AI workflow automation"
|
||||
installer:
|
||||
target_dir: .iflow/commands
|
||||
frontmatter_template: common-yaml.md
|
||||
|
||||
kilo:
|
||||
name: "KiloCoder"
|
||||
preferred: false
|
||||
category: ide
|
||||
description: "AI coding platform"
|
||||
# Kilo has custom installer (.kilocodemodes YAML format) - not config-driven
|
||||
|
||||
kiro-cli:
|
||||
name: "Kiro CLI"
|
||||
preferred: false
|
||||
category: cli
|
||||
description: "Kiro command-line interface"
|
||||
# Kiro CLI has custom installer (YAML->JSON conversion) - not config-driven
|
||||
|
||||
opencode:
|
||||
name: "OpenCode"
|
||||
preferred: false
|
||||
category: ide
|
||||
description: "OpenCode terminal coding assistant"
|
||||
installer:
|
||||
targets:
|
||||
- dir: .opencode/agent
|
||||
frontmatter_template: opencode-agent.md
|
||||
artifact_types: [agents]
|
||||
- dir: .opencode/command
|
||||
frontmatter_template: opencode.md
|
||||
artifact_types: [workflows, tasks, tools]
|
||||
|
||||
qwen:
|
||||
name: "QwenCoder"
|
||||
preferred: false
|
||||
category: ide
|
||||
description: "Qwen AI coding assistant"
|
||||
installer:
|
||||
target_dir: .qwen/commands
|
||||
file_extension: .toml
|
||||
frontmatter_template: common-toml.md
|
||||
|
||||
roo:
|
||||
name: "Roo Code"
|
||||
preferred: false
|
||||
category: ide
|
||||
description: "Enhanced Cline fork"
|
||||
installer:
|
||||
target_dir: .roo/commands
|
||||
frontmatter_template: roo.md
|
||||
skip_existing: true
|
||||
|
||||
rovo-dev:
|
||||
name: "Rovo Dev"
|
||||
preferred: false
|
||||
category: ide
|
||||
description: "Atlassian's Rovo development environment"
|
||||
installer:
|
||||
target_dir: .rovo-dev/commands
|
||||
frontmatter_template: common-yaml.md
|
||||
|
||||
trae:
|
||||
name: "Trae"
|
||||
preferred: false
|
||||
category: ide
|
||||
description: "AI coding tool"
|
||||
installer:
|
||||
target_dir: .trae/rules
|
||||
frontmatter_template: trae.md
|
||||
|
||||
windsurf:
|
||||
name: "Windsurf"
|
||||
preferred: true
|
||||
category: ide
|
||||
description: "AI-powered IDE with cascade flows"
|
||||
installer:
|
||||
target_dir: .windsurf/workflows
|
||||
frontmatter_template: windsurf.md
|
||||
|
||||
# Platform categories
|
||||
categories:
|
||||
|
|
@ -155,3 +205,12 @@ conventions:
|
|||
name_format: "Title Case"
|
||||
max_code_length: 20
|
||||
allowed_characters: "a-z0-9-"
|
||||
|
||||
# New universal file naming standard
|
||||
file_naming:
|
||||
agent: "bmad-{module}-{name}.agent.md"
|
||||
workflow: "bmad-{module}-{name}.workflow.md"
|
||||
task: "bmad-{module}-{name}.task.md"
|
||||
tool: "bmad-{module}-{name}.tool.md"
|
||||
example_agent: "bmad-cis-storymaster.agent.md"
|
||||
example_workflow: "bmad-bmm-plan-project.workflow.md"
|
||||
|
|
@ -1,372 +0,0 @@
|
|||
const path = require('node:path');
|
||||
const fs = require('fs-extra');
|
||||
const { BaseIdeSetup } = require('./_base-ide');
|
||||
const chalk = require('chalk');
|
||||
const { getAgentsFromBmad, getTasksFromBmad } = require('./shared/bmad-artifacts');
|
||||
const { AgentCommandGenerator } = require('./shared/agent-command-generator');
|
||||
|
||||
/**
|
||||
* Qwen Code setup handler
|
||||
* Creates TOML command files in .qwen/commands/BMad/
|
||||
*/
|
||||
class QwenSetup extends BaseIdeSetup {
|
||||
constructor() {
|
||||
super('qwen', 'Qwen Code');
|
||||
this.configDir = '.qwen';
|
||||
this.commandsDir = 'commands';
|
||||
this.bmadDir = 'bmad';
|
||||
}
|
||||
|
||||
/**
|
||||
* Setup Qwen Code configuration
|
||||
* @param {string} projectDir - Project directory
|
||||
* @param {string} bmadDir - BMAD installation directory
|
||||
* @param {Object} options - Setup options
|
||||
*/
|
||||
async setup(projectDir, bmadDir, options = {}) {
|
||||
console.log(chalk.cyan(`Setting up ${this.name}...`));
|
||||
|
||||
// Create .qwen/commands/BMad directory structure
|
||||
const qwenDir = path.join(projectDir, this.configDir);
|
||||
const commandsDir = path.join(qwenDir, this.commandsDir);
|
||||
const bmadCommandsDir = path.join(commandsDir, this.bmadDir);
|
||||
|
||||
await this.ensureDir(bmadCommandsDir);
|
||||
|
||||
// Update existing settings.json if present
|
||||
await this.updateSettings(qwenDir);
|
||||
|
||||
// Clean up old configuration if exists
|
||||
await this.cleanupOldConfig(qwenDir);
|
||||
|
||||
// Generate agent launchers
|
||||
const agentGen = new AgentCommandGenerator(this.bmadFolderName);
|
||||
const { artifacts: agentArtifacts } = await agentGen.collectAgentArtifacts(bmadDir, options.selectedModules || []);
|
||||
|
||||
// Get tasks, tools, and workflows (standalone only for tools/workflows)
|
||||
const tasks = await getTasksFromBmad(bmadDir, options.selectedModules || []);
|
||||
const tools = await this.getTools(bmadDir, true);
|
||||
const workflows = await this.getWorkflows(bmadDir, true);
|
||||
|
||||
// Create directories for each module (including standalone)
|
||||
const modules = new Set();
|
||||
for (const item of [...agentArtifacts, ...tasks, ...tools, ...workflows]) modules.add(item.module);
|
||||
|
||||
for (const module of modules) {
|
||||
await this.ensureDir(path.join(bmadCommandsDir, module));
|
||||
await this.ensureDir(path.join(bmadCommandsDir, module, 'agents'));
|
||||
await this.ensureDir(path.join(bmadCommandsDir, module, 'tasks'));
|
||||
await this.ensureDir(path.join(bmadCommandsDir, module, 'tools'));
|
||||
await this.ensureDir(path.join(bmadCommandsDir, module, 'workflows'));
|
||||
}
|
||||
|
||||
// Create TOML files for each agent launcher
|
||||
let agentCount = 0;
|
||||
for (const artifact of agentArtifacts) {
|
||||
// Convert markdown launcher content to TOML format
|
||||
const tomlContent = this.processAgentLauncherContent(artifact.content, {
|
||||
module: artifact.module,
|
||||
name: artifact.name,
|
||||
});
|
||||
|
||||
const targetPath = path.join(bmadCommandsDir, artifact.module, 'agents', `${artifact.name}.toml`);
|
||||
|
||||
await this.writeFile(targetPath, tomlContent);
|
||||
|
||||
agentCount++;
|
||||
console.log(chalk.green(` ✓ Added agent: /bmad_${artifact.module}_agents_${artifact.name}`));
|
||||
}
|
||||
|
||||
// Create TOML files for each task
|
||||
let taskCount = 0;
|
||||
for (const task of tasks) {
|
||||
const content = await this.readAndProcess(task.path, {
|
||||
module: task.module,
|
||||
name: task.name,
|
||||
});
|
||||
|
||||
const targetPath = path.join(bmadCommandsDir, task.module, 'tasks', `${task.name}.toml`);
|
||||
|
||||
await this.writeFile(targetPath, content);
|
||||
|
||||
taskCount++;
|
||||
console.log(chalk.green(` ✓ Added task: /bmad_${task.module}_tasks_${task.name}`));
|
||||
}
|
||||
|
||||
// Create TOML files for each tool
|
||||
let toolCount = 0;
|
||||
for (const tool of tools) {
|
||||
const content = await this.readAndProcess(tool.path, {
|
||||
module: tool.module,
|
||||
name: tool.name,
|
||||
});
|
||||
|
||||
const targetPath = path.join(bmadCommandsDir, tool.module, 'tools', `${tool.name}.toml`);
|
||||
|
||||
await this.writeFile(targetPath, content);
|
||||
|
||||
toolCount++;
|
||||
console.log(chalk.green(` ✓ Added tool: /bmad_${tool.module}_tools_${tool.name}`));
|
||||
}
|
||||
|
||||
// Create TOML files for each workflow
|
||||
let workflowCount = 0;
|
||||
for (const workflow of workflows) {
|
||||
const content = await this.readAndProcess(workflow.path, {
|
||||
module: workflow.module,
|
||||
name: workflow.name,
|
||||
});
|
||||
|
||||
const targetPath = path.join(bmadCommandsDir, workflow.module, 'workflows', `${workflow.name}.toml`);
|
||||
|
||||
await this.writeFile(targetPath, content);
|
||||
|
||||
workflowCount++;
|
||||
console.log(chalk.green(` ✓ Added workflow: /bmad_${workflow.module}_workflows_${workflow.name}`));
|
||||
}
|
||||
|
||||
console.log(chalk.green(`✓ ${this.name} configured:`));
|
||||
console.log(chalk.dim(` - ${agentCount} agents configured`));
|
||||
console.log(chalk.dim(` - ${taskCount} tasks configured`));
|
||||
console.log(chalk.dim(` - ${toolCount} tools configured`));
|
||||
console.log(chalk.dim(` - ${workflowCount} workflows configured`));
|
||||
console.log(chalk.dim(` - Commands directory: ${path.relative(projectDir, bmadCommandsDir)}`));
|
||||
|
||||
return {
|
||||
success: true,
|
||||
agents: agentCount,
|
||||
tasks: taskCount,
|
||||
tools: toolCount,
|
||||
workflows: workflowCount,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Update settings.json to remove old agent references
|
||||
*/
|
||||
async updateSettings(qwenDir) {
|
||||
const fs = require('fs-extra');
|
||||
const settingsPath = path.join(qwenDir, 'settings.json');
|
||||
|
||||
if (await fs.pathExists(settingsPath)) {
|
||||
try {
|
||||
const settingsContent = await fs.readFile(settingsPath, 'utf8');
|
||||
const settings = JSON.parse(settingsContent);
|
||||
let updated = false;
|
||||
|
||||
// Remove agent file references from contextFileName
|
||||
if (settings.contextFileName && Array.isArray(settings.contextFileName)) {
|
||||
const originalLength = settings.contextFileName.length;
|
||||
settings.contextFileName = settings.contextFileName.filter(
|
||||
(fileName) => !fileName.startsWith('agents/') && !fileName.startsWith('bmad-method/'),
|
||||
);
|
||||
|
||||
if (settings.contextFileName.length !== originalLength) {
|
||||
updated = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (updated) {
|
||||
await fs.writeFile(settingsPath, JSON.stringify(settings, null, 2));
|
||||
console.log(chalk.green(' ✓ Updated .qwen/settings.json'));
|
||||
}
|
||||
} catch (error) {
|
||||
console.warn(chalk.yellow(' ⚠ Could not update settings.json:'), error.message);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Clean up old configuration directories
|
||||
*/
|
||||
async cleanupOldConfig(qwenDir) {
|
||||
const fs = require('fs-extra');
|
||||
const agentsDir = path.join(qwenDir, 'agents');
|
||||
const bmadMethodDir = path.join(qwenDir, 'bmad-method');
|
||||
const bmadDir = path.join(qwenDir, 'bmadDir');
|
||||
|
||||
if (await fs.pathExists(agentsDir)) {
|
||||
await fs.remove(agentsDir);
|
||||
console.log(chalk.green(' ✓ Removed old agents directory'));
|
||||
}
|
||||
|
||||
if (await fs.pathExists(bmadMethodDir)) {
|
||||
await fs.remove(bmadMethodDir);
|
||||
console.log(chalk.green(' ✓ Removed old bmad-method directory'));
|
||||
}
|
||||
|
||||
if (await fs.pathExists(bmadDir)) {
|
||||
await fs.remove(bmadDir);
|
||||
console.log(chalk.green(' ✓ Removed old BMad directory'));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Read and process file content
|
||||
*/
|
||||
async readAndProcess(filePath, metadata) {
|
||||
const fs = require('fs-extra');
|
||||
const content = await fs.readFile(filePath, 'utf8');
|
||||
return this.processContent(content, metadata);
|
||||
}
|
||||
|
||||
/**
|
||||
* Process agent launcher content and convert to TOML format
|
||||
* @param {string} launcherContent - Launcher markdown content
|
||||
* @param {Object} metadata - File metadata
|
||||
* @returns {string} TOML formatted content
|
||||
*/
|
||||
processAgentLauncherContent(launcherContent, metadata = {}) {
|
||||
// Strip frontmatter from launcher content
|
||||
const frontmatterRegex = /^---\s*\n[\s\S]*?\n---\s*\n/;
|
||||
const contentWithoutFrontmatter = launcherContent.replace(frontmatterRegex, '');
|
||||
|
||||
// Extract title for TOML description
|
||||
const titleMatch = launcherContent.match(/description:\s*"([^"]+)"/);
|
||||
const title = titleMatch ? titleMatch[1] : metadata.name;
|
||||
|
||||
// Create TOML with launcher content (without frontmatter)
|
||||
return `description = "BMAD ${metadata.module.toUpperCase()} Agent: ${title}"
|
||||
prompt = """
|
||||
${contentWithoutFrontmatter.trim()}
|
||||
"""
|
||||
`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Override processContent to add TOML metadata header for Qwen
|
||||
* @param {string} content - File content
|
||||
* @param {Object} metadata - File metadata
|
||||
* @returns {string} Processed content with Qwen template
|
||||
*/
|
||||
processContent(content, metadata = {}) {
|
||||
// First apply base processing (includes activation injection for agents)
|
||||
let prompt = super.processContent(content, metadata);
|
||||
|
||||
// Determine the type and description based on content
|
||||
const isAgent = content.includes('<agent');
|
||||
const isTask = content.includes('<task');
|
||||
const isTool = content.includes('<tool');
|
||||
const isWorkflow = content.includes('workflow:') || content.includes('name:');
|
||||
|
||||
let description = '';
|
||||
|
||||
if (isAgent) {
|
||||
// Extract agent title if available
|
||||
const titleMatch = content.match(/title="([^"]+)"/);
|
||||
const title = titleMatch ? titleMatch[1] : metadata.name;
|
||||
description = `BMAD ${metadata.module.toUpperCase()} Agent: ${title}`;
|
||||
} else if (isTask) {
|
||||
// Extract task name if available
|
||||
const nameMatch = content.match(/name="([^"]+)"/);
|
||||
const taskName = nameMatch ? nameMatch[1] : metadata.name;
|
||||
description = `BMAD ${metadata.module.toUpperCase()} Task: ${taskName}`;
|
||||
} else if (isTool) {
|
||||
// Extract tool name if available
|
||||
const nameMatch = content.match(/name="([^"]+)"/);
|
||||
const toolName = nameMatch ? nameMatch[1] : metadata.name;
|
||||
description = `BMAD ${metadata.module.toUpperCase()} Tool: ${toolName}`;
|
||||
} else if (isWorkflow) {
|
||||
// Workflow
|
||||
description = `BMAD ${metadata.module.toUpperCase()} Workflow: ${metadata.name}`;
|
||||
} else {
|
||||
description = `BMAD ${metadata.module.toUpperCase()}: ${metadata.name}`;
|
||||
}
|
||||
|
||||
return `description = "${description}"
|
||||
prompt = """
|
||||
${prompt}
|
||||
"""
|
||||
`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Format name as title
|
||||
*/
|
||||
formatTitle(name) {
|
||||
return name
|
||||
.split('-')
|
||||
.map((word) => word.charAt(0).toUpperCase() + word.slice(1))
|
||||
.join(' ');
|
||||
}
|
||||
|
||||
/**
|
||||
* Cleanup Qwen configuration
|
||||
*/
|
||||
async cleanup(projectDir) {
|
||||
const fs = require('fs-extra');
|
||||
const bmadCommandsDir = path.join(projectDir, this.configDir, this.commandsDir, this.bmadDir);
|
||||
const oldBmadMethodDir = path.join(projectDir, this.configDir, 'bmad-method');
|
||||
const oldBMadDir = path.join(projectDir, this.configDir, 'BMad');
|
||||
|
||||
if (await fs.pathExists(bmadCommandsDir)) {
|
||||
await fs.remove(bmadCommandsDir);
|
||||
console.log(chalk.dim(`Removed BMAD configuration from Qwen Code`));
|
||||
}
|
||||
|
||||
if (await fs.pathExists(oldBmadMethodDir)) {
|
||||
await fs.remove(oldBmadMethodDir);
|
||||
console.log(chalk.dim(`Removed old BMAD configuration from Qwen Code`));
|
||||
}
|
||||
|
||||
if (await fs.pathExists(oldBMadDir)) {
|
||||
await fs.remove(oldBMadDir);
|
||||
console.log(chalk.dim(`Removed old BMAD configuration from Qwen Code`));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Install a custom agent launcher for Qwen
|
||||
* @param {string} projectDir - Project directory
|
||||
* @param {string} agentName - Agent name (e.g., "fred-commit-poet")
|
||||
* @param {string} agentPath - Path to compiled agent (relative to project root)
|
||||
* @param {Object} metadata - Agent metadata
|
||||
* @returns {Object} Installation result
|
||||
*/
|
||||
async installCustomAgentLauncher(projectDir, agentName, agentPath, metadata) {
|
||||
const qwenDir = path.join(projectDir, this.configDir);
|
||||
const commandsDir = path.join(qwenDir, this.commandsDir);
|
||||
const bmadCommandsDir = path.join(commandsDir, this.bmadDir);
|
||||
|
||||
// Create .qwen/commands/BMad directory if it doesn't exist
|
||||
await fs.ensureDir(bmadCommandsDir);
|
||||
|
||||
// Create custom agent launcher in TOML format (same pattern as regular agents)
|
||||
const launcherContent = `# ${agentName} Custom Agent
|
||||
|
||||
**⚠️ IMPORTANT**: Run @${agentPath} first to load the complete agent!
|
||||
|
||||
This is a launcher for the custom BMAD agent "${agentName}".
|
||||
|
||||
## Usage
|
||||
1. First run: \`${agentPath}\` to load the complete agent
|
||||
2. Then use this command to activate ${agentName}
|
||||
|
||||
The agent will follow the persona and instructions from the main agent file.
|
||||
|
||||
---
|
||||
|
||||
*Generated by BMAD Method*`;
|
||||
|
||||
// Use Qwen's TOML conversion method
|
||||
const tomlContent = this.processAgentLauncherContent(launcherContent, {
|
||||
name: agentName,
|
||||
module: 'custom',
|
||||
});
|
||||
|
||||
const fileName = `custom-${agentName.toLowerCase()}.toml`;
|
||||
const launcherPath = path.join(bmadCommandsDir, fileName);
|
||||
|
||||
// Write the launcher file
|
||||
await fs.writeFile(launcherPath, tomlContent, 'utf8');
|
||||
|
||||
return {
|
||||
ide: 'qwen',
|
||||
path: path.relative(projectDir, launcherPath),
|
||||
command: agentName,
|
||||
type: 'custom-agent-launcher',
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { QwenSetup };
|
||||
|
|
@ -1,273 +0,0 @@
|
|||
const path = require('node:path');
|
||||
const { BaseIdeSetup } = require('./_base-ide');
|
||||
const chalk = require('chalk');
|
||||
const { AgentCommandGenerator } = require('./shared/agent-command-generator');
|
||||
const { toDashPath, customAgentDashName } = require('./shared/path-utils');
|
||||
|
||||
/**
|
||||
* Roo IDE setup handler
|
||||
* Creates custom commands in .roo/commands directory
|
||||
*/
|
||||
class RooSetup extends BaseIdeSetup {
|
||||
constructor() {
|
||||
super('roo', 'Roo Code');
|
||||
this.configDir = '.roo';
|
||||
this.commandsDir = 'commands';
|
||||
}
|
||||
|
||||
/**
|
||||
* Setup Roo IDE configuration
|
||||
* @param {string} projectDir - Project directory
|
||||
* @param {string} bmadDir - BMAD installation directory
|
||||
* @param {Object} options - Setup options
|
||||
*/
|
||||
async setup(projectDir, bmadDir, options = {}) {
|
||||
console.log(chalk.cyan(`Setting up ${this.name}...`));
|
||||
|
||||
// Create .roo/commands directory
|
||||
const rooCommandsDir = path.join(projectDir, this.configDir, this.commandsDir);
|
||||
await this.ensureDir(rooCommandsDir);
|
||||
|
||||
// Generate agent launchers
|
||||
const agentGen = new AgentCommandGenerator(this.bmadFolderName);
|
||||
const { artifacts: agentArtifacts } = await agentGen.collectAgentArtifacts(bmadDir, options.selectedModules || []);
|
||||
|
||||
let addedCount = 0;
|
||||
let skippedCount = 0;
|
||||
|
||||
for (const artifact of agentArtifacts) {
|
||||
// Use shared toDashPath to get consistent naming: bmad_bmm_name.md
|
||||
const commandName = toDashPath(artifact.relativePath).replace('.md', '');
|
||||
const commandPath = path.join(rooCommandsDir, `${commandName}.md`);
|
||||
|
||||
// Skip if already exists
|
||||
if (await this.pathExists(commandPath)) {
|
||||
console.log(chalk.dim(` Skipping ${commandName} - already exists`));
|
||||
skippedCount++;
|
||||
continue;
|
||||
}
|
||||
|
||||
// artifact.sourcePath contains the full path to the agent file
|
||||
if (!artifact.sourcePath) {
|
||||
console.error(`Error: Missing sourcePath for artifact ${artifact.name} from module ${artifact.module}`);
|
||||
console.error(`Artifact object:`, artifact);
|
||||
throw new Error(`Missing sourcePath for agent: ${artifact.name}`);
|
||||
}
|
||||
|
||||
const content = await this.readFile(artifact.sourcePath);
|
||||
|
||||
// Create command file that references the actual _bmad agent
|
||||
await this.createCommandFile(
|
||||
{ module: artifact.module, name: artifact.name, path: artifact.sourcePath },
|
||||
content,
|
||||
commandPath,
|
||||
projectDir,
|
||||
);
|
||||
|
||||
addedCount++;
|
||||
console.log(chalk.green(` ✓ Added command: ${commandName}`));
|
||||
}
|
||||
|
||||
console.log(chalk.green(`✓ ${this.name} configured:`));
|
||||
console.log(chalk.dim(` - ${addedCount} commands added`));
|
||||
if (skippedCount > 0) {
|
||||
console.log(chalk.dim(` - ${skippedCount} commands skipped (already exist)`));
|
||||
}
|
||||
console.log(chalk.dim(` - Commands directory: ${this.configDir}/${this.commandsDir}/`));
|
||||
console.log(chalk.dim(` Commands will be available when you open this project in Roo Code`));
|
||||
|
||||
return {
|
||||
success: true,
|
||||
commands: addedCount,
|
||||
skipped: skippedCount,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a unified command file for agents
|
||||
* @param {string} commandPath - Path where to write the command file
|
||||
* @param {Object} options - Command options
|
||||
* @param {string} options.name - Display name for the command
|
||||
* @param {string} options.description - Description for the command
|
||||
* @param {string} options.agentPath - Path to the agent file (relative to project root)
|
||||
* @param {string} [options.icon] - Icon emoji (defaults to 🤖)
|
||||
* @param {string} [options.extraContent] - Additional content to include before activation
|
||||
*/
|
||||
async createAgentCommandFile(commandPath, options) {
|
||||
const { name, description, agentPath, icon = '🤖', extraContent = '' } = options;
|
||||
|
||||
// Build command content with YAML frontmatter
|
||||
let commandContent = `---\n`;
|
||||
commandContent += `name: '${icon} ${name}'\n`;
|
||||
commandContent += `description: '${description}'\n`;
|
||||
commandContent += `---\n\n`;
|
||||
|
||||
commandContent += `You must fully embody this agent's persona and follow all activation instructions exactly as specified. NEVER break character until given an exit command.\n\n`;
|
||||
|
||||
// Add any extra content (e.g., warnings for custom agents)
|
||||
if (extraContent) {
|
||||
commandContent += `${extraContent}\n\n`;
|
||||
}
|
||||
|
||||
commandContent += `<agent-activation CRITICAL="TRUE">\n`;
|
||||
commandContent += `1. LOAD the FULL agent file from @${agentPath}\n`;
|
||||
commandContent += `2. READ its entire contents - this contains the complete agent persona, menu, and instructions\n`;
|
||||
commandContent += `3. Execute ALL activation steps exactly as written in the agent file\n`;
|
||||
commandContent += `4. Follow the agent's persona and menu system precisely\n`;
|
||||
commandContent += `5. Stay in character throughout the session\n`;
|
||||
commandContent += `</agent-activation>\n`;
|
||||
|
||||
// Write command file
|
||||
await this.writeFile(commandPath, commandContent);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a command file for an agent
|
||||
*/
|
||||
async createCommandFile(agent, content, commandPath, projectDir) {
|
||||
// Extract metadata from agent content
|
||||
const titleMatch = content.match(/title="([^"]+)"/);
|
||||
const title = titleMatch ? titleMatch[1] : this.formatTitle(agent.name);
|
||||
|
||||
const iconMatch = content.match(/icon="([^"]+)"/);
|
||||
const icon = iconMatch ? iconMatch[1] : '🤖';
|
||||
|
||||
const whenToUseMatch = content.match(/whenToUse="([^"]+)"/);
|
||||
const whenToUse = whenToUseMatch ? whenToUseMatch[1] : `Use for ${title} tasks`;
|
||||
|
||||
// Get relative path
|
||||
const relativePath = path.relative(projectDir, agent.path).replaceAll('\\', '/');
|
||||
|
||||
// Use unified method
|
||||
await this.createAgentCommandFile(commandPath, {
|
||||
name: title,
|
||||
description: whenToUse,
|
||||
agentPath: relativePath,
|
||||
icon: icon,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Format name as title
|
||||
*/
|
||||
formatTitle(name) {
|
||||
return name
|
||||
.split('-')
|
||||
.map((word) => word.charAt(0).toUpperCase() + word.slice(1))
|
||||
.join(' ');
|
||||
}
|
||||
|
||||
/**
|
||||
* Cleanup Roo configuration
|
||||
*/
|
||||
async cleanup(projectDir) {
|
||||
const fs = require('fs-extra');
|
||||
const rooCommandsDir = path.join(projectDir, this.configDir, this.commandsDir);
|
||||
|
||||
if (await fs.pathExists(rooCommandsDir)) {
|
||||
const files = await fs.readdir(rooCommandsDir);
|
||||
let removedCount = 0;
|
||||
|
||||
for (const file of files) {
|
||||
if (file.startsWith('bmad') && file.endsWith('.md')) {
|
||||
await fs.remove(path.join(rooCommandsDir, file));
|
||||
removedCount++;
|
||||
}
|
||||
}
|
||||
|
||||
if (removedCount > 0) {
|
||||
console.log(chalk.dim(`Removed ${removedCount} BMAD commands from .roo/commands/`));
|
||||
}
|
||||
}
|
||||
|
||||
// Also clean up old .roomodes file if it exists
|
||||
const roomodesPath = path.join(projectDir, '.roomodes');
|
||||
if (await fs.pathExists(roomodesPath)) {
|
||||
const content = await fs.readFile(roomodesPath, 'utf8');
|
||||
|
||||
// Remove BMAD modes only
|
||||
const lines = content.split('\n');
|
||||
const filteredLines = [];
|
||||
let skipMode = false;
|
||||
let removedCount = 0;
|
||||
|
||||
for (const line of lines) {
|
||||
if (/^\s*- slug: bmad/.test(line)) {
|
||||
skipMode = true;
|
||||
removedCount++;
|
||||
} else if (skipMode && /^\s*- slug: /.test(line)) {
|
||||
skipMode = false;
|
||||
}
|
||||
|
||||
if (!skipMode) {
|
||||
filteredLines.push(line);
|
||||
}
|
||||
}
|
||||
|
||||
// Write back filtered content
|
||||
await fs.writeFile(roomodesPath, filteredLines.join('\n'));
|
||||
if (removedCount > 0) {
|
||||
console.log(chalk.dim(`Removed ${removedCount} BMAD modes from legacy .roomodes file`));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Install a custom agent launcher for Roo
|
||||
* @param {string} projectDir - Project directory
|
||||
* @param {string} agentName - Agent name (e.g., "fred-commit-poet")
|
||||
* @param {string} agentPath - Path to compiled agent (relative to project root)
|
||||
* @param {Object} metadata - Agent metadata (unused, kept for compatibility)
|
||||
* @returns {Object} Installation result
|
||||
*/
|
||||
async installCustomAgentLauncher(projectDir, agentName, agentPath, metadata) {
|
||||
const rooCommandsDir = path.join(projectDir, this.configDir, this.commandsDir);
|
||||
await this.ensureDir(rooCommandsDir);
|
||||
|
||||
// Use underscore format: bmad_custom_fred-commit-poet.md
|
||||
const commandName = customAgentDashName(agentName).replace('.md', '');
|
||||
const commandPath = path.join(rooCommandsDir, `${commandName}.md`);
|
||||
|
||||
// Check if command already exists
|
||||
if (await this.pathExists(commandPath)) {
|
||||
return {
|
||||
ide: 'roo',
|
||||
path: path.join(this.configDir, this.commandsDir, `${commandName}.md`),
|
||||
command: commandName,
|
||||
type: 'custom-agent-launcher',
|
||||
alreadyExists: true,
|
||||
};
|
||||
}
|
||||
|
||||
// Read the custom agent file to extract metadata (same as regular agents)
|
||||
const fullAgentPath = path.join(projectDir, agentPath);
|
||||
const content = await this.readFile(fullAgentPath);
|
||||
|
||||
// Extract metadata from agent content
|
||||
const titleMatch = content.match(/title="([^"]+)"/);
|
||||
const title = titleMatch ? titleMatch[1] : this.formatTitle(agentName);
|
||||
|
||||
const iconMatch = content.match(/icon="([^"]+)"/);
|
||||
const icon = iconMatch ? iconMatch[1] : '🤖';
|
||||
|
||||
const whenToUseMatch = content.match(/whenToUse="([^"]+)"/);
|
||||
const whenToUse = whenToUseMatch ? whenToUseMatch[1] : `Use for ${title} tasks`;
|
||||
|
||||
// Use unified method without extra content (clean)
|
||||
await this.createAgentCommandFile(commandPath, {
|
||||
name: title,
|
||||
description: whenToUse,
|
||||
agentPath: agentPath,
|
||||
icon: icon,
|
||||
});
|
||||
|
||||
return {
|
||||
ide: 'roo',
|
||||
path: path.join(this.configDir, this.commandsDir, `${commandName}.md`),
|
||||
command: commandName,
|
||||
type: 'custom-agent-launcher',
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { RooSetup };
|
||||
|
|
@ -1,290 +0,0 @@
|
|||
const path = require('node:path');
|
||||
const fs = require('fs-extra');
|
||||
const chalk = require('chalk');
|
||||
const { BaseIdeSetup } = require('./_base-ide');
|
||||
const { AgentCommandGenerator } = require('./shared/agent-command-generator');
|
||||
const { WorkflowCommandGenerator } = require('./shared/workflow-command-generator');
|
||||
const { TaskToolCommandGenerator } = require('./shared/task-tool-command-generator');
|
||||
|
||||
/**
|
||||
* Rovo Dev IDE setup handler
|
||||
*
|
||||
* Installs BMAD agents as Rovo Dev subagents in .rovodev/subagents/
|
||||
* Installs workflows and tasks/tools as reference guides in .rovodev/
|
||||
* Rovo Dev automatically discovers agents and integrates with BMAD like other IDEs
|
||||
*/
|
||||
class RovoDevSetup extends BaseIdeSetup {
|
||||
constructor() {
|
||||
super('rovo-dev', 'Atlassian Rovo Dev', false);
|
||||
this.configDir = '.rovodev';
|
||||
this.subagentsDir = 'subagents';
|
||||
this.workflowsDir = 'workflows';
|
||||
this.referencesDir = 'references';
|
||||
}
|
||||
|
||||
/**
|
||||
* Cleanup old BMAD installation before reinstalling
|
||||
* @param {string} projectDir - Project directory
|
||||
*/
|
||||
async cleanup(projectDir) {
|
||||
const rovoDevDir = path.join(projectDir, this.configDir);
|
||||
|
||||
if (!(await fs.pathExists(rovoDevDir))) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Clean BMAD agents from subagents directory
|
||||
const subagentsDir = path.join(rovoDevDir, this.subagentsDir);
|
||||
if (await fs.pathExists(subagentsDir)) {
|
||||
const entries = await fs.readdir(subagentsDir);
|
||||
const bmadFiles = entries.filter((file) => file.startsWith('bmad') && file.endsWith('.md'));
|
||||
|
||||
for (const file of bmadFiles) {
|
||||
await fs.remove(path.join(subagentsDir, file));
|
||||
}
|
||||
}
|
||||
|
||||
// Clean BMAD workflows from workflows directory
|
||||
const workflowsDir = path.join(rovoDevDir, this.workflowsDir);
|
||||
if (await fs.pathExists(workflowsDir)) {
|
||||
const entries = await fs.readdir(workflowsDir);
|
||||
const bmadFiles = entries.filter((file) => file.startsWith('bmad') && file.endsWith('.md'));
|
||||
|
||||
for (const file of bmadFiles) {
|
||||
await fs.remove(path.join(workflowsDir, file));
|
||||
}
|
||||
}
|
||||
|
||||
// Clean BMAD tasks/tools from references directory
|
||||
const referencesDir = path.join(rovoDevDir, this.referencesDir);
|
||||
if (await fs.pathExists(referencesDir)) {
|
||||
const entries = await fs.readdir(referencesDir);
|
||||
const bmadFiles = entries.filter((file) => file.startsWith('bmad') && file.endsWith('.md'));
|
||||
|
||||
for (const file of bmadFiles) {
|
||||
await fs.remove(path.join(referencesDir, file));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Setup Rovo Dev configuration
|
||||
* @param {string} projectDir - Project directory
|
||||
* @param {string} bmadDir - BMAD installation directory
|
||||
* @param {Object} options - Setup options
|
||||
*/
|
||||
async setup(projectDir, bmadDir, options = {}) {
|
||||
console.log(chalk.cyan(`Setting up ${this.name}...`));
|
||||
|
||||
// Clean up old BMAD installation first
|
||||
await this.cleanup(projectDir);
|
||||
|
||||
// Create .rovodev directory structure
|
||||
const rovoDevDir = path.join(projectDir, this.configDir);
|
||||
const subagentsDir = path.join(rovoDevDir, this.subagentsDir);
|
||||
const workflowsDir = path.join(rovoDevDir, this.workflowsDir);
|
||||
const referencesDir = path.join(rovoDevDir, this.referencesDir);
|
||||
|
||||
await this.ensureDir(subagentsDir);
|
||||
await this.ensureDir(workflowsDir);
|
||||
await this.ensureDir(referencesDir);
|
||||
|
||||
// Generate and install agents
|
||||
const agentGen = new AgentCommandGenerator(this.bmadFolderName);
|
||||
const { artifacts: agentArtifacts } = await agentGen.collectAgentArtifacts(bmadDir, options.selectedModules || []);
|
||||
|
||||
let agentCount = 0;
|
||||
for (const artifact of agentArtifacts) {
|
||||
const subagentFilename = `bmad-${artifact.module}-${artifact.name}.md`;
|
||||
const targetPath = path.join(subagentsDir, subagentFilename);
|
||||
const subagentContent = this.convertToRovoDevSubagent(artifact.content, artifact.name, artifact.module);
|
||||
await this.writeFile(targetPath, subagentContent);
|
||||
agentCount++;
|
||||
}
|
||||
|
||||
// Generate and install workflows
|
||||
const workflowGen = new WorkflowCommandGenerator(this.bmadFolderName);
|
||||
const { artifacts: workflowArtifacts, counts: workflowCounts } = await workflowGen.collectWorkflowArtifacts(bmadDir);
|
||||
|
||||
let workflowCount = 0;
|
||||
for (const artifact of workflowArtifacts) {
|
||||
if (artifact.type === 'workflow-command') {
|
||||
const workflowFilename = path.basename(artifact.relativePath);
|
||||
const targetPath = path.join(workflowsDir, workflowFilename);
|
||||
await this.writeFile(targetPath, artifact.content);
|
||||
workflowCount++;
|
||||
}
|
||||
}
|
||||
|
||||
// Generate and install tasks and tools
|
||||
const taskToolGen = new TaskToolCommandGenerator();
|
||||
const { tasks: taskCount, tools: toolCount } = await this.generateTaskToolReferences(bmadDir, referencesDir, taskToolGen);
|
||||
|
||||
// Summary output
|
||||
console.log(chalk.green(`✓ ${this.name} configured:`));
|
||||
console.log(chalk.dim(` - ${agentCount} agents installed to .rovodev/subagents/`));
|
||||
if (workflowCount > 0) {
|
||||
console.log(chalk.dim(` - ${workflowCount} workflows installed to .rovodev/workflows/`));
|
||||
}
|
||||
if (taskCount + toolCount > 0) {
|
||||
console.log(
|
||||
chalk.dim(` - ${taskCount + toolCount} tasks/tools installed to .rovodev/references/ (${taskCount} tasks, ${toolCount} tools)`),
|
||||
);
|
||||
}
|
||||
console.log(chalk.yellow(`\n Note: Agents are automatically discovered by Rovo Dev`));
|
||||
console.log(chalk.dim(` - Access agents by typing @ in Rovo Dev to see available options`));
|
||||
console.log(chalk.dim(` - Workflows and references are available in .rovodev/ directory`));
|
||||
|
||||
return {
|
||||
success: true,
|
||||
agents: agentCount,
|
||||
workflows: workflowCount,
|
||||
tasks: taskCount,
|
||||
tools: toolCount,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate task and tool reference guides
|
||||
* @param {string} bmadDir - BMAD directory
|
||||
* @param {string} referencesDir - References directory
|
||||
* @param {TaskToolCommandGenerator} taskToolGen - Generator instance
|
||||
*/
|
||||
async generateTaskToolReferences(bmadDir, referencesDir, taskToolGen) {
|
||||
const tasks = await taskToolGen.loadTaskManifest(bmadDir);
|
||||
const tools = await taskToolGen.loadToolManifest(bmadDir);
|
||||
|
||||
const standaloneTasks = tasks ? tasks.filter((t) => t.standalone === 'true' || t.standalone === true) : [];
|
||||
const standaloneTools = tools ? tools.filter((t) => t.standalone === 'true' || t.standalone === true) : [];
|
||||
|
||||
let taskCount = 0;
|
||||
for (const task of standaloneTasks) {
|
||||
const commandContent = taskToolGen.generateCommandContent(task, 'task');
|
||||
const targetPath = path.join(referencesDir, `bmad-task-${task.module}-${task.name}.md`);
|
||||
await this.writeFile(targetPath, commandContent);
|
||||
taskCount++;
|
||||
}
|
||||
|
||||
let toolCount = 0;
|
||||
for (const tool of standaloneTools) {
|
||||
const commandContent = taskToolGen.generateCommandContent(tool, 'tool');
|
||||
const targetPath = path.join(referencesDir, `bmad-tool-${tool.module}-${tool.name}.md`);
|
||||
await this.writeFile(targetPath, commandContent);
|
||||
toolCount++;
|
||||
}
|
||||
|
||||
return { tasks: taskCount, tools: toolCount };
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert BMAD agent launcher to Rovo Dev subagent format
|
||||
*
|
||||
* Rovo Dev subagents use Markdown files with YAML frontmatter containing:
|
||||
* - name: Unique identifier for the subagent
|
||||
* - description: One-line description of the subagent's purpose
|
||||
* - tools: Array of tools the subagent can use (optional)
|
||||
* - model: Specific model for this subagent (optional)
|
||||
* - load_memory: Whether to load memory files (optional, defaults to true)
|
||||
*
|
||||
* @param {string} launcherContent - Original agent launcher content
|
||||
* @param {string} agentName - Name of the agent
|
||||
* @param {string} moduleName - Name of the module
|
||||
* @returns {string} Rovo Dev subagent-formatted content
|
||||
*/
|
||||
convertToRovoDevSubagent(launcherContent, agentName, moduleName) {
|
||||
// Extract metadata from the launcher XML
|
||||
const titleMatch = launcherContent.match(/title="([^"]+)"/);
|
||||
const title = titleMatch ? titleMatch[1] : this.formatTitle(agentName);
|
||||
|
||||
const descriptionMatch = launcherContent.match(/description="([^"]+)"/);
|
||||
const description = descriptionMatch ? descriptionMatch[1] : `BMAD agent: ${title}`;
|
||||
|
||||
const roleDefinitionMatch = launcherContent.match(/roleDefinition="([^"]+)"/);
|
||||
const roleDefinition = roleDefinitionMatch ? roleDefinitionMatch[1] : `You are a specialized agent for ${title.toLowerCase()} tasks.`;
|
||||
|
||||
// Extract the main system prompt from the launcher (content after closing tags)
|
||||
let systemPrompt = roleDefinition;
|
||||
|
||||
// Try to extract additional instructions from the launcher content
|
||||
const instructionsMatch = launcherContent.match(/<instructions>([\s\S]*?)<\/instructions>/);
|
||||
if (instructionsMatch) {
|
||||
systemPrompt += '\n\n' + instructionsMatch[1].trim();
|
||||
}
|
||||
|
||||
// Build YAML frontmatter for Rovo Dev subagent
|
||||
const frontmatter = {
|
||||
name: `bmad-${moduleName}-${agentName}`,
|
||||
description: description,
|
||||
// Note: tools and model can be added by users in their .rovodev/subagents/*.md files
|
||||
// We don't enforce specific tools since BMAD agents are flexible
|
||||
};
|
||||
|
||||
// Create YAML frontmatter string with proper quoting for special characters
|
||||
let yamlContent = '---\n';
|
||||
yamlContent += `name: ${frontmatter.name}\n`;
|
||||
// Quote description to handle colons and other special characters in YAML
|
||||
yamlContent += `description: "${frontmatter.description.replaceAll('"', String.raw`\"`)}"\n`;
|
||||
yamlContent += '---\n';
|
||||
|
||||
// Combine frontmatter with system prompt
|
||||
const subagentContent = yamlContent + systemPrompt;
|
||||
|
||||
return subagentContent;
|
||||
}
|
||||
|
||||
/**
|
||||
* Detect whether Rovo Dev is already configured in the project
|
||||
* @param {string} projectDir - Project directory
|
||||
* @returns {boolean}
|
||||
*/
|
||||
async detect(projectDir) {
|
||||
const rovoDevDir = path.join(projectDir, this.configDir);
|
||||
|
||||
if (!(await fs.pathExists(rovoDevDir))) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Check for BMAD agents in subagents directory
|
||||
const subagentsDir = path.join(rovoDevDir, this.subagentsDir);
|
||||
if (await fs.pathExists(subagentsDir)) {
|
||||
try {
|
||||
const entries = await fs.readdir(subagentsDir);
|
||||
if (entries.some((entry) => entry.startsWith('bmad') && entry.endsWith('.md'))) {
|
||||
return true;
|
||||
}
|
||||
} catch {
|
||||
// Continue checking other directories
|
||||
}
|
||||
}
|
||||
|
||||
// Check for BMAD workflows in workflows directory
|
||||
const workflowsDir = path.join(rovoDevDir, this.workflowsDir);
|
||||
if (await fs.pathExists(workflowsDir)) {
|
||||
try {
|
||||
const entries = await fs.readdir(workflowsDir);
|
||||
if (entries.some((entry) => entry.startsWith('bmad') && entry.endsWith('.md'))) {
|
||||
return true;
|
||||
}
|
||||
} catch {
|
||||
// Continue checking other directories
|
||||
}
|
||||
}
|
||||
|
||||
// Check for BMAD tasks/tools in references directory
|
||||
const referencesDir = path.join(rovoDevDir, this.referencesDir);
|
||||
if (await fs.pathExists(referencesDir)) {
|
||||
try {
|
||||
const entries = await fs.readdir(referencesDir);
|
||||
if (entries.some((entry) => entry.startsWith('bmad') && entry.endsWith('.md'))) {
|
||||
return true;
|
||||
}
|
||||
} catch {
|
||||
// Continue
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { RovoDevSetup };
|
||||
|
|
@ -1,6 +1,5 @@
|
|||
const path = require('node:path');
|
||||
const fs = require('fs-extra');
|
||||
const chalk = require('chalk');
|
||||
const { toColonPath, toDashPath, customAgentColonName, customAgentDashName } = require('./path-utils');
|
||||
|
||||
/**
|
||||
|
|
@ -33,8 +32,10 @@ class AgentCommandGenerator {
|
|||
const agentPathInModule = agent.relativePath || `${agent.name}.md`;
|
||||
artifacts.push({
|
||||
type: 'agent-launcher',
|
||||
module: agent.module,
|
||||
name: agent.name,
|
||||
displayName: agent.displayName || agent.name,
|
||||
description: agent.description,
|
||||
module: agent.module,
|
||||
relativePath: path.join(agent.module, 'agents', agentPathInModule),
|
||||
content: launcherContent,
|
||||
sourcePath: agent.path,
|
||||
|
|
@ -65,9 +66,8 @@ class AgentCommandGenerator {
|
|||
.replaceAll('{{name}}', agent.name)
|
||||
.replaceAll('{{module}}', agent.module)
|
||||
.replaceAll('{{path}}', agentPathInModule)
|
||||
.replaceAll('{{description}}', agent.description || `${agent.name} agent`)
|
||||
.replaceAll('_bmad', this.bmadFolderName)
|
||||
.replaceAll('_bmad', '_bmad');
|
||||
.replaceAll('{{relativePath}}', path.join(agent.module, 'agents', agentPathInModule))
|
||||
.replaceAll('{{description}}', agent.description || `${agent.name} agent`);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -109,7 +109,7 @@ class AgentCommandGenerator {
|
|||
// Convert relativePath to underscore format: bmm/agents/pm.md → bmad_bmm_pm.md
|
||||
const flatName = toColonPath(artifact.relativePath);
|
||||
const launcherPath = path.join(baseCommandsDir, flatName);
|
||||
await fs.ensureDir(path.dirname(launcherPath));
|
||||
await fs.ensureDir(baseCommandsDir);
|
||||
await fs.writeFile(launcherPath, artifact.content);
|
||||
writtenCount++;
|
||||
}
|
||||
|
|
@ -119,8 +119,8 @@ class AgentCommandGenerator {
|
|||
}
|
||||
|
||||
/**
|
||||
* Write agent launcher artifacts using underscore format (Windows-compatible)
|
||||
* Creates flat files like: bmad_bmm_pm.md
|
||||
* Write agent launcher artifacts using dash format
|
||||
* Creates flat files like: bmad-bmm-agent-pm.md
|
||||
*
|
||||
* @param {string} baseCommandsDir - Base commands directory for the IDE
|
||||
* @param {Array} artifacts - Agent launcher artifacts
|
||||
|
|
@ -131,10 +131,10 @@ class AgentCommandGenerator {
|
|||
|
||||
for (const artifact of artifacts) {
|
||||
if (artifact.type === 'agent-launcher') {
|
||||
// Convert relativePath to underscore format: bmm/agents/pm.md → bmad_bmm_pm.md
|
||||
// Convert relativePath to dash format: bmm/agents/pm.md → bmad-bmm-agent-pm.md
|
||||
const flatName = toDashPath(artifact.relativePath);
|
||||
const launcherPath = path.join(baseCommandsDir, flatName);
|
||||
await fs.ensureDir(path.dirname(launcherPath));
|
||||
await fs.ensureDir(baseCommandsDir);
|
||||
await fs.writeFile(launcherPath, artifact.content);
|
||||
writtenCount++;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -44,9 +44,26 @@ async function getAgentsFromBmad(bmadDir, selectedModules = []) {
|
|||
|
||||
if (content.includes('localskip="true"')) continue;
|
||||
|
||||
// Extract description from YAML frontmatter if present
|
||||
let description = null;
|
||||
let agentName = file.replace('.md', '');
|
||||
const frontmatterMatch = content.match(/^---\s*\n([\s\S]*?)\n---\s*\n/);
|
||||
if (frontmatterMatch) {
|
||||
const descMatch = frontmatterMatch[1].match(/description:\s*"([^"]+)"/);
|
||||
if (descMatch) {
|
||||
description = descMatch[1];
|
||||
}
|
||||
const nameMatch = frontmatterMatch[1].match(/name:\s*"([^"]+)"/);
|
||||
if (nameMatch) {
|
||||
agentName = nameMatch[1];
|
||||
}
|
||||
}
|
||||
|
||||
agents.push({
|
||||
path: filePath,
|
||||
name: file.replace('.md', ''),
|
||||
name: agentName,
|
||||
displayName: agentName,
|
||||
description: description,
|
||||
module: 'standalone', // Mark as standalone agent
|
||||
});
|
||||
}
|
||||
|
|
@ -114,9 +131,26 @@ async function getAgentsFromDir(dirPath, moduleName, relativePath = '') {
|
|||
continue;
|
||||
}
|
||||
|
||||
// Extract description from YAML frontmatter if present
|
||||
let description = null;
|
||||
const frontmatterMatch = content.match(/^---\s*\n([\s\S]*?)\n---\s*\n/);
|
||||
if (frontmatterMatch) {
|
||||
const descMatch = frontmatterMatch[1].match(/description:\s*"([^"]+)"/);
|
||||
if (descMatch) {
|
||||
description = descMatch[1];
|
||||
}
|
||||
// Also extract name from frontmatter if available
|
||||
const nameMatch = frontmatterMatch[1].match(/name:\s*"([^"]+)"/);
|
||||
if (nameMatch) {
|
||||
entry.name = `${nameMatch[1]}.md`;
|
||||
}
|
||||
}
|
||||
|
||||
agents.push({
|
||||
path: fullPath,
|
||||
name: entry.name.replace('.md', ''),
|
||||
displayName: entry.name.replace('.md', ''),
|
||||
description: description,
|
||||
module: moduleName,
|
||||
relativePath: newRelativePath, // Keep the .md extension for the full path
|
||||
});
|
||||
|
|
|
|||
|
|
@ -3,12 +3,24 @@
|
|||
*
|
||||
* Provides utilities to convert hierarchical paths to flat naming conventions.
|
||||
* - Underscore format (bmad_module_name.md) - Windows-compatible universal format
|
||||
* - Suffix-based format (bmad-module-name.agent.md) - New universal standard
|
||||
*/
|
||||
|
||||
// Default file extension for backward compatibility
|
||||
const DEFAULT_FILE_EXTENSION = '.md';
|
||||
|
||||
// Type segments - agents are included in naming, others are filtered out
|
||||
const TYPE_SEGMENTS = ['workflows', 'tasks', 'tools'];
|
||||
const AGENT_SEGMENT = 'agents';
|
||||
|
||||
/**
|
||||
* Artifact type to suffix mapping
|
||||
* Only agents get the .agent suffix; workflows/tasks/tools use standard .md extension
|
||||
*/
|
||||
const ARTIFACT_SUFFIXES = {
|
||||
agent: '.agent',
|
||||
};
|
||||
|
||||
/**
|
||||
* Convert hierarchical path to flat underscore-separated name
|
||||
* Converts: 'bmm', 'agents', 'pm' → 'bmad_bmm_agent_pm.md'
|
||||
|
|
@ -18,36 +30,48 @@ const AGENT_SEGMENT = 'agents';
|
|||
* @param {string} module - Module name (e.g., 'bmm', 'core')
|
||||
* @param {string} type - Artifact type ('agents', 'workflows', 'tasks', 'tools')
|
||||
* @param {string} name - Artifact name (e.g., 'pm', 'brainstorming')
|
||||
* @param {string} [fileExtension=DEFAULT_FILE_EXTENSION] - File extension including dot (e.g., '.md', '.toml')
|
||||
* @returns {string} Flat filename like 'bmad_bmm_agent_pm.md' or 'bmad_bmm_correct-course.md'
|
||||
*/
|
||||
function toUnderscoreName(module, type, name) {
|
||||
function toUnderscoreName(module, type, name, fileExtension = DEFAULT_FILE_EXTENSION) {
|
||||
const isAgent = type === AGENT_SEGMENT;
|
||||
// For core module, skip the module prefix: use 'bmad_name.md' instead of 'bmad_core_name.md'
|
||||
if (module === 'core') {
|
||||
return isAgent ? `bmad_agent_${name}.md` : `bmad_${name}.md`;
|
||||
return isAgent ? `bmad_agent_${name}${fileExtension}` : `bmad_${name}${fileExtension}`;
|
||||
}
|
||||
return isAgent ? `bmad_${module}_agent_${name}.md` : `bmad_${module}_${name}.md`;
|
||||
return isAgent ? `bmad_${module}_agent_${name}${fileExtension}` : `bmad_${module}_${name}${fileExtension}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert relative path to flat underscore-separated name
|
||||
* Converts: 'bmm/agents/pm.md' → 'bmad_bmm_agent_pm.md'
|
||||
* Converts: 'bmm/workflows/correct-course.md' → 'bmad_bmm_correct-course.md'
|
||||
* Converts: 'bmad_bmb/agents/agent-builder.md' → 'bmad_bmb_agent_agent-builder.md' (bmad prefix already in module)
|
||||
* Converts: 'core/agents/brainstorming.md' → 'bmad_agent_brainstorming.md' (core items skip module prefix)
|
||||
*
|
||||
* @param {string} relativePath - Path like 'bmm/agents/pm.md'
|
||||
* @param {string} [fileExtension=DEFAULT_FILE_EXTENSION] - File extension including dot (e.g., '.md', '.toml')
|
||||
* @returns {string} Flat filename like 'bmad_bmm_agent_pm.md' or 'bmad_brainstorming.md'
|
||||
*/
|
||||
function toUnderscorePath(relativePath) {
|
||||
const withoutExt = relativePath.replace('.md', '');
|
||||
function toUnderscorePath(relativePath, fileExtension = DEFAULT_FILE_EXTENSION) {
|
||||
// Extract extension from relativePath to properly remove it
|
||||
const extMatch = relativePath.match(/\.[^.]+$/);
|
||||
const originalExt = extMatch ? extMatch[0] : '';
|
||||
const withoutExt = relativePath.replace(originalExt, '');
|
||||
const parts = withoutExt.split(/[/\\]/);
|
||||
|
||||
const module = parts[0];
|
||||
const type = parts[1];
|
||||
const name = parts.slice(2).join('_');
|
||||
|
||||
// Use toUnderscoreName for consistency
|
||||
return toUnderscoreName(module, type, name);
|
||||
const isAgent = type === AGENT_SEGMENT;
|
||||
// For core module, skip the module prefix: use 'bmad_name.md' instead of 'bmad_core_name.md'
|
||||
if (module === 'core') {
|
||||
return isAgent ? `bmad_agent_${name}${fileExtension}` : `bmad_${name}${fileExtension}`;
|
||||
}
|
||||
// If module already starts with 'bmad_', don't add another prefix
|
||||
const prefix = module.startsWith('bmad_') ? '' : 'bmad_';
|
||||
return isAgent ? `${prefix}${module}_agent_${name}${fileExtension}` : `${prefix}${module}_${name}${fileExtension}`;
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -55,10 +79,11 @@ function toUnderscorePath(relativePath) {
|
|||
* Creates: 'bmad_custom_fred-commit-poet.md'
|
||||
*
|
||||
* @param {string} agentName - Custom agent name
|
||||
* @param {string} [fileExtension=DEFAULT_FILE_EXTENSION] - File extension including dot (e.g., '.md', '.toml')
|
||||
* @returns {string} Flat filename like 'bmad_custom_fred-commit-poet.md'
|
||||
*/
|
||||
function customAgentUnderscoreName(agentName) {
|
||||
return `bmad_custom_${agentName}.md`;
|
||||
function customAgentUnderscoreName(agentName, fileExtension = DEFAULT_FILE_EXTENSION) {
|
||||
return `bmad_custom_${agentName}${fileExtension}`;
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -134,9 +159,9 @@ function parseUnderscoreName(filename) {
|
|||
}
|
||||
|
||||
// Backward compatibility aliases (deprecated)
|
||||
// Note: These now use toDashPath and customAgentDashName which convert underscores to dashes
|
||||
const toColonName = toUnderscoreName;
|
||||
const toColonPath = toUnderscorePath;
|
||||
const toDashPath = toUnderscorePath;
|
||||
const toDashName = toUnderscoreName;
|
||||
const customAgentColonName = customAgentUnderscoreName;
|
||||
const customAgentDashName = customAgentUnderscoreName;
|
||||
const isColonFormat = isUnderscoreFormat;
|
||||
|
|
@ -144,7 +169,125 @@ const isDashFormat = isUnderscoreFormat;
|
|||
const parseColonName = parseUnderscoreName;
|
||||
const parseDashName = parseUnderscoreName;
|
||||
|
||||
/**
|
||||
* Convert relative path to flat colon-separated name (for backward compatibility)
|
||||
* This is actually the same as underscore format now (underscores in filenames)
|
||||
* @param {string} relativePath - Path like 'bmm/agents/pm.md'
|
||||
* @param {string} [fileExtension=DEFAULT_FILE_EXTENSION] - File extension including dot
|
||||
* @returns {string} Flat filename like 'bmad_bmm_agent_pm.md'
|
||||
*/
|
||||
function toColonPath(relativePath, fileExtension = DEFAULT_FILE_EXTENSION) {
|
||||
return toUnderscorePath(relativePath, fileExtension);
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert relative path to flat dash-separated name
|
||||
* Converts: 'bmm/agents/pm.md' → 'bmad-bmm-agent-pm.md'
|
||||
* Converts: 'bmm/workflows/correct-course' → 'bmad-bmm-correct-course.md'
|
||||
* Converts: 'bmad-bmb/agents/agent-builder.md' → 'bmad-bmb-agent-agent-builder.md' (bmad prefix already in module)
|
||||
* @param {string} relativePath - Path like 'bmm/agents/pm.md'
|
||||
* @param {string} [fileExtension=DEFAULT_FILE_EXTENSION] - File extension including dot
|
||||
* @returns {string} Flat filename like 'bmad-bmm-agent-pm.md'
|
||||
*/
|
||||
function toDashPath(relativePath, fileExtension = DEFAULT_FILE_EXTENSION) {
|
||||
// Extract extension from relativePath to properly remove it
|
||||
const extMatch = relativePath.match(/\.[^.]+$/);
|
||||
const originalExt = extMatch ? extMatch[0] : '';
|
||||
const withoutExt = relativePath.replace(originalExt, '');
|
||||
const parts = withoutExt.split(/[/\\]/);
|
||||
|
||||
const module = parts[0];
|
||||
const type = parts[1];
|
||||
const name = parts.slice(2).join('-');
|
||||
|
||||
// Use dash naming style
|
||||
const isAgent = type === AGENT_SEGMENT;
|
||||
// For core module, skip the module prefix
|
||||
if (module === 'core') {
|
||||
return isAgent ? `bmad-agent-${name}${fileExtension}` : `bmad-${name}${fileExtension}`;
|
||||
}
|
||||
// If module already starts with 'bmad-', don't add another prefix
|
||||
const prefix = module.startsWith('bmad-') ? '' : 'bmad-';
|
||||
return isAgent ? `${prefix}${module}-agent-${name}${fileExtension}` : `${prefix}${module}-${name}${fileExtension}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert relative path to suffix-based name (NEW UNIVERSAL STANDARD)
|
||||
* Only applies .agent suffix to agents; workflows/tasks/tools get standard .md extension.
|
||||
* Converts: 'cis/agents/storymaster.md' → 'bmad-cis-storymaster.agent.md'
|
||||
* Converts: 'bmm/workflows/plan-project.md' → 'bmad-bmm-plan-project.md'
|
||||
* Converts: 'bmm/tasks/create-story.md' → 'bmad-bmm-create-story.md'
|
||||
* Converts: 'bmm/tools/file-ops.md' → 'bmad-bmm-file-ops.md'
|
||||
* Converts: 'core/agents/brainstorming.md' → 'bmad-brainstorming.agent.md' (core items skip module prefix)
|
||||
*
|
||||
* @param {string} relativePath - Path like 'cis/agents/storymaster.md'
|
||||
* @param {string} artifactType - Type of artifact: 'agent', 'workflow', 'task', 'tool'
|
||||
* @param {string} [fileExtension='.md'] - File extension including dot (e.g., '.md', '.toml')
|
||||
* @returns {string} Suffix-based filename like 'bmad-cis-storymaster.agent.md'
|
||||
*/
|
||||
function toSuffixBasedName(relativePath, artifactType, fileExtension = DEFAULT_FILE_EXTENSION) {
|
||||
const extMatch = relativePath.match(/\.[^.]+$/);
|
||||
const originalExt = extMatch ? extMatch[0] : '';
|
||||
const withoutExt = relativePath.replace(originalExt, '');
|
||||
const parts = withoutExt.split(/[/\\]/);
|
||||
|
||||
const module = parts[0];
|
||||
const type = parts[1]; // agents, workflows, tasks, tools
|
||||
const name = parts.slice(2).join('-');
|
||||
|
||||
// Only add .agent suffix for agents; workflows/tasks/tools use standard extension
|
||||
const suffix = artifactType === 'agent' ? ARTIFACT_SUFFIXES.agent : '';
|
||||
|
||||
// For core module, skip the module prefix (use 'bmad-name.suffix.md')
|
||||
if (module === 'core') {
|
||||
return `bmad-${name}${suffix}.${fileExtension.replace('.', '')}`;
|
||||
}
|
||||
|
||||
// If module already starts with 'bmad-', don't add another prefix
|
||||
const prefix = module.startsWith('bmad-') ? '' : 'bmad-';
|
||||
return `${prefix}${module}-${name}${suffix}.${fileExtension.replace('.', '')}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get suffix for artifact type
|
||||
* @param {string} artifactType - Type of artifact: 'agent', 'workflow', 'task', 'tool'
|
||||
* @returns {string} Suffix like '.agent', '.workflow', etc.
|
||||
*/
|
||||
function getArtifactSuffix(artifactType) {
|
||||
return ARTIFACT_SUFFIXES[artifactType] || '';
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse artifact type from suffix-based filename
|
||||
* Parses: 'bmad-cis-storymaster.agent.md' → 'agent'
|
||||
* Returns null for workflows/tasks/tools (no suffix)
|
||||
*
|
||||
* @param {string} filename - Suffix-based filename
|
||||
* @returns {string|null} Artifact type or null if not found
|
||||
*/
|
||||
function parseArtifactTypeFromFilename(filename) {
|
||||
for (const [type, suffix] of Object.entries(ARTIFACT_SUFFIXES)) {
|
||||
if (filename.includes(`${suffix}.`)) {
|
||||
return type;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create custom agent suffix-based name
|
||||
* Creates: 'bmad-custom-fred-commit-poet.agent.md'
|
||||
*
|
||||
* @param {string} agentName - Custom agent name
|
||||
* @param {string} [fileExtension='.md'] - File extension including dot
|
||||
* @returns {string} Suffix-based filename like 'bmad-custom-fred-commit-poet.agent.md'
|
||||
*/
|
||||
function customAgentSuffixName(agentName, fileExtension = DEFAULT_FILE_EXTENSION) {
|
||||
return `bmad-custom-${agentName}.agent.${fileExtension.replace('.', '')}`;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
DEFAULT_FILE_EXTENSION,
|
||||
toUnderscoreName,
|
||||
toUnderscorePath,
|
||||
customAgentUnderscoreName,
|
||||
|
|
@ -153,6 +296,7 @@ module.exports = {
|
|||
// Backward compatibility aliases
|
||||
toColonName,
|
||||
toColonPath,
|
||||
toDashName,
|
||||
toDashPath,
|
||||
customAgentColonName,
|
||||
customAgentDashName,
|
||||
|
|
@ -162,4 +306,10 @@ module.exports = {
|
|||
parseDashName,
|
||||
TYPE_SEGMENTS,
|
||||
AGENT_SEGMENT,
|
||||
// New suffix-based naming functions (UNIVERSAL STANDARD)
|
||||
ARTIFACT_SUFFIXES,
|
||||
toSuffixBasedName,
|
||||
getArtifactSuffix,
|
||||
parseArtifactTypeFromFilename,
|
||||
customAgentSuffixName,
|
||||
};
|
||||
|
|
|
|||
|
|
@ -2,85 +2,17 @@ const path = require('node:path');
|
|||
const fs = require('fs-extra');
|
||||
const csv = require('csv-parse/sync');
|
||||
const chalk = require('chalk');
|
||||
const { toColonName, toColonPath, toDashPath } = require('./path-utils');
|
||||
const { toColonName, toColonPath, toDashPath, toSuffixBasedName } = require('./path-utils');
|
||||
|
||||
/**
|
||||
* Generates command files for standalone tasks and tools
|
||||
*/
|
||||
class TaskToolCommandGenerator {
|
||||
/**
|
||||
* Generate task and tool commands from manifest CSVs
|
||||
* @param {string} projectDir - Project directory
|
||||
* @param {string} bmadDir - BMAD installation directory
|
||||
* @param {string} baseCommandsDir - Optional base commands directory (defaults to .claude/commands/bmad)
|
||||
* REMOVED: Old generateTaskToolCommands method that created nested structure.
|
||||
* This was causing bugs where files were written to wrong directories.
|
||||
* Use generateColonTaskToolCommands() or generateDashTaskToolCommands() instead.
|
||||
*/
|
||||
async generateTaskToolCommands(projectDir, bmadDir, baseCommandsDir = null) {
|
||||
const tasks = await this.loadTaskManifest(bmadDir);
|
||||
const tools = await this.loadToolManifest(bmadDir);
|
||||
|
||||
// Filter to only standalone items
|
||||
const standaloneTasks = tasks ? tasks.filter((t) => t.standalone === 'true' || t.standalone === true) : [];
|
||||
const standaloneTools = tools ? tools.filter((t) => t.standalone === 'true' || t.standalone === true) : [];
|
||||
|
||||
// Base commands directory - use provided or default to Claude Code structure
|
||||
const commandsDir = baseCommandsDir || path.join(projectDir, '.claude', 'commands', 'bmad');
|
||||
|
||||
let generatedCount = 0;
|
||||
|
||||
// Generate command files for tasks
|
||||
for (const task of standaloneTasks) {
|
||||
const moduleTasksDir = path.join(commandsDir, task.module, 'tasks');
|
||||
await fs.ensureDir(moduleTasksDir);
|
||||
|
||||
const commandContent = this.generateCommandContent(task, 'task');
|
||||
const commandPath = path.join(moduleTasksDir, `${task.name}.md`);
|
||||
|
||||
await fs.writeFile(commandPath, commandContent);
|
||||
generatedCount++;
|
||||
}
|
||||
|
||||
// Generate command files for tools
|
||||
for (const tool of standaloneTools) {
|
||||
const moduleToolsDir = path.join(commandsDir, tool.module, 'tools');
|
||||
await fs.ensureDir(moduleToolsDir);
|
||||
|
||||
const commandContent = this.generateCommandContent(tool, 'tool');
|
||||
const commandPath = path.join(moduleToolsDir, `${tool.name}.md`);
|
||||
|
||||
await fs.writeFile(commandPath, commandContent);
|
||||
generatedCount++;
|
||||
}
|
||||
|
||||
return {
|
||||
generated: generatedCount,
|
||||
tasks: standaloneTasks.length,
|
||||
tools: standaloneTools.length,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate command content for a task or tool
|
||||
*/
|
||||
generateCommandContent(item, type) {
|
||||
const description = item.description || `Execute ${item.displayName || item.name}`;
|
||||
|
||||
// Convert path to use {project-root} placeholder
|
||||
let itemPath = item.path;
|
||||
if (itemPath.startsWith('bmad/')) {
|
||||
itemPath = `{project-root}/${itemPath}`;
|
||||
}
|
||||
|
||||
return `---
|
||||
description: '${description.replaceAll("'", "''")}'
|
||||
---
|
||||
|
||||
# ${item.displayName || item.name}
|
||||
|
||||
LOAD and execute the ${type} at: ${itemPath}
|
||||
|
||||
Follow all instructions in the ${type} file exactly as written.
|
||||
`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Load task manifest CSV
|
||||
|
|
@ -93,10 +25,16 @@ Follow all instructions in the ${type} file exactly as written.
|
|||
}
|
||||
|
||||
const csvContent = await fs.readFile(manifestPath, 'utf8');
|
||||
return csv.parse(csvContent, {
|
||||
const tasks = csv.parse(csvContent, {
|
||||
columns: true,
|
||||
skip_empty_lines: true,
|
||||
});
|
||||
|
||||
// Filter out README files
|
||||
return tasks.filter((task) => {
|
||||
const nameLower = task.name.toLowerCase();
|
||||
return !nameLower.includes('readme') && task.name !== 'README';
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -110,10 +48,16 @@ Follow all instructions in the ${type} file exactly as written.
|
|||
}
|
||||
|
||||
const csvContent = await fs.readFile(manifestPath, 'utf8');
|
||||
return csv.parse(csvContent, {
|
||||
const tools = csv.parse(csvContent, {
|
||||
columns: true,
|
||||
skip_empty_lines: true,
|
||||
});
|
||||
|
||||
// Filter out README files
|
||||
return tools.filter((tool) => {
|
||||
const nameLower = tool.name.toLowerCase();
|
||||
return !nameLower.includes('readme') && tool.name !== 'README';
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -123,9 +67,10 @@ Follow all instructions in the ${type} file exactly as written.
|
|||
* @param {string} projectDir - Project directory
|
||||
* @param {string} bmadDir - BMAD installation directory
|
||||
* @param {string} baseCommandsDir - Base commands directory for the IDE
|
||||
* @param {string} [fileExtension='.md'] - File extension including dot (e.g., '.md', '.toml')
|
||||
* @returns {Object} Generation results
|
||||
*/
|
||||
async generateColonTaskToolCommands(projectDir, bmadDir, baseCommandsDir) {
|
||||
async generateColonTaskToolCommands(projectDir, bmadDir, baseCommandsDir, fileExtension = '.md') {
|
||||
const tasks = await this.loadTaskManifest(bmadDir);
|
||||
const tools = await this.loadToolManifest(bmadDir);
|
||||
|
||||
|
|
@ -133,14 +78,20 @@ Follow all instructions in the ${type} file exactly as written.
|
|||
const standaloneTasks = tasks ? tasks.filter((t) => t.standalone === 'true' || t.standalone === true) : [];
|
||||
const standaloneTools = tools ? tools.filter((t) => t.standalone === 'true' || t.standalone === true) : [];
|
||||
|
||||
// Determine format based on file extension
|
||||
const format = fileExtension === '.toml' ? 'toml' : 'yaml';
|
||||
let generatedCount = 0;
|
||||
|
||||
// DEBUG: Log parameters
|
||||
console.log(`[DEBUG generateColonTaskToolCommands] baseCommandsDir: ${baseCommandsDir}, format=${format}`);
|
||||
|
||||
// Generate command files for tasks
|
||||
for (const task of standaloneTasks) {
|
||||
const commandContent = this.generateCommandContent(task, 'task');
|
||||
// Use underscore format: bmad_bmm_name.md
|
||||
const flatName = toColonName(task.module, 'tasks', task.name);
|
||||
const commandContent = this.generateCommandContent(task, 'task', format);
|
||||
// Use underscore format: bmad_bmm_name.<ext>
|
||||
const flatName = toColonName(task.module, 'tasks', task.name, fileExtension);
|
||||
const commandPath = path.join(baseCommandsDir, flatName);
|
||||
console.log(`[DEBUG generateColonTaskToolCommands] Writing task ${task.name} to: ${commandPath}`);
|
||||
await fs.ensureDir(path.dirname(commandPath));
|
||||
await fs.writeFile(commandPath, commandContent);
|
||||
generatedCount++;
|
||||
|
|
@ -148,9 +99,9 @@ Follow all instructions in the ${type} file exactly as written.
|
|||
|
||||
// Generate command files for tools
|
||||
for (const tool of standaloneTools) {
|
||||
const commandContent = this.generateCommandContent(tool, 'tool');
|
||||
// Use underscore format: bmad_bmm_name.md
|
||||
const flatName = toColonName(tool.module, 'tools', tool.name);
|
||||
const commandContent = this.generateCommandContent(tool, 'tool', format);
|
||||
// Use underscore format: bmad_bmm_name.<ext>
|
||||
const flatName = toColonName(tool.module, 'tools', tool.name, fileExtension);
|
||||
const commandPath = path.join(baseCommandsDir, flatName);
|
||||
await fs.ensureDir(path.dirname(commandPath));
|
||||
await fs.writeFile(commandPath, commandContent);
|
||||
|
|
@ -165,15 +116,16 @@ Follow all instructions in the ${type} file exactly as written.
|
|||
}
|
||||
|
||||
/**
|
||||
* Generate task and tool commands using underscore format (Windows-compatible)
|
||||
* Creates flat files like: bmad_bmm_bmad-help.md
|
||||
* Generate task and tool commands using dash format
|
||||
* Creates flat files like: bmad-bmm-bmad-help.md
|
||||
*
|
||||
* @param {string} projectDir - Project directory
|
||||
* @param {string} bmadDir - BMAD installation directory
|
||||
* @param {string} baseCommandsDir - Base commands directory for the IDE
|
||||
* @param {string} [fileExtension='.md'] - File extension including dot (e.g., '.md', '.toml')
|
||||
* @returns {Object} Generation results
|
||||
*/
|
||||
async generateDashTaskToolCommands(projectDir, bmadDir, baseCommandsDir) {
|
||||
async generateDashTaskToolCommands(projectDir, bmadDir, baseCommandsDir, fileExtension = '.md') {
|
||||
const tasks = await this.loadTaskManifest(bmadDir);
|
||||
const tools = await this.loadToolManifest(bmadDir);
|
||||
|
||||
|
|
@ -181,13 +133,15 @@ Follow all instructions in the ${type} file exactly as written.
|
|||
const standaloneTasks = tasks ? tasks.filter((t) => t.standalone === 'true' || t.standalone === true) : [];
|
||||
const standaloneTools = tools ? tools.filter((t) => t.standalone === 'true' || t.standalone === true) : [];
|
||||
|
||||
// Determine format based on file extension
|
||||
const format = fileExtension === '.toml' ? 'toml' : 'yaml';
|
||||
let generatedCount = 0;
|
||||
|
||||
// Generate command files for tasks
|
||||
for (const task of standaloneTasks) {
|
||||
const commandContent = this.generateCommandContent(task, 'task');
|
||||
// Use underscore format: bmad_bmm_name.md
|
||||
const flatName = toDashPath(`${task.module}/tasks/${task.name}.md`);
|
||||
const commandContent = this.generateCommandContent(task, 'task', format);
|
||||
// Use dash format: bmad-bmm-task-name.<ext>
|
||||
const flatName = toDashPath(`${task.module}/tasks/${task.name}.md`, fileExtension);
|
||||
const commandPath = path.join(baseCommandsDir, flatName);
|
||||
await fs.ensureDir(path.dirname(commandPath));
|
||||
await fs.writeFile(commandPath, commandContent);
|
||||
|
|
@ -196,9 +150,9 @@ Follow all instructions in the ${type} file exactly as written.
|
|||
|
||||
// Generate command files for tools
|
||||
for (const tool of standaloneTools) {
|
||||
const commandContent = this.generateCommandContent(tool, 'tool');
|
||||
// Use underscore format: bmad_bmm_name.md
|
||||
const flatName = toDashPath(`${tool.module}/tools/${tool.name}.md`);
|
||||
const commandContent = this.generateCommandContent(tool, 'tool', format);
|
||||
// Use dash format: bmad-bmm-tool-name.<ext>
|
||||
const flatName = toDashPath(`${tool.module}/tools/${tool.name}.md`, fileExtension);
|
||||
const commandPath = path.join(baseCommandsDir, flatName);
|
||||
await fs.ensureDir(path.dirname(commandPath));
|
||||
await fs.writeFile(commandPath, commandContent);
|
||||
|
|
@ -263,6 +217,163 @@ Follow all instructions in the ${type} file exactly as written.
|
|||
|
||||
return writtenCount;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate task and tool commands using suffix-based format (NEW UNIVERSAL STANDARD)
|
||||
* Creates flat files like: bmad-bmm-create-story.task.md
|
||||
*
|
||||
* @param {string} projectDir - Project directory
|
||||
* @param {string} bmadDir - BMAD installation directory
|
||||
* @param {string} baseCommandsDir - Base commands directory for the IDE
|
||||
* @param {string} [fileExtension='.md'] - File extension including dot (e.g., '.md', '.toml')
|
||||
* @param {string} [templateContent] - Frontmatter template content (from platform-codes.yaml)
|
||||
* @param {string} [frontmatterTemplate] - Frontmatter template filename
|
||||
* @param {boolean} [skipExisting=false] - Skip if file already exists
|
||||
* @returns {Object} Generation results
|
||||
*/
|
||||
async generateSuffixBasedTaskToolCommands(
|
||||
projectDir,
|
||||
bmadDir,
|
||||
baseCommandsDir,
|
||||
fileExtension = '.md',
|
||||
templateContent = null,
|
||||
frontmatterTemplate = 'common-yaml.md',
|
||||
skipExisting = false,
|
||||
) {
|
||||
const tasks = await this.loadTaskManifest(bmadDir);
|
||||
const tools = await this.loadToolManifest(bmadDir);
|
||||
|
||||
// Filter to only standalone items
|
||||
const standaloneTasks = tasks ? tasks.filter((t) => t.standalone === 'true' || t.standalone === true) : [];
|
||||
const standaloneTools = tools ? tools.filter((t) => t.standalone === 'true' || t.standalone === true) : [];
|
||||
|
||||
let generatedCount = 0;
|
||||
let skippedCount = 0;
|
||||
|
||||
// Generate command files for tasks
|
||||
for (const task of standaloneTasks) {
|
||||
const commandContent = this.generateCommandContent(task, 'task', templateContent, frontmatterTemplate);
|
||||
// Use suffix-based format: bmad-bmm-create-story.task.md
|
||||
const relativePath = `${task.module}/tasks/${task.name}.md`;
|
||||
const suffixName = toSuffixBasedName(relativePath, 'task', fileExtension);
|
||||
const commandPath = path.join(baseCommandsDir, suffixName);
|
||||
|
||||
// Skip if already exists
|
||||
if (skipExisting && (await fs.pathExists(commandPath))) {
|
||||
skippedCount++;
|
||||
continue;
|
||||
}
|
||||
|
||||
await fs.ensureDir(baseCommandsDir);
|
||||
await fs.writeFile(commandPath, commandContent);
|
||||
generatedCount++;
|
||||
}
|
||||
|
||||
// Generate command files for tools
|
||||
for (const tool of standaloneTools) {
|
||||
const commandContent = this.generateCommandContent(tool, 'tool', templateContent, frontmatterTemplate);
|
||||
// Use suffix-based format: bmad-bmm-file-ops.tool.md
|
||||
const relativePath = `${tool.module}/tools/${tool.name}.md`;
|
||||
const suffixName = toSuffixBasedName(relativePath, 'tool', fileExtension);
|
||||
const commandPath = path.join(baseCommandsDir, suffixName);
|
||||
|
||||
// Skip if already exists
|
||||
if (skipExisting && (await fs.pathExists(commandPath))) {
|
||||
skippedCount++;
|
||||
continue;
|
||||
}
|
||||
|
||||
await fs.ensureDir(baseCommandsDir);
|
||||
await fs.writeFile(commandPath, commandContent);
|
||||
generatedCount++;
|
||||
}
|
||||
|
||||
if (skippedCount > 0) {
|
||||
console.log(chalk.dim(` Skipped ${skippedCount} existing task/tool files`));
|
||||
}
|
||||
|
||||
return {
|
||||
generated: generatedCount,
|
||||
tasks: standaloneTasks.length,
|
||||
tools: standaloneTools.length,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate command content for a task or tool
|
||||
* @param {Object} item - Task or tool item from manifest
|
||||
* @param {string} type - 'task' or 'tool'
|
||||
* @param {string|Object|null} [templateOrFormat] - Template content or format string ('yaml'/'toml') for backward compat
|
||||
* @param {string} [frontmatterTemplate] - Template filename (for format detection)
|
||||
*/
|
||||
generateCommandContent(item, type, templateOrFormat = null, frontmatterTemplate = null) {
|
||||
const description = item.description || `Execute ${item.displayName || item.name}`;
|
||||
|
||||
// Convert path to use {project-root} placeholder
|
||||
let itemPath = item.path;
|
||||
if (itemPath.startsWith('bmad/')) {
|
||||
itemPath = `{project-root}/${itemPath}`;
|
||||
}
|
||||
|
||||
const content = `# ${item.displayName || item.name}
|
||||
|
||||
LOAD and execute the ${type} at: ${itemPath}
|
||||
|
||||
Follow all instructions in the ${type} file exactly as written.
|
||||
`;
|
||||
|
||||
// Handle old calling convention: (item, type, format) where format is 'yaml' or 'toml'
|
||||
if (typeof templateOrFormat === 'string' && (templateOrFormat === 'yaml' || templateOrFormat === 'toml')) {
|
||||
if (templateOrFormat === 'toml') {
|
||||
// TOML format
|
||||
const escapedContent = content.replaceAll('"""', String.raw`\"\"\"`);
|
||||
return `description = "${description}"
|
||||
prompt = """
|
||||
${escapedContent}
|
||||
"""
|
||||
`;
|
||||
}
|
||||
// Default YAML format
|
||||
return `---
|
||||
description: '${description.replaceAll("'", "''")}'
|
||||
---
|
||||
|
||||
${content}`;
|
||||
}
|
||||
|
||||
// New calling convention with template content
|
||||
const templateContent = templateOrFormat;
|
||||
if (!templateContent || frontmatterTemplate === 'none' || (templateContent === null && frontmatterTemplate === null)) {
|
||||
// Default YAML
|
||||
return `---
|
||||
description: '${description.replaceAll("'", "''")}'
|
||||
---
|
||||
|
||||
${content}`;
|
||||
}
|
||||
|
||||
// Apply template variables
|
||||
const variables = {
|
||||
name: item.name,
|
||||
displayName: item.displayName || item.name,
|
||||
description,
|
||||
content,
|
||||
icon: '🤖',
|
||||
};
|
||||
|
||||
let result = templateContent;
|
||||
for (const [key, value] of Object.entries(variables)) {
|
||||
result = result.replaceAll(`{{${key}}}`, value);
|
||||
}
|
||||
|
||||
// Handle TOML templates specially
|
||||
if (frontmatterTemplate && frontmatterTemplate.includes('toml')) {
|
||||
const escapedContent = content.replaceAll('"""', String.raw`\"\"\"`);
|
||||
result = result.replace(/prompt = """/, `prompt = """\n${escapedContent}`);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { TaskToolCommandGenerator };
|
||||
|
|
|
|||
|
|
@ -0,0 +1,375 @@
|
|||
/**
|
||||
* Unified BMAD Installer for all IDEs
|
||||
*
|
||||
* ALL IDE configuration comes from platform-codes.yaml
|
||||
* NO IDE-specific code in this file - just loads and applies templates
|
||||
*/
|
||||
|
||||
const path = require('node:path');
|
||||
const fs = require('fs-extra');
|
||||
const chalk = require('chalk');
|
||||
const { AgentCommandGenerator } = require('./agent-command-generator');
|
||||
const { WorkflowCommandGenerator } = require('./workflow-command-generator');
|
||||
const { TaskToolCommandGenerator } = require('./task-tool-command-generator');
|
||||
const { toColonPath, toDashPath, toSuffixBasedName, getArtifactSuffix } = require('./path-utils');
|
||||
|
||||
/**
|
||||
* Naming styles
|
||||
* @deprecated Use 'suffix-based' for all new installations
|
||||
*/
|
||||
const NamingStyle = {
|
||||
FLAT_COLON: 'flat-colon',
|
||||
FLAT_DASH: 'flat-dash',
|
||||
NESTED: 'nested',
|
||||
SUFFIX_BASED: 'suffix-based',
|
||||
};
|
||||
|
||||
/**
|
||||
* Unified installer configuration
|
||||
* @typedef {Object} UnifiedInstallConfig
|
||||
* @property {string} targetDir - Full path to target directory
|
||||
* @property {NamingStyle} namingStyle - How to name files
|
||||
* @property {string} [frontmatterTemplate] - Frontmatter template filename (from platform-codes.yaml)
|
||||
* @property {string} [fileExtension='.md'] - File extension including dot
|
||||
* @property {boolean} includeNestedStructure - For NESTED style, create subdirectories
|
||||
* @property {Function} [customTemplateFn] - Optional custom template function
|
||||
*/
|
||||
|
||||
/**
|
||||
* Unified BMAD Installer
|
||||
*
|
||||
* Driven entirely by platform-codes.yaml configuration
|
||||
* Frontmatter templates are loaded from templates/frontmatter/ directory
|
||||
*/
|
||||
class UnifiedInstaller {
|
||||
constructor(bmadFolderName = 'bmad') {
|
||||
this.bmadFolderName = bmadFolderName;
|
||||
this.templateDir = path.join(__dirname, '../templates/frontmatter');
|
||||
}
|
||||
|
||||
/**
|
||||
* Install BMAD artifacts for an IDE
|
||||
*
|
||||
* @param {string} projectDir - Project root directory
|
||||
* @param {string} bmadDir - BMAD installation directory (_bmad)
|
||||
* @param {UnifiedInstallConfig} config - Installation configuration
|
||||
* @param {Array<string>} selectedModules - Modules to install
|
||||
* @returns {Promise<Object>} Installation result with counts
|
||||
*/
|
||||
async install(projectDir, bmadDir, config, selectedModules = []) {
|
||||
const {
|
||||
targetDir,
|
||||
namingStyle = NamingStyle.SUFFIX_BASED,
|
||||
frontmatterTemplate = 'common-yaml.md',
|
||||
fileExtension = '.md',
|
||||
includeNestedStructure = false,
|
||||
customTemplateFn = null,
|
||||
skipExisting = false,
|
||||
artifactTypes = null,
|
||||
} = config;
|
||||
|
||||
// Clean up any existing BMAD files in target directory (unless skipExisting)
|
||||
if (!skipExisting) {
|
||||
await this.cleanupBmadFiles(targetDir, fileExtension);
|
||||
}
|
||||
|
||||
// Ensure target directory exists
|
||||
await fs.ensureDir(targetDir);
|
||||
|
||||
// Count results
|
||||
const counts = {
|
||||
agents: 0,
|
||||
workflows: 0,
|
||||
tasks: 0,
|
||||
tools: 0,
|
||||
total: 0,
|
||||
};
|
||||
|
||||
// Check if we should install agents
|
||||
const installAgents = !artifactTypes || artifactTypes.includes('agents');
|
||||
const installWorkflows = !artifactTypes || artifactTypes.includes('workflows');
|
||||
const installTasks = !artifactTypes || artifactTypes.includes('tasks');
|
||||
const installTools = !artifactTypes || artifactTypes.includes('tools');
|
||||
|
||||
// Load frontmatter template once (if not 'none')
|
||||
let templateContent = null;
|
||||
if (frontmatterTemplate && frontmatterTemplate !== 'none') {
|
||||
templateContent = await this.loadFrontmatterTemplate(frontmatterTemplate);
|
||||
}
|
||||
|
||||
// 1. Install Agents
|
||||
if (installAgents) {
|
||||
const agentGen = new AgentCommandGenerator(this.bmadFolderName);
|
||||
const { artifacts: agentArtifacts } = await agentGen.collectAgentArtifacts(bmadDir, selectedModules);
|
||||
counts.agents = await this.writeArtifacts(
|
||||
agentArtifacts,
|
||||
targetDir,
|
||||
namingStyle,
|
||||
templateContent,
|
||||
frontmatterTemplate,
|
||||
fileExtension,
|
||||
customTemplateFn,
|
||||
'agent',
|
||||
skipExisting,
|
||||
);
|
||||
}
|
||||
|
||||
// 2. Install Workflows (filter out README artifacts)
|
||||
if (installWorkflows) {
|
||||
const workflowGen = new WorkflowCommandGenerator(this.bmadFolderName);
|
||||
const { artifacts: workflowArtifacts } = await workflowGen.collectWorkflowArtifacts(bmadDir);
|
||||
const workflowArtifactsFiltered = workflowArtifacts.filter((a) => {
|
||||
const name = path.basename(a.relativePath || '');
|
||||
return name.toLowerCase() !== 'readme.md' && !name.toLowerCase().startsWith('readme-');
|
||||
});
|
||||
counts.workflows = await this.writeArtifacts(
|
||||
workflowArtifactsFiltered,
|
||||
targetDir,
|
||||
namingStyle,
|
||||
templateContent,
|
||||
frontmatterTemplate,
|
||||
fileExtension,
|
||||
customTemplateFn,
|
||||
'workflow',
|
||||
skipExisting,
|
||||
);
|
||||
}
|
||||
|
||||
// 3. Install Tasks and Tools from manifest CSV
|
||||
if (installTasks || installTools) {
|
||||
const ttGen = new TaskToolCommandGenerator();
|
||||
|
||||
// Use suffix-based naming if specified
|
||||
if (namingStyle === NamingStyle.SUFFIX_BASED) {
|
||||
const taskToolResult = await ttGen.generateSuffixBasedTaskToolCommands(
|
||||
projectDir,
|
||||
bmadDir,
|
||||
targetDir,
|
||||
fileExtension,
|
||||
templateContent,
|
||||
frontmatterTemplate,
|
||||
skipExisting,
|
||||
);
|
||||
counts.tasks = taskToolResult.tasks || 0;
|
||||
counts.tools = taskToolResult.tools || 0;
|
||||
} else if (namingStyle === NamingStyle.FLAT_DASH) {
|
||||
const taskToolResult = await ttGen.generateDashTaskToolCommands(projectDir, bmadDir, targetDir, fileExtension);
|
||||
counts.tasks = taskToolResult.tasks || 0;
|
||||
counts.tools = taskToolResult.tools || 0;
|
||||
} else {
|
||||
const taskToolResult = await ttGen.generateColonTaskToolCommands(projectDir, bmadDir, targetDir, fileExtension);
|
||||
counts.tasks = taskToolResult.tasks || 0;
|
||||
counts.tools = taskToolResult.tools || 0;
|
||||
}
|
||||
}
|
||||
|
||||
counts.total = counts.agents + counts.workflows + counts.tasks + counts.tools;
|
||||
|
||||
return counts;
|
||||
}
|
||||
|
||||
/**
|
||||
* Load frontmatter template from file
|
||||
* @param {string} templateFile - Template filename
|
||||
* @returns {Promise<string|null>} Template content or null if not found
|
||||
*/
|
||||
async loadFrontmatterTemplate(templateFile) {
|
||||
const templatePath = path.join(this.templateDir, templateFile);
|
||||
try {
|
||||
return await fs.readFile(templatePath, 'utf8');
|
||||
} catch {
|
||||
console.warn(chalk.yellow(`Warning: Could not load template ${templateFile}, using default`));
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply frontmatter template to content
|
||||
* @param {Object} artifact - Artifact with metadata
|
||||
* @param {string} content - Original content
|
||||
* @param {string} templateContent - Template content
|
||||
* @param {string} templateFile - Template filename (for special handling)
|
||||
* @returns {string} Content with frontmatter applied
|
||||
*/
|
||||
applyFrontmatterTemplate(artifact, content, templateContent, templateFile) {
|
||||
if (!templateContent) {
|
||||
return content;
|
||||
}
|
||||
|
||||
// Extract existing frontmatter if present
|
||||
const frontmatterRegex = /^---\s*\n[\s\S]*?\n---\s*\n/;
|
||||
const contentWithoutFrontmatter = content.replace(frontmatterRegex, '').trim();
|
||||
|
||||
// Get artifact metadata for template substitution
|
||||
const name = artifact.name || artifact.displayName || 'workflow';
|
||||
const title = this.formatTitle(name);
|
||||
const iconMatch = content.match(/icon="([^"]+)"/);
|
||||
const icon = iconMatch ? iconMatch[1] : '🤖';
|
||||
|
||||
// Use artifact's description if available, otherwise generate fallback
|
||||
const description = artifact.description || `Activates the ${name} ${artifact.type || 'workflow'}.`;
|
||||
|
||||
// Template variables
|
||||
const variables = {
|
||||
name,
|
||||
title,
|
||||
displayName: name,
|
||||
description,
|
||||
icon,
|
||||
content: contentWithoutFrontmatter,
|
||||
|
||||
// Special variables for certain templates
|
||||
autoExecMode: this.getAutoExecMode(artifact),
|
||||
tools: JSON.stringify(this.getCopilotTools()),
|
||||
};
|
||||
|
||||
// Apply template substitutions
|
||||
let result = templateContent;
|
||||
for (const [key, value] of Object.entries(variables)) {
|
||||
result = result.replaceAll(`{{${key}}}`, value);
|
||||
}
|
||||
|
||||
// Append content after frontmatter (for TOML templates with prompt field)
|
||||
if (templateFile.includes('toml') && !result.includes('{{content}}')) {
|
||||
const escapedContent = contentWithoutFrontmatter.replaceAll('"""', String.raw`\"\"\"`);
|
||||
result = result.replace(/prompt = """/, `prompt = """\n${escapedContent}`);
|
||||
}
|
||||
|
||||
return result.trim() + '\n\n' + contentWithoutFrontmatter;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get auto_execution_mode for Windsurf based on artifact type
|
||||
*/
|
||||
getAutoExecMode(artifact) {
|
||||
if (artifact.type === 'agent') return '3';
|
||||
if (artifact.type === 'task' || artifact.type === 'tool') return '2';
|
||||
return '1'; // default for workflows
|
||||
}
|
||||
|
||||
/**
|
||||
* Get GitHub Copilot tools array
|
||||
*/
|
||||
getCopilotTools() {
|
||||
return [
|
||||
'changes',
|
||||
'edit',
|
||||
'fetch',
|
||||
'githubRepo',
|
||||
'problems',
|
||||
'runCommands',
|
||||
'runTasks',
|
||||
'runTests',
|
||||
'search',
|
||||
'runSubagent',
|
||||
'testFailure',
|
||||
'todos',
|
||||
'usages',
|
||||
];
|
||||
}
|
||||
|
||||
/**
|
||||
* Clean up any existing BMAD files in target directory
|
||||
*/
|
||||
async cleanupBmadFiles(targetDir, fileExtension = '.md') {
|
||||
if (!(await fs.pathExists(targetDir))) {
|
||||
return;
|
||||
}
|
||||
|
||||
const entries = await fs.readdir(targetDir, { withFileTypes: true });
|
||||
|
||||
for (const entry of entries) {
|
||||
if (entry.name.startsWith('bmad') && entry.name.endsWith(fileExtension)) {
|
||||
const entryPath = path.join(targetDir, entry.name);
|
||||
await fs.remove(entryPath);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Write artifacts with specified naming style and template
|
||||
*/
|
||||
async writeArtifacts(
|
||||
artifacts,
|
||||
targetDir,
|
||||
namingStyle,
|
||||
templateContent,
|
||||
templateFile,
|
||||
fileExtension,
|
||||
customTemplateFn,
|
||||
artifactType,
|
||||
skipExisting = false,
|
||||
) {
|
||||
let written = 0;
|
||||
let skipped = 0;
|
||||
|
||||
for (const artifact of artifacts) {
|
||||
// Determine target path based on naming style
|
||||
let targetPath;
|
||||
let content = artifact.content;
|
||||
|
||||
switch (namingStyle) {
|
||||
case NamingStyle.SUFFIX_BASED: {
|
||||
const suffixName = toSuffixBasedName(artifact.relativePath, artifactType, fileExtension);
|
||||
targetPath = path.join(targetDir, suffixName);
|
||||
|
||||
break;
|
||||
}
|
||||
case NamingStyle.FLAT_COLON: {
|
||||
const flatName = toColonPath(artifact.relativePath, fileExtension);
|
||||
targetPath = path.join(targetDir, flatName);
|
||||
|
||||
break;
|
||||
}
|
||||
case NamingStyle.FLAT_DASH: {
|
||||
const flatName = toDashPath(artifact.relativePath, fileExtension);
|
||||
targetPath = path.join(targetDir, flatName);
|
||||
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
const flatName = toColonPath(artifact.relativePath, fileExtension);
|
||||
targetPath = path.join(targetDir, flatName);
|
||||
}
|
||||
}
|
||||
|
||||
// Skip if file already exists
|
||||
if (skipExisting && (await fs.pathExists(targetPath))) {
|
||||
skipped++;
|
||||
continue;
|
||||
}
|
||||
|
||||
// Apply template transformations
|
||||
if (customTemplateFn) {
|
||||
content = customTemplateFn(artifact, content, templateFile);
|
||||
} else if (templateFile !== 'none') {
|
||||
content = this.applyFrontmatterTemplate(artifact, content, templateContent, templateFile);
|
||||
}
|
||||
|
||||
await fs.ensureDir(targetDir);
|
||||
await fs.writeFile(targetPath, content, 'utf8');
|
||||
written++;
|
||||
}
|
||||
|
||||
if (skipped > 0) {
|
||||
console.log(chalk.dim(` Skipped ${skipped} existing files`));
|
||||
}
|
||||
|
||||
return written;
|
||||
}
|
||||
|
||||
/**
|
||||
* Format name as title
|
||||
*/
|
||||
formatTitle(name) {
|
||||
return name
|
||||
.split('-')
|
||||
.map((word) => word.charAt(0).toUpperCase() + word.slice(1))
|
||||
.join(' ');
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
UnifiedInstaller,
|
||||
NamingStyle,
|
||||
};
|
||||
|
|
@ -14,44 +14,10 @@ class WorkflowCommandGenerator {
|
|||
}
|
||||
|
||||
/**
|
||||
* Generate workflow commands from the manifest CSV
|
||||
* @param {string} projectDir - Project directory
|
||||
* @param {string} bmadDir - BMAD installation directory
|
||||
* REMOVED: Old generateWorkflowCommands method that created nested structure.
|
||||
* This was hardcoded to .claude/commands/bmad and caused bugs.
|
||||
* Use collectWorkflowArtifacts() + writeColonArtifacts/writeDashArtifacts() instead.
|
||||
*/
|
||||
async generateWorkflowCommands(projectDir, bmadDir) {
|
||||
const workflows = await this.loadWorkflowManifest(bmadDir);
|
||||
|
||||
if (!workflows) {
|
||||
console.log(chalk.yellow('Workflow manifest not found. Skipping command generation.'));
|
||||
return { generated: 0 };
|
||||
}
|
||||
|
||||
// ALL workflows now generate commands - no standalone filtering
|
||||
const allWorkflows = workflows;
|
||||
|
||||
// Base commands directory
|
||||
const baseCommandsDir = path.join(projectDir, '.claude', 'commands', 'bmad');
|
||||
|
||||
let generatedCount = 0;
|
||||
|
||||
// Generate a command file for each workflow, organized by module
|
||||
for (const workflow of allWorkflows) {
|
||||
const moduleWorkflowsDir = path.join(baseCommandsDir, workflow.module, 'workflows');
|
||||
await fs.ensureDir(moduleWorkflowsDir);
|
||||
|
||||
const commandContent = await this.generateCommandContent(workflow, bmadDir);
|
||||
const commandPath = path.join(moduleWorkflowsDir, `${workflow.name}.md`);
|
||||
|
||||
await fs.writeFile(commandPath, commandContent);
|
||||
generatedCount++;
|
||||
}
|
||||
|
||||
// Also create a workflow launcher README in each module
|
||||
const groupedWorkflows = this.groupWorkflowsByModule(allWorkflows);
|
||||
await this.createModuleWorkflowLaunchers(baseCommandsDir, groupedWorkflows);
|
||||
|
||||
return { generated: generatedCount };
|
||||
}
|
||||
|
||||
async collectWorkflowArtifacts(bmadDir) {
|
||||
const workflows = await this.loadWorkflowManifest(bmadDir);
|
||||
|
|
@ -69,6 +35,9 @@ class WorkflowCommandGenerator {
|
|||
const commandContent = await this.generateCommandContent(workflow, bmadDir);
|
||||
artifacts.push({
|
||||
type: 'workflow-command',
|
||||
name: workflow.name,
|
||||
displayName: workflow.displayName || workflow.name,
|
||||
description: workflow.description,
|
||||
module: workflow.module,
|
||||
relativePath: path.join(workflow.module, 'workflows', `${workflow.name}.md`),
|
||||
content: commandContent,
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ description: '{{description}}'
|
|||
You must fully embody this agent's persona and follow all activation instructions exactly as specified. NEVER break character until given an exit command.
|
||||
|
||||
<agent-activation CRITICAL="TRUE">
|
||||
1. LOAD the FULL agent file from @_bmad/{{module}}/agents/{{path}}
|
||||
1. LOAD the FULL agent file from @_bmad/{{relativePath}}
|
||||
2. READ its entire contents - this contains the complete agent persona, menu, and instructions
|
||||
3. Execute ALL activation steps exactly as written in the agent file
|
||||
4. Follow the agent's persona and menu system precisely
|
||||
|
|
|
|||
|
|
@ -0,0 +1,15 @@
|
|||
---
|
||||
name: '{{name}}'
|
||||
description: '{{description}}'
|
||||
---
|
||||
|
||||
You must fully embody this agent's persona and follow all activation instructions exactly as specified. NEVER break character until given an exit command.
|
||||
|
||||
<agent-activation CRITICAL="TRUE">
|
||||
1. LOAD the FULL agent file from @_bmad/{{relativePath}}
|
||||
2. READ its entire contents - this contains the complete agent persona, menu, and instructions
|
||||
3. FOLLOW every step in the <activation> section precisely
|
||||
4. DISPLAY the welcome/greeting as instructed
|
||||
5. PRESENT the numbered menu
|
||||
6. WAIT for user input before proceeding
|
||||
</agent-activation>
|
||||
|
|
@ -0,0 +1,8 @@
|
|||
---
|
||||
name: '{{name}}'
|
||||
description: '{{description}}'
|
||||
---
|
||||
|
||||
{{activationHeader}}
|
||||
|
||||
Run @_bmad/{{relativePath}} to load the full agent.
|
||||
|
|
@ -0,0 +1,4 @@
|
|||
description = "{{description}}"
|
||||
prompt = """
|
||||
{{content}}
|
||||
"""
|
||||
|
|
@ -0,0 +1,4 @@
|
|||
---
|
||||
name: '{{name}}'
|
||||
description: '{{description}}'
|
||||
---
|
||||
|
|
@ -0,0 +1,7 @@
|
|||
---
|
||||
description: "{{description}}"
|
||||
tools: {{tools}}
|
||||
---
|
||||
|
||||
# {{title}}
|
||||
|
||||
|
|
@ -0,0 +1,4 @@
|
|||
---
|
||||
description: "{{description}}"
|
||||
tools: {{tools}}
|
||||
---
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
name: '{{name}}'
|
||||
description: 'BMAD {{name}} agent'
|
||||
mode: 'primary'
|
||||
---
|
||||
|
|
@ -0,0 +1,4 @@
|
|||
---
|
||||
name: '{{name}}'
|
||||
description: 'BMAD {{name}} command'
|
||||
---
|
||||
|
|
@ -0,0 +1,4 @@
|
|||
---
|
||||
name: '{{icon}} {{title}}'
|
||||
description: 'Use for {{title}} tasks'
|
||||
---
|
||||
|
|
@ -0,0 +1,4 @@
|
|||
---
|
||||
description: "{{name}}"
|
||||
always: true
|
||||
---
|
||||
|
|
@ -0,0 +1,4 @@
|
|||
---
|
||||
description: {{name}}
|
||||
auto_execution_mode: {{autoExecMode}}
|
||||
---
|
||||
|
|
@ -1,4 +1,5 @@
|
|||
---
|
||||
name: '{{name}}'
|
||||
description: '{{description}}'
|
||||
---
|
||||
|
||||
|
|
|
|||
|
|
@ -1,313 +0,0 @@
|
|||
const path = require('node:path');
|
||||
const fs = require('fs-extra');
|
||||
const { BaseIdeSetup } = require('./_base-ide');
|
||||
const chalk = require('chalk');
|
||||
const { AgentCommandGenerator } = require('./shared/agent-command-generator');
|
||||
|
||||
/**
|
||||
* Trae IDE setup handler
|
||||
*/
|
||||
class TraeSetup extends BaseIdeSetup {
|
||||
constructor() {
|
||||
super('trae', 'Trae');
|
||||
this.configDir = '.trae';
|
||||
this.rulesDir = 'rules';
|
||||
}
|
||||
|
||||
/**
|
||||
* Setup Trae IDE configuration
|
||||
* @param {string} projectDir - Project directory
|
||||
* @param {string} bmadDir - BMAD installation directory
|
||||
* @param {Object} options - Setup options
|
||||
*/
|
||||
async setup(projectDir, bmadDir, options = {}) {
|
||||
console.log(chalk.cyan(`Setting up ${this.name}...`));
|
||||
|
||||
// Create .trae/rules directory
|
||||
const traeDir = path.join(projectDir, this.configDir);
|
||||
const rulesDir = path.join(traeDir, this.rulesDir);
|
||||
|
||||
await this.ensureDir(rulesDir);
|
||||
|
||||
// Clean up any existing BMAD files before reinstalling
|
||||
await this.cleanup(projectDir);
|
||||
|
||||
// Generate agent launchers
|
||||
const agentGen = new AgentCommandGenerator(this.bmadFolderName);
|
||||
const { artifacts: agentArtifacts } = await agentGen.collectAgentArtifacts(bmadDir, options.selectedModules || []);
|
||||
|
||||
// Get tasks, tools, and workflows (standalone only)
|
||||
const tasks = await this.getTasks(bmadDir, true);
|
||||
const tools = await this.getTools(bmadDir, true);
|
||||
const workflows = await this.getWorkflows(bmadDir, true);
|
||||
|
||||
// Process agents as rules with bmad- prefix
|
||||
let agentCount = 0;
|
||||
for (const artifact of agentArtifacts) {
|
||||
const processedContent = await this.createAgentRule(artifact, bmadDir, projectDir);
|
||||
|
||||
// Use bmad- prefix: bmad-agent-{module}-{name}.md
|
||||
const targetPath = path.join(rulesDir, `bmad-agent-${artifact.module}-${artifact.name}.md`);
|
||||
await this.writeFile(targetPath, processedContent);
|
||||
agentCount++;
|
||||
}
|
||||
|
||||
// Process tasks as rules with bmad- prefix
|
||||
let taskCount = 0;
|
||||
for (const task of tasks) {
|
||||
const content = await this.readFile(task.path);
|
||||
const processedContent = this.createTaskRule(task, content);
|
||||
|
||||
// Use bmad- prefix: bmad-task-{module}-{name}.md
|
||||
const targetPath = path.join(rulesDir, `bmad-task-${task.module}-${task.name}.md`);
|
||||
await this.writeFile(targetPath, processedContent);
|
||||
taskCount++;
|
||||
}
|
||||
|
||||
// Process tools as rules with bmad- prefix
|
||||
let toolCount = 0;
|
||||
for (const tool of tools) {
|
||||
const content = await this.readFile(tool.path);
|
||||
const processedContent = this.createToolRule(tool, content);
|
||||
|
||||
// Use bmad- prefix: bmad-tool-{module}-{name}.md
|
||||
const targetPath = path.join(rulesDir, `bmad-tool-${tool.module}-${tool.name}.md`);
|
||||
await this.writeFile(targetPath, processedContent);
|
||||
toolCount++;
|
||||
}
|
||||
|
||||
// Process workflows as rules with bmad- prefix
|
||||
let workflowCount = 0;
|
||||
for (const workflow of workflows) {
|
||||
const content = await this.readFile(workflow.path);
|
||||
const processedContent = this.createWorkflowRule(workflow, content);
|
||||
|
||||
// Use bmad- prefix: bmad-workflow-{module}-{name}.md
|
||||
const targetPath = path.join(rulesDir, `bmad-workflow-${workflow.module}-${workflow.name}.md`);
|
||||
await this.writeFile(targetPath, processedContent);
|
||||
workflowCount++;
|
||||
}
|
||||
|
||||
const totalRules = agentCount + taskCount + toolCount + workflowCount;
|
||||
|
||||
console.log(chalk.green(`✓ ${this.name} configured:`));
|
||||
console.log(chalk.dim(` - ${agentCount} agent rules created`));
|
||||
console.log(chalk.dim(` - ${taskCount} task rules created`));
|
||||
console.log(chalk.dim(` - ${toolCount} tool rules created`));
|
||||
console.log(chalk.dim(` - ${workflowCount} workflow rules created`));
|
||||
console.log(chalk.dim(` - Total: ${totalRules} rules`));
|
||||
console.log(chalk.dim(` - Rules directory: ${path.relative(projectDir, rulesDir)}`));
|
||||
console.log(chalk.dim(` - Agents can be activated with @{agent-name}`));
|
||||
|
||||
return {
|
||||
success: true,
|
||||
rules: totalRules,
|
||||
agents: agentCount,
|
||||
tasks: taskCount,
|
||||
tools: toolCount,
|
||||
workflows: workflowCount,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Create rule content for an agent
|
||||
*/
|
||||
async createAgentRule(artifact, bmadDir, projectDir) {
|
||||
// Strip frontmatter from launcher
|
||||
const frontmatterRegex = /^---\s*\n[\s\S]*?\n---\s*\n/;
|
||||
const contentWithoutFrontmatter = artifact.content.replace(frontmatterRegex, '').trim();
|
||||
|
||||
// Extract metadata from launcher content
|
||||
const titleMatch = artifact.content.match(/description:\s*"([^"]+)"/);
|
||||
const title = titleMatch ? titleMatch[1] : this.formatTitle(artifact.name);
|
||||
|
||||
// Calculate relative path for reference
|
||||
const relativePath = path.relative(projectDir, artifact.sourcePath).replaceAll('\\', '/');
|
||||
|
||||
let ruleContent = `# ${title} Agent Rule
|
||||
|
||||
This rule is triggered when the user types \`@${artifact.name}\` and activates the ${title} agent persona.
|
||||
|
||||
## Agent Activation
|
||||
|
||||
${contentWithoutFrontmatter}
|
||||
|
||||
## File Reference
|
||||
|
||||
The full agent definition is located at: \`${relativePath}\`
|
||||
`;
|
||||
|
||||
return ruleContent;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create rule content for a task
|
||||
*/
|
||||
createTaskRule(task, content) {
|
||||
// Extract task name from content
|
||||
const nameMatch = content.match(/name="([^"]+)"/);
|
||||
const taskName = nameMatch ? nameMatch[1] : this.formatTitle(task.name);
|
||||
|
||||
let ruleContent = `# ${taskName} Task Rule
|
||||
|
||||
This rule defines the ${taskName} task workflow.
|
||||
|
||||
## Task Definition
|
||||
|
||||
When this task is triggered, execute the following workflow:
|
||||
|
||||
${content}
|
||||
|
||||
## Usage
|
||||
|
||||
Reference this task with \`@task-${task.name}\` to execute the defined workflow.
|
||||
|
||||
## Module
|
||||
|
||||
Part of the BMAD ${task.module.toUpperCase()} module.
|
||||
`;
|
||||
|
||||
return ruleContent;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create rule content for a tool
|
||||
*/
|
||||
createToolRule(tool, content) {
|
||||
// Extract tool name from content
|
||||
const nameMatch = content.match(/name="([^"]+)"/);
|
||||
const toolName = nameMatch ? nameMatch[1] : this.formatTitle(tool.name);
|
||||
|
||||
let ruleContent = `# ${toolName} Tool Rule
|
||||
|
||||
This rule defines the ${toolName} tool.
|
||||
|
||||
## Tool Definition
|
||||
|
||||
When this tool is triggered, execute the following:
|
||||
|
||||
${content}
|
||||
|
||||
## Usage
|
||||
|
||||
Reference this tool with \`@tool-${tool.name}\` to execute it.
|
||||
|
||||
## Module
|
||||
|
||||
Part of the BMAD ${tool.module.toUpperCase()} module.
|
||||
`;
|
||||
|
||||
return ruleContent;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create rule content for a workflow
|
||||
*/
|
||||
createWorkflowRule(workflow, content) {
|
||||
let ruleContent = `# ${workflow.name} Workflow Rule
|
||||
|
||||
This rule defines the ${workflow.name} workflow.
|
||||
|
||||
## Workflow Description
|
||||
|
||||
${workflow.description || 'No description provided'}
|
||||
|
||||
## Workflow Definition
|
||||
|
||||
${content}
|
||||
|
||||
## Usage
|
||||
|
||||
Reference this workflow with \`@workflow-${workflow.name}\` to execute the guided workflow.
|
||||
|
||||
## Module
|
||||
|
||||
Part of the BMAD ${workflow.module.toUpperCase()} module.
|
||||
`;
|
||||
|
||||
return ruleContent;
|
||||
}
|
||||
|
||||
/**
|
||||
* Format agent/task name as title
|
||||
*/
|
||||
formatTitle(name) {
|
||||
return name
|
||||
.split('-')
|
||||
.map((word) => word.charAt(0).toUpperCase() + word.slice(1))
|
||||
.join(' ');
|
||||
}
|
||||
|
||||
/**
|
||||
* Cleanup Trae configuration - surgically remove only BMAD files
|
||||
*/
|
||||
async cleanup(projectDir) {
|
||||
const fs = require('fs-extra');
|
||||
const rulesPath = path.join(projectDir, this.configDir, this.rulesDir);
|
||||
|
||||
if (await fs.pathExists(rulesPath)) {
|
||||
// Remove any bmad* files (cleans up old bmad- and bmad: formats)
|
||||
const files = await fs.readdir(rulesPath);
|
||||
let removed = 0;
|
||||
|
||||
for (const file of files) {
|
||||
if (file.startsWith('bmad') && file.endsWith('.md')) {
|
||||
await fs.remove(path.join(rulesPath, file));
|
||||
removed++;
|
||||
}
|
||||
}
|
||||
|
||||
if (removed > 0) {
|
||||
console.log(chalk.dim(` Cleaned up ${removed} existing BMAD rules`));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Install a custom agent launcher for Trae
|
||||
* @param {string} projectDir - Project directory
|
||||
* @param {string} agentName - Agent name (e.g., "fred-commit-poet")
|
||||
* @param {string} agentPath - Path to compiled agent (relative to project root)
|
||||
* @param {Object} metadata - Agent metadata
|
||||
* @returns {Object} Installation result
|
||||
*/
|
||||
async installCustomAgentLauncher(projectDir, agentName, agentPath, metadata) {
|
||||
const traeDir = path.join(projectDir, this.configDir);
|
||||
const rulesDir = path.join(traeDir, this.rulesDir);
|
||||
|
||||
// Create .trae/rules directory if it doesn't exist
|
||||
await fs.ensureDir(rulesDir);
|
||||
|
||||
// Create custom agent launcher
|
||||
const launcherContent = `# ${agentName} Custom Agent
|
||||
|
||||
**⚠️ IMPORTANT**: Run @${agentPath} first to load the complete agent!
|
||||
|
||||
This is a launcher for the custom BMAD agent "${agentName}".
|
||||
|
||||
## Usage
|
||||
1. First run: \`${agentPath}\` to load the complete agent
|
||||
2. Then use this rule to activate ${agentName}
|
||||
|
||||
The agent will follow the persona and instructions from the main agent file.
|
||||
|
||||
---
|
||||
|
||||
*Generated by BMAD Method*`;
|
||||
|
||||
const fileName = `bmad-agent-custom-${agentName.toLowerCase()}.md`;
|
||||
const launcherPath = path.join(rulesDir, fileName);
|
||||
|
||||
// Write the launcher file
|
||||
await fs.writeFile(launcherPath, launcherContent, 'utf8');
|
||||
|
||||
return {
|
||||
ide: 'trae',
|
||||
path: path.relative(projectDir, launcherPath),
|
||||
command: agentName,
|
||||
type: 'custom-agent-launcher',
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { TraeSetup };
|
||||
|
|
@ -1,258 +0,0 @@
|
|||
const path = require('node:path');
|
||||
const { BaseIdeSetup } = require('./_base-ide');
|
||||
const chalk = require('chalk');
|
||||
const { AgentCommandGenerator } = require('./shared/agent-command-generator');
|
||||
|
||||
/**
|
||||
* Windsurf IDE setup handler
|
||||
*/
|
||||
class WindsurfSetup extends BaseIdeSetup {
|
||||
constructor() {
|
||||
super('windsurf', 'Windsurf', true); // preferred IDE
|
||||
this.configDir = '.windsurf';
|
||||
this.workflowsDir = 'workflows';
|
||||
}
|
||||
|
||||
/**
|
||||
* Setup Windsurf IDE configuration
|
||||
* @param {string} projectDir - Project directory
|
||||
* @param {string} bmadDir - BMAD installation directory
|
||||
* @param {Object} options - Setup options
|
||||
*/
|
||||
async setup(projectDir, bmadDir, options = {}) {
|
||||
console.log(chalk.cyan(`Setting up ${this.name}...`));
|
||||
|
||||
// Create .windsurf/workflows/bmad directory structure
|
||||
const windsurfDir = path.join(projectDir, this.configDir);
|
||||
const workflowsDir = path.join(windsurfDir, this.workflowsDir);
|
||||
const bmadWorkflowsDir = path.join(workflowsDir, 'bmad');
|
||||
|
||||
await this.ensureDir(bmadWorkflowsDir);
|
||||
|
||||
// Clean up any existing BMAD workflows before reinstalling
|
||||
await this.cleanup(projectDir);
|
||||
|
||||
// Generate agent launchers
|
||||
const agentGen = new AgentCommandGenerator(this.bmadFolderName);
|
||||
const { artifacts: agentArtifacts } = await agentGen.collectAgentArtifacts(bmadDir, options.selectedModules || []);
|
||||
|
||||
// Convert artifacts to agent format for module organization
|
||||
const agents = agentArtifacts.map((a) => ({ module: a.module, name: a.name }));
|
||||
|
||||
// Get tasks, tools, and workflows (standalone only)
|
||||
const tasks = await this.getTasks(bmadDir, true);
|
||||
const tools = await this.getTools(bmadDir, true);
|
||||
const workflows = await this.getWorkflows(bmadDir, true);
|
||||
|
||||
// Create directories for each module under bmad/
|
||||
const modules = new Set();
|
||||
for (const item of [...agents, ...tasks, ...tools, ...workflows]) modules.add(item.module);
|
||||
|
||||
for (const module of modules) {
|
||||
await this.ensureDir(path.join(bmadWorkflowsDir, module));
|
||||
await this.ensureDir(path.join(bmadWorkflowsDir, module, 'agents'));
|
||||
await this.ensureDir(path.join(bmadWorkflowsDir, module, 'tasks'));
|
||||
await this.ensureDir(path.join(bmadWorkflowsDir, module, 'tools'));
|
||||
await this.ensureDir(path.join(bmadWorkflowsDir, module, 'workflows'));
|
||||
}
|
||||
|
||||
// Process agent launchers as workflows with organized structure
|
||||
let agentCount = 0;
|
||||
for (const artifact of agentArtifacts) {
|
||||
const processedContent = this.createWorkflowContent({ module: artifact.module, name: artifact.name }, artifact.content);
|
||||
|
||||
// Organized path: bmad/module/agents/agent-name.md
|
||||
const targetPath = path.join(bmadWorkflowsDir, artifact.module, 'agents', `${artifact.name}.md`);
|
||||
await this.writeFile(targetPath, processedContent);
|
||||
agentCount++;
|
||||
}
|
||||
|
||||
// Process tasks as workflows with organized structure
|
||||
let taskCount = 0;
|
||||
for (const task of tasks) {
|
||||
const content = await this.readFile(task.path);
|
||||
const processedContent = this.createTaskWorkflowContent(task, content);
|
||||
|
||||
// Organized path: bmad/module/tasks/task-name.md
|
||||
const targetPath = path.join(bmadWorkflowsDir, task.module, 'tasks', `${task.name}.md`);
|
||||
await this.writeFile(targetPath, processedContent);
|
||||
taskCount++;
|
||||
}
|
||||
|
||||
// Process tools as workflows with organized structure
|
||||
let toolCount = 0;
|
||||
for (const tool of tools) {
|
||||
const content = await this.readFile(tool.path);
|
||||
const processedContent = this.createToolWorkflowContent(tool, content);
|
||||
|
||||
// Organized path: bmad/module/tools/tool-name.md
|
||||
const targetPath = path.join(bmadWorkflowsDir, tool.module, 'tools', `${tool.name}.md`);
|
||||
await this.writeFile(targetPath, processedContent);
|
||||
toolCount++;
|
||||
}
|
||||
|
||||
// Process workflows with organized structure
|
||||
let workflowCount = 0;
|
||||
for (const workflow of workflows) {
|
||||
const content = await this.readFile(workflow.path);
|
||||
const processedContent = this.createWorkflowWorkflowContent(workflow, content);
|
||||
|
||||
// Organized path: bmad/module/workflows/workflow-name.md
|
||||
const targetPath = path.join(bmadWorkflowsDir, workflow.module, 'workflows', `${workflow.name}.md`);
|
||||
await this.writeFile(targetPath, processedContent);
|
||||
workflowCount++;
|
||||
}
|
||||
|
||||
console.log(chalk.green(`✓ ${this.name} configured:`));
|
||||
console.log(chalk.dim(` - ${agentCount} agents installed`));
|
||||
console.log(chalk.dim(` - ${taskCount} tasks installed`));
|
||||
console.log(chalk.dim(` - ${toolCount} tools installed`));
|
||||
console.log(chalk.dim(` - ${workflowCount} workflows installed`));
|
||||
console.log(chalk.dim(` - Organized in modules: ${[...modules].join(', ')}`));
|
||||
console.log(chalk.dim(` - Workflows directory: ${path.relative(projectDir, workflowsDir)}`));
|
||||
|
||||
// Provide additional configuration hints
|
||||
if (options.showHints !== false) {
|
||||
console.log(chalk.dim('\n Windsurf workflow settings:'));
|
||||
console.log(chalk.dim(' - auto_execution_mode: 3 (recommended for agents)'));
|
||||
console.log(chalk.dim(' - auto_execution_mode: 2 (recommended for tasks/tools)'));
|
||||
console.log(chalk.dim(' - auto_execution_mode: 1 (recommended for workflows)'));
|
||||
console.log(chalk.dim(' - Workflows can be triggered via the Windsurf menu'));
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
agents: agentCount,
|
||||
tasks: taskCount,
|
||||
tools: toolCount,
|
||||
workflows: workflowCount,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Create workflow content for an agent
|
||||
*/
|
||||
createWorkflowContent(agent, content) {
|
||||
// Strip existing frontmatter from launcher
|
||||
const frontmatterRegex = /^---\s*\n[\s\S]*?\n---\s*\n/;
|
||||
const contentWithoutFrontmatter = content.replace(frontmatterRegex, '');
|
||||
|
||||
// Create simple Windsurf frontmatter matching original format
|
||||
let workflowContent = `---
|
||||
description: ${agent.name}
|
||||
auto_execution_mode: 3
|
||||
---
|
||||
|
||||
${contentWithoutFrontmatter}`;
|
||||
|
||||
return workflowContent;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create workflow content for a task
|
||||
*/
|
||||
createTaskWorkflowContent(task, content) {
|
||||
// Create simple Windsurf frontmatter matching original format
|
||||
let workflowContent = `---
|
||||
description: task-${task.name}
|
||||
auto_execution_mode: 2
|
||||
---
|
||||
|
||||
${content}`;
|
||||
|
||||
return workflowContent;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create workflow content for a tool
|
||||
*/
|
||||
createToolWorkflowContent(tool, content) {
|
||||
// Create simple Windsurf frontmatter matching original format
|
||||
let workflowContent = `---
|
||||
description: tool-${tool.name}
|
||||
auto_execution_mode: 2
|
||||
---
|
||||
|
||||
${content}`;
|
||||
|
||||
return workflowContent;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create workflow content for a workflow
|
||||
*/
|
||||
createWorkflowWorkflowContent(workflow, content) {
|
||||
// Create simple Windsurf frontmatter matching original format
|
||||
let workflowContent = `---
|
||||
description: ${workflow.name}
|
||||
auto_execution_mode: 1
|
||||
---
|
||||
|
||||
${content}`;
|
||||
|
||||
return workflowContent;
|
||||
}
|
||||
|
||||
/**
|
||||
* Cleanup Windsurf configuration - surgically remove only BMAD files
|
||||
*/
|
||||
async cleanup(projectDir) {
|
||||
const fs = require('fs-extra');
|
||||
const bmadPath = path.join(projectDir, this.configDir, this.workflowsDir, 'bmad');
|
||||
|
||||
if (await fs.pathExists(bmadPath)) {
|
||||
// Remove the entire bmad folder - this is our territory
|
||||
await fs.remove(bmadPath);
|
||||
console.log(chalk.dim(` Cleaned up existing BMAD workflows`));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Install a custom agent launcher for Windsurf
|
||||
* @param {string} projectDir - Project directory
|
||||
* @param {string} agentName - Agent name (e.g., "fred-commit-poet")
|
||||
* @param {string} agentPath - Path to compiled agent (relative to project root)
|
||||
* @param {Object} metadata - Agent metadata
|
||||
* @returns {Object|null} Info about created command
|
||||
*/
|
||||
async installCustomAgentLauncher(projectDir, agentName, agentPath, metadata) {
|
||||
const fs = require('fs-extra');
|
||||
const customAgentsDir = path.join(projectDir, this.configDir, this.workflowsDir, 'bmad', 'custom', 'agents');
|
||||
|
||||
if (!(await this.exists(path.join(projectDir, this.configDir)))) {
|
||||
return null; // IDE not configured for this project
|
||||
}
|
||||
|
||||
await this.ensureDir(customAgentsDir);
|
||||
|
||||
const launcherContent = `You must fully embody this agent's persona and follow all activation instructions exactly as specified. NEVER break character until given an exit command.
|
||||
|
||||
<agent-activation CRITICAL="TRUE">
|
||||
1. LOAD the FULL agent file from @${agentPath}
|
||||
2. READ its entire contents - this contains the complete agent persona, menu, and instructions
|
||||
3. FOLLOW every step in the <activation> section precisely
|
||||
4. DISPLAY the welcome/greeting as instructed
|
||||
5. PRESENT the numbered menu
|
||||
6. WAIT for user input before proceeding
|
||||
</agent-activation>
|
||||
`;
|
||||
|
||||
// Windsurf uses workflow format with frontmatter
|
||||
const workflowContent = `---
|
||||
description: ${metadata.title || agentName}
|
||||
auto_execution_mode: 3
|
||||
---
|
||||
|
||||
${launcherContent}`;
|
||||
|
||||
const launcherPath = path.join(customAgentsDir, `${agentName}.md`);
|
||||
await fs.writeFile(launcherPath, workflowContent);
|
||||
|
||||
return {
|
||||
path: launcherPath,
|
||||
command: `bmad/custom/agents/${agentName}`,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { WindsurfSetup };
|
||||
|
|
@ -9,7 +9,7 @@ const { getProjectRoot } = require('./project-root');
|
|||
*/
|
||||
class PlatformCodes {
|
||||
constructor() {
|
||||
this.configPath = path.join(getProjectRoot(), 'tools', 'platform-codes.yaml');
|
||||
this.configPath = path.join(getProjectRoot(), 'tools/cli/installers/lib/ide/platform-codes.yaml');
|
||||
this.loadConfig();
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -363,8 +363,8 @@ class UI {
|
|||
const { IdeManager } = require('../installers/lib/ide/manager');
|
||||
const ideManager = new IdeManager();
|
||||
|
||||
const preferredIdes = ideManager.getPreferredIdes();
|
||||
const otherIdes = ideManager.getOtherIdes();
|
||||
const preferredIdes = await ideManager.getPreferredIdes();
|
||||
const otherIdes = await ideManager.getOtherIdes();
|
||||
|
||||
// Build grouped options object for groupMultiselect
|
||||
const groupedOptions = {};
|
||||
|
|
|
|||
|
|
@ -0,0 +1,51 @@
|
|||
import { defineConfig } from 'vitest/config';
|
||||
|
||||
export default defineConfig({
|
||||
test: {
|
||||
// Test file patterns
|
||||
include: ['test/unit/**/*.test.js', 'test/integration/**/*.test.js'],
|
||||
exclude: ['test/test-*.js', 'node_modules/**'],
|
||||
|
||||
// Timeouts
|
||||
testTimeout: 10_000, // 10s for unit tests
|
||||
hookTimeout: 30_000, // 30s for setup/teardown
|
||||
|
||||
// Parallel execution for speed
|
||||
threads: true,
|
||||
maxThreads: 4,
|
||||
|
||||
// Coverage configuration (using V8)
|
||||
coverage: {
|
||||
provider: 'v8',
|
||||
reporter: ['text', 'html', 'lcov', 'json-summary'],
|
||||
|
||||
// Files to include in coverage
|
||||
include: ['tools/**/*.js', 'src/**/*.js'],
|
||||
|
||||
// Files to exclude from coverage
|
||||
exclude: [
|
||||
'test/**',
|
||||
'tools/flattener/**', // Separate concern
|
||||
'tools/bmad-npx-wrapper.js', // Entry point
|
||||
'tools/build-docs.js', // Documentation tools
|
||||
'tools/check-doc-links.js', // Documentation tools
|
||||
'**/*.config.js', // Configuration files
|
||||
],
|
||||
|
||||
// Include all files for accurate coverage
|
||||
all: true,
|
||||
|
||||
// Coverage thresholds (fail if below these)
|
||||
statements: 85,
|
||||
branches: 80,
|
||||
functions: 85,
|
||||
lines: 85,
|
||||
},
|
||||
|
||||
// Global setup file
|
||||
setupFiles: ['./test/setup.js'],
|
||||
|
||||
// Environment
|
||||
environment: 'node',
|
||||
},
|
||||
});
|
||||
Loading…
Reference in New Issue