Compare commits
9 Commits
c892aaafb7
...
1d4c07df0c
| Author | SHA1 | Date |
|---|---|---|
|
|
1d4c07df0c | |
|
|
274dea16fa | |
|
|
dcd581c84a | |
|
|
6d84a60a78 | |
|
|
59e1b7067c | |
|
|
1d8df63ac5 | |
|
|
993d02b8b3 | |
|
|
5cb5606ba3 | |
|
|
5e841f9cac |
|
|
@ -0,0 +1,85 @@
|
||||||
|
# Security Policy
|
||||||
|
|
||||||
|
## Supported Versions
|
||||||
|
|
||||||
|
We release security patches for the following versions:
|
||||||
|
|
||||||
|
| Version | Supported |
|
||||||
|
| ------- | ------------------ |
|
||||||
|
| Latest | :white_check_mark: |
|
||||||
|
| < Latest | :x: |
|
||||||
|
|
||||||
|
We recommend always using the latest version of BMad Method to ensure you have the most recent security updates.
|
||||||
|
|
||||||
|
## Reporting a Vulnerability
|
||||||
|
|
||||||
|
We take security vulnerabilities seriously. If you discover a security issue, please report it responsibly.
|
||||||
|
|
||||||
|
### How to Report
|
||||||
|
|
||||||
|
**Do NOT report security vulnerabilities through public GitHub issues.**
|
||||||
|
|
||||||
|
Instead, please report them via one of these methods:
|
||||||
|
|
||||||
|
1. **GitHub Security Advisories** (Preferred): Use [GitHub's private vulnerability reporting](https://github.com/bmad-code-org/BMAD-METHOD/security/advisories/new) to submit a confidential report.
|
||||||
|
|
||||||
|
2. **Discord**: Contact a maintainer directly via DM on our [Discord server](https://discord.gg/gk8jAdXWmj).
|
||||||
|
|
||||||
|
### What to Include
|
||||||
|
|
||||||
|
Please include as much of the following information as possible:
|
||||||
|
|
||||||
|
- Type of vulnerability (e.g., prompt injection, path traversal, etc.)
|
||||||
|
- Full paths of source file(s) related to the vulnerability
|
||||||
|
- Step-by-step instructions to reproduce the issue
|
||||||
|
- Proof-of-concept or exploit code (if available)
|
||||||
|
- Impact assessment of the vulnerability
|
||||||
|
|
||||||
|
### Response Timeline
|
||||||
|
|
||||||
|
- **Initial Response**: Within 48 hours of receiving your report
|
||||||
|
- **Status Update**: Within 7 days with our assessment
|
||||||
|
- **Resolution Target**: Critical issues within 30 days; other issues within 90 days
|
||||||
|
|
||||||
|
### What to Expect
|
||||||
|
|
||||||
|
1. We will acknowledge receipt of your report
|
||||||
|
2. We will investigate and validate the vulnerability
|
||||||
|
3. We will work on a fix and coordinate disclosure timing with you
|
||||||
|
4. We will credit you in the security advisory (unless you prefer to remain anonymous)
|
||||||
|
|
||||||
|
## Security Scope
|
||||||
|
|
||||||
|
### In Scope
|
||||||
|
|
||||||
|
- Vulnerabilities in BMad Method core framework code
|
||||||
|
- Security issues in agent definitions or workflows that could lead to unintended behavior
|
||||||
|
- Path traversal or file system access issues
|
||||||
|
- Prompt injection vulnerabilities that bypass intended agent behavior
|
||||||
|
- Supply chain vulnerabilities in dependencies
|
||||||
|
|
||||||
|
### Out of Scope
|
||||||
|
|
||||||
|
- Security issues in user-created custom agents or modules
|
||||||
|
- Vulnerabilities in third-party AI providers (Claude, GPT, etc.)
|
||||||
|
- Issues that require physical access to a user's machine
|
||||||
|
- Social engineering attacks
|
||||||
|
- Denial of service attacks that don't exploit a specific vulnerability
|
||||||
|
|
||||||
|
## Security Best Practices for Users
|
||||||
|
|
||||||
|
When using BMad Method:
|
||||||
|
|
||||||
|
1. **Review Agent Outputs**: Always review AI-generated code before executing it
|
||||||
|
2. **Limit File Access**: Configure your AI IDE to limit file system access where possible
|
||||||
|
3. **Keep Updated**: Regularly update to the latest version
|
||||||
|
4. **Validate Dependencies**: Review any dependencies added by generated code
|
||||||
|
5. **Environment Isolation**: Consider running AI-assisted development in isolated environments
|
||||||
|
|
||||||
|
## Acknowledgments
|
||||||
|
|
||||||
|
We appreciate the security research community's efforts in helping keep BMad Method secure. Contributors who report valid security issues will be acknowledged in our security advisories.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
Thank you for helping keep BMad Method and our community safe.
|
||||||
|
|
@ -81,6 +81,21 @@ export default [
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
|
||||||
|
// Test files using Vitest (ES modules)
|
||||||
|
{
|
||||||
|
files: ['test/unit/**/*.js', 'test/integration/**/*.js', 'test/helpers/**/*.js', 'test/setup.js', 'vitest.config.js'],
|
||||||
|
languageOptions: {
|
||||||
|
sourceType: 'module',
|
||||||
|
ecmaVersion: 'latest',
|
||||||
|
},
|
||||||
|
rules: {
|
||||||
|
// Allow dev dependencies in test files
|
||||||
|
'n/no-unpublished-import': 'off',
|
||||||
|
'unicorn/prefer-module': 'off',
|
||||||
|
'no-unused-vars': 'off',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
|
||||||
// CLI scripts under tools/** and test/**
|
// CLI scripts under tools/** and test/**
|
||||||
{
|
{
|
||||||
files: ['tools/**/*.js', 'tools/**/*.mjs', 'test/**/*.js'],
|
files: ['tools/**/*.js', 'tools/**/*.mjs', 'test/**/*.js'],
|
||||||
|
|
|
||||||
|
|
@ -19,7 +19,6 @@
|
||||||
"fs-extra": "^11.3.0",
|
"fs-extra": "^11.3.0",
|
||||||
"glob": "^11.0.3",
|
"glob": "^11.0.3",
|
||||||
"ignore": "^7.0.5",
|
"ignore": "^7.0.5",
|
||||||
"inquirer": "^9.3.8",
|
|
||||||
"js-yaml": "^4.1.0",
|
"js-yaml": "^4.1.0",
|
||||||
"ora": "^5.4.1",
|
"ora": "^5.4.1",
|
||||||
"semver": "^7.6.3",
|
"semver": "^7.6.3",
|
||||||
|
|
@ -34,7 +33,10 @@
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@astrojs/sitemap": "^3.6.0",
|
"@astrojs/sitemap": "^3.6.0",
|
||||||
"@astrojs/starlight": "^0.37.0",
|
"@astrojs/starlight": "^0.37.0",
|
||||||
|
"@clack/prompts": "^0.11.0",
|
||||||
"@eslint/js": "^9.33.0",
|
"@eslint/js": "^9.33.0",
|
||||||
|
"@vitest/coverage-v8": "^4.0.16",
|
||||||
|
"@vitest/ui": "^4.0.16",
|
||||||
"archiver": "^7.0.1",
|
"archiver": "^7.0.1",
|
||||||
"astro": "^5.16.0",
|
"astro": "^5.16.0",
|
||||||
"c8": "^10.1.3",
|
"c8": "^10.1.3",
|
||||||
|
|
@ -50,6 +52,7 @@
|
||||||
"prettier": "^3.7.4",
|
"prettier": "^3.7.4",
|
||||||
"prettier-plugin-packagejson": "^2.5.19",
|
"prettier-plugin-packagejson": "^2.5.19",
|
||||||
"sharp": "^0.33.5",
|
"sharp": "^0.33.5",
|
||||||
|
"vitest": "^4.0.16",
|
||||||
"yaml-eslint-parser": "^1.2.3",
|
"yaml-eslint-parser": "^1.2.3",
|
||||||
"yaml-lint": "^1.7.0"
|
"yaml-lint": "^1.7.0"
|
||||||
},
|
},
|
||||||
|
|
@ -244,7 +247,6 @@
|
||||||
"integrity": "sha512-e7jT4DxYvIDLk1ZHmU/m/mB19rex9sv0c2ftBtjSBv+kVM/902eh0fINUzD7UwLLNR+jU585GxUJ8/EBfAM5fw==",
|
"integrity": "sha512-e7jT4DxYvIDLk1ZHmU/m/mB19rex9sv0c2ftBtjSBv+kVM/902eh0fINUzD7UwLLNR+jU585GxUJ8/EBfAM5fw==",
|
||||||
"dev": true,
|
"dev": true,
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"peer": true,
|
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@babel/code-frame": "^7.27.1",
|
"@babel/code-frame": "^7.27.1",
|
||||||
"@babel/generator": "^7.28.5",
|
"@babel/generator": "^7.28.5",
|
||||||
|
|
@ -756,6 +758,29 @@
|
||||||
"node": ">=18"
|
"node": ">=18"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/@clack/core": {
|
||||||
|
"version": "0.5.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/@clack/core/-/core-0.5.0.tgz",
|
||||||
|
"integrity": "sha512-p3y0FIOwaYRUPRcMO7+dlmLh8PSRcrjuTndsiA0WAFbWES0mLZlrjVoBRZ9DzkPFJZG6KGkJmoEAY0ZcVWTkow==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"picocolors": "^1.0.0",
|
||||||
|
"sisteransi": "^1.0.5"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@clack/prompts": {
|
||||||
|
"version": "0.11.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/@clack/prompts/-/prompts-0.11.0.tgz",
|
||||||
|
"integrity": "sha512-pMN5FcrEw9hUkZA4f+zLlzivQSeQf5dRGJjSUbvVYDLvpKCdQx5OaknvKzgbtXOizhP+SJJJjqEbOe55uKKfAw==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"@clack/core": "0.5.0",
|
||||||
|
"picocolors": "^1.0.0",
|
||||||
|
"sisteransi": "^1.0.5"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/@colors/colors": {
|
"node_modules/@colors/colors": {
|
||||||
"version": "1.5.0",
|
"version": "1.5.0",
|
||||||
"resolved": "https://registry.npmjs.org/@colors/colors/-/colors-1.5.0.tgz",
|
"resolved": "https://registry.npmjs.org/@colors/colors/-/colors-1.5.0.tgz",
|
||||||
|
|
@ -1998,36 +2023,6 @@
|
||||||
"url": "https://opencollective.com/libvips"
|
"url": "https://opencollective.com/libvips"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/@inquirer/external-editor": {
|
|
||||||
"version": "1.0.3",
|
|
||||||
"resolved": "https://registry.npmjs.org/@inquirer/external-editor/-/external-editor-1.0.3.tgz",
|
|
||||||
"integrity": "sha512-RWbSrDiYmO4LbejWY7ttpxczuwQyZLBUyygsA9Nsv95hpzUWwnNTVQmAq3xuh7vNwCp07UTmE5i11XAEExx4RA==",
|
|
||||||
"license": "MIT",
|
|
||||||
"dependencies": {
|
|
||||||
"chardet": "^2.1.1",
|
|
||||||
"iconv-lite": "^0.7.0"
|
|
||||||
},
|
|
||||||
"engines": {
|
|
||||||
"node": ">=18"
|
|
||||||
},
|
|
||||||
"peerDependencies": {
|
|
||||||
"@types/node": ">=18"
|
|
||||||
},
|
|
||||||
"peerDependenciesMeta": {
|
|
||||||
"@types/node": {
|
|
||||||
"optional": true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/@inquirer/figures": {
|
|
||||||
"version": "1.0.15",
|
|
||||||
"resolved": "https://registry.npmjs.org/@inquirer/figures/-/figures-1.0.15.tgz",
|
|
||||||
"integrity": "sha512-t2IEY+unGHOzAaVM5Xx6DEWKeXlDDcNPeDyUpsRc6CUhBfU3VQOEl+Vssh7VNp1dR8MdUJBWhuObjXCsVpjN5g==",
|
|
||||||
"license": "MIT",
|
|
||||||
"engines": {
|
|
||||||
"node": ">=18"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/@isaacs/balanced-match": {
|
"node_modules/@isaacs/balanced-match": {
|
||||||
"version": "4.0.1",
|
"version": "4.0.1",
|
||||||
"resolved": "https://registry.npmjs.org/@isaacs/balanced-match/-/balanced-match-4.0.1.tgz",
|
"resolved": "https://registry.npmjs.org/@isaacs/balanced-match/-/balanced-match-4.0.1.tgz",
|
||||||
|
|
@ -2993,6 +2988,13 @@
|
||||||
"url": "https://opencollective.com/pkgr"
|
"url": "https://opencollective.com/pkgr"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/@polka/url": {
|
||||||
|
"version": "1.0.0-next.29",
|
||||||
|
"resolved": "https://registry.npmjs.org/@polka/url/-/url-1.0.0-next.29.tgz",
|
||||||
|
"integrity": "sha512-wwQAWhWSuHaag8c4q/KN/vCoeOJYshAIvMQwD4GpSb3OiZklFfvAgmj0VCBBImRpuF/aFgIRzllXlVX93Jevww==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
"node_modules/@rollup/pluginutils": {
|
"node_modules/@rollup/pluginutils": {
|
||||||
"version": "5.3.0",
|
"version": "5.3.0",
|
||||||
"resolved": "https://registry.npmjs.org/@rollup/pluginutils/-/pluginutils-5.3.0.tgz",
|
"resolved": "https://registry.npmjs.org/@rollup/pluginutils/-/pluginutils-5.3.0.tgz",
|
||||||
|
|
@ -3445,6 +3447,13 @@
|
||||||
"@sinonjs/commons": "^3.0.1"
|
"@sinonjs/commons": "^3.0.1"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/@standard-schema/spec": {
|
||||||
|
"version": "1.1.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/@standard-schema/spec/-/spec-1.1.0.tgz",
|
||||||
|
"integrity": "sha512-l2aFy5jALhniG5HgqrD6jXLi/rUWrKvqN/qJx6yoJsgKhblVd+iqqU4RCXavm/jPityDo5TCvKMnpjKnOriy0w==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
"node_modules/@swc/helpers": {
|
"node_modules/@swc/helpers": {
|
||||||
"version": "0.5.18",
|
"version": "0.5.18",
|
||||||
"resolved": "https://registry.npmjs.org/@swc/helpers/-/helpers-0.5.18.tgz",
|
"resolved": "https://registry.npmjs.org/@swc/helpers/-/helpers-0.5.18.tgz",
|
||||||
|
|
@ -3511,6 +3520,17 @@
|
||||||
"@babel/types": "^7.28.2"
|
"@babel/types": "^7.28.2"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/@types/chai": {
|
||||||
|
"version": "5.2.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/@types/chai/-/chai-5.2.3.tgz",
|
||||||
|
"integrity": "sha512-Mw558oeA9fFbv65/y4mHtXDs9bPnFMZAL/jxdPFUpOHHIXX91mcgEHbS5Lahr+pwZFR8A7GQleRWeI6cGFC2UA==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"@types/deep-eql": "*",
|
||||||
|
"assertion-error": "^2.0.1"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/@types/debug": {
|
"node_modules/@types/debug": {
|
||||||
"version": "4.1.12",
|
"version": "4.1.12",
|
||||||
"resolved": "https://registry.npmjs.org/@types/debug/-/debug-4.1.12.tgz",
|
"resolved": "https://registry.npmjs.org/@types/debug/-/debug-4.1.12.tgz",
|
||||||
|
|
@ -3520,6 +3540,13 @@
|
||||||
"@types/ms": "*"
|
"@types/ms": "*"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/@types/deep-eql": {
|
||||||
|
"version": "4.0.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/@types/deep-eql/-/deep-eql-4.0.2.tgz",
|
||||||
|
"integrity": "sha512-c9h9dVVMigMPc4bwTvC5dxqtqJZwQPePsWjPlpSOnojbor6pGqdk541lfA7AqFQr5pB1BRdq0juY9db81BwyFw==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
"node_modules/@types/estree": {
|
"node_modules/@types/estree": {
|
||||||
"version": "1.0.8",
|
"version": "1.0.8",
|
||||||
"resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.8.tgz",
|
"resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.8.tgz",
|
||||||
|
|
@ -3641,9 +3668,8 @@
|
||||||
"version": "25.0.3",
|
"version": "25.0.3",
|
||||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-25.0.3.tgz",
|
"resolved": "https://registry.npmjs.org/@types/node/-/node-25.0.3.tgz",
|
||||||
"integrity": "sha512-W609buLVRVmeW693xKfzHeIV6nJGGz98uCPfeXI1ELMLXVeKYZ9m15fAMSaUPBHYLGFsVRcMmSCksQOrZV9BYA==",
|
"integrity": "sha512-W609buLVRVmeW693xKfzHeIV6nJGGz98uCPfeXI1ELMLXVeKYZ9m15fAMSaUPBHYLGFsVRcMmSCksQOrZV9BYA==",
|
||||||
"devOptional": true,
|
"dev": true,
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"peer": true,
|
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"undici-types": "~7.16.0"
|
"undici-types": "~7.16.0"
|
||||||
}
|
}
|
||||||
|
|
@ -3964,6 +3990,171 @@
|
||||||
"win32"
|
"win32"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
"node_modules/@vitest/coverage-v8": {
|
||||||
|
"version": "4.0.16",
|
||||||
|
"resolved": "https://registry.npmjs.org/@vitest/coverage-v8/-/coverage-v8-4.0.16.tgz",
|
||||||
|
"integrity": "sha512-2rNdjEIsPRzsdu6/9Eq0AYAzYdpP6Bx9cje9tL3FE5XzXRQF1fNU9pe/1yE8fCrS0HD+fBtt6gLPh6LI57tX7A==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"@bcoe/v8-coverage": "^1.0.2",
|
||||||
|
"@vitest/utils": "4.0.16",
|
||||||
|
"ast-v8-to-istanbul": "^0.3.8",
|
||||||
|
"istanbul-lib-coverage": "^3.2.2",
|
||||||
|
"istanbul-lib-report": "^3.0.1",
|
||||||
|
"istanbul-lib-source-maps": "^5.0.6",
|
||||||
|
"istanbul-reports": "^3.2.0",
|
||||||
|
"magicast": "^0.5.1",
|
||||||
|
"obug": "^2.1.1",
|
||||||
|
"std-env": "^3.10.0",
|
||||||
|
"tinyrainbow": "^3.0.3"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"url": "https://opencollective.com/vitest"
|
||||||
|
},
|
||||||
|
"peerDependencies": {
|
||||||
|
"@vitest/browser": "4.0.16",
|
||||||
|
"vitest": "4.0.16"
|
||||||
|
},
|
||||||
|
"peerDependenciesMeta": {
|
||||||
|
"@vitest/browser": {
|
||||||
|
"optional": true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@vitest/expect": {
|
||||||
|
"version": "4.0.16",
|
||||||
|
"resolved": "https://registry.npmjs.org/@vitest/expect/-/expect-4.0.16.tgz",
|
||||||
|
"integrity": "sha512-eshqULT2It7McaJkQGLkPjPjNph+uevROGuIMJdG3V+0BSR2w9u6J9Lwu+E8cK5TETlfou8GRijhafIMhXsimA==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"@standard-schema/spec": "^1.0.0",
|
||||||
|
"@types/chai": "^5.2.2",
|
||||||
|
"@vitest/spy": "4.0.16",
|
||||||
|
"@vitest/utils": "4.0.16",
|
||||||
|
"chai": "^6.2.1",
|
||||||
|
"tinyrainbow": "^3.0.3"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"url": "https://opencollective.com/vitest"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@vitest/mocker": {
|
||||||
|
"version": "4.0.16",
|
||||||
|
"resolved": "https://registry.npmjs.org/@vitest/mocker/-/mocker-4.0.16.tgz",
|
||||||
|
"integrity": "sha512-yb6k4AZxJTB+q9ycAvsoxGn+j/po0UaPgajllBgt1PzoMAAmJGYFdDk0uCcRcxb3BrME34I6u8gHZTQlkqSZpg==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"@vitest/spy": "4.0.16",
|
||||||
|
"estree-walker": "^3.0.3",
|
||||||
|
"magic-string": "^0.30.21"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"url": "https://opencollective.com/vitest"
|
||||||
|
},
|
||||||
|
"peerDependencies": {
|
||||||
|
"msw": "^2.4.9",
|
||||||
|
"vite": "^6.0.0 || ^7.0.0-0"
|
||||||
|
},
|
||||||
|
"peerDependenciesMeta": {
|
||||||
|
"msw": {
|
||||||
|
"optional": true
|
||||||
|
},
|
||||||
|
"vite": {
|
||||||
|
"optional": true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@vitest/pretty-format": {
|
||||||
|
"version": "4.0.16",
|
||||||
|
"resolved": "https://registry.npmjs.org/@vitest/pretty-format/-/pretty-format-4.0.16.tgz",
|
||||||
|
"integrity": "sha512-eNCYNsSty9xJKi/UdVD8Ou16alu7AYiS2fCPRs0b1OdhJiV89buAXQLpTbe+X8V9L6qrs9CqyvU7OaAopJYPsA==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"tinyrainbow": "^3.0.3"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"url": "https://opencollective.com/vitest"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@vitest/runner": {
|
||||||
|
"version": "4.0.16",
|
||||||
|
"resolved": "https://registry.npmjs.org/@vitest/runner/-/runner-4.0.16.tgz",
|
||||||
|
"integrity": "sha512-VWEDm5Wv9xEo80ctjORcTQRJ539EGPB3Pb9ApvVRAY1U/WkHXmmYISqU5E79uCwcW7xYUV38gwZD+RV755fu3Q==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"@vitest/utils": "4.0.16",
|
||||||
|
"pathe": "^2.0.3"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"url": "https://opencollective.com/vitest"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@vitest/snapshot": {
|
||||||
|
"version": "4.0.16",
|
||||||
|
"resolved": "https://registry.npmjs.org/@vitest/snapshot/-/snapshot-4.0.16.tgz",
|
||||||
|
"integrity": "sha512-sf6NcrYhYBsSYefxnry+DR8n3UV4xWZwWxYbCJUt2YdvtqzSPR7VfGrY0zsv090DAbjFZsi7ZaMi1KnSRyK1XA==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"@vitest/pretty-format": "4.0.16",
|
||||||
|
"magic-string": "^0.30.21",
|
||||||
|
"pathe": "^2.0.3"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"url": "https://opencollective.com/vitest"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@vitest/spy": {
|
||||||
|
"version": "4.0.16",
|
||||||
|
"resolved": "https://registry.npmjs.org/@vitest/spy/-/spy-4.0.16.tgz",
|
||||||
|
"integrity": "sha512-4jIOWjKP0ZUaEmJm00E0cOBLU+5WE0BpeNr3XN6TEF05ltro6NJqHWxXD0kA8/Zc8Nh23AT8WQxwNG+WeROupw==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"funding": {
|
||||||
|
"url": "https://opencollective.com/vitest"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@vitest/ui": {
|
||||||
|
"version": "4.0.16",
|
||||||
|
"resolved": "https://registry.npmjs.org/@vitest/ui/-/ui-4.0.16.tgz",
|
||||||
|
"integrity": "sha512-rkoPH+RqWopVxDnCBE/ysIdfQ2A7j1eDmW8tCxxrR9nnFBa9jKf86VgsSAzxBd1x+ny0GC4JgiD3SNfRHv3pOg==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"@vitest/utils": "4.0.16",
|
||||||
|
"fflate": "^0.8.2",
|
||||||
|
"flatted": "^3.3.3",
|
||||||
|
"pathe": "^2.0.3",
|
||||||
|
"sirv": "^3.0.2",
|
||||||
|
"tinyglobby": "^0.2.15",
|
||||||
|
"tinyrainbow": "^3.0.3"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"url": "https://opencollective.com/vitest"
|
||||||
|
},
|
||||||
|
"peerDependencies": {
|
||||||
|
"vitest": "4.0.16"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@vitest/utils": {
|
||||||
|
"version": "4.0.16",
|
||||||
|
"resolved": "https://registry.npmjs.org/@vitest/utils/-/utils-4.0.16.tgz",
|
||||||
|
"integrity": "sha512-h8z9yYhV3e1LEfaQ3zdypIrnAg/9hguReGZoS7Gl0aBG5xgA410zBqECqmaF/+RkTggRsfnzc1XaAHA6bmUufA==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"@vitest/pretty-format": "4.0.16",
|
||||||
|
"tinyrainbow": "^3.0.3"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"url": "https://opencollective.com/vitest"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/abort-controller": {
|
"node_modules/abort-controller": {
|
||||||
"version": "3.0.0",
|
"version": "3.0.0",
|
||||||
"resolved": "https://registry.npmjs.org/abort-controller/-/abort-controller-3.0.0.tgz",
|
"resolved": "https://registry.npmjs.org/abort-controller/-/abort-controller-3.0.0.tgz",
|
||||||
|
|
@ -3983,7 +4174,6 @@
|
||||||
"integrity": "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==",
|
"integrity": "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==",
|
||||||
"dev": true,
|
"dev": true,
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"peer": true,
|
|
||||||
"bin": {
|
"bin": {
|
||||||
"acorn": "bin/acorn"
|
"acorn": "bin/acorn"
|
||||||
},
|
},
|
||||||
|
|
@ -4031,6 +4221,7 @@
|
||||||
"version": "4.3.2",
|
"version": "4.3.2",
|
||||||
"resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.2.tgz",
|
"resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.2.tgz",
|
||||||
"integrity": "sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ==",
|
"integrity": "sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ==",
|
||||||
|
"dev": true,
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"type-fest": "^0.21.3"
|
"type-fest": "^0.21.3"
|
||||||
|
|
@ -4046,6 +4237,7 @@
|
||||||
"version": "0.21.3",
|
"version": "0.21.3",
|
||||||
"resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.21.3.tgz",
|
"resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.21.3.tgz",
|
||||||
"integrity": "sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==",
|
"integrity": "sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==",
|
||||||
|
"dev": true,
|
||||||
"license": "(MIT OR CC0-1.0)",
|
"license": "(MIT OR CC0-1.0)",
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">=10"
|
"node": ">=10"
|
||||||
|
|
@ -4274,6 +4466,35 @@
|
||||||
"node": ">=8"
|
"node": ">=8"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/assertion-error": {
|
||||||
|
"version": "2.0.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/assertion-error/-/assertion-error-2.0.1.tgz",
|
||||||
|
"integrity": "sha512-Izi8RQcffqCeNVgFigKli1ssklIbpHnCYc6AknXGYoB6grJqyeby7jv12JUQgmTAnIDnbck1uxksT4dzN3PWBA==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=12"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/ast-v8-to-istanbul": {
|
||||||
|
"version": "0.3.10",
|
||||||
|
"resolved": "https://registry.npmjs.org/ast-v8-to-istanbul/-/ast-v8-to-istanbul-0.3.10.tgz",
|
||||||
|
"integrity": "sha512-p4K7vMz2ZSk3wN8l5o3y2bJAoZXT3VuJI5OLTATY/01CYWumWvwkUw0SqDBnNq6IiTO3qDa1eSQDibAV8g7XOQ==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"@jridgewell/trace-mapping": "^0.3.31",
|
||||||
|
"estree-walker": "^3.0.3",
|
||||||
|
"js-tokens": "^9.0.1"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/ast-v8-to-istanbul/node_modules/js-tokens": {
|
||||||
|
"version": "9.0.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-9.0.1.tgz",
|
||||||
|
"integrity": "sha512-mxa9E9ITFOt0ban3j6L5MpjwegGz6lBQmM1IJkWeBZGcMxto50+eWdjC/52xDbS2vy0k7vIMK0Fe2wfL9OQSpQ==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
"node_modules/astring": {
|
"node_modules/astring": {
|
||||||
"version": "1.9.0",
|
"version": "1.9.0",
|
||||||
"resolved": "https://registry.npmjs.org/astring/-/astring-1.9.0.tgz",
|
"resolved": "https://registry.npmjs.org/astring/-/astring-1.9.0.tgz",
|
||||||
|
|
@ -4290,7 +4511,6 @@
|
||||||
"integrity": "sha512-6mF/YrvwwRxLTu+aMEa5pwzKUNl5ZetWbTyZCs9Um0F12HUmxUiF5UHiZPy4rifzU3gtpM3xP2DfdmkNX9eZRg==",
|
"integrity": "sha512-6mF/YrvwwRxLTu+aMEa5pwzKUNl5ZetWbTyZCs9Um0F12HUmxUiF5UHiZPy4rifzU3gtpM3xP2DfdmkNX9eZRg==",
|
||||||
"dev": true,
|
"dev": true,
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"peer": true,
|
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@astrojs/compiler": "^2.13.0",
|
"@astrojs/compiler": "^2.13.0",
|
||||||
"@astrojs/internal-helpers": "0.7.5",
|
"@astrojs/internal-helpers": "0.7.5",
|
||||||
|
|
@ -5358,7 +5578,6 @@
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"peer": true,
|
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"baseline-browser-mapping": "^2.9.0",
|
"baseline-browser-mapping": "^2.9.0",
|
||||||
"caniuse-lite": "^1.0.30001759",
|
"caniuse-lite": "^1.0.30001759",
|
||||||
|
|
@ -5525,6 +5744,16 @@
|
||||||
"url": "https://github.com/sponsors/wooorm"
|
"url": "https://github.com/sponsors/wooorm"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/chai": {
|
||||||
|
"version": "6.2.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/chai/-/chai-6.2.2.tgz",
|
||||||
|
"integrity": "sha512-NUPRluOfOiTKBKvWPtSD4PhFvWCqOi0BGStNWs57X9js7XGTprSmFoz5F0tWhR4WPjNeR9jXqdC7/UpSJTnlRg==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=18"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/chalk": {
|
"node_modules/chalk": {
|
||||||
"version": "4.1.2",
|
"version": "4.1.2",
|
||||||
"resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz",
|
"resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz",
|
||||||
|
|
@ -5601,12 +5830,6 @@
|
||||||
"url": "https://github.com/sponsors/wooorm"
|
"url": "https://github.com/sponsors/wooorm"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/chardet": {
|
|
||||||
"version": "2.1.1",
|
|
||||||
"resolved": "https://registry.npmjs.org/chardet/-/chardet-2.1.1.tgz",
|
|
||||||
"integrity": "sha512-PsezH1rqdV9VvyNhxxOW32/d75r01NY7TQCmOqomRo15ZSOKbpTFVsfjghxo6JloQUCGnH4k1LGu0R4yCLlWQQ==",
|
|
||||||
"license": "MIT"
|
|
||||||
},
|
|
||||||
"node_modules/chokidar": {
|
"node_modules/chokidar": {
|
||||||
"version": "4.0.3",
|
"version": "4.0.3",
|
||||||
"resolved": "https://registry.npmjs.org/chokidar/-/chokidar-4.0.3.tgz",
|
"resolved": "https://registry.npmjs.org/chokidar/-/chokidar-4.0.3.tgz",
|
||||||
|
|
@ -5787,15 +6010,6 @@
|
||||||
"url": "https://github.com/chalk/strip-ansi?sponsor=1"
|
"url": "https://github.com/chalk/strip-ansi?sponsor=1"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/cli-width": {
|
|
||||||
"version": "4.1.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/cli-width/-/cli-width-4.1.0.tgz",
|
|
||||||
"integrity": "sha512-ouuZd4/dm2Sw5Gmqy6bGyNNNe1qt9RpmxveLSO7KcgsTnU7RXfsw+/bukWGo1abgBiMAic068rclZsO4IWmmxQ==",
|
|
||||||
"license": "ISC",
|
|
||||||
"engines": {
|
|
||||||
"node": ">= 12"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/cliui": {
|
"node_modules/cliui": {
|
||||||
"version": "8.0.1",
|
"version": "8.0.1",
|
||||||
"resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz",
|
"resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz",
|
||||||
|
|
@ -6689,7 +6903,6 @@
|
||||||
"integrity": "sha512-LEyamqS7W5HB3ujJyvi0HQK/dtVINZvd5mAAp9eT5S/ujByGjiZLCzPcHVzuXbpJDJF/cxwHlfceVUDZ2lnSTw==",
|
"integrity": "sha512-LEyamqS7W5HB3ujJyvi0HQK/dtVINZvd5mAAp9eT5S/ujByGjiZLCzPcHVzuXbpJDJF/cxwHlfceVUDZ2lnSTw==",
|
||||||
"dev": true,
|
"dev": true,
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"peer": true,
|
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@eslint-community/eslint-utils": "^4.8.0",
|
"@eslint-community/eslint-utils": "^4.8.0",
|
||||||
"@eslint-community/regexpp": "^4.12.1",
|
"@eslint-community/regexpp": "^4.12.1",
|
||||||
|
|
@ -7276,6 +7489,16 @@
|
||||||
"node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0"
|
"node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/expect-type": {
|
||||||
|
"version": "1.3.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/expect-type/-/expect-type-1.3.0.tgz",
|
||||||
|
"integrity": "sha512-knvyeauYhqjOYvQ66MznSMs83wmHrCycNEN6Ao+2AeYEfxUIkuiVxdEa1qlGEPK+We3n0THiDciYSsCcgW/DoA==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "Apache-2.0",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=12.0.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/expressive-code": {
|
"node_modules/expressive-code": {
|
||||||
"version": "0.41.5",
|
"version": "0.41.5",
|
||||||
"resolved": "https://registry.npmjs.org/expressive-code/-/expressive-code-0.41.5.tgz",
|
"resolved": "https://registry.npmjs.org/expressive-code/-/expressive-code-0.41.5.tgz",
|
||||||
|
|
@ -7391,6 +7614,13 @@
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/fflate": {
|
||||||
|
"version": "0.8.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/fflate/-/fflate-0.8.2.tgz",
|
||||||
|
"integrity": "sha512-cPJU47OaAoCbg0pBvzsgpTPhmhqI5eJjh/JIu8tPj5q+T7iLvW/JAYUqmE7KOB4R1ZyEhzBaIQpQpardBF5z8A==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
"node_modules/figlet": {
|
"node_modules/figlet": {
|
||||||
"version": "1.9.4",
|
"version": "1.9.4",
|
||||||
"resolved": "https://registry.npmjs.org/figlet/-/figlet-1.9.4.tgz",
|
"resolved": "https://registry.npmjs.org/figlet/-/figlet-1.9.4.tgz",
|
||||||
|
|
@ -8269,22 +8499,6 @@
|
||||||
"@babel/runtime": "^7.23.2"
|
"@babel/runtime": "^7.23.2"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/iconv-lite": {
|
|
||||||
"version": "0.7.1",
|
|
||||||
"resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.7.1.tgz",
|
|
||||||
"integrity": "sha512-2Tth85cXwGFHfvRgZWszZSvdo+0Xsqmw8k8ZwxScfcBneNUraK+dxRxRm24nszx80Y0TVio8kKLt5sLE7ZCLlw==",
|
|
||||||
"license": "MIT",
|
|
||||||
"dependencies": {
|
|
||||||
"safer-buffer": ">= 2.1.2 < 3.0.0"
|
|
||||||
},
|
|
||||||
"engines": {
|
|
||||||
"node": ">=0.10.0"
|
|
||||||
},
|
|
||||||
"funding": {
|
|
||||||
"type": "opencollective",
|
|
||||||
"url": "https://opencollective.com/express"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/ieee754": {
|
"node_modules/ieee754": {
|
||||||
"version": "1.2.1",
|
"version": "1.2.1",
|
||||||
"resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz",
|
"resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz",
|
||||||
|
|
@ -8420,43 +8634,6 @@
|
||||||
"dev": true,
|
"dev": true,
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
"node_modules/inquirer": {
|
|
||||||
"version": "9.3.8",
|
|
||||||
"resolved": "https://registry.npmjs.org/inquirer/-/inquirer-9.3.8.tgz",
|
|
||||||
"integrity": "sha512-pFGGdaHrmRKMh4WoDDSowddgjT1Vkl90atobmTeSmcPGdYiwikch/m/Ef5wRaiamHejtw0cUUMMerzDUXCci2w==",
|
|
||||||
"license": "MIT",
|
|
||||||
"dependencies": {
|
|
||||||
"@inquirer/external-editor": "^1.0.2",
|
|
||||||
"@inquirer/figures": "^1.0.3",
|
|
||||||
"ansi-escapes": "^4.3.2",
|
|
||||||
"cli-width": "^4.1.0",
|
|
||||||
"mute-stream": "1.0.0",
|
|
||||||
"ora": "^5.4.1",
|
|
||||||
"run-async": "^3.0.0",
|
|
||||||
"rxjs": "^7.8.1",
|
|
||||||
"string-width": "^4.2.3",
|
|
||||||
"strip-ansi": "^6.0.1",
|
|
||||||
"wrap-ansi": "^6.2.0",
|
|
||||||
"yoctocolors-cjs": "^2.1.2"
|
|
||||||
},
|
|
||||||
"engines": {
|
|
||||||
"node": ">=18"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/inquirer/node_modules/wrap-ansi": {
|
|
||||||
"version": "6.2.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-6.2.0.tgz",
|
|
||||||
"integrity": "sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==",
|
|
||||||
"license": "MIT",
|
|
||||||
"dependencies": {
|
|
||||||
"ansi-styles": "^4.0.0",
|
|
||||||
"string-width": "^4.1.0",
|
|
||||||
"strip-ansi": "^6.0.0"
|
|
||||||
},
|
|
||||||
"engines": {
|
|
||||||
"node": ">=8"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/iron-webcrypto": {
|
"node_modules/iron-webcrypto": {
|
||||||
"version": "1.2.1",
|
"version": "1.2.1",
|
||||||
"resolved": "https://registry.npmjs.org/iron-webcrypto/-/iron-webcrypto-1.2.1.tgz",
|
"resolved": "https://registry.npmjs.org/iron-webcrypto/-/iron-webcrypto-1.2.1.tgz",
|
||||||
|
|
@ -10304,7 +10481,6 @@
|
||||||
"integrity": "sha512-p3JTemJJbkiMjXEMiFwgm0v6ym5g8K+b2oDny+6xdl300tUKySxvilJQLSea48C6OaYNmO30kH9KxpiAg5bWJw==",
|
"integrity": "sha512-p3JTemJJbkiMjXEMiFwgm0v6ym5g8K+b2oDny+6xdl300tUKySxvilJQLSea48C6OaYNmO30kH9KxpiAg5bWJw==",
|
||||||
"dev": true,
|
"dev": true,
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"peer": true,
|
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"globby": "15.0.0",
|
"globby": "15.0.0",
|
||||||
"js-yaml": "4.1.1",
|
"js-yaml": "4.1.1",
|
||||||
|
|
@ -11576,15 +11752,6 @@
|
||||||
"integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==",
|
"integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==",
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
"node_modules/mute-stream": {
|
|
||||||
"version": "1.0.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-1.0.0.tgz",
|
|
||||||
"integrity": "sha512-avsJQhyd+680gKXyG/sQc0nXaC6rBkPOfyHYcFb9+hdkqQkR9bdnkJ0AMZhke0oesPqIO+mFFJ+IdBc7mst4IA==",
|
|
||||||
"license": "ISC",
|
|
||||||
"engines": {
|
|
||||||
"node": "^14.17.0 || ^16.13.0 || >=18.0.0"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/nano-spawn": {
|
"node_modules/nano-spawn": {
|
||||||
"version": "2.0.0",
|
"version": "2.0.0",
|
||||||
"resolved": "https://registry.npmjs.org/nano-spawn/-/nano-spawn-2.0.0.tgz",
|
"resolved": "https://registry.npmjs.org/nano-spawn/-/nano-spawn-2.0.0.tgz",
|
||||||
|
|
@ -11784,6 +11951,17 @@
|
||||||
"url": "https://github.com/fb55/nth-check?sponsor=1"
|
"url": "https://github.com/fb55/nth-check?sponsor=1"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/obug": {
|
||||||
|
"version": "2.1.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/obug/-/obug-2.1.1.tgz",
|
||||||
|
"integrity": "sha512-uTqF9MuPraAQ+IsnPf366RG4cP9RtUi7MLO1N3KEc+wb0a6yKpeL0lmk2IB1jY5KHPAlTc6T/JRdC/YqxHNwkQ==",
|
||||||
|
"dev": true,
|
||||||
|
"funding": [
|
||||||
|
"https://github.com/sponsors/sxzz",
|
||||||
|
"https://opencollective.com/debug"
|
||||||
|
],
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
"node_modules/ofetch": {
|
"node_modules/ofetch": {
|
||||||
"version": "1.5.1",
|
"version": "1.5.1",
|
||||||
"resolved": "https://registry.npmjs.org/ofetch/-/ofetch-1.5.1.tgz",
|
"resolved": "https://registry.npmjs.org/ofetch/-/ofetch-1.5.1.tgz",
|
||||||
|
|
@ -12229,6 +12407,13 @@
|
||||||
"url": "https://github.com/sponsors/sindresorhus"
|
"url": "https://github.com/sponsors/sindresorhus"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/pathe": {
|
||||||
|
"version": "2.0.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/pathe/-/pathe-2.0.3.tgz",
|
||||||
|
"integrity": "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
"node_modules/piccolore": {
|
"node_modules/piccolore": {
|
||||||
"version": "0.1.3",
|
"version": "0.1.3",
|
||||||
"resolved": "https://registry.npmjs.org/piccolore/-/piccolore-0.1.3.tgz",
|
"resolved": "https://registry.npmjs.org/piccolore/-/piccolore-0.1.3.tgz",
|
||||||
|
|
@ -12378,7 +12563,6 @@
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"peer": true,
|
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"nanoid": "^3.3.11",
|
"nanoid": "^3.3.11",
|
||||||
"picocolors": "^1.1.1",
|
"picocolors": "^1.1.1",
|
||||||
|
|
@ -12444,7 +12628,6 @@
|
||||||
"integrity": "sha512-v6UNi1+3hSlVvv8fSaoUbggEM5VErKmmpGA7Pl3HF8V6uKY7rvClBOJlH6yNwQtfTueNkGVpOv/mtWL9L4bgRA==",
|
"integrity": "sha512-v6UNi1+3hSlVvv8fSaoUbggEM5VErKmmpGA7Pl3HF8V6uKY7rvClBOJlH6yNwQtfTueNkGVpOv/mtWL9L4bgRA==",
|
||||||
"dev": true,
|
"dev": true,
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"peer": true,
|
|
||||||
"bin": {
|
"bin": {
|
||||||
"prettier": "bin/prettier.cjs"
|
"prettier": "bin/prettier.cjs"
|
||||||
},
|
},
|
||||||
|
|
@ -13273,7 +13456,6 @@
|
||||||
"integrity": "sha512-3nk8Y3a9Ea8szgKhinMlGMhGMw89mqule3KWczxhIzqudyHdCIOHw8WJlj/r329fACjKLEh13ZSk7oE22kyeIw==",
|
"integrity": "sha512-3nk8Y3a9Ea8szgKhinMlGMhGMw89mqule3KWczxhIzqudyHdCIOHw8WJlj/r329fACjKLEh13ZSk7oE22kyeIw==",
|
||||||
"dev": true,
|
"dev": true,
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"peer": true,
|
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@types/estree": "1.0.8"
|
"@types/estree": "1.0.8"
|
||||||
},
|
},
|
||||||
|
|
@ -13310,15 +13492,6 @@
|
||||||
"fsevents": "~2.3.2"
|
"fsevents": "~2.3.2"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/run-async": {
|
|
||||||
"version": "3.0.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/run-async/-/run-async-3.0.0.tgz",
|
|
||||||
"integrity": "sha512-540WwVDOMxA6dN6We19EcT9sc3hkXPw5mzRNGM3FkdN/vtE9NFvj5lFAPNwUDmJjXidm3v7TC1cTE7t17Ulm1Q==",
|
|
||||||
"license": "MIT",
|
|
||||||
"engines": {
|
|
||||||
"node": ">=0.12.0"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/run-parallel": {
|
"node_modules/run-parallel": {
|
||||||
"version": "1.2.0",
|
"version": "1.2.0",
|
||||||
"resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz",
|
"resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz",
|
||||||
|
|
@ -13343,15 +13516,6 @@
|
||||||
"queue-microtask": "^1.2.2"
|
"queue-microtask": "^1.2.2"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/rxjs": {
|
|
||||||
"version": "7.8.2",
|
|
||||||
"resolved": "https://registry.npmjs.org/rxjs/-/rxjs-7.8.2.tgz",
|
|
||||||
"integrity": "sha512-dhKf903U/PQZY6boNNtAGdWbG85WAbjT/1xYoZIC7FAY0yWapOBQVsVrDl58W86//e1VpMNBtRV4MaXfdMySFA==",
|
|
||||||
"license": "Apache-2.0",
|
|
||||||
"dependencies": {
|
|
||||||
"tslib": "^2.1.0"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/safe-buffer": {
|
"node_modules/safe-buffer": {
|
||||||
"version": "5.2.1",
|
"version": "5.2.1",
|
||||||
"resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz",
|
"resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz",
|
||||||
|
|
@ -13372,12 +13536,6 @@
|
||||||
],
|
],
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
"node_modules/safer-buffer": {
|
|
||||||
"version": "2.1.2",
|
|
||||||
"resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz",
|
|
||||||
"integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==",
|
|
||||||
"license": "MIT"
|
|
||||||
},
|
|
||||||
"node_modules/sax": {
|
"node_modules/sax": {
|
||||||
"version": "1.4.3",
|
"version": "1.4.3",
|
||||||
"resolved": "https://registry.npmjs.org/sax/-/sax-1.4.3.tgz",
|
"resolved": "https://registry.npmjs.org/sax/-/sax-1.4.3.tgz",
|
||||||
|
|
@ -13481,6 +13639,13 @@
|
||||||
"@types/hast": "^3.0.4"
|
"@types/hast": "^3.0.4"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/siginfo": {
|
||||||
|
"version": "2.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/siginfo/-/siginfo-2.0.0.tgz",
|
||||||
|
"integrity": "sha512-ybx0WO1/8bSBLEWXZvEd7gMW3Sn3JFlW3TvX1nREbDLRNQNaeNN8WK0meBwPdAaOI7TtRRRJn/Es1zhrrCHu7g==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "ISC"
|
||||||
|
},
|
||||||
"node_modules/signal-exit": {
|
"node_modules/signal-exit": {
|
||||||
"version": "4.1.0",
|
"version": "4.1.0",
|
||||||
"resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz",
|
"resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz",
|
||||||
|
|
@ -13510,6 +13675,21 @@
|
||||||
"dev": true,
|
"dev": true,
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
|
"node_modules/sirv": {
|
||||||
|
"version": "3.0.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/sirv/-/sirv-3.0.2.tgz",
|
||||||
|
"integrity": "sha512-2wcC/oGxHis/BoHkkPwldgiPSYcpZK3JU28WoMVv55yHJgcZ8rlXvuG9iZggz+sU1d4bRgIGASwyWqjxu3FM0g==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"@polka/url": "^1.0.0-next.24",
|
||||||
|
"mrmime": "^2.0.0",
|
||||||
|
"totalist": "^3.0.0"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=18"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/sisteransi": {
|
"node_modules/sisteransi": {
|
||||||
"version": "1.0.5",
|
"version": "1.0.5",
|
||||||
"resolved": "https://registry.npmjs.org/sisteransi/-/sisteransi-1.0.5.tgz",
|
"resolved": "https://registry.npmjs.org/sisteransi/-/sisteransi-1.0.5.tgz",
|
||||||
|
|
@ -13721,6 +13901,20 @@
|
||||||
"node": ">=8"
|
"node": ">=8"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/stackback": {
|
||||||
|
"version": "0.0.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/stackback/-/stackback-0.0.2.tgz",
|
||||||
|
"integrity": "sha512-1XMJE5fQo1jGH6Y/7ebnwPOBEkIEnT4QF32d5R1+VXdXveM0IBMJt8zfaxX1P3QhVwrYe+576+jkANtSS2mBbw==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
|
"node_modules/std-env": {
|
||||||
|
"version": "3.10.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/std-env/-/std-env-3.10.0.tgz",
|
||||||
|
"integrity": "sha512-5GS12FdOZNliM5mAOxFRg7Ir0pWz8MdpYm6AY6VPkGpbA7ZzmbzNcBJQ0GPvvyWgcY7QAhCgf9Uy89I03faLkg==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
"node_modules/stream-replace-string": {
|
"node_modules/stream-replace-string": {
|
||||||
"version": "2.0.0",
|
"version": "2.0.0",
|
||||||
"resolved": "https://registry.npmjs.org/stream-replace-string/-/stream-replace-string-2.0.0.tgz",
|
"resolved": "https://registry.npmjs.org/stream-replace-string/-/stream-replace-string-2.0.0.tgz",
|
||||||
|
|
@ -14135,6 +14329,13 @@
|
||||||
"dev": true,
|
"dev": true,
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
|
"node_modules/tinybench": {
|
||||||
|
"version": "2.9.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/tinybench/-/tinybench-2.9.0.tgz",
|
||||||
|
"integrity": "sha512-0+DUvqWMValLmha6lr4kD8iAMK1HzV0/aKnCtWb9v9641TnP/MFb7Pc2bxoxQjTXAErryXVgUOfv2YqNllqGeg==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
"node_modules/tinyexec": {
|
"node_modules/tinyexec": {
|
||||||
"version": "1.0.2",
|
"version": "1.0.2",
|
||||||
"resolved": "https://registry.npmjs.org/tinyexec/-/tinyexec-1.0.2.tgz",
|
"resolved": "https://registry.npmjs.org/tinyexec/-/tinyexec-1.0.2.tgz",
|
||||||
|
|
@ -14162,6 +14363,16 @@
|
||||||
"url": "https://github.com/sponsors/SuperchupuDev"
|
"url": "https://github.com/sponsors/SuperchupuDev"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/tinyrainbow": {
|
||||||
|
"version": "3.0.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/tinyrainbow/-/tinyrainbow-3.0.3.tgz",
|
||||||
|
"integrity": "sha512-PSkbLUoxOFRzJYjjxHJt9xro7D+iilgMX/C9lawzVuYiIdcihh9DXmVibBe8lmcFrRi/VzlPjBxbN7rH24q8/Q==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=14.0.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/tmpl": {
|
"node_modules/tmpl": {
|
||||||
"version": "1.0.5",
|
"version": "1.0.5",
|
||||||
"resolved": "https://registry.npmjs.org/tmpl/-/tmpl-1.0.5.tgz",
|
"resolved": "https://registry.npmjs.org/tmpl/-/tmpl-1.0.5.tgz",
|
||||||
|
|
@ -14182,6 +14393,16 @@
|
||||||
"node": ">=8.0"
|
"node": ">=8.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/totalist": {
|
||||||
|
"version": "3.0.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/totalist/-/totalist-3.0.1.tgz",
|
||||||
|
"integrity": "sha512-sf4i37nQ2LBx4m3wB74y+ubopq6W/dIzXg0FDGjsYnZHVa1Da8FH853wlL2gtUhg+xJXjfk3kUZS3BRoQeoQBQ==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=6"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/trim-lines": {
|
"node_modules/trim-lines": {
|
||||||
"version": "3.0.1",
|
"version": "3.0.1",
|
||||||
"resolved": "https://registry.npmjs.org/trim-lines/-/trim-lines-3.0.1.tgz",
|
"resolved": "https://registry.npmjs.org/trim-lines/-/trim-lines-3.0.1.tgz",
|
||||||
|
|
@ -14251,6 +14472,7 @@
|
||||||
"version": "2.8.1",
|
"version": "2.8.1",
|
||||||
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz",
|
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz",
|
||||||
"integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==",
|
"integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==",
|
||||||
|
"dev": true,
|
||||||
"license": "0BSD"
|
"license": "0BSD"
|
||||||
},
|
},
|
||||||
"node_modules/type-check": {
|
"node_modules/type-check": {
|
||||||
|
|
@ -14335,7 +14557,7 @@
|
||||||
"version": "7.16.0",
|
"version": "7.16.0",
|
||||||
"resolved": "https://registry.npmjs.org/undici-types/-/undici-types-7.16.0.tgz",
|
"resolved": "https://registry.npmjs.org/undici-types/-/undici-types-7.16.0.tgz",
|
||||||
"integrity": "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw==",
|
"integrity": "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw==",
|
||||||
"devOptional": true,
|
"dev": true,
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
"node_modules/unicode-properties": {
|
"node_modules/unicode-properties": {
|
||||||
|
|
@ -14837,7 +15059,6 @@
|
||||||
"integrity": "sha512-+Oxm7q9hDoLMyJOYfUYBuHQo+dkAloi33apOPP56pzj+vsdJDzr+j1NISE5pyaAuKL4A3UD34qd0lx5+kfKp2g==",
|
"integrity": "sha512-+Oxm7q9hDoLMyJOYfUYBuHQo+dkAloi33apOPP56pzj+vsdJDzr+j1NISE5pyaAuKL4A3UD34qd0lx5+kfKp2g==",
|
||||||
"dev": true,
|
"dev": true,
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"peer": true,
|
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"esbuild": "^0.25.0",
|
"esbuild": "^0.25.0",
|
||||||
"fdir": "^6.4.4",
|
"fdir": "^6.4.4",
|
||||||
|
|
@ -14927,6 +15148,84 @@
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/vitest": {
|
||||||
|
"version": "4.0.16",
|
||||||
|
"resolved": "https://registry.npmjs.org/vitest/-/vitest-4.0.16.tgz",
|
||||||
|
"integrity": "sha512-E4t7DJ9pESL6E3I8nFjPa4xGUd3PmiWDLsDztS2qXSJWfHtbQnwAWylaBvSNY48I3vr8PTqIZlyK8TE3V3CA4Q==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"@vitest/expect": "4.0.16",
|
||||||
|
"@vitest/mocker": "4.0.16",
|
||||||
|
"@vitest/pretty-format": "4.0.16",
|
||||||
|
"@vitest/runner": "4.0.16",
|
||||||
|
"@vitest/snapshot": "4.0.16",
|
||||||
|
"@vitest/spy": "4.0.16",
|
||||||
|
"@vitest/utils": "4.0.16",
|
||||||
|
"es-module-lexer": "^1.7.0",
|
||||||
|
"expect-type": "^1.2.2",
|
||||||
|
"magic-string": "^0.30.21",
|
||||||
|
"obug": "^2.1.1",
|
||||||
|
"pathe": "^2.0.3",
|
||||||
|
"picomatch": "^4.0.3",
|
||||||
|
"std-env": "^3.10.0",
|
||||||
|
"tinybench": "^2.9.0",
|
||||||
|
"tinyexec": "^1.0.2",
|
||||||
|
"tinyglobby": "^0.2.15",
|
||||||
|
"tinyrainbow": "^3.0.3",
|
||||||
|
"vite": "^6.0.0 || ^7.0.0",
|
||||||
|
"why-is-node-running": "^2.3.0"
|
||||||
|
},
|
||||||
|
"bin": {
|
||||||
|
"vitest": "vitest.mjs"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": "^20.0.0 || ^22.0.0 || >=24.0.0"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"url": "https://opencollective.com/vitest"
|
||||||
|
},
|
||||||
|
"peerDependencies": {
|
||||||
|
"@edge-runtime/vm": "*",
|
||||||
|
"@opentelemetry/api": "^1.9.0",
|
||||||
|
"@types/node": "^20.0.0 || ^22.0.0 || >=24.0.0",
|
||||||
|
"@vitest/browser-playwright": "4.0.16",
|
||||||
|
"@vitest/browser-preview": "4.0.16",
|
||||||
|
"@vitest/browser-webdriverio": "4.0.16",
|
||||||
|
"@vitest/ui": "4.0.16",
|
||||||
|
"happy-dom": "*",
|
||||||
|
"jsdom": "*"
|
||||||
|
},
|
||||||
|
"peerDependenciesMeta": {
|
||||||
|
"@edge-runtime/vm": {
|
||||||
|
"optional": true
|
||||||
|
},
|
||||||
|
"@opentelemetry/api": {
|
||||||
|
"optional": true
|
||||||
|
},
|
||||||
|
"@types/node": {
|
||||||
|
"optional": true
|
||||||
|
},
|
||||||
|
"@vitest/browser-playwright": {
|
||||||
|
"optional": true
|
||||||
|
},
|
||||||
|
"@vitest/browser-preview": {
|
||||||
|
"optional": true
|
||||||
|
},
|
||||||
|
"@vitest/browser-webdriverio": {
|
||||||
|
"optional": true
|
||||||
|
},
|
||||||
|
"@vitest/ui": {
|
||||||
|
"optional": true
|
||||||
|
},
|
||||||
|
"happy-dom": {
|
||||||
|
"optional": true
|
||||||
|
},
|
||||||
|
"jsdom": {
|
||||||
|
"optional": true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/walker": {
|
"node_modules/walker": {
|
||||||
"version": "1.0.8",
|
"version": "1.0.8",
|
||||||
"resolved": "https://registry.npmjs.org/walker/-/walker-1.0.8.tgz",
|
"resolved": "https://registry.npmjs.org/walker/-/walker-1.0.8.tgz",
|
||||||
|
|
@ -14982,6 +15281,23 @@
|
||||||
"node": ">=4"
|
"node": ">=4"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/why-is-node-running": {
|
||||||
|
"version": "2.3.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/why-is-node-running/-/why-is-node-running-2.3.0.tgz",
|
||||||
|
"integrity": "sha512-hUrmaWBdVDcxvYqnyh09zunKzROWjbZTiNy8dBEjkS7ehEDQibXJ7XvlmtbwuTclUiIyN+CyXQD4Vmko8fNm8w==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"siginfo": "^2.0.0",
|
||||||
|
"stackback": "0.0.2"
|
||||||
|
},
|
||||||
|
"bin": {
|
||||||
|
"why-is-node-running": "cli.js"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=8"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/widest-line": {
|
"node_modules/widest-line": {
|
||||||
"version": "3.1.0",
|
"version": "3.1.0",
|
||||||
"resolved": "https://registry.npmjs.org/widest-line/-/widest-line-3.1.0.tgz",
|
"resolved": "https://registry.npmjs.org/widest-line/-/widest-line-3.1.0.tgz",
|
||||||
|
|
@ -15111,7 +15427,6 @@
|
||||||
"resolved": "https://registry.npmjs.org/yaml/-/yaml-2.8.2.tgz",
|
"resolved": "https://registry.npmjs.org/yaml/-/yaml-2.8.2.tgz",
|
||||||
"integrity": "sha512-mplynKqc1C2hTVYxd0PU2xQAc22TI1vShAYGksCCfxbn/dFwnHTNi1bvYsBTkhdUNtGIf5xNOg938rrSSYvS9A==",
|
"integrity": "sha512-mplynKqc1C2hTVYxd0PU2xQAc22TI1vShAYGksCCfxbn/dFwnHTNi1bvYsBTkhdUNtGIf5xNOg938rrSSYvS9A==",
|
||||||
"license": "ISC",
|
"license": "ISC",
|
||||||
"peer": true,
|
|
||||||
"bin": {
|
"bin": {
|
||||||
"yaml": "bin.mjs"
|
"yaml": "bin.mjs"
|
||||||
},
|
},
|
||||||
|
|
@ -15270,18 +15585,6 @@
|
||||||
"url": "https://github.com/sponsors/sindresorhus"
|
"url": "https://github.com/sponsors/sindresorhus"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/yoctocolors-cjs": {
|
|
||||||
"version": "2.1.3",
|
|
||||||
"resolved": "https://registry.npmjs.org/yoctocolors-cjs/-/yoctocolors-cjs-2.1.3.tgz",
|
|
||||||
"integrity": "sha512-U/PBtDf35ff0D8X8D0jfdzHYEPFxAI7jJlxZXwCSez5M3190m+QobIfh+sWDWSHMCWWJN2AWamkegn6vr6YBTw==",
|
|
||||||
"license": "MIT",
|
|
||||||
"engines": {
|
|
||||||
"node": ">=18"
|
|
||||||
},
|
|
||||||
"funding": {
|
|
||||||
"url": "https://github.com/sponsors/sindresorhus"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/zip-stream": {
|
"node_modules/zip-stream": {
|
||||||
"version": "6.0.1",
|
"version": "6.0.1",
|
||||||
"resolved": "https://registry.npmjs.org/zip-stream/-/zip-stream-6.0.1.tgz",
|
"resolved": "https://registry.npmjs.org/zip-stream/-/zip-stream-6.0.1.tgz",
|
||||||
|
|
@ -15303,7 +15606,6 @@
|
||||||
"integrity": "sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ==",
|
"integrity": "sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ==",
|
||||||
"dev": true,
|
"dev": true,
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"peer": true,
|
|
||||||
"funding": {
|
"funding": {
|
||||||
"url": "https://github.com/sponsors/colinhacks"
|
"url": "https://github.com/sponsors/colinhacks"
|
||||||
}
|
}
|
||||||
|
|
|
||||||
14
package.json
14
package.json
|
|
@ -44,10 +44,15 @@
|
||||||
"release:minor": "gh workflow run \"Manual Release\" -f version_bump=minor",
|
"release:minor": "gh workflow run \"Manual Release\" -f version_bump=minor",
|
||||||
"release:patch": "gh workflow run \"Manual Release\" -f version_bump=patch",
|
"release:patch": "gh workflow run \"Manual Release\" -f version_bump=patch",
|
||||||
"release:watch": "gh run watch",
|
"release:watch": "gh run watch",
|
||||||
"test": "npm run test:schemas && npm run test:install && npm run validate:schemas && npm run lint && npm run lint:md && npm run format:check",
|
"test": "npm run test:schemas && npm run test:install && npm run test:unit && npm run validate:schemas && npm run lint && npm run lint:md && npm run format:check",
|
||||||
"test:coverage": "c8 --reporter=text --reporter=html npm run test:schemas",
|
"test:coverage": "vitest run --coverage",
|
||||||
"test:install": "node test/test-installation-components.js",
|
"test:install": "node test/test-installation-components.js",
|
||||||
|
"test:integration": "vitest run test/integration",
|
||||||
|
"test:quick": "vitest run --changed",
|
||||||
"test:schemas": "node test/test-agent-schema.js",
|
"test:schemas": "node test/test-agent-schema.js",
|
||||||
|
"test:ui": "vitest --ui",
|
||||||
|
"test:unit": "vitest run",
|
||||||
|
"test:unit:watch": "vitest",
|
||||||
"validate:schemas": "node tools/validate-agent-schema.js"
|
"validate:schemas": "node tools/validate-agent-schema.js"
|
||||||
},
|
},
|
||||||
"lint-staged": {
|
"lint-staged": {
|
||||||
|
|
@ -67,6 +72,7 @@
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
|
"@clack/prompts": "^0.11.0",
|
||||||
"@kayvan/markdown-tree-parser": "^1.6.1",
|
"@kayvan/markdown-tree-parser": "^1.6.1",
|
||||||
"boxen": "^5.1.2",
|
"boxen": "^5.1.2",
|
||||||
"chalk": "^4.1.2",
|
"chalk": "^4.1.2",
|
||||||
|
|
@ -77,7 +83,6 @@
|
||||||
"fs-extra": "^11.3.0",
|
"fs-extra": "^11.3.0",
|
||||||
"glob": "^11.0.3",
|
"glob": "^11.0.3",
|
||||||
"ignore": "^7.0.5",
|
"ignore": "^7.0.5",
|
||||||
"inquirer": "^9.3.8",
|
|
||||||
"js-yaml": "^4.1.0",
|
"js-yaml": "^4.1.0",
|
||||||
"ora": "^5.4.1",
|
"ora": "^5.4.1",
|
||||||
"semver": "^7.6.3",
|
"semver": "^7.6.3",
|
||||||
|
|
@ -89,6 +94,8 @@
|
||||||
"@astrojs/sitemap": "^3.6.0",
|
"@astrojs/sitemap": "^3.6.0",
|
||||||
"@astrojs/starlight": "^0.37.0",
|
"@astrojs/starlight": "^0.37.0",
|
||||||
"@eslint/js": "^9.33.0",
|
"@eslint/js": "^9.33.0",
|
||||||
|
"@vitest/coverage-v8": "^4.0.16",
|
||||||
|
"@vitest/ui": "^4.0.16",
|
||||||
"archiver": "^7.0.1",
|
"archiver": "^7.0.1",
|
||||||
"astro": "^5.16.0",
|
"astro": "^5.16.0",
|
||||||
"c8": "^10.1.3",
|
"c8": "^10.1.3",
|
||||||
|
|
@ -104,6 +111,7 @@
|
||||||
"prettier": "^3.7.4",
|
"prettier": "^3.7.4",
|
||||||
"prettier-plugin-packagejson": "^2.5.19",
|
"prettier-plugin-packagejson": "^2.5.19",
|
||||||
"sharp": "^0.33.5",
|
"sharp": "^0.33.5",
|
||||||
|
"vitest": "^4.0.16",
|
||||||
"yaml-eslint-parser": "^1.2.3",
|
"yaml-eslint-parser": "^1.2.3",
|
||||||
"yaml-lint": "^1.7.0"
|
"yaml-lint": "^1.7.0"
|
||||||
},
|
},
|
||||||
|
|
|
||||||
|
|
@ -18,7 +18,6 @@ agent:
|
||||||
|
|
||||||
critical_actions:
|
critical_actions:
|
||||||
- "Load into memory {project-root}/_bmad/core/config.yaml and set variable project_name, output_folder, user_name, communication_language"
|
- "Load into memory {project-root}/_bmad/core/config.yaml and set variable project_name, output_folder, user_name, communication_language"
|
||||||
- "Remember the users name is {user_name}"
|
|
||||||
- "ALWAYS communicate in {communication_language}"
|
- "ALWAYS communicate in {communication_language}"
|
||||||
|
|
||||||
menu:
|
menu:
|
||||||
|
|
|
||||||
|
|
@ -22,6 +22,8 @@ agent:
|
||||||
|
|
||||||
critical_actions:
|
critical_actions:
|
||||||
- "Consult {project-root}/_bmad/bmgd/gametest/qa-index.csv to select knowledge fragments under knowledge/ and load only the files needed for the current task"
|
- "Consult {project-root}/_bmad/bmgd/gametest/qa-index.csv to select knowledge fragments under knowledge/ and load only the files needed for the current task"
|
||||||
|
- "For E2E testing requests, always load knowledge/e2e-testing.md first"
|
||||||
|
- "When scaffolding tests, distinguish between unit, integration, and E2E test needs"
|
||||||
- "Load the referenced fragment(s) from {project-root}/_bmad/bmgd/gametest/knowledge/ before giving recommendations"
|
- "Load the referenced fragment(s) from {project-root}/_bmad/bmgd/gametest/knowledge/ before giving recommendations"
|
||||||
- "Cross-check recommendations with the current official Unity Test Framework, Unreal Automation, or Godot GUT documentation"
|
- "Cross-check recommendations with the current official Unity Test Framework, Unreal Automation, or Godot GUT documentation"
|
||||||
- "Find if this exists, if it does, always treat it as the bible I plan and execute against: `**/project-context.md`"
|
- "Find if this exists, if it does, always treat it as the bible I plan and execute against: `**/project-context.md`"
|
||||||
|
|
@ -43,6 +45,10 @@ agent:
|
||||||
workflow: "{project-root}/_bmad/bmgd/workflows/gametest/automate/workflow.yaml"
|
workflow: "{project-root}/_bmad/bmgd/workflows/gametest/automate/workflow.yaml"
|
||||||
description: "[TA] Generate automated game tests"
|
description: "[TA] Generate automated game tests"
|
||||||
|
|
||||||
|
- trigger: ES or fuzzy match on e2e-scaffold
|
||||||
|
workflow: "{project-root}/_bmad/bmgd/workflows/gametest/e2e-scaffold/workflow.yaml"
|
||||||
|
description: "[ES] Scaffold E2E testing infrastructure"
|
||||||
|
|
||||||
- trigger: PP or fuzzy match on playtest-plan
|
- trigger: PP or fuzzy match on playtest-plan
|
||||||
workflow: "{project-root}/_bmad/bmgd/workflows/gametest/playtest-plan/workflow.yaml"
|
workflow: "{project-root}/_bmad/bmgd/workflows/gametest/playtest-plan/workflow.yaml"
|
||||||
description: "[PP] Create structured playtesting plan"
|
description: "[PP] Create structured playtesting plan"
|
||||||
|
|
|
||||||
File diff suppressed because it is too large
Load Diff
|
|
@ -374,3 +374,502 @@ test:
|
||||||
| Signal not detected | Signal not watched | Call `watch_signals()` before action |
|
| Signal not detected | Signal not watched | Call `watch_signals()` before action |
|
||||||
| Physics not working | Missing frames | Await `physics_frame` |
|
| Physics not working | Missing frames | Await `physics_frame` |
|
||||||
| Flaky tests | Timing issues | Use proper await/signals |
|
| Flaky tests | Timing issues | Use proper await/signals |
|
||||||
|
|
||||||
|
## C# Testing in Godot
|
||||||
|
|
||||||
|
Godot 4 supports C# via .NET 6+. You can use standard .NET testing frameworks alongside GUT.
|
||||||
|
|
||||||
|
### Project Setup for C#
|
||||||
|
|
||||||
|
```
|
||||||
|
project/
|
||||||
|
├── addons/
|
||||||
|
│ └── gut/
|
||||||
|
├── src/
|
||||||
|
│ ├── Player/
|
||||||
|
│ │ └── PlayerController.cs
|
||||||
|
│ └── Combat/
|
||||||
|
│ └── DamageCalculator.cs
|
||||||
|
├── tests/
|
||||||
|
│ ├── gdscript/
|
||||||
|
│ │ └── test_integration.gd
|
||||||
|
│ └── csharp/
|
||||||
|
│ ├── Tests.csproj
|
||||||
|
│ └── DamageCalculatorTests.cs
|
||||||
|
└── project.csproj
|
||||||
|
```
|
||||||
|
|
||||||
|
### C# Test Project Setup
|
||||||
|
|
||||||
|
Create a separate test project that references your game assembly:
|
||||||
|
|
||||||
|
```xml
|
||||||
|
<!-- tests/csharp/Tests.csproj -->
|
||||||
|
<Project Sdk="Godot.NET.Sdk/4.2.0">
|
||||||
|
<PropertyGroup>
|
||||||
|
<TargetFramework>net6.0</TargetFramework>
|
||||||
|
<EnableDynamicLoading>true</EnableDynamicLoading>
|
||||||
|
<IsPackable>false</IsPackable>
|
||||||
|
</PropertyGroup>
|
||||||
|
|
||||||
|
<ItemGroup>
|
||||||
|
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.8.0" />
|
||||||
|
<PackageReference Include="xunit" Version="2.6.2" />
|
||||||
|
<PackageReference Include="xunit.runner.visualstudio" Version="2.5.4" />
|
||||||
|
<PackageReference Include="NSubstitute" Version="5.1.0" />
|
||||||
|
</ItemGroup>
|
||||||
|
|
||||||
|
<ItemGroup>
|
||||||
|
<ProjectReference Include="../../project.csproj" />
|
||||||
|
</ItemGroup>
|
||||||
|
</Project>
|
||||||
|
```
|
||||||
|
|
||||||
|
### Basic C# Unit Tests
|
||||||
|
|
||||||
|
```csharp
|
||||||
|
// tests/csharp/DamageCalculatorTests.cs
|
||||||
|
using Xunit;
|
||||||
|
using YourGame.Combat;
|
||||||
|
|
||||||
|
public class DamageCalculatorTests
|
||||||
|
{
|
||||||
|
private readonly DamageCalculator _calculator;
|
||||||
|
|
||||||
|
public DamageCalculatorTests()
|
||||||
|
{
|
||||||
|
_calculator = new DamageCalculator();
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void Calculate_BaseDamage_ReturnsCorrectValue()
|
||||||
|
{
|
||||||
|
var result = _calculator.Calculate(100f, 1f);
|
||||||
|
Assert.Equal(100f, result);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
public void Calculate_CriticalHit_DoublesDamage()
|
||||||
|
{
|
||||||
|
var result = _calculator.Calculate(100f, 2f);
|
||||||
|
Assert.Equal(200f, result);
|
||||||
|
}
|
||||||
|
|
||||||
|
[Theory]
|
||||||
|
[InlineData(100f, 0.5f, 50f)]
|
||||||
|
[InlineData(100f, 1.5f, 150f)]
|
||||||
|
[InlineData(50f, 2f, 100f)]
|
||||||
|
public void Calculate_Parameterized_ReturnsExpected(
|
||||||
|
float baseDamage, float multiplier, float expected)
|
||||||
|
{
|
||||||
|
var result = _calculator.Calculate(baseDamage, multiplier);
|
||||||
|
Assert.Equal(expected, result);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Testing Godot Nodes in C#
|
||||||
|
|
||||||
|
For tests requiring Godot runtime, use a hybrid approach:
|
||||||
|
|
||||||
|
```csharp
|
||||||
|
// tests/csharp/PlayerControllerTests.cs
|
||||||
|
using Godot;
|
||||||
|
using Xunit;
|
||||||
|
using YourGame.Player;
|
||||||
|
|
||||||
|
public class PlayerControllerTests : IDisposable
|
||||||
|
{
|
||||||
|
private readonly SceneTree _sceneTree;
|
||||||
|
private PlayerController _player;
|
||||||
|
|
||||||
|
public PlayerControllerTests()
|
||||||
|
{
|
||||||
|
// These tests must run within Godot runtime
|
||||||
|
// Use GodotXUnit or similar adapter
|
||||||
|
}
|
||||||
|
|
||||||
|
[GodotFact] // Custom attribute for Godot runtime tests
|
||||||
|
public async Task Player_Move_ChangesPosition()
|
||||||
|
{
|
||||||
|
var startPos = _player.GlobalPosition;
|
||||||
|
|
||||||
|
_player.SetInput(new Vector2(1, 0));
|
||||||
|
|
||||||
|
await ToSignal(GetTree().CreateTimer(0.5f), "timeout");
|
||||||
|
|
||||||
|
Assert.True(_player.GlobalPosition.X > startPos.X);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void Dispose()
|
||||||
|
{
|
||||||
|
_player?.QueueFree();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### C# Mocking with NSubstitute
|
||||||
|
|
||||||
|
```csharp
|
||||||
|
using NSubstitute;
|
||||||
|
using Xunit;
|
||||||
|
|
||||||
|
public class EnemyAITests
|
||||||
|
{
|
||||||
|
[Fact]
|
||||||
|
public void Enemy_UsesPathfinding_WhenMoving()
|
||||||
|
{
|
||||||
|
var mockPathfinding = Substitute.For<IPathfinding>();
|
||||||
|
mockPathfinding.FindPath(Arg.Any<Vector2>(), Arg.Any<Vector2>())
|
||||||
|
.Returns(new[] { Vector2.Zero, new Vector2(10, 10) });
|
||||||
|
|
||||||
|
var enemy = new EnemyAI(mockPathfinding);
|
||||||
|
|
||||||
|
enemy.MoveTo(new Vector2(10, 10));
|
||||||
|
|
||||||
|
mockPathfinding.Received().FindPath(
|
||||||
|
Arg.Any<Vector2>(),
|
||||||
|
Arg.Is<Vector2>(v => v == new Vector2(10, 10)));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Running C# Tests
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Run C# unit tests (no Godot runtime needed)
|
||||||
|
dotnet test tests/csharp/Tests.csproj
|
||||||
|
|
||||||
|
# Run with coverage
|
||||||
|
dotnet test tests/csharp/Tests.csproj --collect:"XPlat Code Coverage"
|
||||||
|
|
||||||
|
# Run specific test
|
||||||
|
dotnet test tests/csharp/Tests.csproj --filter "FullyQualifiedName~DamageCalculator"
|
||||||
|
```
|
||||||
|
|
||||||
|
### Hybrid Test Strategy
|
||||||
|
|
||||||
|
| Test Type | Framework | When to Use |
|
||||||
|
| ------------- | ---------------- | ---------------------------------- |
|
||||||
|
| Pure logic | xUnit/NUnit (C#) | Classes without Godot dependencies |
|
||||||
|
| Node behavior | GUT (GDScript) | MonoBehaviour-like testing |
|
||||||
|
| Integration | GUT (GDScript) | Scene and signal testing |
|
||||||
|
| E2E | GUT (GDScript) | Full gameplay flows |
|
||||||
|
|
||||||
|
## End-to-End Testing
|
||||||
|
|
||||||
|
For comprehensive E2E testing patterns, infrastructure scaffolding, and
|
||||||
|
scenario builders, see **knowledge/e2e-testing.md**.
|
||||||
|
|
||||||
|
### E2E Infrastructure for Godot
|
||||||
|
|
||||||
|
#### GameE2ETestFixture (GDScript)
|
||||||
|
|
||||||
|
```gdscript
|
||||||
|
# tests/e2e/infrastructure/game_e2e_test_fixture.gd
|
||||||
|
extends GutTest
|
||||||
|
class_name GameE2ETestFixture
|
||||||
|
|
||||||
|
var game_state: GameStateManager
|
||||||
|
var input_sim: InputSimulator
|
||||||
|
var scenario: ScenarioBuilder
|
||||||
|
var _scene_instance: Node
|
||||||
|
|
||||||
|
## Override to specify a different scene for specific test classes.
|
||||||
|
func get_scene_path() -> String:
|
||||||
|
return "res://scenes/game.tscn"
|
||||||
|
|
||||||
|
func before_each():
|
||||||
|
# Load game scene
|
||||||
|
var scene = load(get_scene_path())
|
||||||
|
_scene_instance = scene.instantiate()
|
||||||
|
add_child(_scene_instance)
|
||||||
|
|
||||||
|
# Get references
|
||||||
|
game_state = _scene_instance.get_node("GameStateManager")
|
||||||
|
assert_not_null(game_state, "GameStateManager not found in scene")
|
||||||
|
|
||||||
|
input_sim = InputSimulator.new()
|
||||||
|
scenario = ScenarioBuilder.new(game_state)
|
||||||
|
|
||||||
|
# Wait for ready
|
||||||
|
await wait_for_game_ready()
|
||||||
|
|
||||||
|
func after_each():
|
||||||
|
if _scene_instance:
|
||||||
|
_scene_instance.queue_free()
|
||||||
|
_scene_instance = null
|
||||||
|
input_sim = null
|
||||||
|
scenario = null
|
||||||
|
|
||||||
|
func wait_for_game_ready(timeout: float = 10.0):
|
||||||
|
var elapsed = 0.0
|
||||||
|
while not game_state.is_ready and elapsed < timeout:
|
||||||
|
await get_tree().process_frame
|
||||||
|
elapsed += get_process_delta_time()
|
||||||
|
assert_true(game_state.is_ready, "Game should be ready within timeout")
|
||||||
|
```
|
||||||
|
|
||||||
|
#### ScenarioBuilder (GDScript)
|
||||||
|
|
||||||
|
```gdscript
|
||||||
|
# tests/e2e/infrastructure/scenario_builder.gd
|
||||||
|
extends RefCounted
|
||||||
|
class_name ScenarioBuilder
|
||||||
|
|
||||||
|
var _game_state: GameStateManager
|
||||||
|
var _setup_actions: Array[Callable] = []
|
||||||
|
|
||||||
|
func _init(game_state: GameStateManager):
|
||||||
|
_game_state = game_state
|
||||||
|
|
||||||
|
## Load a pre-configured scenario from a save file.
|
||||||
|
func from_save_file(file_name: String) -> ScenarioBuilder:
|
||||||
|
_setup_actions.append(func(): await _load_save_file(file_name))
|
||||||
|
return self
|
||||||
|
|
||||||
|
## Configure the current turn number.
|
||||||
|
func on_turn(turn_number: int) -> ScenarioBuilder:
|
||||||
|
_setup_actions.append(func(): _set_turn(turn_number))
|
||||||
|
return self
|
||||||
|
|
||||||
|
## Spawn a unit at position.
|
||||||
|
func with_unit(faction: int, position: Vector2, movement_points: int = 6) -> ScenarioBuilder:
|
||||||
|
_setup_actions.append(func(): await _spawn_unit(faction, position, movement_points))
|
||||||
|
return self
|
||||||
|
|
||||||
|
## Execute all configured setup actions.
|
||||||
|
func build() -> void:
|
||||||
|
for action in _setup_actions:
|
||||||
|
await action.call()
|
||||||
|
_setup_actions.clear()
|
||||||
|
|
||||||
|
## Clear pending actions without executing.
|
||||||
|
func reset() -> void:
|
||||||
|
_setup_actions.clear()
|
||||||
|
|
||||||
|
# Private implementation
|
||||||
|
func _load_save_file(file_name: String) -> void:
|
||||||
|
var path = "res://tests/e2e/test_data/%s" % file_name
|
||||||
|
await _game_state.load_game(path)
|
||||||
|
|
||||||
|
func _set_turn(turn: int) -> void:
|
||||||
|
_game_state.set_turn_number(turn)
|
||||||
|
|
||||||
|
func _spawn_unit(faction: int, pos: Vector2, mp: int) -> void:
|
||||||
|
var unit = _game_state.spawn_unit(faction, pos)
|
||||||
|
unit.movement_points = mp
|
||||||
|
```
|
||||||
|
|
||||||
|
#### InputSimulator (GDScript)
|
||||||
|
|
||||||
|
```gdscript
|
||||||
|
# tests/e2e/infrastructure/input_simulator.gd
|
||||||
|
extends RefCounted
|
||||||
|
class_name InputSimulator
|
||||||
|
|
||||||
|
## Click at a world position.
|
||||||
|
func click_world_position(world_pos: Vector2) -> void:
|
||||||
|
var viewport = Engine.get_main_loop().root.get_viewport()
|
||||||
|
var camera = viewport.get_camera_2d()
|
||||||
|
var screen_pos = camera.get_screen_center_position() + (world_pos - camera.global_position)
|
||||||
|
await click_screen_position(screen_pos)
|
||||||
|
|
||||||
|
## Click at a screen position.
|
||||||
|
func click_screen_position(screen_pos: Vector2) -> void:
|
||||||
|
var press = InputEventMouseButton.new()
|
||||||
|
press.button_index = MOUSE_BUTTON_LEFT
|
||||||
|
press.pressed = true
|
||||||
|
press.position = screen_pos
|
||||||
|
|
||||||
|
var release = InputEventMouseButton.new()
|
||||||
|
release.button_index = MOUSE_BUTTON_LEFT
|
||||||
|
release.pressed = false
|
||||||
|
release.position = screen_pos
|
||||||
|
|
||||||
|
Input.parse_input_event(press)
|
||||||
|
await Engine.get_main_loop().process_frame
|
||||||
|
Input.parse_input_event(release)
|
||||||
|
await Engine.get_main_loop().process_frame
|
||||||
|
|
||||||
|
## Click a UI button by name.
|
||||||
|
func click_button(button_name: String) -> void:
|
||||||
|
var root = Engine.get_main_loop().root
|
||||||
|
var button = _find_button_recursive(root, button_name)
|
||||||
|
assert(button != null, "Button '%s' not found in scene tree" % button_name)
|
||||||
|
|
||||||
|
if not button.visible:
|
||||||
|
push_warning("[InputSimulator] Button '%s' is not visible" % button_name)
|
||||||
|
if button.disabled:
|
||||||
|
push_warning("[InputSimulator] Button '%s' is disabled" % button_name)
|
||||||
|
|
||||||
|
button.pressed.emit()
|
||||||
|
await Engine.get_main_loop().process_frame
|
||||||
|
|
||||||
|
func _find_button_recursive(node: Node, button_name: String) -> Button:
|
||||||
|
if node is Button and node.name == button_name:
|
||||||
|
return node
|
||||||
|
for child in node.get_children():
|
||||||
|
var found = _find_button_recursive(child, button_name)
|
||||||
|
if found:
|
||||||
|
return found
|
||||||
|
return null
|
||||||
|
|
||||||
|
## Press and release a key.
|
||||||
|
func press_key(keycode: Key) -> void:
|
||||||
|
var press = InputEventKey.new()
|
||||||
|
press.keycode = keycode
|
||||||
|
press.pressed = true
|
||||||
|
|
||||||
|
var release = InputEventKey.new()
|
||||||
|
release.keycode = keycode
|
||||||
|
release.pressed = false
|
||||||
|
|
||||||
|
Input.parse_input_event(press)
|
||||||
|
await Engine.get_main_loop().process_frame
|
||||||
|
Input.parse_input_event(release)
|
||||||
|
await Engine.get_main_loop().process_frame
|
||||||
|
|
||||||
|
## Simulate an input action.
|
||||||
|
func action_press(action_name: String) -> void:
|
||||||
|
Input.action_press(action_name)
|
||||||
|
await Engine.get_main_loop().process_frame
|
||||||
|
|
||||||
|
func action_release(action_name: String) -> void:
|
||||||
|
Input.action_release(action_name)
|
||||||
|
await Engine.get_main_loop().process_frame
|
||||||
|
|
||||||
|
## Reset all input state.
|
||||||
|
func reset() -> void:
|
||||||
|
Input.flush_buffered_events()
|
||||||
|
```
|
||||||
|
|
||||||
|
#### AsyncAssert (GDScript)
|
||||||
|
|
||||||
|
```gdscript
|
||||||
|
# tests/e2e/infrastructure/async_assert.gd
|
||||||
|
extends RefCounted
|
||||||
|
class_name AsyncAssert
|
||||||
|
|
||||||
|
## Wait until condition is true, or fail after timeout.
|
||||||
|
static func wait_until(
|
||||||
|
condition: Callable,
|
||||||
|
description: String,
|
||||||
|
timeout: float = 5.0
|
||||||
|
) -> void:
|
||||||
|
var elapsed := 0.0
|
||||||
|
while not condition.call() and elapsed < timeout:
|
||||||
|
await Engine.get_main_loop().process_frame
|
||||||
|
elapsed += Engine.get_main_loop().root.get_process_delta_time()
|
||||||
|
|
||||||
|
assert(condition.call(),
|
||||||
|
"Timeout after %.1fs waiting for: %s" % [timeout, description])
|
||||||
|
|
||||||
|
## Wait for a value to equal expected.
|
||||||
|
static func wait_for_value(
|
||||||
|
getter: Callable,
|
||||||
|
expected: Variant,
|
||||||
|
description: String,
|
||||||
|
timeout: float = 5.0
|
||||||
|
) -> void:
|
||||||
|
await wait_until(
|
||||||
|
func(): return getter.call() == expected,
|
||||||
|
"%s to equal '%s' (current: '%s')" % [description, expected, getter.call()],
|
||||||
|
timeout)
|
||||||
|
|
||||||
|
## Wait for a float value within tolerance.
|
||||||
|
static func wait_for_value_approx(
|
||||||
|
getter: Callable,
|
||||||
|
expected: float,
|
||||||
|
description: String,
|
||||||
|
tolerance: float = 0.0001,
|
||||||
|
timeout: float = 5.0
|
||||||
|
) -> void:
|
||||||
|
await wait_until(
|
||||||
|
func(): return absf(expected - getter.call()) < tolerance,
|
||||||
|
"%s to equal ~%s ±%s (current: %s)" % [description, expected, tolerance, getter.call()],
|
||||||
|
timeout)
|
||||||
|
|
||||||
|
## Assert that condition does NOT become true within duration.
|
||||||
|
static func assert_never_true(
|
||||||
|
condition: Callable,
|
||||||
|
description: String,
|
||||||
|
duration: float = 1.0
|
||||||
|
) -> void:
|
||||||
|
var elapsed := 0.0
|
||||||
|
while elapsed < duration:
|
||||||
|
assert(not condition.call(),
|
||||||
|
"Condition unexpectedly became true: %s" % description)
|
||||||
|
await Engine.get_main_loop().process_frame
|
||||||
|
elapsed += Engine.get_main_loop().root.get_process_delta_time()
|
||||||
|
|
||||||
|
## Wait for specified number of frames.
|
||||||
|
static func wait_frames(count: int) -> void:
|
||||||
|
for i in range(count):
|
||||||
|
await Engine.get_main_loop().process_frame
|
||||||
|
|
||||||
|
## Wait for physics to settle.
|
||||||
|
static func wait_for_physics(frames: int = 3) -> void:
|
||||||
|
for i in range(frames):
|
||||||
|
await Engine.get_main_loop().root.get_tree().physics_frame
|
||||||
|
```
|
||||||
|
|
||||||
|
### Example E2E Test (GDScript)
|
||||||
|
|
||||||
|
```gdscript
|
||||||
|
# tests/e2e/scenarios/test_combat_flow.gd
|
||||||
|
extends GameE2ETestFixture
|
||||||
|
|
||||||
|
func test_player_can_attack_enemy():
|
||||||
|
# GIVEN: Player and enemy in combat range
|
||||||
|
await scenario \
|
||||||
|
.with_unit(Faction.PLAYER, Vector2(100, 100)) \
|
||||||
|
.with_unit(Faction.ENEMY, Vector2(150, 100)) \
|
||||||
|
.build()
|
||||||
|
|
||||||
|
var enemy = game_state.get_units(Faction.ENEMY)[0]
|
||||||
|
var initial_health = enemy.health
|
||||||
|
|
||||||
|
# WHEN: Player attacks
|
||||||
|
await input_sim.click_world_position(Vector2(100, 100)) # Select player
|
||||||
|
await AsyncAssert.wait_until(
|
||||||
|
func(): return game_state.selected_unit != null,
|
||||||
|
"Unit should be selected")
|
||||||
|
|
||||||
|
await input_sim.click_world_position(Vector2(150, 100)) # Attack enemy
|
||||||
|
|
||||||
|
# THEN: Enemy takes damage
|
||||||
|
await AsyncAssert.wait_until(
|
||||||
|
func(): return enemy.health < initial_health,
|
||||||
|
"Enemy should take damage")
|
||||||
|
|
||||||
|
func test_turn_cycle_completes():
|
||||||
|
# GIVEN: Game in progress
|
||||||
|
await scenario.on_turn(1).build()
|
||||||
|
var starting_turn = game_state.turn_number
|
||||||
|
|
||||||
|
# WHEN: Player ends turn
|
||||||
|
await input_sim.click_button("EndTurnButton")
|
||||||
|
await AsyncAssert.wait_until(
|
||||||
|
func(): return game_state.current_faction == Faction.ENEMY,
|
||||||
|
"Should switch to enemy turn")
|
||||||
|
|
||||||
|
# AND: Enemy turn completes
|
||||||
|
await AsyncAssert.wait_until(
|
||||||
|
func(): return game_state.current_faction == Faction.PLAYER,
|
||||||
|
"Should return to player turn",
|
||||||
|
30.0) # AI might take a while
|
||||||
|
|
||||||
|
# THEN: Turn number incremented
|
||||||
|
assert_eq(game_state.turn_number, starting_turn + 1)
|
||||||
|
```
|
||||||
|
|
||||||
|
### Quick E2E Checklist for Godot
|
||||||
|
|
||||||
|
- [ ] Create `GameE2ETestFixture` base class extending GutTest
|
||||||
|
- [ ] Implement `ScenarioBuilder` for your game's domain
|
||||||
|
- [ ] Create `InputSimulator` wrapping Godot Input
|
||||||
|
- [ ] Add `AsyncAssert` utilities with proper await
|
||||||
|
- [ ] Organize E2E tests under `tests/e2e/scenarios/`
|
||||||
|
- [ ] Configure GUT to include E2E test directory
|
||||||
|
- [ ] Set up CI with headless Godot execution
|
||||||
|
|
|
||||||
|
|
@ -381,3 +381,17 @@ test:
|
||||||
| NullReferenceException | Missing Setup | Ensure [SetUp] initializes all fields |
|
| NullReferenceException | Missing Setup | Ensure [SetUp] initializes all fields |
|
||||||
| Tests hang | Infinite coroutine | Add timeout or max iterations |
|
| Tests hang | Infinite coroutine | Add timeout or max iterations |
|
||||||
| Flaky physics tests | Timing dependent | Use WaitForFixedUpdate, increase tolerance |
|
| Flaky physics tests | Timing dependent | Use WaitForFixedUpdate, increase tolerance |
|
||||||
|
|
||||||
|
## End-to-End Testing
|
||||||
|
|
||||||
|
For comprehensive E2E testing patterns, infrastructure scaffolding, and
|
||||||
|
scenario builders, see **knowledge/e2e-testing.md**.
|
||||||
|
|
||||||
|
### Quick E2E Checklist for Unity
|
||||||
|
|
||||||
|
- [ ] Create `GameE2ETestFixture` base class
|
||||||
|
- [ ] Implement `ScenarioBuilder` for your game's domain
|
||||||
|
- [ ] Create `InputSimulator` wrapping Input System
|
||||||
|
- [ ] Add `AsyncAssert` utilities
|
||||||
|
- [ ] Organize E2E tests under `Tests/PlayMode/E2E/`
|
||||||
|
- [ ] Configure separate CI job for E2E suite
|
||||||
|
|
|
||||||
File diff suppressed because it is too large
Load Diff
|
|
@ -15,3 +15,4 @@ localization-testing,Localization Testing,"Text, audio, and cultural validation
|
||||||
certification-testing,Platform Certification,"Console TRC/XR requirements and certification testing","certification,console,trc,xr",knowledge/certification-testing.md
|
certification-testing,Platform Certification,"Console TRC/XR requirements and certification testing","certification,console,trc,xr",knowledge/certification-testing.md
|
||||||
smoke-testing,Smoke Testing,"Critical path validation for build verification","smoke-tests,bvt,ci",knowledge/smoke-testing.md
|
smoke-testing,Smoke Testing,"Critical path validation for build verification","smoke-tests,bvt,ci",knowledge/smoke-testing.md
|
||||||
test-priorities,Test Priorities Matrix,"P0-P3 criteria, coverage targets, execution ordering for games","prioritization,risk,coverage",knowledge/test-priorities.md
|
test-priorities,Test Priorities Matrix,"P0-P3 criteria, coverage targets, execution ordering for games","prioritization,risk,coverage",knowledge/test-priorities.md
|
||||||
|
e2e-testing,End-to-End Testing,"Complete player journey testing with infrastructure patterns and async utilities","e2e,integration,player-journeys,scenarios,infrastructure",knowledge/e2e-testing.md
|
||||||
|
|
|
||||||
|
|
|
@ -209,6 +209,87 @@ func test_{feature}_integration():
|
||||||
# Cleanup
|
# Cleanup
|
||||||
scene.queue_free()
|
scene.queue_free()
|
||||||
```
|
```
|
||||||
|
### E2E Journey Tests
|
||||||
|
|
||||||
|
**Knowledge Base Reference**: `knowledge/e2e-testing.md`
|
||||||
|
```csharp
|
||||||
|
public class {Feature}E2ETests : GameE2ETestFixture
|
||||||
|
{
|
||||||
|
[UnityTest]
|
||||||
|
public IEnumerator {JourneyName}_Succeeds()
|
||||||
|
{
|
||||||
|
// GIVEN
|
||||||
|
yield return Scenario
|
||||||
|
.{SetupMethod1}()
|
||||||
|
.{SetupMethod2}()
|
||||||
|
.Build();
|
||||||
|
|
||||||
|
// WHEN
|
||||||
|
yield return Input.{Action1}();
|
||||||
|
yield return AsyncAssert.WaitUntil(
|
||||||
|
() => {Condition1}, "{Description1}");
|
||||||
|
yield return Input.{Action2}();
|
||||||
|
|
||||||
|
// THEN
|
||||||
|
yield return AsyncAssert.WaitUntil(
|
||||||
|
() => {FinalCondition}, "{FinalDescription}");
|
||||||
|
Assert.{Assertion}({expected}, {actual});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
## Step 3.5: Generate E2E Infrastructure
|
||||||
|
|
||||||
|
Before generating E2E tests, scaffold the required infrastructure.
|
||||||
|
|
||||||
|
### Infrastructure Checklist
|
||||||
|
|
||||||
|
1. **Test Fixture Base Class**
|
||||||
|
- Scene loading/unloading
|
||||||
|
- Game ready state waiting
|
||||||
|
- Common service access
|
||||||
|
- Cleanup guarantees
|
||||||
|
|
||||||
|
2. **Scenario Builder**
|
||||||
|
- Fluent API for game state configuration
|
||||||
|
- Domain-specific methods (e.g., `WithUnit`, `OnTurn`)
|
||||||
|
- Yields for state propagation
|
||||||
|
|
||||||
|
3. **Input Simulator**
|
||||||
|
- Click/drag abstractions
|
||||||
|
- Button press simulation
|
||||||
|
- Keyboard input queuing
|
||||||
|
|
||||||
|
4. **Async Assertions**
|
||||||
|
- `WaitUntil` with timeout and message
|
||||||
|
- `WaitForEvent` for event-driven flows
|
||||||
|
- `WaitForState` for state machine transitions
|
||||||
|
|
||||||
|
### Generation Template
|
||||||
|
```csharp
|
||||||
|
// GameE2ETestFixture.cs
|
||||||
|
public abstract class GameE2ETestFixture
|
||||||
|
{
|
||||||
|
protected {GameStateClass} GameState;
|
||||||
|
protected {InputSimulatorClass} Input;
|
||||||
|
protected {ScenarioBuilderClass} Scenario;
|
||||||
|
|
||||||
|
[UnitySetUp]
|
||||||
|
public IEnumerator BaseSetUp()
|
||||||
|
{
|
||||||
|
yield return LoadScene("{main_scene}");
|
||||||
|
GameState = Object.FindFirstObjectByType<{GameStateClass}>();
|
||||||
|
Input = new {InputSimulatorClass}();
|
||||||
|
Scenario = new {ScenarioBuilderClass}(GameState);
|
||||||
|
yield return WaitForReady();
|
||||||
|
}
|
||||||
|
|
||||||
|
// ... (fill from e2e-testing.md patterns)
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**After scaffolding infrastructure, proceed to generate actual E2E tests.**
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,95 @@
|
||||||
|
# E2E Infrastructure Scaffold Checklist
|
||||||
|
|
||||||
|
## Preflight Validation
|
||||||
|
|
||||||
|
- [ ] Test framework already initialized (`Tests/` directory exists with proper structure)
|
||||||
|
- [ ] Game state manager class identified
|
||||||
|
- [ ] Main gameplay scene identified and loads without errors
|
||||||
|
- [ ] No existing E2E infrastructure conflicts
|
||||||
|
|
||||||
|
## Architecture Analysis
|
||||||
|
|
||||||
|
- [ ] Game engine correctly detected
|
||||||
|
- [ ] Engine version identified
|
||||||
|
- [ ] Input system type determined (New Input System, Legacy, Custom)
|
||||||
|
- [ ] Game state manager class located
|
||||||
|
- [ ] Ready/initialized state property identified
|
||||||
|
- [ ] Key domain entities catalogued for ScenarioBuilder
|
||||||
|
|
||||||
|
## Generated Files
|
||||||
|
|
||||||
|
### Directory Structure
|
||||||
|
- [ ] `Tests/PlayMode/E2E/` directory created
|
||||||
|
- [ ] `Tests/PlayMode/E2E/Infrastructure/` directory created
|
||||||
|
- [ ] `Tests/PlayMode/E2E/Scenarios/` directory created
|
||||||
|
- [ ] `Tests/PlayMode/E2E/TestData/` directory created
|
||||||
|
|
||||||
|
### Infrastructure Files
|
||||||
|
- [ ] `E2E.asmdef` created with correct assembly references
|
||||||
|
- [ ] `GameE2ETestFixture.cs` created with correct class references
|
||||||
|
- [ ] `ScenarioBuilder.cs` created with at least placeholder methods
|
||||||
|
- [ ] `InputSimulator.cs` created matching detected input system
|
||||||
|
- [ ] `AsyncAssert.cs` created with core assertion methods
|
||||||
|
|
||||||
|
### Example and Documentation
|
||||||
|
- [ ] `ExampleE2ETest.cs` created with working infrastructure test
|
||||||
|
- [ ] `README.md` created with usage documentation
|
||||||
|
|
||||||
|
## Code Quality
|
||||||
|
|
||||||
|
### GameE2ETestFixture
|
||||||
|
- [ ] Correct namespace applied
|
||||||
|
- [ ] Correct `GameStateClass` reference
|
||||||
|
- [ ] Correct `SceneName` default
|
||||||
|
- [ ] `WaitForGameReady` uses correct ready property
|
||||||
|
- [ ] `UnitySetUp` and `UnityTearDown` properly structured
|
||||||
|
- [ ] Virtual methods for derived class customization
|
||||||
|
|
||||||
|
### ScenarioBuilder
|
||||||
|
- [ ] Fluent API pattern correctly implemented
|
||||||
|
- [ ] `Build()` executes all queued actions
|
||||||
|
- [ ] At least one domain-specific method added (or clear TODOs)
|
||||||
|
- [ ] `FromSaveFile` method scaffolded
|
||||||
|
|
||||||
|
### InputSimulator
|
||||||
|
- [ ] Matches detected input system (New vs Legacy)
|
||||||
|
- [ ] Mouse click simulation works
|
||||||
|
- [ ] Button click by name works
|
||||||
|
- [ ] Keyboard input scaffolded
|
||||||
|
- [ ] `Reset()` method cleans up state
|
||||||
|
|
||||||
|
### AsyncAssert
|
||||||
|
- [ ] `WaitUntil` includes timeout and descriptive failure
|
||||||
|
- [ ] `WaitForValue` provides current vs expected in failure
|
||||||
|
- [ ] `AssertNeverTrue` for negative assertions
|
||||||
|
- [ ] Frame/physics wait utilities included
|
||||||
|
|
||||||
|
## Assembly Definition
|
||||||
|
|
||||||
|
- [ ] References main game assembly
|
||||||
|
- [ ] References Unity.InputSystem (if applicable)
|
||||||
|
- [ ] `overrideReferences` set to true
|
||||||
|
- [ ] `precompiledReferences` includes nunit.framework.dll
|
||||||
|
- [ ] `precompiledReferences` includes UnityEngine.TestRunner.dll
|
||||||
|
- [ ] `precompiledReferences` includes UnityEditor.TestRunner.dll
|
||||||
|
- [ ] `UNITY_INCLUDE_TESTS` define constraint set
|
||||||
|
|
||||||
|
## Verification
|
||||||
|
|
||||||
|
- [ ] Project compiles without errors after scaffold
|
||||||
|
- [ ] `ExampleE2ETests.Infrastructure_GameLoadsAndReachesReadyState` passes
|
||||||
|
- [ ] Test appears in Test Runner under PlayMode → E2E category
|
||||||
|
|
||||||
|
## Documentation Quality
|
||||||
|
|
||||||
|
- [ ] README explains all infrastructure components
|
||||||
|
- [ ] Quick start example is copy-pasteable
|
||||||
|
- [ ] Extension instructions are clear
|
||||||
|
- [ ] Troubleshooting table addresses common issues
|
||||||
|
|
||||||
|
## Handoff
|
||||||
|
|
||||||
|
- [ ] Summary output provided with all configuration values
|
||||||
|
- [ ] Next steps clearly listed
|
||||||
|
- [ ] Customization requirements highlighted
|
||||||
|
- [ ] Knowledge fragments referenced
|
||||||
File diff suppressed because it is too large
Load Diff
|
|
@ -0,0 +1,145 @@
|
||||||
|
# E2E Test Infrastructure Scaffold Workflow
|
||||||
|
|
||||||
|
workflow:
|
||||||
|
id: e2e-scaffold
|
||||||
|
name: E2E Test Infrastructure Scaffold
|
||||||
|
version: 1.0
|
||||||
|
module: bmgd
|
||||||
|
agent: game-qa
|
||||||
|
|
||||||
|
description: |
|
||||||
|
Scaffold complete E2E testing infrastructure for an existing game project.
|
||||||
|
Creates test fixtures, scenario builders, input simulators, and async
|
||||||
|
assertion utilities tailored to the project's architecture.
|
||||||
|
|
||||||
|
triggers:
|
||||||
|
- "ES"
|
||||||
|
- "e2e-scaffold"
|
||||||
|
- "scaffold e2e"
|
||||||
|
- "e2e infrastructure"
|
||||||
|
- "setup e2e"
|
||||||
|
|
||||||
|
preflight:
|
||||||
|
- "Test framework initialized (run `test-framework` workflow first)"
|
||||||
|
- "Game has identifiable state manager"
|
||||||
|
- "Main gameplay scene exists"
|
||||||
|
|
||||||
|
# Paths are relative to this workflow file's location
|
||||||
|
knowledge_fragments:
|
||||||
|
- "../../../gametest/knowledge/e2e-testing.md"
|
||||||
|
- "../../../gametest/knowledge/unity-testing.md"
|
||||||
|
- "../../../gametest/knowledge/unreal-testing.md"
|
||||||
|
- "../../../gametest/knowledge/godot-testing.md"
|
||||||
|
|
||||||
|
inputs:
|
||||||
|
game_state_class:
|
||||||
|
description: "Primary game state manager class name"
|
||||||
|
required: true
|
||||||
|
example: "GameStateManager"
|
||||||
|
|
||||||
|
main_scene:
|
||||||
|
description: "Scene name where core gameplay occurs"
|
||||||
|
required: true
|
||||||
|
example: "GameScene"
|
||||||
|
|
||||||
|
input_system:
|
||||||
|
description: "Input system in use"
|
||||||
|
required: false
|
||||||
|
default: "auto-detect"
|
||||||
|
options:
|
||||||
|
- "unity-input-system"
|
||||||
|
- "unity-legacy"
|
||||||
|
- "unreal-enhanced"
|
||||||
|
- "godot-input"
|
||||||
|
- "custom"
|
||||||
|
|
||||||
|
# Output paths vary by engine. Generate files matching detected engine.
|
||||||
|
outputs:
|
||||||
|
unity:
|
||||||
|
condition: "engine == 'unity'"
|
||||||
|
infrastructure_files:
|
||||||
|
description: "Generated E2E infrastructure classes"
|
||||||
|
files:
|
||||||
|
- "Tests/PlayMode/E2E/Infrastructure/GameE2ETestFixture.cs"
|
||||||
|
- "Tests/PlayMode/E2E/Infrastructure/ScenarioBuilder.cs"
|
||||||
|
- "Tests/PlayMode/E2E/Infrastructure/InputSimulator.cs"
|
||||||
|
- "Tests/PlayMode/E2E/Infrastructure/AsyncAssert.cs"
|
||||||
|
assembly_definition:
|
||||||
|
description: "E2E test assembly configuration"
|
||||||
|
files:
|
||||||
|
- "Tests/PlayMode/E2E/E2E.asmdef"
|
||||||
|
example_test:
|
||||||
|
description: "Working example E2E test"
|
||||||
|
files:
|
||||||
|
- "Tests/PlayMode/E2E/ExampleE2ETest.cs"
|
||||||
|
documentation:
|
||||||
|
description: "E2E testing README"
|
||||||
|
files:
|
||||||
|
- "Tests/PlayMode/E2E/README.md"
|
||||||
|
|
||||||
|
unreal:
|
||||||
|
condition: "engine == 'unreal'"
|
||||||
|
infrastructure_files:
|
||||||
|
description: "Generated E2E infrastructure classes"
|
||||||
|
files:
|
||||||
|
- "Source/{ProjectName}/Tests/E2E/GameE2ETestBase.h"
|
||||||
|
- "Source/{ProjectName}/Tests/E2E/GameE2ETestBase.cpp"
|
||||||
|
- "Source/{ProjectName}/Tests/E2E/ScenarioBuilder.h"
|
||||||
|
- "Source/{ProjectName}/Tests/E2E/ScenarioBuilder.cpp"
|
||||||
|
- "Source/{ProjectName}/Tests/E2E/InputSimulator.h"
|
||||||
|
- "Source/{ProjectName}/Tests/E2E/InputSimulator.cpp"
|
||||||
|
- "Source/{ProjectName}/Tests/E2E/AsyncAssert.h"
|
||||||
|
build_configuration:
|
||||||
|
description: "E2E test build configuration"
|
||||||
|
files:
|
||||||
|
- "Source/{ProjectName}/Tests/E2E/{ProjectName}E2ETests.Build.cs"
|
||||||
|
example_test:
|
||||||
|
description: "Working example E2E test"
|
||||||
|
files:
|
||||||
|
- "Source/{ProjectName}/Tests/E2E/ExampleE2ETest.cpp"
|
||||||
|
documentation:
|
||||||
|
description: "E2E testing README"
|
||||||
|
files:
|
||||||
|
- "Source/{ProjectName}/Tests/E2E/README.md"
|
||||||
|
|
||||||
|
godot:
|
||||||
|
condition: "engine == 'godot'"
|
||||||
|
infrastructure_files:
|
||||||
|
description: "Generated E2E infrastructure classes"
|
||||||
|
files:
|
||||||
|
- "tests/e2e/infrastructure/game_e2e_test_fixture.gd"
|
||||||
|
- "tests/e2e/infrastructure/scenario_builder.gd"
|
||||||
|
- "tests/e2e/infrastructure/input_simulator.gd"
|
||||||
|
- "tests/e2e/infrastructure/async_assert.gd"
|
||||||
|
example_test:
|
||||||
|
description: "Working example E2E test"
|
||||||
|
files:
|
||||||
|
- "tests/e2e/scenarios/example_e2e_test.gd"
|
||||||
|
documentation:
|
||||||
|
description: "E2E testing README"
|
||||||
|
files:
|
||||||
|
- "tests/e2e/README.md"
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- id: analyze
|
||||||
|
name: "Analyze Game Architecture"
|
||||||
|
instruction_file: "instructions.md#step-1-analyze-game-architecture"
|
||||||
|
|
||||||
|
- id: scaffold
|
||||||
|
name: "Generate Infrastructure"
|
||||||
|
instruction_file: "instructions.md#step-2-generate-infrastructure"
|
||||||
|
|
||||||
|
- id: example
|
||||||
|
name: "Generate Example Test"
|
||||||
|
instruction_file: "instructions.md#step-3-generate-example-test"
|
||||||
|
|
||||||
|
- id: document
|
||||||
|
name: "Generate Documentation"
|
||||||
|
instruction_file: "instructions.md#step-4-generate-documentation"
|
||||||
|
|
||||||
|
- id: complete
|
||||||
|
name: "Output Summary"
|
||||||
|
instruction_file: "instructions.md#step-5-output-summary"
|
||||||
|
|
||||||
|
validation:
|
||||||
|
checklist: "checklist.md"
|
||||||
|
|
@ -91,6 +91,18 @@ Create comprehensive test scenarios for game projects, covering gameplay mechani
|
||||||
| Performance | FPS, loading times | P1 |
|
| Performance | FPS, loading times | P1 |
|
||||||
| Accessibility | Assist features | P1 |
|
| Accessibility | Assist features | P1 |
|
||||||
|
|
||||||
|
### E2E Journey Testing
|
||||||
|
|
||||||
|
**Knowledge Base Reference**: `knowledge/e2e-testing.md`
|
||||||
|
|
||||||
|
| Category | Focus | Priority |
|
||||||
|
|----------|-------|----------|
|
||||||
|
| Core Loop | Complete gameplay cycle | P0 |
|
||||||
|
| Turn Lifecycle | Full turn from start to end | P0 |
|
||||||
|
| Save/Load Round-trip | Save → quit → load → resume | P0 |
|
||||||
|
| Scene Transitions | Menu → Game → Back | P1 |
|
||||||
|
| Win/Lose Paths | Victory and defeat conditions | P1 |
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
## Step 3: Create Test Scenarios
|
## Step 3: Create Test Scenarios
|
||||||
|
|
@ -153,6 +165,39 @@ SCENARIO: Gameplay Under High Latency
|
||||||
CATEGORY: multiplayer
|
CATEGORY: multiplayer
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### E2E Scenario Format
|
||||||
|
|
||||||
|
For player journey tests, use this extended format:
|
||||||
|
```
|
||||||
|
E2E SCENARIO: [Player Journey Name]
|
||||||
|
GIVEN [Initial game state - use ScenarioBuilder terms]
|
||||||
|
WHEN [Sequence of player actions]
|
||||||
|
THEN [Observable outcomes]
|
||||||
|
TIMEOUT: [Expected max duration in seconds]
|
||||||
|
PRIORITY: P0/P1
|
||||||
|
CATEGORY: e2e
|
||||||
|
INFRASTRUCTURE: [Required fixtures/builders]
|
||||||
|
```
|
||||||
|
|
||||||
|
### Example E2E Scenario
|
||||||
|
```
|
||||||
|
E2E SCENARIO: Complete Combat Encounter
|
||||||
|
GIVEN game loaded with player unit adjacent to enemy
|
||||||
|
AND player unit has full health and actions
|
||||||
|
WHEN player selects unit
|
||||||
|
AND player clicks attack on enemy
|
||||||
|
AND player confirms attack
|
||||||
|
AND attack animation completes
|
||||||
|
AND enemy responds (if alive)
|
||||||
|
THEN enemy health is reduced OR enemy is defeated
|
||||||
|
AND turn state advances appropriately
|
||||||
|
AND UI reflects new state
|
||||||
|
TIMEOUT: 15
|
||||||
|
PRIORITY: P0
|
||||||
|
CATEGORY: e2e
|
||||||
|
INFRASTRUCTURE: ScenarioBuilder, InputSimulator, AsyncAssert
|
||||||
|
```
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
## Step 4: Prioritize Test Coverage
|
## Step 4: Prioritize Test Coverage
|
||||||
|
|
@ -161,12 +206,12 @@ SCENARIO: Gameplay Under High Latency
|
||||||
|
|
||||||
**Knowledge Base Reference**: `knowledge/test-priorities.md`
|
**Knowledge Base Reference**: `knowledge/test-priorities.md`
|
||||||
|
|
||||||
| Priority | Criteria | Coverage Target |
|
| Priority | Criteria | Unit | Integration | E2E | Manual |
|
||||||
| -------- | ---------------------------- | --------------- |
|
|----------|----------|------|-------------|-----|--------|
|
||||||
| P0 | Ship blockers, certification | 100% automated |
|
| P0 | Ship blockers | 100% | 80% | Core flows | Smoke |
|
||||||
| P1 | Major features, common paths | 80% automated |
|
| P1 | Major features | 90% | 70% | Happy paths | Full |
|
||||||
| P2 | Secondary features | 60% automated |
|
| P2 | Secondary | 80% | 50% | - | Targeted |
|
||||||
| P3 | Edge cases, polish | Manual only |
|
| P3 | Edge cases | 60% | - | - | As needed |
|
||||||
|
|
||||||
### Risk-Based Ordering
|
### Risk-Based Ordering
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -33,7 +33,7 @@ agent:
|
||||||
menu:
|
menu:
|
||||||
- trigger: WS or fuzzy match on workflow-status
|
- trigger: WS or fuzzy match on workflow-status
|
||||||
workflow: "{project-root}/_bmad/bmm/workflows/workflow-status/workflow.yaml"
|
workflow: "{project-root}/_bmad/bmm/workflows/workflow-status/workflow.yaml"
|
||||||
description: "[WS] Get workflow status or initialize a workflow if not already done (optional)"
|
description: "[WS] Start here or resume - show workflow status and next best step"
|
||||||
|
|
||||||
- trigger: TF or fuzzy match on test-framework
|
- trigger: TF or fuzzy match on test-framework
|
||||||
workflow: "{project-root}/_bmad/bmm/workflows/testarch/framework/workflow.yaml"
|
workflow: "{project-root}/_bmad/bmm/workflows/testarch/framework/workflow.yaml"
|
||||||
|
|
|
||||||
|
|
@ -121,6 +121,8 @@ Parse these fields from YAML comments and metadata:
|
||||||
- {{workflow_name}} ({{agent}}) - {{status}}
|
- {{workflow_name}} ({{agent}}) - {{status}}
|
||||||
{{/each}}
|
{{/each}}
|
||||||
{{/if}}
|
{{/if}}
|
||||||
|
|
||||||
|
**Tip:** For guardrail tests, run TEA `*automate` after `dev-story`. If you lose context, TEA workflows resume from artifacts in `{{output_folder}}`.
|
||||||
</output>
|
</output>
|
||||||
</step>
|
</step>
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,83 @@
|
||||||
|
import fs from 'fs-extra';
|
||||||
|
import path from 'node:path';
|
||||||
|
import { fileURLToPath } from 'node:url';
|
||||||
|
import yaml from 'yaml';
|
||||||
|
import xml2js from 'xml2js';
|
||||||
|
|
||||||
|
// Get the directory of this module
|
||||||
|
const __filename = fileURLToPath(import.meta.url);
|
||||||
|
const __dirname = path.dirname(__filename);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Load a fixture file
|
||||||
|
* @param {string} fixturePath - Relative path to fixture from test/fixtures/
|
||||||
|
* @returns {Promise<string>} File content
|
||||||
|
*/
|
||||||
|
export async function loadFixture(fixturePath) {
|
||||||
|
const fullPath = path.join(__dirname, '..', 'fixtures', fixturePath);
|
||||||
|
return fs.readFile(fullPath, 'utf8');
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Load a YAML fixture
|
||||||
|
* @param {string} fixturePath - Relative path to YAML fixture
|
||||||
|
* @returns {Promise<Object>} Parsed YAML object
|
||||||
|
*/
|
||||||
|
export async function loadYamlFixture(fixturePath) {
|
||||||
|
const content = await loadFixture(fixturePath);
|
||||||
|
return yaml.parse(content);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Load an XML fixture
|
||||||
|
* @param {string} fixturePath - Relative path to XML fixture
|
||||||
|
* @returns {Promise<Object>} Parsed XML object
|
||||||
|
*/
|
||||||
|
export async function loadXmlFixture(fixturePath) {
|
||||||
|
const content = await loadFixture(fixturePath);
|
||||||
|
return xml2js.parseStringPromise(content);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Load a JSON fixture
|
||||||
|
* @param {string} fixturePath - Relative path to JSON fixture
|
||||||
|
* @returns {Promise<Object>} Parsed JSON object
|
||||||
|
*/
|
||||||
|
export async function loadJsonFixture(fixturePath) {
|
||||||
|
const content = await loadFixture(fixturePath);
|
||||||
|
return JSON.parse(content);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if a fixture file exists
|
||||||
|
* @param {string} fixturePath - Relative path to fixture
|
||||||
|
* @returns {Promise<boolean>} True if fixture exists
|
||||||
|
*/
|
||||||
|
export async function fixtureExists(fixturePath) {
|
||||||
|
const fullPath = path.join(__dirname, '..', 'fixtures', fixturePath);
|
||||||
|
return fs.pathExists(fullPath);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the full path to a fixture
|
||||||
|
* @param {string} fixturePath - Relative path to fixture
|
||||||
|
* @returns {string} Full path to fixture
|
||||||
|
*/
|
||||||
|
export function getFixturePath(fixturePath) {
|
||||||
|
return path.join(__dirname, '..', 'fixtures', fixturePath);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a test file in a temporary directory
|
||||||
|
* (Re-exported from temp-dir for convenience)
|
||||||
|
* @param {string} tmpDir - Temporary directory path
|
||||||
|
* @param {string} relativePath - Relative path for the file
|
||||||
|
* @param {string} content - File content
|
||||||
|
* @returns {Promise<string>} Full path to the created file
|
||||||
|
*/
|
||||||
|
export async function createTestFile(tmpDir, relativePath, content) {
|
||||||
|
const fullPath = path.join(tmpDir, relativePath);
|
||||||
|
await fs.ensureDir(path.dirname(fullPath));
|
||||||
|
await fs.writeFile(fullPath, content, 'utf8');
|
||||||
|
return fullPath;
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,82 @@
|
||||||
|
import fs from 'fs-extra';
|
||||||
|
import path from 'node:path';
|
||||||
|
import os from 'node:os';
|
||||||
|
import { randomUUID } from 'node:crypto';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a temporary directory for testing
|
||||||
|
* @param {string} prefix - Prefix for the directory name
|
||||||
|
* @returns {Promise<string>} Path to the created temporary directory
|
||||||
|
*/
|
||||||
|
export async function createTempDir(prefix = 'bmad-test-') {
|
||||||
|
const tmpDir = path.join(os.tmpdir(), `${prefix}${randomUUID()}`);
|
||||||
|
await fs.ensureDir(tmpDir);
|
||||||
|
return tmpDir;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Clean up a temporary directory
|
||||||
|
* @param {string} tmpDir - Path to the temporary directory
|
||||||
|
* @returns {Promise<void>}
|
||||||
|
*/
|
||||||
|
export async function cleanupTempDir(tmpDir) {
|
||||||
|
if (await fs.pathExists(tmpDir)) {
|
||||||
|
await fs.remove(tmpDir);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Execute a test function with a temporary directory
|
||||||
|
* Automatically creates and cleans up the directory
|
||||||
|
* @param {Function} testFn - Test function that receives the temp directory path
|
||||||
|
* @returns {Promise<void>}
|
||||||
|
*/
|
||||||
|
export async function withTempDir(testFn) {
|
||||||
|
const tmpDir = await createTempDir();
|
||||||
|
try {
|
||||||
|
await testFn(tmpDir);
|
||||||
|
} finally {
|
||||||
|
await cleanupTempDir(tmpDir);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a test file in a temporary directory
|
||||||
|
* @param {string} tmpDir - Temporary directory path
|
||||||
|
* @param {string} relativePath - Relative path for the file
|
||||||
|
* @param {string} content - File content
|
||||||
|
* @returns {Promise<string>} Full path to the created file
|
||||||
|
*/
|
||||||
|
export async function createTestFile(tmpDir, relativePath, content) {
|
||||||
|
const fullPath = path.join(tmpDir, relativePath);
|
||||||
|
await fs.ensureDir(path.dirname(fullPath));
|
||||||
|
await fs.writeFile(fullPath, content, 'utf8');
|
||||||
|
return fullPath;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create multiple test files in a temporary directory
|
||||||
|
* @param {string} tmpDir - Temporary directory path
|
||||||
|
* @param {Object} files - Object mapping relative paths to content
|
||||||
|
* @returns {Promise<string[]>} Array of created file paths
|
||||||
|
*/
|
||||||
|
export async function createTestFiles(tmpDir, files) {
|
||||||
|
const paths = [];
|
||||||
|
for (const [relativePath, content] of Object.entries(files)) {
|
||||||
|
const fullPath = await createTestFile(tmpDir, relativePath, content);
|
||||||
|
paths.push(fullPath);
|
||||||
|
}
|
||||||
|
return paths;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a test directory structure
|
||||||
|
* @param {string} tmpDir - Temporary directory path
|
||||||
|
* @param {string[]} dirs - Array of relative directory paths
|
||||||
|
* @returns {Promise<void>}
|
||||||
|
*/
|
||||||
|
export async function createTestDirs(tmpDir, dirs) {
|
||||||
|
for (const dir of dirs) {
|
||||||
|
await fs.ensureDir(path.join(tmpDir, dir));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,26 @@
|
||||||
|
import { beforeEach, afterEach } from 'vitest';
|
||||||
|
|
||||||
|
// Global test setup
|
||||||
|
beforeEach(() => {
|
||||||
|
// Reset environment variables to prevent test pollution
|
||||||
|
// Store original env for restoration
|
||||||
|
if (!globalThis.__originalEnv) {
|
||||||
|
globalThis.__originalEnv = { ...process.env };
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(async () => {
|
||||||
|
// Restore original environment variables
|
||||||
|
if (globalThis.__originalEnv) {
|
||||||
|
process.env = { ...globalThis.__originalEnv };
|
||||||
|
}
|
||||||
|
|
||||||
|
// Any global cleanup can go here
|
||||||
|
});
|
||||||
|
|
||||||
|
// Increase timeout for file system operations
|
||||||
|
// (Individual tests can override this if needed)
|
||||||
|
const DEFAULT_TIMEOUT = 10_000; // 10 seconds
|
||||||
|
|
||||||
|
// Make timeout available globally
|
||||||
|
globalThis.DEFAULT_TEST_TIMEOUT = DEFAULT_TIMEOUT;
|
||||||
|
|
@ -0,0 +1,428 @@
|
||||||
|
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
||||||
|
import { Config } from '../../../tools/cli/lib/config.js';
|
||||||
|
import { createTempDir, cleanupTempDir, createTestFile } from '../../helpers/temp-dir.js';
|
||||||
|
import fs from 'fs-extra';
|
||||||
|
import path from 'node:path';
|
||||||
|
import yaml from 'yaml';
|
||||||
|
|
||||||
|
describe('Config', () => {
|
||||||
|
let tmpDir;
|
||||||
|
let config;
|
||||||
|
|
||||||
|
beforeEach(async () => {
|
||||||
|
tmpDir = await createTempDir();
|
||||||
|
config = new Config();
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(async () => {
|
||||||
|
await cleanupTempDir(tmpDir);
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('loadYaml()', () => {
|
||||||
|
it('should load and parse YAML file', async () => {
|
||||||
|
const yamlContent = {
|
||||||
|
key1: 'value1',
|
||||||
|
key2: { nested: 'value2' },
|
||||||
|
array: [1, 2, 3],
|
||||||
|
};
|
||||||
|
|
||||||
|
const configPath = path.join(tmpDir, 'config.yaml');
|
||||||
|
await fs.writeFile(configPath, yaml.stringify(yamlContent));
|
||||||
|
|
||||||
|
const result = await config.loadYaml(configPath);
|
||||||
|
|
||||||
|
expect(result).toEqual(yamlContent);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should throw error for non-existent file', async () => {
|
||||||
|
const nonExistent = path.join(tmpDir, 'missing.yaml');
|
||||||
|
|
||||||
|
await expect(config.loadYaml(nonExistent)).rejects.toThrow('Configuration file not found');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle Unicode content', async () => {
|
||||||
|
const yamlContent = {
|
||||||
|
chinese: '测试',
|
||||||
|
russian: 'Тест',
|
||||||
|
japanese: 'テスト',
|
||||||
|
};
|
||||||
|
|
||||||
|
const configPath = path.join(tmpDir, 'unicode.yaml');
|
||||||
|
await fs.writeFile(configPath, yaml.stringify(yamlContent));
|
||||||
|
|
||||||
|
const result = await config.loadYaml(configPath);
|
||||||
|
|
||||||
|
expect(result.chinese).toBe('测试');
|
||||||
|
expect(result.russian).toBe('Тест');
|
||||||
|
expect(result.japanese).toBe('テスト');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// Note: saveYaml() is not tested because it uses yaml.dump() which doesn't exist
|
||||||
|
// in yaml 2.7.0 (should use yaml.stringify). This method is never called in production
|
||||||
|
// and represents dead code with a latent bug.
|
||||||
|
|
||||||
|
describe('processConfig()', () => {
|
||||||
|
it('should replace {project-root} placeholder', async () => {
|
||||||
|
const configPath = path.join(tmpDir, 'config.txt');
|
||||||
|
await fs.writeFile(configPath, 'Root is {project-root}/bmad');
|
||||||
|
|
||||||
|
await config.processConfig(configPath, { root: '/home/user/project' });
|
||||||
|
|
||||||
|
const content = await fs.readFile(configPath, 'utf8');
|
||||||
|
expect(content).toBe('Root is /home/user/project/bmad');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should replace {module} placeholder', async () => {
|
||||||
|
const configPath = path.join(tmpDir, 'config.txt');
|
||||||
|
await fs.writeFile(configPath, 'Module: {module}');
|
||||||
|
|
||||||
|
await config.processConfig(configPath, { module: 'bmm' });
|
||||||
|
|
||||||
|
const content = await fs.readFile(configPath, 'utf8');
|
||||||
|
expect(content).toBe('Module: bmm');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should replace {version} placeholder with package version', async () => {
|
||||||
|
const configPath = path.join(tmpDir, 'config.txt');
|
||||||
|
await fs.writeFile(configPath, 'Version: {version}');
|
||||||
|
|
||||||
|
await config.processConfig(configPath);
|
||||||
|
|
||||||
|
const content = await fs.readFile(configPath, 'utf8');
|
||||||
|
expect(content).toMatch(/Version: \d+\.\d+\.\d+/); // Semver format
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should replace {date} placeholder with current date', async () => {
|
||||||
|
const configPath = path.join(tmpDir, 'config.txt');
|
||||||
|
await fs.writeFile(configPath, 'Date: {date}');
|
||||||
|
|
||||||
|
await config.processConfig(configPath);
|
||||||
|
|
||||||
|
const content = await fs.readFile(configPath, 'utf8');
|
||||||
|
expect(content).toMatch(/Date: \d{4}-\d{2}-\d{2}/); // YYYY-MM-DD
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should replace multiple placeholders', async () => {
|
||||||
|
const configPath = path.join(tmpDir, 'config.txt');
|
||||||
|
await fs.writeFile(configPath, 'Root: {project-root}, Module: {module}, Version: {version}');
|
||||||
|
|
||||||
|
await config.processConfig(configPath, {
|
||||||
|
root: '/project',
|
||||||
|
module: 'test',
|
||||||
|
});
|
||||||
|
|
||||||
|
const content = await fs.readFile(configPath, 'utf8');
|
||||||
|
expect(content).toContain('Root: /project');
|
||||||
|
expect(content).toContain('Module: test');
|
||||||
|
expect(content).toMatch(/Version: \d+\.\d+/);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should replace custom placeholders', async () => {
|
||||||
|
const configPath = path.join(tmpDir, 'config.txt');
|
||||||
|
await fs.writeFile(configPath, 'Custom: {custom-placeholder}');
|
||||||
|
|
||||||
|
await config.processConfig(configPath, { '{custom-placeholder}': 'custom-value' });
|
||||||
|
|
||||||
|
const content = await fs.readFile(configPath, 'utf8');
|
||||||
|
expect(content).toBe('Custom: custom-value');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should escape regex special characters in placeholders', async () => {
|
||||||
|
const configPath = path.join(tmpDir, 'config.txt');
|
||||||
|
await fs.writeFile(configPath, 'Path: {project-root}/test');
|
||||||
|
|
||||||
|
// Test that {project-root} doesn't get interpreted as regex
|
||||||
|
await config.processConfig(configPath, {
|
||||||
|
root: '/path/with/special$chars^',
|
||||||
|
});
|
||||||
|
|
||||||
|
const content = await fs.readFile(configPath, 'utf8');
|
||||||
|
expect(content).toBe('Path: /path/with/special$chars^/test');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle placeholders with regex metacharacters in values', async () => {
|
||||||
|
const configPath = path.join(tmpDir, 'config.txt');
|
||||||
|
await fs.writeFile(configPath, 'Value: {placeholder}');
|
||||||
|
|
||||||
|
await config.processConfig(configPath, {
|
||||||
|
'{placeholder}': String.raw`value with $1 and \backslash`,
|
||||||
|
});
|
||||||
|
|
||||||
|
const content = await fs.readFile(configPath, 'utf8');
|
||||||
|
expect(content).toBe(String.raw`Value: value with $1 and \backslash`);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should replace all occurrences of placeholder', async () => {
|
||||||
|
const configPath = path.join(tmpDir, 'config.txt');
|
||||||
|
await fs.writeFile(configPath, '{module} is here and {module} is there and {module} everywhere');
|
||||||
|
|
||||||
|
await config.processConfig(configPath, { module: 'BMM' });
|
||||||
|
|
||||||
|
const content = await fs.readFile(configPath, 'utf8');
|
||||||
|
expect(content).toBe('BMM is here and BMM is there and BMM everywhere');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('deepMerge()', () => {
|
||||||
|
it('should merge shallow objects', () => {
|
||||||
|
const target = { a: 1, b: 2 };
|
||||||
|
const source = { b: 3, c: 4 };
|
||||||
|
|
||||||
|
const result = config.deepMerge(target, source);
|
||||||
|
|
||||||
|
expect(result).toEqual({ a: 1, b: 3, c: 4 });
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should merge nested objects', () => {
|
||||||
|
const target = { level1: { a: 1, b: 2 } };
|
||||||
|
const source = { level1: { b: 3, c: 4 } };
|
||||||
|
|
||||||
|
const result = config.deepMerge(target, source);
|
||||||
|
|
||||||
|
expect(result.level1).toEqual({ a: 1, b: 3, c: 4 });
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should not merge arrays (just replace)', () => {
|
||||||
|
const target = { items: [1, 2, 3] };
|
||||||
|
const source = { items: [4, 5] };
|
||||||
|
|
||||||
|
const result = config.deepMerge(target, source);
|
||||||
|
|
||||||
|
expect(result.items).toEqual([4, 5]); // Replaced, not merged
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle null values', () => {
|
||||||
|
const target = { a: 'value', b: null };
|
||||||
|
const source = { a: null, c: 'new' };
|
||||||
|
|
||||||
|
const result = config.deepMerge(target, source);
|
||||||
|
|
||||||
|
expect(result).toEqual({ a: null, b: null, c: 'new' });
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should not mutate original objects', () => {
|
||||||
|
const target = { a: 1 };
|
||||||
|
const source = { b: 2 };
|
||||||
|
|
||||||
|
config.deepMerge(target, source);
|
||||||
|
|
||||||
|
expect(target).toEqual({ a: 1 });
|
||||||
|
expect(source).toEqual({ b: 2 });
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('mergeConfigs()', () => {
|
||||||
|
it('should delegate to deepMerge', () => {
|
||||||
|
const base = { setting1: 'base' };
|
||||||
|
const override = { setting2: 'override' };
|
||||||
|
|
||||||
|
const result = config.mergeConfigs(base, override);
|
||||||
|
|
||||||
|
expect(result).toEqual({ setting1: 'base', setting2: 'override' });
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('isObject()', () => {
|
||||||
|
it('should return true for plain objects', () => {
|
||||||
|
expect(config.isObject({})).toBe(true);
|
||||||
|
expect(config.isObject({ key: 'value' })).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return false for arrays', () => {
|
||||||
|
expect(config.isObject([])).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return false for null', () => {
|
||||||
|
expect(config.isObject(null)).toBeFalsy();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return false for primitives', () => {
|
||||||
|
expect(config.isObject('string')).toBe(false);
|
||||||
|
expect(config.isObject(42)).toBe(false);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('getValue() and setValue()', () => {
|
||||||
|
it('should get value by dot notation path', () => {
|
||||||
|
const obj = {
|
||||||
|
level1: {
|
||||||
|
level2: {
|
||||||
|
value: 'test',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const result = config.getValue(obj, 'level1.level2.value');
|
||||||
|
|
||||||
|
expect(result).toBe('test');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should set value by dot notation path', () => {
|
||||||
|
const obj = {
|
||||||
|
level1: {
|
||||||
|
level2: {},
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
config.setValue(obj, 'level1.level2.value', 'new value');
|
||||||
|
|
||||||
|
expect(obj.level1.level2.value).toBe('new value');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return default value for non-existent path', () => {
|
||||||
|
const obj = { a: { b: 'value' } };
|
||||||
|
|
||||||
|
const result = config.getValue(obj, 'a.c.d', 'default');
|
||||||
|
|
||||||
|
expect(result).toBe('default');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return null default when path not found', () => {
|
||||||
|
const obj = { a: { b: 'value' } };
|
||||||
|
|
||||||
|
const result = config.getValue(obj, 'a.c.d');
|
||||||
|
|
||||||
|
expect(result).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle simple (non-nested) paths', () => {
|
||||||
|
const obj = { key: 'value' };
|
||||||
|
|
||||||
|
expect(config.getValue(obj, 'key')).toBe('value');
|
||||||
|
|
||||||
|
config.setValue(obj, 'newKey', 'newValue');
|
||||||
|
expect(obj.newKey).toBe('newValue');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should create intermediate objects when setting deep paths', () => {
|
||||||
|
const obj = {};
|
||||||
|
|
||||||
|
config.setValue(obj, 'a.b.c.d', 'deep value');
|
||||||
|
|
||||||
|
expect(obj.a.b.c.d).toBe('deep value');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('validateConfig()', () => {
|
||||||
|
it('should validate required fields', () => {
|
||||||
|
const cfg = { field1: 'value1' };
|
||||||
|
const schema = {
|
||||||
|
required: ['field1', 'field2'],
|
||||||
|
};
|
||||||
|
|
||||||
|
const result = config.validateConfig(cfg, schema);
|
||||||
|
|
||||||
|
expect(result.valid).toBe(false);
|
||||||
|
expect(result.errors).toContain('Missing required field: field2');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should pass when all required fields present', () => {
|
||||||
|
const cfg = { field1: 'value1', field2: 'value2' };
|
||||||
|
const schema = {
|
||||||
|
required: ['field1', 'field2'],
|
||||||
|
};
|
||||||
|
|
||||||
|
const result = config.validateConfig(cfg, schema);
|
||||||
|
|
||||||
|
expect(result.valid).toBe(true);
|
||||||
|
expect(result.errors).toHaveLength(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should validate field types', () => {
|
||||||
|
const cfg = {
|
||||||
|
stringField: 'text',
|
||||||
|
numberField: '42', // Wrong type
|
||||||
|
arrayField: [1, 2, 3],
|
||||||
|
objectField: 'not-object', // Wrong type
|
||||||
|
boolField: true,
|
||||||
|
};
|
||||||
|
|
||||||
|
const schema = {
|
||||||
|
properties: {
|
||||||
|
stringField: { type: 'string' },
|
||||||
|
numberField: { type: 'number' },
|
||||||
|
arrayField: { type: 'array' },
|
||||||
|
objectField: { type: 'object' },
|
||||||
|
boolField: { type: 'boolean' },
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const result = config.validateConfig(cfg, schema);
|
||||||
|
|
||||||
|
expect(result.valid).toBe(false);
|
||||||
|
expect(result.errors.some((e) => e.includes('numberField'))).toBe(true);
|
||||||
|
expect(result.errors.some((e) => e.includes('objectField'))).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should validate enum values', () => {
|
||||||
|
const cfg = { level: 'expert' };
|
||||||
|
const schema = {
|
||||||
|
properties: {
|
||||||
|
level: { type: 'string', enum: ['beginner', 'intermediate', 'advanced'] },
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const result = config.validateConfig(cfg, schema);
|
||||||
|
|
||||||
|
expect(result.valid).toBe(false);
|
||||||
|
expect(result.errors.some((e) => e.includes('must be one of'))).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should pass validation for valid enum value', () => {
|
||||||
|
const cfg = { level: 'intermediate' };
|
||||||
|
const schema = {
|
||||||
|
properties: {
|
||||||
|
level: { type: 'string', enum: ['beginner', 'intermediate', 'advanced'] },
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const result = config.validateConfig(cfg, schema);
|
||||||
|
|
||||||
|
expect(result.valid).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return warnings array', () => {
|
||||||
|
const cfg = { field: 'value' };
|
||||||
|
const schema = { required: ['field'] };
|
||||||
|
|
||||||
|
const result = config.validateConfig(cfg, schema);
|
||||||
|
|
||||||
|
expect(result.warnings).toBeDefined();
|
||||||
|
expect(Array.isArray(result.warnings)).toBe(true);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('edge cases', () => {
|
||||||
|
it('should handle empty YAML file', async () => {
|
||||||
|
const configPath = path.join(tmpDir, 'empty.yaml');
|
||||||
|
await fs.writeFile(configPath, '');
|
||||||
|
|
||||||
|
const result = await config.loadYaml(configPath);
|
||||||
|
|
||||||
|
expect(result).toBeNull(); // Empty YAML parses to null
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle YAML with only comments', async () => {
|
||||||
|
const configPath = path.join(tmpDir, 'comments.yaml');
|
||||||
|
await fs.writeFile(configPath, '# Just a comment\n# Another comment\n');
|
||||||
|
|
||||||
|
const result = await config.loadYaml(configPath);
|
||||||
|
|
||||||
|
expect(result).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle very deep object nesting', () => {
|
||||||
|
const deep = {
|
||||||
|
l1: { l2: { l3: { l4: { l5: { l6: { l7: { l8: { value: 'deep' } } } } } } } },
|
||||||
|
};
|
||||||
|
const override = {
|
||||||
|
l1: { l2: { l3: { l4: { l5: { l6: { l7: { l8: { value: 'updated' } } } } } } } },
|
||||||
|
};
|
||||||
|
|
||||||
|
const result = config.deepMerge(deep, override);
|
||||||
|
|
||||||
|
expect(result.l1.l2.l3.l4.l5.l6.l7.l8.value).toBe('updated');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
@ -0,0 +1,558 @@
|
||||||
|
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
||||||
|
import { DependencyResolver } from '../../../tools/cli/installers/lib/core/dependency-resolver.js';
|
||||||
|
import { createTempDir, cleanupTempDir, createTestFile } from '../../helpers/temp-dir.js';
|
||||||
|
import fs from 'fs-extra';
|
||||||
|
import path from 'node:path';
|
||||||
|
|
||||||
|
describe('DependencyResolver - Advanced Scenarios', () => {
|
||||||
|
let tmpDir;
|
||||||
|
let bmadDir;
|
||||||
|
|
||||||
|
beforeEach(async () => {
|
||||||
|
tmpDir = await createTempDir();
|
||||||
|
bmadDir = path.join(tmpDir, 'src');
|
||||||
|
await fs.ensureDir(path.join(bmadDir, 'core', 'agents'));
|
||||||
|
await fs.ensureDir(path.join(bmadDir, 'core', 'tasks'));
|
||||||
|
await fs.ensureDir(path.join(bmadDir, 'core', 'templates'));
|
||||||
|
await fs.ensureDir(path.join(bmadDir, 'modules', 'bmm', 'agents'));
|
||||||
|
await fs.ensureDir(path.join(bmadDir, 'modules', 'bmm', 'tasks'));
|
||||||
|
await fs.ensureDir(path.join(bmadDir, 'modules', 'bmm', 'templates'));
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(async () => {
|
||||||
|
await cleanupTempDir(tmpDir);
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('module path resolution', () => {
|
||||||
|
it('should resolve bmad/bmm/tasks/task.md (module path)', async () => {
|
||||||
|
await createTestFile(
|
||||||
|
bmadDir,
|
||||||
|
'core/agents/agent.md',
|
||||||
|
`---
|
||||||
|
dependencies: ["{project-root}/bmad/bmm/tasks/analyze.md"]
|
||||||
|
---
|
||||||
|
<agent>Agent</agent>`,
|
||||||
|
);
|
||||||
|
await createTestFile(bmadDir, 'modules/bmm/tasks/analyze.md', 'BMM Task');
|
||||||
|
|
||||||
|
const resolver = new DependencyResolver();
|
||||||
|
const result = await resolver.resolve(bmadDir, []);
|
||||||
|
|
||||||
|
expect([...result.allFiles].some((f) => f.includes('bmm'))).toBe(true);
|
||||||
|
expect([...result.allFiles].some((f) => f.includes('analyze.md'))).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle glob in module path bmad/bmm/tasks/*.md', async () => {
|
||||||
|
await createTestFile(
|
||||||
|
bmadDir,
|
||||||
|
'core/agents/agent.md',
|
||||||
|
`---
|
||||||
|
dependencies: ["{project-root}/bmad/bmm/tasks/*.md"]
|
||||||
|
---
|
||||||
|
<agent>Agent</agent>`,
|
||||||
|
);
|
||||||
|
await createTestFile(bmadDir, 'modules/bmm/tasks/task1.md', 'Task 1');
|
||||||
|
await createTestFile(bmadDir, 'modules/bmm/tasks/task2.md', 'Task 2');
|
||||||
|
|
||||||
|
const resolver = new DependencyResolver();
|
||||||
|
const result = await resolver.resolve(bmadDir, ['bmm']); // Include bmm module
|
||||||
|
|
||||||
|
// Should resolve glob pattern
|
||||||
|
expect(result.allFiles.length).toBeGreaterThanOrEqual(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle non-existent module path gracefully', async () => {
|
||||||
|
await createTestFile(
|
||||||
|
bmadDir,
|
||||||
|
'core/agents/agent.md',
|
||||||
|
`---
|
||||||
|
dependencies: ["{project-root}/bmad/nonexistent/tasks/task.md"]
|
||||||
|
---
|
||||||
|
<agent>Agent</agent>`,
|
||||||
|
);
|
||||||
|
|
||||||
|
const resolver = new DependencyResolver();
|
||||||
|
const result = await resolver.resolve(bmadDir, []);
|
||||||
|
|
||||||
|
// Should not crash, just skip missing dependency
|
||||||
|
expect(result.primaryFiles).toHaveLength(1);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('relative glob patterns', () => {
|
||||||
|
it('should resolve relative glob patterns ../tasks/*.md', async () => {
|
||||||
|
await createTestFile(
|
||||||
|
bmadDir,
|
||||||
|
'core/agents/agent.md',
|
||||||
|
`---
|
||||||
|
dependencies: ["../tasks/*.md"]
|
||||||
|
---
|
||||||
|
<agent>Agent</agent>`,
|
||||||
|
);
|
||||||
|
await createTestFile(bmadDir, 'core/tasks/task1.md', 'Task 1');
|
||||||
|
await createTestFile(bmadDir, 'core/tasks/task2.md', 'Task 2');
|
||||||
|
|
||||||
|
const resolver = new DependencyResolver();
|
||||||
|
const result = await resolver.resolve(bmadDir, []);
|
||||||
|
|
||||||
|
expect(result.allFiles.length).toBeGreaterThanOrEqual(3);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle glob pattern with no matches', async () => {
|
||||||
|
await createTestFile(
|
||||||
|
bmadDir,
|
||||||
|
'core/agents/agent.md',
|
||||||
|
`---
|
||||||
|
dependencies: ["../tasks/nonexistent-*.md"]
|
||||||
|
---
|
||||||
|
<agent>Agent</agent>`,
|
||||||
|
);
|
||||||
|
|
||||||
|
const resolver = new DependencyResolver();
|
||||||
|
const result = await resolver.resolve(bmadDir, []);
|
||||||
|
|
||||||
|
// Should handle gracefully - just the agent
|
||||||
|
expect(result.primaryFiles).toHaveLength(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle glob in non-existent directory', async () => {
|
||||||
|
await createTestFile(
|
||||||
|
bmadDir,
|
||||||
|
'core/agents/agent.md',
|
||||||
|
`---
|
||||||
|
dependencies: ["../nonexistent/*.md"]
|
||||||
|
---
|
||||||
|
<agent>Agent</agent>`,
|
||||||
|
);
|
||||||
|
|
||||||
|
const resolver = new DependencyResolver();
|
||||||
|
const result = await resolver.resolve(bmadDir, []);
|
||||||
|
|
||||||
|
// Should handle gracefully
|
||||||
|
expect(result.primaryFiles).toHaveLength(1);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('template dependencies', () => {
|
||||||
|
it('should resolve template with {project-root} prefix', async () => {
|
||||||
|
await createTestFile(bmadDir, 'core/agents/agent.md', '<agent>Agent</agent>');
|
||||||
|
await createTestFile(
|
||||||
|
bmadDir,
|
||||||
|
'core/tasks/task.md',
|
||||||
|
`---
|
||||||
|
template: "{project-root}/bmad/core/templates/form.yaml"
|
||||||
|
---
|
||||||
|
Task content`,
|
||||||
|
);
|
||||||
|
await createTestFile(bmadDir, 'core/templates/form.yaml', 'template');
|
||||||
|
|
||||||
|
const resolver = new DependencyResolver();
|
||||||
|
const result = await resolver.resolve(bmadDir, []);
|
||||||
|
|
||||||
|
// Template dependency should be resolved
|
||||||
|
expect(result.allFiles.length).toBeGreaterThanOrEqual(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should resolve template from module path', async () => {
|
||||||
|
await createTestFile(bmadDir, 'modules/bmm/agents/agent.md', '<agent>BMM Agent</agent>');
|
||||||
|
await createTestFile(
|
||||||
|
bmadDir,
|
||||||
|
'modules/bmm/tasks/task.md',
|
||||||
|
`---
|
||||||
|
template: "{project-root}/bmad/bmm/templates/prd-template.yaml"
|
||||||
|
---
|
||||||
|
Task`,
|
||||||
|
);
|
||||||
|
await createTestFile(bmadDir, 'modules/bmm/templates/prd-template.yaml', 'template');
|
||||||
|
|
||||||
|
const resolver = new DependencyResolver();
|
||||||
|
const result = await resolver.resolve(bmadDir, ['bmm']);
|
||||||
|
|
||||||
|
// Should resolve files from BMM module
|
||||||
|
expect(result.allFiles.length).toBeGreaterThanOrEqual(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle missing template gracefully', async () => {
|
||||||
|
await createTestFile(
|
||||||
|
bmadDir,
|
||||||
|
'core/tasks/task.md',
|
||||||
|
`---
|
||||||
|
template: "../templates/missing.yaml"
|
||||||
|
---
|
||||||
|
Task`,
|
||||||
|
);
|
||||||
|
|
||||||
|
const resolver = new DependencyResolver();
|
||||||
|
const result = await resolver.resolve(bmadDir, []);
|
||||||
|
|
||||||
|
// Should not crash
|
||||||
|
expect(result).toBeDefined();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('bmad-path type resolution', () => {
|
||||||
|
it('should resolve bmad-path dependencies', async () => {
|
||||||
|
await createTestFile(
|
||||||
|
bmadDir,
|
||||||
|
'core/agents/agent.md',
|
||||||
|
`<agent>
|
||||||
|
<command exec="bmad/core/tasks/analyze" />
|
||||||
|
</agent>`,
|
||||||
|
);
|
||||||
|
await createTestFile(bmadDir, 'core/tasks/analyze.md', 'Task');
|
||||||
|
|
||||||
|
const resolver = new DependencyResolver();
|
||||||
|
const result = await resolver.resolve(bmadDir, []);
|
||||||
|
|
||||||
|
expect([...result.allFiles].some((f) => f.includes('analyze.md'))).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should resolve bmad-path for module files', async () => {
|
||||||
|
await createTestFile(
|
||||||
|
bmadDir,
|
||||||
|
'core/agents/agent.md',
|
||||||
|
`<agent>
|
||||||
|
<command exec="bmad/bmm/tasks/create-prd" />
|
||||||
|
</agent>`,
|
||||||
|
);
|
||||||
|
await createTestFile(bmadDir, 'modules/bmm/tasks/create-prd.md', 'PRD Task');
|
||||||
|
|
||||||
|
const resolver = new DependencyResolver();
|
||||||
|
const result = await resolver.resolve(bmadDir, []);
|
||||||
|
|
||||||
|
expect([...result.allFiles].some((f) => f.includes('create-prd.md'))).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle non-existent bmad-path gracefully', async () => {
|
||||||
|
await createTestFile(
|
||||||
|
bmadDir,
|
||||||
|
'core/agents/agent.md',
|
||||||
|
`<agent>
|
||||||
|
<command exec="bmad/core/tasks/missing" />
|
||||||
|
</agent>`,
|
||||||
|
);
|
||||||
|
|
||||||
|
const resolver = new DependencyResolver();
|
||||||
|
const result = await resolver.resolve(bmadDir, []);
|
||||||
|
|
||||||
|
// Should not crash
|
||||||
|
expect(result.primaryFiles).toHaveLength(1);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('command resolution with modules', () => {
|
||||||
|
it('should search multiple modules for @task-name', async () => {
|
||||||
|
await createTestFile(
|
||||||
|
bmadDir,
|
||||||
|
'core/agents/agent.md',
|
||||||
|
`<agent>
|
||||||
|
Use @task-custom-task
|
||||||
|
</agent>`,
|
||||||
|
);
|
||||||
|
await createTestFile(bmadDir, 'modules/bmm/tasks/custom-task.md', 'Custom Task');
|
||||||
|
|
||||||
|
const resolver = new DependencyResolver();
|
||||||
|
const result = await resolver.resolve(bmadDir, ['bmm']);
|
||||||
|
|
||||||
|
expect([...result.allFiles].some((f) => f.includes('custom-task.md'))).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should search multiple modules for @agent-name', async () => {
|
||||||
|
await createTestFile(
|
||||||
|
bmadDir,
|
||||||
|
'core/agents/main.md',
|
||||||
|
`<agent>
|
||||||
|
Use @agent-pm
|
||||||
|
</agent>`,
|
||||||
|
);
|
||||||
|
await createTestFile(bmadDir, 'modules/bmm/agents/pm.md', '<agent>PM</agent>');
|
||||||
|
|
||||||
|
const resolver = new DependencyResolver();
|
||||||
|
const result = await resolver.resolve(bmadDir, ['bmm']);
|
||||||
|
|
||||||
|
expect([...result.allFiles].some((f) => f.includes('pm.md'))).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle bmad/ path with 4+ segments', async () => {
|
||||||
|
await createTestFile(
|
||||||
|
bmadDir,
|
||||||
|
'core/agents/agent.md',
|
||||||
|
`<agent>
|
||||||
|
Reference bmad/core/tasks/nested/deep/task
|
||||||
|
</agent>`,
|
||||||
|
);
|
||||||
|
await createTestFile(bmadDir, 'core/tasks/nested/deep/task.md', 'Deep task');
|
||||||
|
|
||||||
|
const resolver = new DependencyResolver();
|
||||||
|
const result = await resolver.resolve(bmadDir, []);
|
||||||
|
|
||||||
|
// Implementation may or may not support deeply nested paths in commands
|
||||||
|
// Just verify it doesn't crash
|
||||||
|
expect(result.primaryFiles.length).toBeGreaterThanOrEqual(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle bmad path with .md extension already', async () => {
|
||||||
|
await createTestFile(
|
||||||
|
bmadDir,
|
||||||
|
'core/agents/agent.md',
|
||||||
|
`<agent>
|
||||||
|
Use bmad/core/tasks/task.md explicitly
|
||||||
|
</agent>`,
|
||||||
|
);
|
||||||
|
await createTestFile(bmadDir, 'core/tasks/task.md', 'Task');
|
||||||
|
|
||||||
|
const resolver = new DependencyResolver();
|
||||||
|
const result = await resolver.resolve(bmadDir, []);
|
||||||
|
|
||||||
|
expect([...result.allFiles].some((f) => f.includes('task.md'))).toBe(true);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('verbose mode', () => {
|
||||||
|
it('should include console output when verbose is true', async () => {
|
||||||
|
await createTestFile(bmadDir, 'core/agents/agent.md', '<agent>Test</agent>');
|
||||||
|
|
||||||
|
const resolver = new DependencyResolver();
|
||||||
|
|
||||||
|
// Mock console.log to capture output
|
||||||
|
const logs = [];
|
||||||
|
const originalLog = console.log;
|
||||||
|
console.log = (...args) => logs.push(args.join(' '));
|
||||||
|
|
||||||
|
await resolver.resolve(bmadDir, [], { verbose: true });
|
||||||
|
|
||||||
|
console.log = originalLog;
|
||||||
|
|
||||||
|
// Should have logged something in verbose mode
|
||||||
|
expect(logs.length).toBeGreaterThan(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should not log when verbose is false', async () => {
|
||||||
|
await createTestFile(bmadDir, 'core/agents/agent.md', '<agent>Test</agent>');
|
||||||
|
|
||||||
|
const resolver = new DependencyResolver();
|
||||||
|
|
||||||
|
const logs = [];
|
||||||
|
const originalLog = console.log;
|
||||||
|
console.log = (...args) => logs.push(args.join(' '));
|
||||||
|
|
||||||
|
await resolver.resolve(bmadDir, [], { verbose: false });
|
||||||
|
|
||||||
|
console.log = originalLog;
|
||||||
|
|
||||||
|
// Should not have logged in non-verbose mode
|
||||||
|
// (There might be warns but no regular logs)
|
||||||
|
expect(logs.length).toBe(0);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('createWebBundle()', () => {
|
||||||
|
it('should create bundle with metadata', async () => {
|
||||||
|
await createTestFile(bmadDir, 'core/agents/agent.md', '<agent>Agent</agent>');
|
||||||
|
await createTestFile(bmadDir, 'core/tasks/task.md', 'Task');
|
||||||
|
|
||||||
|
const resolver = new DependencyResolver();
|
||||||
|
const resolution = await resolver.resolve(bmadDir, []);
|
||||||
|
|
||||||
|
const bundle = await resolver.createWebBundle(resolution);
|
||||||
|
|
||||||
|
expect(bundle.metadata).toBeDefined();
|
||||||
|
expect(bundle.metadata.modules).toContain('core');
|
||||||
|
expect(bundle.metadata.totalFiles).toBeGreaterThan(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should organize bundle by file type', async () => {
|
||||||
|
await createTestFile(bmadDir, 'core/agents/agent.md', '<agent>Agent</agent>');
|
||||||
|
await createTestFile(bmadDir, 'core/tasks/task.md', 'Task');
|
||||||
|
await createTestFile(bmadDir, 'core/templates/template.yaml', 'template');
|
||||||
|
|
||||||
|
const resolver = new DependencyResolver();
|
||||||
|
const resolution = await resolver.resolve(bmadDir, []);
|
||||||
|
|
||||||
|
const bundle = await resolver.createWebBundle(resolution);
|
||||||
|
|
||||||
|
expect(bundle.agents).toBeDefined();
|
||||||
|
expect(bundle.tasks).toBeDefined();
|
||||||
|
expect(bundle.templates).toBeDefined();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('single string dependency (not array)', () => {
|
||||||
|
it('should handle single string dependency (converted to array)', async () => {
|
||||||
|
await createTestFile(
|
||||||
|
bmadDir,
|
||||||
|
'core/agents/agent.md',
|
||||||
|
`---
|
||||||
|
dependencies: "{project-root}/bmad/core/tasks/task.md"
|
||||||
|
---
|
||||||
|
<agent>Agent</agent>`,
|
||||||
|
);
|
||||||
|
await createTestFile(bmadDir, 'core/tasks/task.md', 'Task');
|
||||||
|
|
||||||
|
const resolver = new DependencyResolver();
|
||||||
|
const result = await resolver.resolve(bmadDir, []);
|
||||||
|
|
||||||
|
// Single string should be converted to array internally
|
||||||
|
expect(result.allFiles.length).toBeGreaterThanOrEqual(2);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle single string template', async () => {
|
||||||
|
await createTestFile(
|
||||||
|
bmadDir,
|
||||||
|
'core/tasks/task.md',
|
||||||
|
`---
|
||||||
|
template: "../templates/form.yaml"
|
||||||
|
---
|
||||||
|
Task`,
|
||||||
|
);
|
||||||
|
await createTestFile(bmadDir, 'core/templates/form.yaml', 'template');
|
||||||
|
|
||||||
|
const resolver = new DependencyResolver();
|
||||||
|
const result = await resolver.resolve(bmadDir, []);
|
||||||
|
|
||||||
|
expect([...result.allFiles].some((f) => f.includes('form.yaml'))).toBe(true);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('missing dependency tracking', () => {
|
||||||
|
it('should track missing relative file dependencies', async () => {
|
||||||
|
await createTestFile(
|
||||||
|
bmadDir,
|
||||||
|
'core/agents/agent.md',
|
||||||
|
`---
|
||||||
|
dependencies: ["../tasks/missing-file.md"]
|
||||||
|
---
|
||||||
|
<agent>Agent</agent>`,
|
||||||
|
);
|
||||||
|
|
||||||
|
const resolver = new DependencyResolver();
|
||||||
|
const result = await resolver.resolve(bmadDir, []);
|
||||||
|
|
||||||
|
// Missing dependency should be tracked
|
||||||
|
expect(result.missing.length).toBeGreaterThanOrEqual(0);
|
||||||
|
// Should not crash
|
||||||
|
expect(result).toBeDefined();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('reportResults()', () => {
|
||||||
|
it('should report results with file counts', async () => {
|
||||||
|
await createTestFile(bmadDir, 'core/agents/agent1.md', '<agent>1</agent>');
|
||||||
|
await createTestFile(bmadDir, 'core/agents/agent2.md', '<agent>2</agent>');
|
||||||
|
await createTestFile(bmadDir, 'core/tasks/task1.md', 'Task 1');
|
||||||
|
await createTestFile(bmadDir, 'core/tasks/task2.md', 'Task 2');
|
||||||
|
await createTestFile(bmadDir, 'core/templates/template.yaml', 'Template');
|
||||||
|
|
||||||
|
const resolver = new DependencyResolver();
|
||||||
|
|
||||||
|
// Mock console.log
|
||||||
|
const logs = [];
|
||||||
|
const originalLog = console.log;
|
||||||
|
console.log = (...args) => logs.push(args.join(' '));
|
||||||
|
|
||||||
|
const result = await resolver.resolve(bmadDir, [], { verbose: true });
|
||||||
|
|
||||||
|
console.log = originalLog;
|
||||||
|
|
||||||
|
// Should have reported module statistics
|
||||||
|
expect(logs.some((log) => log.includes('CORE'))).toBe(true);
|
||||||
|
expect(logs.some((log) => log.includes('Agents:'))).toBe(true);
|
||||||
|
expect(logs.some((log) => log.includes('Tasks:'))).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should report missing dependencies', async () => {
|
||||||
|
await createTestFile(
|
||||||
|
bmadDir,
|
||||||
|
'core/agents/agent.md',
|
||||||
|
`---
|
||||||
|
dependencies: ["../tasks/missing.md"]
|
||||||
|
---
|
||||||
|
<agent>Agent</agent>`,
|
||||||
|
);
|
||||||
|
|
||||||
|
const resolver = new DependencyResolver();
|
||||||
|
|
||||||
|
const logs = [];
|
||||||
|
const originalLog = console.log;
|
||||||
|
console.log = (...args) => logs.push(args.join(' '));
|
||||||
|
|
||||||
|
await resolver.resolve(bmadDir, [], { verbose: true });
|
||||||
|
|
||||||
|
console.log = originalLog;
|
||||||
|
|
||||||
|
// May log warning about missing dependencies
|
||||||
|
expect(logs.length).toBeGreaterThan(0);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('file without .md extension in command', () => {
|
||||||
|
it('should add .md extension to bmad/ commands without extension', async () => {
|
||||||
|
await createTestFile(
|
||||||
|
bmadDir,
|
||||||
|
'core/agents/agent.md',
|
||||||
|
`<agent>
|
||||||
|
Use bmad/core/tasks/analyze without extension
|
||||||
|
</agent>`,
|
||||||
|
);
|
||||||
|
await createTestFile(bmadDir, 'core/tasks/analyze.md', 'Analyze');
|
||||||
|
|
||||||
|
const resolver = new DependencyResolver();
|
||||||
|
const result = await resolver.resolve(bmadDir, []);
|
||||||
|
|
||||||
|
expect([...result.allFiles].some((f) => f.includes('analyze.md'))).toBe(true);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('module structure detection', () => {
|
||||||
|
it('should detect source directory structure (src/)', async () => {
|
||||||
|
// Default structure already uses src/
|
||||||
|
await createTestFile(bmadDir, 'core/agents/agent.md', '<agent>Core</agent>');
|
||||||
|
|
||||||
|
const resolver = new DependencyResolver();
|
||||||
|
const result = await resolver.resolve(bmadDir, []);
|
||||||
|
|
||||||
|
expect(result.primaryFiles.length).toBeGreaterThanOrEqual(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should detect installed directory structure (no src/)', async () => {
|
||||||
|
// Create installed structure
|
||||||
|
const installedDir = path.join(tmpDir, 'installed');
|
||||||
|
await fs.ensureDir(path.join(installedDir, 'core', 'agents'));
|
||||||
|
await fs.ensureDir(path.join(installedDir, 'modules', 'bmm', 'agents'));
|
||||||
|
await createTestFile(installedDir, 'core/agents/agent.md', '<agent>Core</agent>');
|
||||||
|
|
||||||
|
const resolver = new DependencyResolver();
|
||||||
|
const result = await resolver.resolve(installedDir, []);
|
||||||
|
|
||||||
|
expect(result.primaryFiles.length).toBeGreaterThanOrEqual(1);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('dependency deduplication', () => {
|
||||||
|
it('should not include same file twice', async () => {
|
||||||
|
await createTestFile(
|
||||||
|
bmadDir,
|
||||||
|
'core/agents/agent1.md',
|
||||||
|
`---
|
||||||
|
dependencies: ["{project-root}/bmad/core/tasks/shared.md"]
|
||||||
|
---
|
||||||
|
<agent>1</agent>`,
|
||||||
|
);
|
||||||
|
await createTestFile(
|
||||||
|
bmadDir,
|
||||||
|
'core/agents/agent2.md',
|
||||||
|
`---
|
||||||
|
dependencies: ["{project-root}/bmad/core/tasks/shared.md"]
|
||||||
|
---
|
||||||
|
<agent>2</agent>`,
|
||||||
|
);
|
||||||
|
await createTestFile(bmadDir, 'core/tasks/shared.md', 'Shared');
|
||||||
|
|
||||||
|
const resolver = new DependencyResolver();
|
||||||
|
const result = await resolver.resolve(bmadDir, []);
|
||||||
|
|
||||||
|
// Should have 2 agents + 1 shared task = 3 unique files
|
||||||
|
expect(result.allFiles).toHaveLength(3);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
@ -0,0 +1,796 @@
|
||||||
|
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
||||||
|
import { DependencyResolver } from '../../../tools/cli/installers/lib/core/dependency-resolver.js';
|
||||||
|
import { createTempDir, cleanupTempDir, createTestFile } from '../../helpers/temp-dir.js';
|
||||||
|
import fs from 'fs-extra';
|
||||||
|
import path from 'node:path';
|
||||||
|
|
||||||
|
describe('DependencyResolver', () => {
|
||||||
|
let tmpDir;
|
||||||
|
let bmadDir;
|
||||||
|
|
||||||
|
beforeEach(async () => {
|
||||||
|
tmpDir = await createTempDir();
|
||||||
|
// Create structure: tmpDir/src/core and tmpDir/src/modules/
|
||||||
|
bmadDir = path.join(tmpDir, 'src');
|
||||||
|
await fs.ensureDir(path.join(bmadDir, 'core', 'agents'));
|
||||||
|
await fs.ensureDir(path.join(bmadDir, 'core', 'tasks'));
|
||||||
|
await fs.ensureDir(path.join(bmadDir, 'core', 'templates'));
|
||||||
|
await fs.ensureDir(path.join(bmadDir, 'modules', 'bmm', 'agents'));
|
||||||
|
await fs.ensureDir(path.join(bmadDir, 'modules', 'bmm', 'tasks'));
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(async () => {
|
||||||
|
await cleanupTempDir(tmpDir);
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('basic resolution', () => {
|
||||||
|
it('should resolve core agents with no dependencies', async () => {
|
||||||
|
await createTestFile(
|
||||||
|
bmadDir,
|
||||||
|
'core/agents/simple.md',
|
||||||
|
`---
|
||||||
|
name: simple
|
||||||
|
---
|
||||||
|
<agent>Simple agent</agent>`,
|
||||||
|
);
|
||||||
|
|
||||||
|
const resolver = new DependencyResolver();
|
||||||
|
const result = await resolver.resolve(bmadDir, []);
|
||||||
|
|
||||||
|
expect(result.primaryFiles).toHaveLength(1);
|
||||||
|
expect(result.primaryFiles[0].type).toBe('agent');
|
||||||
|
expect(result.primaryFiles[0].module).toBe('core');
|
||||||
|
expect(result.allFiles).toHaveLength(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should resolve multiple agents from same module', async () => {
|
||||||
|
await createTestFile(bmadDir, 'core/agents/agent1.md', '<agent>Agent 1</agent>');
|
||||||
|
await createTestFile(bmadDir, 'core/agents/agent2.md', '<agent>Agent 2</agent>');
|
||||||
|
await createTestFile(bmadDir, 'core/agents/agent3.md', '<agent>Agent 3</agent>');
|
||||||
|
|
||||||
|
const resolver = new DependencyResolver();
|
||||||
|
const result = await resolver.resolve(bmadDir, []);
|
||||||
|
|
||||||
|
expect(result.primaryFiles).toHaveLength(3);
|
||||||
|
expect(result.allFiles).toHaveLength(3);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should always include core module', async () => {
|
||||||
|
await createTestFile(bmadDir, 'core/agents/core-agent.md', '<agent>Core</agent>');
|
||||||
|
|
||||||
|
const resolver = new DependencyResolver();
|
||||||
|
const result = await resolver.resolve(bmadDir, ['bmm']);
|
||||||
|
|
||||||
|
// Core should be included even though only 'bmm' was requested
|
||||||
|
expect(result.byModule.core).toBeDefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should skip agents with localskip="true"', async () => {
|
||||||
|
await createTestFile(bmadDir, 'core/agents/normal.md', '<agent>Normal agent</agent>');
|
||||||
|
await createTestFile(bmadDir, 'core/agents/webonly.md', '<agent localskip="true">Web only agent</agent>');
|
||||||
|
|
||||||
|
const resolver = new DependencyResolver();
|
||||||
|
const result = await resolver.resolve(bmadDir, []);
|
||||||
|
|
||||||
|
expect(result.primaryFiles).toHaveLength(1);
|
||||||
|
expect(result.primaryFiles[0].name).toBe('normal');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('path resolution variations', () => {
|
||||||
|
it('should resolve {project-root}/bmad/core/tasks/foo.md dependencies', async () => {
|
||||||
|
await createTestFile(
|
||||||
|
bmadDir,
|
||||||
|
'core/agents/agent.md',
|
||||||
|
`---
|
||||||
|
dependencies: ["{project-root}/bmad/core/tasks/task.md"]
|
||||||
|
---
|
||||||
|
<agent>Agent with task dependency</agent>`,
|
||||||
|
);
|
||||||
|
await createTestFile(bmadDir, 'core/tasks/task.md', 'Task content');
|
||||||
|
|
||||||
|
const resolver = new DependencyResolver();
|
||||||
|
const result = await resolver.resolve(bmadDir, []);
|
||||||
|
|
||||||
|
expect(result.allFiles).toHaveLength(2);
|
||||||
|
expect(result.dependencies.size).toBeGreaterThan(0);
|
||||||
|
expect([...result.dependencies].some((d) => d.includes('task.md'))).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should resolve relative path dependencies', async () => {
|
||||||
|
await createTestFile(
|
||||||
|
bmadDir,
|
||||||
|
'core/agents/agent.md',
|
||||||
|
`---
|
||||||
|
template: "../templates/template.yaml"
|
||||||
|
---
|
||||||
|
<agent>Agent with template</agent>`,
|
||||||
|
);
|
||||||
|
await createTestFile(bmadDir, 'core/templates/template.yaml', 'template: data');
|
||||||
|
|
||||||
|
const resolver = new DependencyResolver();
|
||||||
|
const result = await resolver.resolve(bmadDir, []);
|
||||||
|
|
||||||
|
expect(result.allFiles).toHaveLength(2);
|
||||||
|
expect([...result.dependencies].some((d) => d.includes('template.yaml'))).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should resolve glob pattern dependencies', async () => {
|
||||||
|
await createTestFile(
|
||||||
|
bmadDir,
|
||||||
|
'core/agents/agent.md',
|
||||||
|
`---
|
||||||
|
dependencies: ["{project-root}/bmad/core/tasks/*.md"]
|
||||||
|
---
|
||||||
|
<agent>Agent with multiple tasks</agent>`,
|
||||||
|
);
|
||||||
|
await createTestFile(bmadDir, 'core/tasks/task1.md', 'Task 1');
|
||||||
|
await createTestFile(bmadDir, 'core/tasks/task2.md', 'Task 2');
|
||||||
|
await createTestFile(bmadDir, 'core/tasks/task3.md', 'Task 3');
|
||||||
|
|
||||||
|
const resolver = new DependencyResolver();
|
||||||
|
const result = await resolver.resolve(bmadDir, []);
|
||||||
|
|
||||||
|
// Should find agent + 3 tasks
|
||||||
|
expect(result.allFiles).toHaveLength(4);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should resolve array of dependencies', async () => {
|
||||||
|
await createTestFile(
|
||||||
|
bmadDir,
|
||||||
|
'core/agents/agent.md',
|
||||||
|
`---
|
||||||
|
dependencies:
|
||||||
|
- "{project-root}/bmad/core/tasks/task1.md"
|
||||||
|
- "{project-root}/bmad/core/tasks/task2.md"
|
||||||
|
- "../templates/template.yaml"
|
||||||
|
---
|
||||||
|
<agent>Agent</agent>`,
|
||||||
|
);
|
||||||
|
await createTestFile(bmadDir, 'core/tasks/task1.md', 'Task 1');
|
||||||
|
await createTestFile(bmadDir, 'core/tasks/task2.md', 'Task 2');
|
||||||
|
await createTestFile(bmadDir, 'core/templates/template.yaml', 'template');
|
||||||
|
|
||||||
|
const resolver = new DependencyResolver();
|
||||||
|
const result = await resolver.resolve(bmadDir, []);
|
||||||
|
|
||||||
|
expect(result.allFiles).toHaveLength(4); // agent + 2 tasks + template
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('command reference resolution', () => {
|
||||||
|
it('should resolve @task-name references', async () => {
|
||||||
|
await createTestFile(
|
||||||
|
bmadDir,
|
||||||
|
'core/agents/agent.md',
|
||||||
|
`<agent>
|
||||||
|
Use @task-analyze for analysis
|
||||||
|
</agent>`,
|
||||||
|
);
|
||||||
|
await createTestFile(bmadDir, 'core/tasks/analyze.md', 'Analyze task');
|
||||||
|
|
||||||
|
const resolver = new DependencyResolver();
|
||||||
|
const result = await resolver.resolve(bmadDir, []);
|
||||||
|
|
||||||
|
expect(result.allFiles.length).toBeGreaterThanOrEqual(2);
|
||||||
|
expect([...result.allFiles].some((f) => f.includes('analyze.md'))).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should resolve @agent-name references', async () => {
|
||||||
|
await createTestFile(
|
||||||
|
bmadDir,
|
||||||
|
'core/agents/main.md',
|
||||||
|
`<agent>
|
||||||
|
Reference @agent-helper for help
|
||||||
|
</agent>`,
|
||||||
|
);
|
||||||
|
await createTestFile(bmadDir, 'core/agents/helper.md', '<agent>Helper</agent>');
|
||||||
|
|
||||||
|
const resolver = new DependencyResolver();
|
||||||
|
const result = await resolver.resolve(bmadDir, []);
|
||||||
|
|
||||||
|
expect(result.allFiles).toHaveLength(2);
|
||||||
|
expect([...result.allFiles].some((f) => f.includes('helper.md'))).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should resolve bmad/module/type/name references', async () => {
|
||||||
|
await createTestFile(
|
||||||
|
bmadDir,
|
||||||
|
'core/agents/agent.md',
|
||||||
|
`<agent>
|
||||||
|
See bmad/core/tasks/review
|
||||||
|
</agent>`,
|
||||||
|
);
|
||||||
|
await createTestFile(bmadDir, 'core/tasks/review.md', 'Review task');
|
||||||
|
|
||||||
|
const resolver = new DependencyResolver();
|
||||||
|
const result = await resolver.resolve(bmadDir, []);
|
||||||
|
|
||||||
|
expect([...result.allFiles].some((f) => f.includes('review.md'))).toBe(true);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('exec and tmpl attribute parsing', () => {
|
||||||
|
it('should parse exec attributes from command tags', async () => {
|
||||||
|
await createTestFile(
|
||||||
|
bmadDir,
|
||||||
|
'core/agents/agent.md',
|
||||||
|
`<agent>
|
||||||
|
<command exec="{project-root}/bmad/core/tasks/task.md" />
|
||||||
|
</agent>`,
|
||||||
|
);
|
||||||
|
await createTestFile(bmadDir, 'core/tasks/task.md', 'Task');
|
||||||
|
|
||||||
|
const resolver = new DependencyResolver();
|
||||||
|
const result = await resolver.resolve(bmadDir, []);
|
||||||
|
|
||||||
|
expect([...result.allFiles].some((f) => f.includes('task.md'))).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should parse tmpl attributes from command tags', async () => {
|
||||||
|
await createTestFile(
|
||||||
|
bmadDir,
|
||||||
|
'core/agents/agent.md',
|
||||||
|
`<agent>
|
||||||
|
<command tmpl="../templates/form.yaml" />
|
||||||
|
</agent>`,
|
||||||
|
);
|
||||||
|
await createTestFile(bmadDir, 'core/templates/form.yaml', 'template');
|
||||||
|
|
||||||
|
const resolver = new DependencyResolver();
|
||||||
|
const result = await resolver.resolve(bmadDir, []);
|
||||||
|
|
||||||
|
expect([...result.allFiles].some((f) => f.includes('form.yaml'))).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should ignore exec="*" wildcard', async () => {
|
||||||
|
await createTestFile(
|
||||||
|
bmadDir,
|
||||||
|
'core/agents/agent.md',
|
||||||
|
`<agent>
|
||||||
|
<command exec="*" description="Dynamic" />
|
||||||
|
</agent>`,
|
||||||
|
);
|
||||||
|
|
||||||
|
const resolver = new DependencyResolver();
|
||||||
|
const result = await resolver.resolve(bmadDir, []);
|
||||||
|
|
||||||
|
// Should only have the agent itself
|
||||||
|
expect(result.primaryFiles).toHaveLength(1);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('multi-pass dependency resolution', () => {
|
||||||
|
it('should resolve single-level dependencies (A→B)', async () => {
|
||||||
|
await createTestFile(
|
||||||
|
bmadDir,
|
||||||
|
'core/agents/agent-a.md',
|
||||||
|
`---
|
||||||
|
dependencies: ["{project-root}/bmad/core/tasks/task-b.md"]
|
||||||
|
---
|
||||||
|
<agent>Agent A</agent>`,
|
||||||
|
);
|
||||||
|
await createTestFile(bmadDir, 'core/tasks/task-b.md', 'Task B');
|
||||||
|
|
||||||
|
const resolver = new DependencyResolver();
|
||||||
|
const result = await resolver.resolve(bmadDir, []);
|
||||||
|
|
||||||
|
expect(result.allFiles).toHaveLength(2);
|
||||||
|
// Primary files includes both agents and tasks from selected modules
|
||||||
|
expect(result.primaryFiles.length).toBeGreaterThanOrEqual(1);
|
||||||
|
expect(result.dependencies.size).toBeGreaterThanOrEqual(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should resolve two-level dependencies (A→B→C)', async () => {
|
||||||
|
await createTestFile(
|
||||||
|
bmadDir,
|
||||||
|
'core/agents/agent-a.md',
|
||||||
|
`---
|
||||||
|
dependencies: ["{project-root}/bmad/core/tasks/task-b.md"]
|
||||||
|
---
|
||||||
|
<agent>Agent A</agent>`,
|
||||||
|
);
|
||||||
|
await createTestFile(
|
||||||
|
bmadDir,
|
||||||
|
'core/tasks/task-b.md',
|
||||||
|
`---
|
||||||
|
template: "../templates/template-c.yaml"
|
||||||
|
---
|
||||||
|
Task B content`,
|
||||||
|
);
|
||||||
|
await createTestFile(bmadDir, 'core/templates/template-c.yaml', 'template: data');
|
||||||
|
|
||||||
|
const resolver = new DependencyResolver();
|
||||||
|
const result = await resolver.resolve(bmadDir, []);
|
||||||
|
|
||||||
|
expect(result.allFiles).toHaveLength(3);
|
||||||
|
// Primary files includes agents and tasks
|
||||||
|
expect(result.primaryFiles.length).toBeGreaterThanOrEqual(1);
|
||||||
|
// Total dependencies (direct + transitive) should be at least 2
|
||||||
|
const totalDeps = result.dependencies.size + result.transitiveDependencies.size;
|
||||||
|
expect(totalDeps).toBeGreaterThanOrEqual(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should resolve three-level dependencies (A→B→C→D)', async () => {
|
||||||
|
await createTestFile(
|
||||||
|
bmadDir,
|
||||||
|
'core/agents/agent-a.md',
|
||||||
|
`---
|
||||||
|
dependencies: ["{project-root}/bmad/core/tasks/task-b.md"]
|
||||||
|
---
|
||||||
|
<agent>A</agent>`,
|
||||||
|
);
|
||||||
|
await createTestFile(
|
||||||
|
bmadDir,
|
||||||
|
'core/tasks/task-b.md',
|
||||||
|
`---
|
||||||
|
dependencies: ["{project-root}/bmad/core/tasks/task-c.md"]
|
||||||
|
---
|
||||||
|
Task B`,
|
||||||
|
);
|
||||||
|
await createTestFile(
|
||||||
|
bmadDir,
|
||||||
|
'core/tasks/task-c.md',
|
||||||
|
`---
|
||||||
|
template: "../templates/template-d.yaml"
|
||||||
|
---
|
||||||
|
Task C`,
|
||||||
|
);
|
||||||
|
await createTestFile(bmadDir, 'core/templates/template-d.yaml', 'Template D');
|
||||||
|
|
||||||
|
const resolver = new DependencyResolver();
|
||||||
|
const result = await resolver.resolve(bmadDir, []);
|
||||||
|
|
||||||
|
expect(result.allFiles).toHaveLength(4);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should resolve multiple branches (A→B, A→C)', async () => {
|
||||||
|
await createTestFile(
|
||||||
|
bmadDir,
|
||||||
|
'core/agents/agent-a.md',
|
||||||
|
`---
|
||||||
|
dependencies:
|
||||||
|
- "{project-root}/bmad/core/tasks/task-b.md"
|
||||||
|
- "{project-root}/bmad/core/tasks/task-c.md"
|
||||||
|
---
|
||||||
|
<agent>A</agent>`,
|
||||||
|
);
|
||||||
|
await createTestFile(bmadDir, 'core/tasks/task-b.md', 'Task B');
|
||||||
|
await createTestFile(bmadDir, 'core/tasks/task-c.md', 'Task C');
|
||||||
|
|
||||||
|
const resolver = new DependencyResolver();
|
||||||
|
const result = await resolver.resolve(bmadDir, []);
|
||||||
|
|
||||||
|
expect(result.allFiles).toHaveLength(3);
|
||||||
|
expect(result.dependencies.size).toBe(2);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should deduplicate diamond pattern (A→B,C; B,C→D)', async () => {
|
||||||
|
await createTestFile(
|
||||||
|
bmadDir,
|
||||||
|
'core/agents/agent-a.md',
|
||||||
|
`---
|
||||||
|
dependencies:
|
||||||
|
- "{project-root}/bmad/core/tasks/task-b.md"
|
||||||
|
- "{project-root}/bmad/core/tasks/task-c.md"
|
||||||
|
---
|
||||||
|
<agent>A</agent>`,
|
||||||
|
);
|
||||||
|
await createTestFile(
|
||||||
|
bmadDir,
|
||||||
|
'core/tasks/task-b.md',
|
||||||
|
`---
|
||||||
|
template: "../templates/shared.yaml"
|
||||||
|
---
|
||||||
|
Task B`,
|
||||||
|
);
|
||||||
|
await createTestFile(
|
||||||
|
bmadDir,
|
||||||
|
'core/tasks/task-c.md',
|
||||||
|
`---
|
||||||
|
template: "../templates/shared.yaml"
|
||||||
|
---
|
||||||
|
Task C`,
|
||||||
|
);
|
||||||
|
await createTestFile(bmadDir, 'core/templates/shared.yaml', 'Shared template');
|
||||||
|
|
||||||
|
const resolver = new DependencyResolver();
|
||||||
|
const result = await resolver.resolve(bmadDir, []);
|
||||||
|
|
||||||
|
// A + B + C + shared = 4 unique files (D appears twice but should be deduped)
|
||||||
|
expect(result.allFiles).toHaveLength(4);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('circular dependency detection', () => {
|
||||||
|
it('should detect direct circular dependency (A→B→A)', async () => {
|
||||||
|
await createTestFile(
|
||||||
|
bmadDir,
|
||||||
|
'core/agents/agent-a.md',
|
||||||
|
`---
|
||||||
|
dependencies: ["{project-root}/bmad/core/tasks/task-b.md"]
|
||||||
|
---
|
||||||
|
<agent>A</agent>`,
|
||||||
|
);
|
||||||
|
await createTestFile(
|
||||||
|
bmadDir,
|
||||||
|
'core/tasks/task-b.md',
|
||||||
|
`---
|
||||||
|
dependencies: ["{project-root}/bmad/core/agents/agent-a.md"]
|
||||||
|
---
|
||||||
|
Task B`,
|
||||||
|
);
|
||||||
|
|
||||||
|
const resolver = new DependencyResolver();
|
||||||
|
|
||||||
|
// Should not hang or crash
|
||||||
|
const resultPromise = resolver.resolve(bmadDir, []);
|
||||||
|
await expect(resultPromise).resolves.toBeDefined();
|
||||||
|
|
||||||
|
const result = await resultPromise;
|
||||||
|
// Should process both files without infinite loop
|
||||||
|
expect(result.allFiles.length).toBeGreaterThanOrEqual(2);
|
||||||
|
}, 5000); // 5 second timeout to ensure no infinite loop
|
||||||
|
|
||||||
|
it('should detect indirect circular dependency (A→B→C→A)', async () => {
|
||||||
|
await createTestFile(
|
||||||
|
bmadDir,
|
||||||
|
'core/agents/agent-a.md',
|
||||||
|
`---
|
||||||
|
dependencies: ["{project-root}/bmad/core/tasks/task-b.md"]
|
||||||
|
---
|
||||||
|
<agent>A</agent>`,
|
||||||
|
);
|
||||||
|
await createTestFile(
|
||||||
|
bmadDir,
|
||||||
|
'core/tasks/task-b.md',
|
||||||
|
`---
|
||||||
|
dependencies: ["{project-root}/bmad/core/tasks/task-c.md"]
|
||||||
|
---
|
||||||
|
Task B`,
|
||||||
|
);
|
||||||
|
await createTestFile(
|
||||||
|
bmadDir,
|
||||||
|
'core/tasks/task-c.md',
|
||||||
|
`---
|
||||||
|
dependencies: ["{project-root}/bmad/core/agents/agent-a.md"]
|
||||||
|
---
|
||||||
|
Task C`,
|
||||||
|
);
|
||||||
|
|
||||||
|
const resolver = new DependencyResolver();
|
||||||
|
const resultPromise = resolver.resolve(bmadDir, []);
|
||||||
|
|
||||||
|
await expect(resultPromise).resolves.toBeDefined();
|
||||||
|
const result = await resultPromise;
|
||||||
|
|
||||||
|
// Should include all 3 files without duplicates
|
||||||
|
expect(result.allFiles.length).toBeGreaterThanOrEqual(3);
|
||||||
|
}, 5000);
|
||||||
|
|
||||||
|
it('should handle self-reference (A→A)', async () => {
|
||||||
|
await createTestFile(
|
||||||
|
bmadDir,
|
||||||
|
'core/agents/agent-a.md',
|
||||||
|
`---
|
||||||
|
dependencies: ["{project-root}/bmad/core/agents/agent-a.md"]
|
||||||
|
---
|
||||||
|
<agent>A</agent>`,
|
||||||
|
);
|
||||||
|
|
||||||
|
const resolver = new DependencyResolver();
|
||||||
|
const result = await resolver.resolve(bmadDir, []);
|
||||||
|
|
||||||
|
// Should include the file once, not infinite times
|
||||||
|
expect(result.allFiles).toHaveLength(1);
|
||||||
|
}, 5000);
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('command reference parsing', () => {
|
||||||
|
describe('parseCommandReferences()', () => {
|
||||||
|
it('should extract @task- references', () => {
|
||||||
|
const resolver = new DependencyResolver();
|
||||||
|
const content = 'Use @task-analyze for analysis\nThen @task-review';
|
||||||
|
|
||||||
|
const refs = resolver.parseCommandReferences(content);
|
||||||
|
|
||||||
|
expect(refs).toContain('@task-analyze');
|
||||||
|
expect(refs).toContain('@task-review');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should extract @agent- references', () => {
|
||||||
|
const resolver = new DependencyResolver();
|
||||||
|
const content = 'Call @agent-architect then @agent-developer';
|
||||||
|
|
||||||
|
const refs = resolver.parseCommandReferences(content);
|
||||||
|
|
||||||
|
expect(refs).toContain('@agent-architect');
|
||||||
|
expect(refs).toContain('@agent-developer');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should extract bmad/ path references', () => {
|
||||||
|
const resolver = new DependencyResolver();
|
||||||
|
const content = 'See bmad/core/agents/analyst and bmad/bmm/tasks/review';
|
||||||
|
|
||||||
|
const refs = resolver.parseCommandReferences(content);
|
||||||
|
|
||||||
|
expect(refs).toContain('bmad/core/agents/analyst');
|
||||||
|
expect(refs).toContain('bmad/bmm/tasks/review');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should extract @bmad- references', () => {
|
||||||
|
const resolver = new DependencyResolver();
|
||||||
|
const content = 'Use @bmad-master command';
|
||||||
|
|
||||||
|
const refs = resolver.parseCommandReferences(content);
|
||||||
|
|
||||||
|
expect(refs).toContain('@bmad-master');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle multiple reference types in same content', () => {
|
||||||
|
const resolver = new DependencyResolver();
|
||||||
|
const content = `
|
||||||
|
Use @task-analyze for analysis
|
||||||
|
Then run @agent-architect
|
||||||
|
Finally check bmad/core/tasks/review
|
||||||
|
`;
|
||||||
|
|
||||||
|
const refs = resolver.parseCommandReferences(content);
|
||||||
|
|
||||||
|
expect(refs.length).toBeGreaterThanOrEqual(3);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('parseFileReferences()', () => {
|
||||||
|
it('should extract exec attribute paths', () => {
|
||||||
|
const resolver = new DependencyResolver();
|
||||||
|
const content = '<command exec="{project-root}/bmad/core/tasks/foo.md" />';
|
||||||
|
|
||||||
|
const refs = resolver.parseFileReferences(content);
|
||||||
|
|
||||||
|
expect(refs).toContain('/bmad/core/tasks/foo.md');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should extract tmpl attribute paths', () => {
|
||||||
|
const resolver = new DependencyResolver();
|
||||||
|
const content = '<command tmpl="../templates/bar.yaml" />';
|
||||||
|
|
||||||
|
const refs = resolver.parseFileReferences(content);
|
||||||
|
|
||||||
|
expect(refs).toContain('../templates/bar.yaml');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should extract relative file paths', () => {
|
||||||
|
const resolver = new DependencyResolver();
|
||||||
|
const content = 'Load "./data/config.json" and "../templates/form.yaml"';
|
||||||
|
|
||||||
|
const refs = resolver.parseFileReferences(content);
|
||||||
|
|
||||||
|
expect(refs).toContain('./data/config.json');
|
||||||
|
expect(refs).toContain('../templates/form.yaml');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should skip exec="*" wildcards', () => {
|
||||||
|
const resolver = new DependencyResolver();
|
||||||
|
const content = '<command exec="*" description="Dynamic" />';
|
||||||
|
|
||||||
|
const refs = resolver.parseFileReferences(content);
|
||||||
|
|
||||||
|
// Should not include "*"
|
||||||
|
expect(refs).not.toContain('*');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('module organization', () => {
|
||||||
|
it('should organize files by module correctly', async () => {
|
||||||
|
await createTestFile(bmadDir, 'core/agents/core-agent.md', '<agent>Core</agent>');
|
||||||
|
await createTestFile(bmadDir, 'modules/bmm/agents/bmm-agent.md', '<agent>BMM</agent>');
|
||||||
|
|
||||||
|
const resolver = new DependencyResolver();
|
||||||
|
const result = await resolver.resolve(bmadDir, ['bmm']);
|
||||||
|
|
||||||
|
expect(result.byModule.core).toBeDefined();
|
||||||
|
expect(result.byModule.bmm).toBeDefined();
|
||||||
|
expect(result.byModule.core.agents).toHaveLength(1);
|
||||||
|
expect(result.byModule.bmm.agents).toHaveLength(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should categorize files by type', async () => {
|
||||||
|
await createTestFile(bmadDir, 'core/agents/agent.md', '<agent>Agent</agent>');
|
||||||
|
await createTestFile(bmadDir, 'core/tasks/task.md', 'Task');
|
||||||
|
await createTestFile(bmadDir, 'core/templates/template.yaml', 'template');
|
||||||
|
|
||||||
|
const resolver = new DependencyResolver();
|
||||||
|
const files = [
|
||||||
|
path.join(bmadDir, 'core/agents/agent.md'),
|
||||||
|
path.join(bmadDir, 'core/tasks/task.md'),
|
||||||
|
path.join(bmadDir, 'core/templates/template.yaml'),
|
||||||
|
];
|
||||||
|
|
||||||
|
const organized = resolver.organizeByModule(bmadDir, new Set(files));
|
||||||
|
|
||||||
|
expect(organized.core.agents).toHaveLength(1);
|
||||||
|
expect(organized.core.tasks).toHaveLength(1);
|
||||||
|
expect(organized.core.templates).toHaveLength(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should treat brain-tech as data, not tasks', async () => {
|
||||||
|
await createTestFile(bmadDir, 'core/tasks/brain-tech/data.csv', 'col1,col2\nval1,val2');
|
||||||
|
|
||||||
|
const resolver = new DependencyResolver();
|
||||||
|
const files = [path.join(bmadDir, 'core/tasks/brain-tech/data.csv')];
|
||||||
|
|
||||||
|
const organized = resolver.organizeByModule(bmadDir, new Set(files));
|
||||||
|
|
||||||
|
expect(organized.core.data).toHaveLength(1);
|
||||||
|
expect(organized.core.tasks).toHaveLength(0);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('getModuleFromPath()', () => {
|
||||||
|
it('should extract module from src/core path', () => {
|
||||||
|
const resolver = new DependencyResolver();
|
||||||
|
const filePath = path.join(bmadDir, 'core/agents/agent.md');
|
||||||
|
|
||||||
|
const module = resolver.getModuleFromPath(bmadDir, filePath);
|
||||||
|
|
||||||
|
expect(module).toBe('core');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should extract module from src/modules/bmm path', () => {
|
||||||
|
const resolver = new DependencyResolver();
|
||||||
|
const filePath = path.join(bmadDir, 'modules/bmm/agents/pm.md');
|
||||||
|
|
||||||
|
const module = resolver.getModuleFromPath(bmadDir, filePath);
|
||||||
|
|
||||||
|
expect(module).toBe('bmm');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle installed directory structure', async () => {
|
||||||
|
// Create installed structure (no src/ prefix)
|
||||||
|
const installedDir = path.join(tmpDir, 'installed');
|
||||||
|
await fs.ensureDir(path.join(installedDir, 'core/agents'));
|
||||||
|
await fs.ensureDir(path.join(installedDir, 'modules/bmm/agents'));
|
||||||
|
|
||||||
|
const resolver = new DependencyResolver();
|
||||||
|
|
||||||
|
const coreFile = path.join(installedDir, 'core/agents/agent.md');
|
||||||
|
const moduleFile = path.join(installedDir, 'modules/bmm/agents/pm.md');
|
||||||
|
|
||||||
|
expect(resolver.getModuleFromPath(installedDir, coreFile)).toBe('core');
|
||||||
|
expect(resolver.getModuleFromPath(installedDir, moduleFile)).toBe('bmm');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('edge cases', () => {
|
||||||
|
it('should handle malformed YAML frontmatter', async () => {
|
||||||
|
await createTestFile(
|
||||||
|
bmadDir,
|
||||||
|
'core/agents/bad-yaml.md',
|
||||||
|
`---
|
||||||
|
dependencies: [invalid: yaml: here
|
||||||
|
---
|
||||||
|
<agent>Agent</agent>`,
|
||||||
|
);
|
||||||
|
|
||||||
|
const resolver = new DependencyResolver();
|
||||||
|
|
||||||
|
// Should not crash, just warn and continue
|
||||||
|
await expect(resolver.resolve(bmadDir, [])).resolves.toBeDefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle backticks in YAML values', async () => {
|
||||||
|
await createTestFile(
|
||||||
|
bmadDir,
|
||||||
|
'core/agents/backticks.md',
|
||||||
|
`---
|
||||||
|
name: \`test\`
|
||||||
|
dependencies: [\`{project-root}/bmad/core/tasks/task.md\`]
|
||||||
|
---
|
||||||
|
<agent>Agent</agent>`,
|
||||||
|
);
|
||||||
|
await createTestFile(bmadDir, 'core/tasks/task.md', 'Task');
|
||||||
|
|
||||||
|
const resolver = new DependencyResolver();
|
||||||
|
const result = await resolver.resolve(bmadDir, []);
|
||||||
|
|
||||||
|
// Backticks should be pre-processed
|
||||||
|
expect(result.allFiles.length).toBeGreaterThanOrEqual(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle missing dependencies gracefully', async () => {
|
||||||
|
await createTestFile(
|
||||||
|
bmadDir,
|
||||||
|
'core/agents/agent.md',
|
||||||
|
`---
|
||||||
|
dependencies: ["{project-root}/bmad/core/tasks/missing.md"]
|
||||||
|
---
|
||||||
|
<agent>Agent</agent>`,
|
||||||
|
);
|
||||||
|
// Don't create missing.md
|
||||||
|
|
||||||
|
const resolver = new DependencyResolver();
|
||||||
|
const result = await resolver.resolve(bmadDir, []);
|
||||||
|
|
||||||
|
expect(result.primaryFiles.length).toBeGreaterThanOrEqual(1);
|
||||||
|
// Implementation may or may not track missing dependencies
|
||||||
|
// Just verify it doesn't crash
|
||||||
|
expect(result).toBeDefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle empty dependencies array', async () => {
|
||||||
|
await createTestFile(
|
||||||
|
bmadDir,
|
||||||
|
'core/agents/agent.md',
|
||||||
|
`---
|
||||||
|
dependencies: []
|
||||||
|
---
|
||||||
|
<agent>Agent</agent>`,
|
||||||
|
);
|
||||||
|
|
||||||
|
const resolver = new DependencyResolver();
|
||||||
|
const result = await resolver.resolve(bmadDir, []);
|
||||||
|
|
||||||
|
expect(result.primaryFiles).toHaveLength(1);
|
||||||
|
expect(result.allFiles).toHaveLength(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle missing frontmatter', async () => {
|
||||||
|
await createTestFile(bmadDir, 'core/agents/no-frontmatter.md', '<agent>Agent</agent>');
|
||||||
|
|
||||||
|
const resolver = new DependencyResolver();
|
||||||
|
const result = await resolver.resolve(bmadDir, []);
|
||||||
|
|
||||||
|
expect(result.primaryFiles).toHaveLength(1);
|
||||||
|
expect(result.allFiles).toHaveLength(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle non-existent module directory', async () => {
|
||||||
|
// Create at least one core file so core module appears
|
||||||
|
await createTestFile(bmadDir, 'core/agents/core-agent.md', '<agent>Core</agent>');
|
||||||
|
|
||||||
|
const resolver = new DependencyResolver();
|
||||||
|
const result = await resolver.resolve(bmadDir, ['nonexistent']);
|
||||||
|
|
||||||
|
// Should include core even though nonexistent module not found
|
||||||
|
expect(result.byModule.core).toBeDefined();
|
||||||
|
expect(result.byModule.nonexistent).toBeUndefined();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('cross-module dependencies', () => {
|
||||||
|
it('should resolve dependencies across modules', async () => {
|
||||||
|
await createTestFile(bmadDir, 'core/agents/core-agent.md', '<agent>Core</agent>');
|
||||||
|
await createTestFile(
|
||||||
|
bmadDir,
|
||||||
|
'modules/bmm/agents/bmm-agent.md',
|
||||||
|
`---
|
||||||
|
dependencies: ["{project-root}/bmad/core/tasks/shared-task.md"]
|
||||||
|
---
|
||||||
|
<agent>BMM Agent</agent>`,
|
||||||
|
);
|
||||||
|
await createTestFile(bmadDir, 'core/tasks/shared-task.md', 'Shared task');
|
||||||
|
|
||||||
|
const resolver = new DependencyResolver();
|
||||||
|
const result = await resolver.resolve(bmadDir, ['bmm']);
|
||||||
|
|
||||||
|
// Should include: core agent + bmm agent + shared task
|
||||||
|
expect(result.allFiles.length).toBeGreaterThanOrEqual(3);
|
||||||
|
expect(result.byModule.core).toBeDefined();
|
||||||
|
expect(result.byModule.bmm).toBeDefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should resolve module tasks', async () => {
|
||||||
|
await createTestFile(bmadDir, 'core/agents/core-agent.md', '<agent>Core</agent>');
|
||||||
|
await createTestFile(bmadDir, 'modules/bmm/agents/pm.md', '<agent>PM</agent>');
|
||||||
|
await createTestFile(bmadDir, 'modules/bmm/tasks/create-prd.md', 'Create PRD task');
|
||||||
|
|
||||||
|
const resolver = new DependencyResolver();
|
||||||
|
const result = await resolver.resolve(bmadDir, ['bmm']);
|
||||||
|
|
||||||
|
expect(result.byModule.bmm.agents).toHaveLength(1);
|
||||||
|
expect(result.byModule.bmm.tasks).toHaveLength(1);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
@ -0,0 +1,243 @@
|
||||||
|
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
||||||
|
import { FileOps } from '../../../tools/cli/lib/file-ops.js';
|
||||||
|
import { createTempDir, cleanupTempDir, createTestFile } from '../../helpers/temp-dir.js';
|
||||||
|
import fs from 'fs-extra';
|
||||||
|
import path from 'node:path';
|
||||||
|
|
||||||
|
describe('FileOps', () => {
|
||||||
|
describe('copyDirectory()', () => {
|
||||||
|
const fileOps = new FileOps();
|
||||||
|
let tmpDir;
|
||||||
|
let sourceDir;
|
||||||
|
let destDir;
|
||||||
|
|
||||||
|
beforeEach(async () => {
|
||||||
|
tmpDir = await createTempDir();
|
||||||
|
sourceDir = path.join(tmpDir, 'source');
|
||||||
|
destDir = path.join(tmpDir, 'dest');
|
||||||
|
await fs.ensureDir(sourceDir);
|
||||||
|
await fs.ensureDir(destDir);
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(async () => {
|
||||||
|
await cleanupTempDir(tmpDir);
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('basic copying', () => {
|
||||||
|
it('should copy a single file', async () => {
|
||||||
|
await createTestFile(sourceDir, 'test.txt', 'content');
|
||||||
|
|
||||||
|
await fileOps.copyDirectory(sourceDir, destDir);
|
||||||
|
|
||||||
|
const destFile = path.join(destDir, 'test.txt');
|
||||||
|
expect(await fs.pathExists(destFile)).toBe(true);
|
||||||
|
expect(await fs.readFile(destFile, 'utf8')).toBe('content');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should copy multiple files', async () => {
|
||||||
|
await createTestFile(sourceDir, 'file1.txt', 'content1');
|
||||||
|
await createTestFile(sourceDir, 'file2.md', 'content2');
|
||||||
|
await createTestFile(sourceDir, 'file3.json', '{}');
|
||||||
|
|
||||||
|
await fileOps.copyDirectory(sourceDir, destDir);
|
||||||
|
|
||||||
|
expect(await fs.pathExists(path.join(destDir, 'file1.txt'))).toBe(true);
|
||||||
|
expect(await fs.pathExists(path.join(destDir, 'file2.md'))).toBe(true);
|
||||||
|
expect(await fs.pathExists(path.join(destDir, 'file3.json'))).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should copy nested directory structure', async () => {
|
||||||
|
await createTestFile(sourceDir, 'root.txt', 'root');
|
||||||
|
await createTestFile(sourceDir, 'level1/file.txt', 'level1');
|
||||||
|
await createTestFile(sourceDir, 'level1/level2/deep.txt', 'deep');
|
||||||
|
|
||||||
|
await fileOps.copyDirectory(sourceDir, destDir);
|
||||||
|
|
||||||
|
expect(await fs.pathExists(path.join(destDir, 'root.txt'))).toBe(true);
|
||||||
|
expect(await fs.pathExists(path.join(destDir, 'level1', 'file.txt'))).toBe(true);
|
||||||
|
expect(await fs.pathExists(path.join(destDir, 'level1', 'level2', 'deep.txt'))).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should create destination directory if it does not exist', async () => {
|
||||||
|
const newDest = path.join(tmpDir, 'new-dest');
|
||||||
|
await createTestFile(sourceDir, 'test.txt', 'content');
|
||||||
|
|
||||||
|
await fileOps.copyDirectory(sourceDir, newDest);
|
||||||
|
|
||||||
|
expect(await fs.pathExists(newDest)).toBe(true);
|
||||||
|
expect(await fs.pathExists(path.join(newDest, 'test.txt'))).toBe(true);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('overwrite behavior', () => {
|
||||||
|
it('should overwrite existing files by default', async () => {
|
||||||
|
await createTestFile(sourceDir, 'file.txt', 'new content');
|
||||||
|
await createTestFile(destDir, 'file.txt', 'old content');
|
||||||
|
|
||||||
|
await fileOps.copyDirectory(sourceDir, destDir);
|
||||||
|
|
||||||
|
const content = await fs.readFile(path.join(destDir, 'file.txt'), 'utf8');
|
||||||
|
expect(content).toBe('new content');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should preserve file content when overwriting', async () => {
|
||||||
|
await createTestFile(sourceDir, 'data.json', '{"new": true}');
|
||||||
|
await createTestFile(destDir, 'data.json', '{"old": true}');
|
||||||
|
await createTestFile(destDir, 'keep.txt', 'preserve this');
|
||||||
|
|
||||||
|
await fileOps.copyDirectory(sourceDir, destDir);
|
||||||
|
|
||||||
|
expect(await fs.readFile(path.join(destDir, 'data.json'), 'utf8')).toBe('{"new": true}');
|
||||||
|
// Files not in source should be preserved
|
||||||
|
expect(await fs.pathExists(path.join(destDir, 'keep.txt'))).toBe(true);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('filtering with shouldIgnore', () => {
|
||||||
|
it('should filter out .git directories', async () => {
|
||||||
|
await createTestFile(sourceDir, 'file.txt', 'content');
|
||||||
|
await createTestFile(sourceDir, '.git/config', 'git config');
|
||||||
|
|
||||||
|
await fileOps.copyDirectory(sourceDir, destDir);
|
||||||
|
|
||||||
|
expect(await fs.pathExists(path.join(destDir, 'file.txt'))).toBe(true);
|
||||||
|
expect(await fs.pathExists(path.join(destDir, '.git'))).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should filter out node_modules directories', async () => {
|
||||||
|
await createTestFile(sourceDir, 'package.json', '{}');
|
||||||
|
await createTestFile(sourceDir, 'node_modules/lib/code.js', 'code');
|
||||||
|
|
||||||
|
await fileOps.copyDirectory(sourceDir, destDir);
|
||||||
|
|
||||||
|
expect(await fs.pathExists(path.join(destDir, 'package.json'))).toBe(true);
|
||||||
|
expect(await fs.pathExists(path.join(destDir, 'node_modules'))).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should filter out *.swp and *.tmp files', async () => {
|
||||||
|
await createTestFile(sourceDir, 'document.txt', 'content');
|
||||||
|
await createTestFile(sourceDir, 'document.txt.swp', 'vim swap');
|
||||||
|
await createTestFile(sourceDir, 'temp.tmp', 'temporary');
|
||||||
|
|
||||||
|
await fileOps.copyDirectory(sourceDir, destDir);
|
||||||
|
|
||||||
|
expect(await fs.pathExists(path.join(destDir, 'document.txt'))).toBe(true);
|
||||||
|
expect(await fs.pathExists(path.join(destDir, 'document.txt.swp'))).toBe(false);
|
||||||
|
expect(await fs.pathExists(path.join(destDir, 'temp.tmp'))).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should filter out .DS_Store files', async () => {
|
||||||
|
await createTestFile(sourceDir, 'file.txt', 'content');
|
||||||
|
await createTestFile(sourceDir, '.DS_Store', 'mac metadata');
|
||||||
|
|
||||||
|
await fileOps.copyDirectory(sourceDir, destDir);
|
||||||
|
|
||||||
|
expect(await fs.pathExists(path.join(destDir, 'file.txt'))).toBe(true);
|
||||||
|
expect(await fs.pathExists(path.join(destDir, '.DS_Store'))).toBe(false);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('edge cases', () => {
|
||||||
|
it('should handle empty source directory', async () => {
|
||||||
|
await fileOps.copyDirectory(sourceDir, destDir);
|
||||||
|
|
||||||
|
const files = await fs.readdir(destDir);
|
||||||
|
expect(files).toHaveLength(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle Unicode filenames', async () => {
|
||||||
|
await createTestFile(sourceDir, '测试.txt', 'chinese');
|
||||||
|
await createTestFile(sourceDir, 'файл.json', 'russian');
|
||||||
|
|
||||||
|
await fileOps.copyDirectory(sourceDir, destDir);
|
||||||
|
|
||||||
|
expect(await fs.pathExists(path.join(destDir, '测试.txt'))).toBe(true);
|
||||||
|
expect(await fs.pathExists(path.join(destDir, 'файл.json'))).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle filenames with special characters', async () => {
|
||||||
|
await createTestFile(sourceDir, 'file with spaces.txt', 'content');
|
||||||
|
await createTestFile(sourceDir, 'special-chars!@#.md', 'content');
|
||||||
|
|
||||||
|
await fileOps.copyDirectory(sourceDir, destDir);
|
||||||
|
|
||||||
|
expect(await fs.pathExists(path.join(destDir, 'file with spaces.txt'))).toBe(true);
|
||||||
|
expect(await fs.pathExists(path.join(destDir, 'special-chars!@#.md'))).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle very deep directory nesting', async () => {
|
||||||
|
const deepPath = Array.from({ length: 10 }, (_, i) => `level${i}`).join('/');
|
||||||
|
await createTestFile(sourceDir, `${deepPath}/deep.txt`, 'very deep');
|
||||||
|
|
||||||
|
await fileOps.copyDirectory(sourceDir, destDir);
|
||||||
|
|
||||||
|
expect(await fs.pathExists(path.join(destDir, ...deepPath.split('/'), 'deep.txt'))).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should preserve file permissions', async () => {
|
||||||
|
const execFile = path.join(sourceDir, 'script.sh');
|
||||||
|
await fs.writeFile(execFile, '#!/bin/bash\necho "test"');
|
||||||
|
await fs.chmod(execFile, 0o755); // Make executable
|
||||||
|
|
||||||
|
await fileOps.copyDirectory(sourceDir, destDir);
|
||||||
|
|
||||||
|
const destFile = path.join(destDir, 'script.sh');
|
||||||
|
const stats = await fs.stat(destFile);
|
||||||
|
// Check if file is executable (user execute bit)
|
||||||
|
expect((stats.mode & 0o100) !== 0).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle large number of files', async () => {
|
||||||
|
// Create 50 files
|
||||||
|
const promises = Array.from({ length: 50 }, (_, i) => createTestFile(sourceDir, `file${i}.txt`, `content ${i}`));
|
||||||
|
await Promise.all(promises);
|
||||||
|
|
||||||
|
await fileOps.copyDirectory(sourceDir, destDir);
|
||||||
|
|
||||||
|
const destFiles = await fs.readdir(destDir);
|
||||||
|
expect(destFiles).toHaveLength(50);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('content integrity', () => {
|
||||||
|
it('should preserve file content exactly', async () => {
|
||||||
|
const content = 'Line 1\nLine 2\nLine 3\n';
|
||||||
|
await createTestFile(sourceDir, 'file.txt', content);
|
||||||
|
|
||||||
|
await fileOps.copyDirectory(sourceDir, destDir);
|
||||||
|
|
||||||
|
const copiedContent = await fs.readFile(path.join(destDir, 'file.txt'), 'utf8');
|
||||||
|
expect(copiedContent).toBe(content);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should preserve binary file content', async () => {
|
||||||
|
const buffer = Buffer.from([0x89, 0x50, 0x4e, 0x47, 0x0d, 0x0a, 0x1a, 0x0a]);
|
||||||
|
await fs.writeFile(path.join(sourceDir, 'binary.dat'), buffer);
|
||||||
|
|
||||||
|
await fileOps.copyDirectory(sourceDir, destDir);
|
||||||
|
|
||||||
|
const copiedBuffer = await fs.readFile(path.join(destDir, 'binary.dat'));
|
||||||
|
expect(copiedBuffer).toEqual(buffer);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should preserve UTF-8 content', async () => {
|
||||||
|
const utf8Content = 'Hello 世界 🌍';
|
||||||
|
await createTestFile(sourceDir, 'utf8.txt', utf8Content);
|
||||||
|
|
||||||
|
await fileOps.copyDirectory(sourceDir, destDir);
|
||||||
|
|
||||||
|
const copied = await fs.readFile(path.join(destDir, 'utf8.txt'), 'utf8');
|
||||||
|
expect(copied).toBe(utf8Content);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should preserve empty files', async () => {
|
||||||
|
await createTestFile(sourceDir, 'empty.txt', '');
|
||||||
|
|
||||||
|
await fileOps.copyDirectory(sourceDir, destDir);
|
||||||
|
|
||||||
|
const content = await fs.readFile(path.join(destDir, 'empty.txt'), 'utf8');
|
||||||
|
expect(content).toBe('');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
@ -0,0 +1,211 @@
|
||||||
|
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
||||||
|
import { FileOps } from '../../../tools/cli/lib/file-ops.js';
|
||||||
|
import { createTempDir, cleanupTempDir, createTestFile } from '../../helpers/temp-dir.js';
|
||||||
|
|
||||||
|
describe('FileOps', () => {
|
||||||
|
describe('getFileHash()', () => {
|
||||||
|
const fileOps = new FileOps();
|
||||||
|
let tmpDir;
|
||||||
|
|
||||||
|
beforeEach(async () => {
|
||||||
|
tmpDir = await createTempDir();
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(async () => {
|
||||||
|
await cleanupTempDir(tmpDir);
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('basic hashing', () => {
|
||||||
|
it('should return SHA256 hash for a simple file', async () => {
|
||||||
|
const filePath = await createTestFile(tmpDir, 'test.txt', 'hello');
|
||||||
|
const hash = await fileOps.getFileHash(filePath);
|
||||||
|
|
||||||
|
// SHA256 of 'hello' is known
|
||||||
|
expect(hash).toBe('2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824');
|
||||||
|
expect(hash).toHaveLength(64); // SHA256 is 64 hex characters
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return consistent hash for same content', async () => {
|
||||||
|
const content = 'test content for hashing';
|
||||||
|
const file1 = await createTestFile(tmpDir, 'file1.txt', content);
|
||||||
|
const file2 = await createTestFile(tmpDir, 'file2.txt', content);
|
||||||
|
|
||||||
|
const hash1 = await fileOps.getFileHash(file1);
|
||||||
|
const hash2 = await fileOps.getFileHash(file2);
|
||||||
|
|
||||||
|
expect(hash1).toBe(hash2);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return different hash for different content', async () => {
|
||||||
|
const file1 = await createTestFile(tmpDir, 'file1.txt', 'content A');
|
||||||
|
const file2 = await createTestFile(tmpDir, 'file2.txt', 'content B');
|
||||||
|
|
||||||
|
const hash1 = await fileOps.getFileHash(file1);
|
||||||
|
const hash2 = await fileOps.getFileHash(file2);
|
||||||
|
|
||||||
|
expect(hash1).not.toBe(hash2);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('file size handling', () => {
|
||||||
|
it('should handle empty file', async () => {
|
||||||
|
const filePath = await createTestFile(tmpDir, 'empty.txt', '');
|
||||||
|
const hash = await fileOps.getFileHash(filePath);
|
||||||
|
|
||||||
|
// SHA256 of empty string
|
||||||
|
expect(hash).toBe('e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle small file (<4KB)', async () => {
|
||||||
|
const content = 'a'.repeat(1000); // 1KB
|
||||||
|
const filePath = await createTestFile(tmpDir, 'small.txt', content);
|
||||||
|
const hash = await fileOps.getFileHash(filePath);
|
||||||
|
|
||||||
|
expect(hash).toHaveLength(64);
|
||||||
|
expect(hash).toMatch(/^[a-f0-9]{64}$/);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle medium file (~1MB)', async () => {
|
||||||
|
const content = 'x'.repeat(1024 * 1024); // 1MB
|
||||||
|
const filePath = await createTestFile(tmpDir, 'medium.txt', content);
|
||||||
|
const hash = await fileOps.getFileHash(filePath);
|
||||||
|
|
||||||
|
expect(hash).toHaveLength(64);
|
||||||
|
expect(hash).toMatch(/^[a-f0-9]{64}$/);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle large file (~10MB) via streaming', async () => {
|
||||||
|
// Create a 10MB file
|
||||||
|
const chunkSize = 1024 * 1024; // 1MB chunks
|
||||||
|
const chunks = Array.from({ length: 10 }, () => 'y'.repeat(chunkSize));
|
||||||
|
const content = chunks.join('');
|
||||||
|
|
||||||
|
const filePath = await createTestFile(tmpDir, 'large.txt', content);
|
||||||
|
const hash = await fileOps.getFileHash(filePath);
|
||||||
|
|
||||||
|
expect(hash).toHaveLength(64);
|
||||||
|
expect(hash).toMatch(/^[a-f0-9]{64}$/);
|
||||||
|
}, 15_000); // 15 second timeout for large file
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('content type handling', () => {
|
||||||
|
it('should handle binary content', async () => {
|
||||||
|
// Create a buffer with binary data
|
||||||
|
const buffer = Buffer.from([0x89, 0x50, 0x4e, 0x47, 0x0d, 0x0a, 0x1a, 0x0a]);
|
||||||
|
const filePath = await createTestFile(tmpDir, 'binary.dat', buffer.toString('binary'));
|
||||||
|
const hash = await fileOps.getFileHash(filePath);
|
||||||
|
|
||||||
|
expect(hash).toHaveLength(64);
|
||||||
|
expect(hash).toMatch(/^[a-f0-9]{64}$/);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle UTF-8 content correctly', async () => {
|
||||||
|
const content = 'Hello 世界 🌍';
|
||||||
|
const filePath = await createTestFile(tmpDir, 'utf8.txt', content);
|
||||||
|
const hash = await fileOps.getFileHash(filePath);
|
||||||
|
|
||||||
|
// Hash should be consistent for UTF-8 content
|
||||||
|
const hash2 = await fileOps.getFileHash(filePath);
|
||||||
|
expect(hash).toBe(hash2);
|
||||||
|
expect(hash).toHaveLength(64);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle newline characters', async () => {
|
||||||
|
const contentLF = 'line1\nline2\nline3';
|
||||||
|
const contentCRLF = 'line1\r\nline2\r\nline3';
|
||||||
|
|
||||||
|
const fileLF = await createTestFile(tmpDir, 'lf.txt', contentLF);
|
||||||
|
const fileCRLF = await createTestFile(tmpDir, 'crlf.txt', contentCRLF);
|
||||||
|
|
||||||
|
const hashLF = await fileOps.getFileHash(fileLF);
|
||||||
|
const hashCRLF = await fileOps.getFileHash(fileCRLF);
|
||||||
|
|
||||||
|
// Different line endings should produce different hashes
|
||||||
|
expect(hashLF).not.toBe(hashCRLF);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle JSON content', async () => {
|
||||||
|
const json = JSON.stringify({ key: 'value', nested: { array: [1, 2, 3] } }, null, 2);
|
||||||
|
const filePath = await createTestFile(tmpDir, 'data.json', json);
|
||||||
|
const hash = await fileOps.getFileHash(filePath);
|
||||||
|
|
||||||
|
expect(hash).toHaveLength(64);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('edge cases', () => {
|
||||||
|
it('should handle file with special characters in name', async () => {
|
||||||
|
const filePath = await createTestFile(tmpDir, 'file with spaces & special-chars.txt', 'content');
|
||||||
|
const hash = await fileOps.getFileHash(filePath);
|
||||||
|
|
||||||
|
expect(hash).toHaveLength(64);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle concurrent hash calculations', async () => {
|
||||||
|
const files = await Promise.all([
|
||||||
|
createTestFile(tmpDir, 'file1.txt', 'content 1'),
|
||||||
|
createTestFile(tmpDir, 'file2.txt', 'content 2'),
|
||||||
|
createTestFile(tmpDir, 'file3.txt', 'content 3'),
|
||||||
|
]);
|
||||||
|
|
||||||
|
// Calculate hashes concurrently
|
||||||
|
const hashes = await Promise.all(files.map((file) => fileOps.getFileHash(file)));
|
||||||
|
|
||||||
|
// All hashes should be valid
|
||||||
|
expect(hashes).toHaveLength(3);
|
||||||
|
for (const hash of hashes) {
|
||||||
|
expect(hash).toMatch(/^[a-f0-9]{64}$/);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Hashes should be different
|
||||||
|
expect(hashes[0]).not.toBe(hashes[1]);
|
||||||
|
expect(hashes[1]).not.toBe(hashes[2]);
|
||||||
|
expect(hashes[0]).not.toBe(hashes[2]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle file with only whitespace', async () => {
|
||||||
|
const filePath = await createTestFile(tmpDir, 'whitespace.txt', ' ');
|
||||||
|
const hash = await fileOps.getFileHash(filePath);
|
||||||
|
|
||||||
|
expect(hash).toHaveLength(64);
|
||||||
|
// Should be different from empty file
|
||||||
|
expect(hash).not.toBe('e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle very long single line', async () => {
|
||||||
|
const longLine = 'x'.repeat(100_000); // 100KB single line
|
||||||
|
const filePath = await createTestFile(tmpDir, 'longline.txt', longLine);
|
||||||
|
const hash = await fileOps.getFileHash(filePath);
|
||||||
|
|
||||||
|
expect(hash).toHaveLength(64);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('error handling', () => {
|
||||||
|
it('should reject for non-existent file', async () => {
|
||||||
|
const nonExistentPath = `${tmpDir}/does-not-exist.txt`;
|
||||||
|
|
||||||
|
await expect(fileOps.getFileHash(nonExistentPath)).rejects.toThrow();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should reject for directory instead of file', async () => {
|
||||||
|
await expect(fileOps.getFileHash(tmpDir)).rejects.toThrow();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('streaming behavior', () => {
|
||||||
|
it('should use streaming for efficiency (test implementation detail)', async () => {
|
||||||
|
// This test verifies that the implementation uses streams
|
||||||
|
// by checking that large files can be processed without loading entirely into memory
|
||||||
|
const largeContent = 'z'.repeat(5 * 1024 * 1024); // 5MB
|
||||||
|
const filePath = await createTestFile(tmpDir, 'stream.txt', largeContent);
|
||||||
|
|
||||||
|
// If this completes without memory issues, streaming is working
|
||||||
|
const hash = await fileOps.getFileHash(filePath);
|
||||||
|
|
||||||
|
expect(hash).toHaveLength(64);
|
||||||
|
expect(hash).toMatch(/^[a-f0-9]{64}$/);
|
||||||
|
}, 10_000);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
@ -0,0 +1,283 @@
|
||||||
|
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
||||||
|
import { FileOps } from '../../../tools/cli/lib/file-ops.js';
|
||||||
|
import { createTempDir, cleanupTempDir, createTestFile, createTestDirs } from '../../helpers/temp-dir.js';
|
||||||
|
import path from 'node:path';
|
||||||
|
|
||||||
|
describe('FileOps', () => {
|
||||||
|
describe('getFileList()', () => {
|
||||||
|
const fileOps = new FileOps();
|
||||||
|
let tmpDir;
|
||||||
|
|
||||||
|
beforeEach(async () => {
|
||||||
|
tmpDir = await createTempDir();
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(async () => {
|
||||||
|
await cleanupTempDir(tmpDir);
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('basic functionality', () => {
|
||||||
|
it('should return empty array for empty directory', async () => {
|
||||||
|
const files = await fileOps.getFileList(tmpDir);
|
||||||
|
expect(files).toEqual([]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return single file in directory', async () => {
|
||||||
|
await createTestFile(tmpDir, 'test.txt', 'content');
|
||||||
|
|
||||||
|
const files = await fileOps.getFileList(tmpDir);
|
||||||
|
|
||||||
|
expect(files).toHaveLength(1);
|
||||||
|
expect(files[0]).toBe('test.txt');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return multiple files in directory', async () => {
|
||||||
|
await createTestFile(tmpDir, 'file1.txt', 'content1');
|
||||||
|
await createTestFile(tmpDir, 'file2.md', 'content2');
|
||||||
|
await createTestFile(tmpDir, 'file3.json', 'content3');
|
||||||
|
|
||||||
|
const files = await fileOps.getFileList(tmpDir);
|
||||||
|
|
||||||
|
expect(files).toHaveLength(3);
|
||||||
|
expect(files).toContain('file1.txt');
|
||||||
|
expect(files).toContain('file2.md');
|
||||||
|
expect(files).toContain('file3.json');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('recursive directory walking', () => {
|
||||||
|
it('should recursively find files in nested directories', async () => {
|
||||||
|
await createTestFile(tmpDir, 'root.txt', 'root');
|
||||||
|
await createTestFile(tmpDir, 'level1/file1.txt', 'level1');
|
||||||
|
await createTestFile(tmpDir, 'level1/level2/file2.txt', 'level2');
|
||||||
|
await createTestFile(tmpDir, 'level1/level2/level3/file3.txt', 'level3');
|
||||||
|
|
||||||
|
const files = await fileOps.getFileList(tmpDir);
|
||||||
|
|
||||||
|
expect(files).toHaveLength(4);
|
||||||
|
expect(files).toContain('root.txt');
|
||||||
|
expect(files).toContain(path.join('level1', 'file1.txt'));
|
||||||
|
expect(files).toContain(path.join('level1', 'level2', 'file2.txt'));
|
||||||
|
expect(files).toContain(path.join('level1', 'level2', 'level3', 'file3.txt'));
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle multiple subdirectories at same level', async () => {
|
||||||
|
await createTestFile(tmpDir, 'dir1/file1.txt', 'content');
|
||||||
|
await createTestFile(tmpDir, 'dir2/file2.txt', 'content');
|
||||||
|
await createTestFile(tmpDir, 'dir3/file3.txt', 'content');
|
||||||
|
|
||||||
|
const files = await fileOps.getFileList(tmpDir);
|
||||||
|
|
||||||
|
expect(files).toHaveLength(3);
|
||||||
|
expect(files).toContain(path.join('dir1', 'file1.txt'));
|
||||||
|
expect(files).toContain(path.join('dir2', 'file2.txt'));
|
||||||
|
expect(files).toContain(path.join('dir3', 'file3.txt'));
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should not include empty directories in results', async () => {
|
||||||
|
await createTestDirs(tmpDir, ['empty1', 'empty2', 'has-file']);
|
||||||
|
await createTestFile(tmpDir, 'has-file/file.txt', 'content');
|
||||||
|
|
||||||
|
const files = await fileOps.getFileList(tmpDir);
|
||||||
|
|
||||||
|
expect(files).toHaveLength(1);
|
||||||
|
expect(files[0]).toBe(path.join('has-file', 'file.txt'));
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('ignore filtering', () => {
|
||||||
|
it('should ignore .git directories', async () => {
|
||||||
|
await createTestFile(tmpDir, 'normal.txt', 'content');
|
||||||
|
await createTestFile(tmpDir, '.git/config', 'git config');
|
||||||
|
await createTestFile(tmpDir, '.git/hooks/pre-commit', 'hook');
|
||||||
|
|
||||||
|
const files = await fileOps.getFileList(tmpDir);
|
||||||
|
|
||||||
|
expect(files).toHaveLength(1);
|
||||||
|
expect(files[0]).toBe('normal.txt');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should ignore node_modules directories', async () => {
|
||||||
|
await createTestFile(tmpDir, 'package.json', '{}');
|
||||||
|
await createTestFile(tmpDir, 'node_modules/package/index.js', 'code');
|
||||||
|
await createTestFile(tmpDir, 'node_modules/package/lib/util.js', 'util');
|
||||||
|
|
||||||
|
const files = await fileOps.getFileList(tmpDir);
|
||||||
|
|
||||||
|
expect(files).toHaveLength(1);
|
||||||
|
expect(files[0]).toBe('package.json');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should ignore .DS_Store files', async () => {
|
||||||
|
await createTestFile(tmpDir, 'file.txt', 'content');
|
||||||
|
await createTestFile(tmpDir, '.DS_Store', 'mac metadata');
|
||||||
|
await createTestFile(tmpDir, 'subdir/.DS_Store', 'mac metadata');
|
||||||
|
|
||||||
|
const files = await fileOps.getFileList(tmpDir);
|
||||||
|
|
||||||
|
expect(files).toHaveLength(1);
|
||||||
|
expect(files[0]).toBe('file.txt');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should ignore *.swp and *.tmp files', async () => {
|
||||||
|
await createTestFile(tmpDir, 'document.txt', 'content');
|
||||||
|
await createTestFile(tmpDir, 'document.txt.swp', 'vim swap');
|
||||||
|
await createTestFile(tmpDir, 'temp.tmp', 'temporary');
|
||||||
|
|
||||||
|
const files = await fileOps.getFileList(tmpDir);
|
||||||
|
|
||||||
|
expect(files).toHaveLength(1);
|
||||||
|
expect(files[0]).toBe('document.txt');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should ignore multiple ignored patterns together', async () => {
|
||||||
|
await createTestFile(tmpDir, 'src/index.js', 'source code');
|
||||||
|
await createTestFile(tmpDir, 'node_modules/lib/code.js', 'dependency');
|
||||||
|
await createTestFile(tmpDir, '.git/config', 'git config');
|
||||||
|
await createTestFile(tmpDir, '.DS_Store', 'mac file');
|
||||||
|
await createTestFile(tmpDir, 'file.swp', 'swap file');
|
||||||
|
await createTestFile(tmpDir, '.idea/workspace.xml', 'ide');
|
||||||
|
|
||||||
|
const files = await fileOps.getFileList(tmpDir);
|
||||||
|
|
||||||
|
expect(files).toHaveLength(1);
|
||||||
|
expect(files[0]).toBe(path.join('src', 'index.js'));
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('relative path handling', () => {
|
||||||
|
it('should return paths relative to base directory', async () => {
|
||||||
|
await createTestFile(tmpDir, 'a/b/c/deep.txt', 'deep');
|
||||||
|
|
||||||
|
const files = await fileOps.getFileList(tmpDir);
|
||||||
|
|
||||||
|
expect(files[0]).toBe(path.join('a', 'b', 'c', 'deep.txt'));
|
||||||
|
expect(path.isAbsolute(files[0])).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle subdirectory as base', async () => {
|
||||||
|
await createTestFile(tmpDir, 'root.txt', 'root');
|
||||||
|
await createTestFile(tmpDir, 'sub/file1.txt', 'sub1');
|
||||||
|
await createTestFile(tmpDir, 'sub/file2.txt', 'sub2');
|
||||||
|
|
||||||
|
const subDir = path.join(tmpDir, 'sub');
|
||||||
|
const files = await fileOps.getFileList(subDir);
|
||||||
|
|
||||||
|
expect(files).toHaveLength(2);
|
||||||
|
expect(files).toContain('file1.txt');
|
||||||
|
expect(files).toContain('file2.txt');
|
||||||
|
// Should not include root.txt
|
||||||
|
expect(files).not.toContain('root.txt');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('edge cases', () => {
|
||||||
|
it('should handle directory with special characters', async () => {
|
||||||
|
await createTestFile(tmpDir, 'folder with spaces/file.txt', 'content');
|
||||||
|
await createTestFile(tmpDir, 'special-chars!@#/data.json', 'data');
|
||||||
|
|
||||||
|
const files = await fileOps.getFileList(tmpDir);
|
||||||
|
|
||||||
|
expect(files).toHaveLength(2);
|
||||||
|
expect(files).toContain(path.join('folder with spaces', 'file.txt'));
|
||||||
|
expect(files).toContain(path.join('special-chars!@#', 'data.json'));
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle Unicode filenames', async () => {
|
||||||
|
await createTestFile(tmpDir, '文档/测试.txt', 'chinese');
|
||||||
|
await createTestFile(tmpDir, 'файл/данные.json', 'russian');
|
||||||
|
await createTestFile(tmpDir, 'ファイル/データ.yaml', 'japanese');
|
||||||
|
|
||||||
|
const files = await fileOps.getFileList(tmpDir);
|
||||||
|
|
||||||
|
expect(files).toHaveLength(3);
|
||||||
|
expect(files.some((f) => f.includes('测试.txt'))).toBe(true);
|
||||||
|
expect(files.some((f) => f.includes('данные.json'))).toBe(true);
|
||||||
|
expect(files.some((f) => f.includes('データ.yaml'))).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return empty array for non-existent directory', async () => {
|
||||||
|
const nonExistent = path.join(tmpDir, 'does-not-exist');
|
||||||
|
|
||||||
|
const files = await fileOps.getFileList(nonExistent);
|
||||||
|
|
||||||
|
expect(files).toEqual([]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle very deep directory nesting', async () => {
|
||||||
|
// Create a deeply nested structure (10 levels)
|
||||||
|
const deepPath = Array.from({ length: 10 }, (_, i) => `level${i}`).join('/');
|
||||||
|
await createTestFile(tmpDir, `${deepPath}/deep.txt`, 'very deep');
|
||||||
|
|
||||||
|
const files = await fileOps.getFileList(tmpDir);
|
||||||
|
|
||||||
|
expect(files).toHaveLength(1);
|
||||||
|
expect(files[0]).toBe(path.join(...deepPath.split('/'), 'deep.txt'));
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle directory with many files', async () => {
|
||||||
|
// Create 100 files
|
||||||
|
const promises = Array.from({ length: 100 }, (_, i) => createTestFile(tmpDir, `file${i}.txt`, `content ${i}`));
|
||||||
|
await Promise.all(promises);
|
||||||
|
|
||||||
|
const files = await fileOps.getFileList(tmpDir);
|
||||||
|
|
||||||
|
expect(files).toHaveLength(100);
|
||||||
|
expect(files.every((f) => f.startsWith('file') && f.endsWith('.txt'))).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle mixed ignored and non-ignored files', async () => {
|
||||||
|
await createTestFile(tmpDir, 'src/main.js', 'code');
|
||||||
|
await createTestFile(tmpDir, 'src/main.js.swp', 'swap');
|
||||||
|
await createTestFile(tmpDir, 'lib/utils.js', 'utils');
|
||||||
|
await createTestFile(tmpDir, 'node_modules/dep/index.js', 'dep');
|
||||||
|
await createTestFile(tmpDir, 'test/test.js', 'test');
|
||||||
|
|
||||||
|
const files = await fileOps.getFileList(tmpDir);
|
||||||
|
|
||||||
|
expect(files).toHaveLength(3);
|
||||||
|
expect(files).toContain(path.join('src', 'main.js'));
|
||||||
|
expect(files).toContain(path.join('lib', 'utils.js'));
|
||||||
|
expect(files).toContain(path.join('test', 'test.js'));
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('file types', () => {
|
||||||
|
it('should include files with no extension', async () => {
|
||||||
|
await createTestFile(tmpDir, 'README', 'readme content');
|
||||||
|
await createTestFile(tmpDir, 'LICENSE', 'license text');
|
||||||
|
await createTestFile(tmpDir, 'Makefile', 'make commands');
|
||||||
|
|
||||||
|
const files = await fileOps.getFileList(tmpDir);
|
||||||
|
|
||||||
|
expect(files).toHaveLength(3);
|
||||||
|
expect(files).toContain('README');
|
||||||
|
expect(files).toContain('LICENSE');
|
||||||
|
expect(files).toContain('Makefile');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should include dotfiles (except ignored ones)', async () => {
|
||||||
|
await createTestFile(tmpDir, '.gitignore', 'ignore patterns');
|
||||||
|
await createTestFile(tmpDir, '.env', 'environment');
|
||||||
|
await createTestFile(tmpDir, '.eslintrc', 'eslint config');
|
||||||
|
|
||||||
|
const files = await fileOps.getFileList(tmpDir);
|
||||||
|
|
||||||
|
expect(files).toHaveLength(3);
|
||||||
|
expect(files).toContain('.gitignore');
|
||||||
|
expect(files).toContain('.env');
|
||||||
|
expect(files).toContain('.eslintrc');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should include files with multiple extensions', async () => {
|
||||||
|
await createTestFile(tmpDir, 'archive.tar.gz', 'archive');
|
||||||
|
await createTestFile(tmpDir, 'backup.sql.bak', 'backup');
|
||||||
|
await createTestFile(tmpDir, 'config.yaml.sample', 'sample config');
|
||||||
|
|
||||||
|
const files = await fileOps.getFileList(tmpDir);
|
||||||
|
|
||||||
|
expect(files).toHaveLength(3);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
@ -0,0 +1,177 @@
|
||||||
|
import { describe, it, expect } from 'vitest';
|
||||||
|
import { FileOps } from '../../../tools/cli/lib/file-ops.js';
|
||||||
|
|
||||||
|
describe('FileOps', () => {
|
||||||
|
describe('shouldIgnore()', () => {
|
||||||
|
const fileOps = new FileOps();
|
||||||
|
|
||||||
|
describe('exact matches', () => {
|
||||||
|
it('should ignore .git directory', () => {
|
||||||
|
expect(fileOps.shouldIgnore('.git')).toBe(true);
|
||||||
|
expect(fileOps.shouldIgnore('/path/to/.git')).toBe(true);
|
||||||
|
// Note: basename of '/project/.git/hooks' is 'hooks', not '.git'
|
||||||
|
expect(fileOps.shouldIgnore('/project/.git/hooks')).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should ignore .DS_Store files', () => {
|
||||||
|
expect(fileOps.shouldIgnore('.DS_Store')).toBe(true);
|
||||||
|
expect(fileOps.shouldIgnore('/path/to/.DS_Store')).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should ignore node_modules directory', () => {
|
||||||
|
expect(fileOps.shouldIgnore('node_modules')).toBe(true);
|
||||||
|
expect(fileOps.shouldIgnore('/path/to/node_modules')).toBe(true);
|
||||||
|
// Note: basename of '/project/node_modules/package' is 'package', not 'node_modules'
|
||||||
|
expect(fileOps.shouldIgnore('/project/node_modules/package')).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should ignore .idea directory', () => {
|
||||||
|
expect(fileOps.shouldIgnore('.idea')).toBe(true);
|
||||||
|
expect(fileOps.shouldIgnore('/path/to/.idea')).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should ignore .vscode directory', () => {
|
||||||
|
expect(fileOps.shouldIgnore('.vscode')).toBe(true);
|
||||||
|
expect(fileOps.shouldIgnore('/path/to/.vscode')).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should ignore __pycache__ directory', () => {
|
||||||
|
expect(fileOps.shouldIgnore('__pycache__')).toBe(true);
|
||||||
|
expect(fileOps.shouldIgnore('/path/to/__pycache__')).toBe(true);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('glob pattern matches', () => {
|
||||||
|
it('should ignore *.swp files (Vim swap files)', () => {
|
||||||
|
expect(fileOps.shouldIgnore('file.swp')).toBe(true);
|
||||||
|
expect(fileOps.shouldIgnore('.config.yaml.swp')).toBe(true);
|
||||||
|
expect(fileOps.shouldIgnore('/path/to/document.txt.swp')).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should ignore *.tmp files (temporary files)', () => {
|
||||||
|
expect(fileOps.shouldIgnore('file.tmp')).toBe(true);
|
||||||
|
expect(fileOps.shouldIgnore('temp_data.tmp')).toBe(true);
|
||||||
|
expect(fileOps.shouldIgnore('/path/to/cache.tmp')).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should ignore *.pyc files (Python compiled)', () => {
|
||||||
|
expect(fileOps.shouldIgnore('module.pyc')).toBe(true);
|
||||||
|
expect(fileOps.shouldIgnore('__init__.pyc')).toBe(true);
|
||||||
|
expect(fileOps.shouldIgnore('/path/to/script.pyc')).toBe(true);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('files that should NOT be ignored', () => {
|
||||||
|
it('should not ignore normal files', () => {
|
||||||
|
expect(fileOps.shouldIgnore('README.md')).toBe(false);
|
||||||
|
expect(fileOps.shouldIgnore('package.json')).toBe(false);
|
||||||
|
expect(fileOps.shouldIgnore('index.js')).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should not ignore .gitignore itself', () => {
|
||||||
|
expect(fileOps.shouldIgnore('.gitignore')).toBe(false);
|
||||||
|
expect(fileOps.shouldIgnore('/path/to/.gitignore')).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should not ignore files with similar but different names', () => {
|
||||||
|
expect(fileOps.shouldIgnore('git-file.txt')).toBe(false);
|
||||||
|
expect(fileOps.shouldIgnore('node_modules.backup')).toBe(false);
|
||||||
|
expect(fileOps.shouldIgnore('swap-file.txt')).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should not ignore files with ignored patterns in parent directory', () => {
|
||||||
|
// The pattern matches basename, not full path
|
||||||
|
expect(fileOps.shouldIgnore('/project/src/utils.js')).toBe(false);
|
||||||
|
expect(fileOps.shouldIgnore('/code/main.py')).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should not ignore directories with dot prefix (except specific ones)', () => {
|
||||||
|
expect(fileOps.shouldIgnore('.github')).toBe(false);
|
||||||
|
expect(fileOps.shouldIgnore('.husky')).toBe(false);
|
||||||
|
expect(fileOps.shouldIgnore('.npmrc')).toBe(false);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('edge cases', () => {
|
||||||
|
it('should handle empty string', () => {
|
||||||
|
expect(fileOps.shouldIgnore('')).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle paths with multiple segments', () => {
|
||||||
|
// basename of '/very/deep/path/to/node_modules/package' is 'package'
|
||||||
|
expect(fileOps.shouldIgnore('/very/deep/path/to/node_modules/package')).toBe(false);
|
||||||
|
expect(fileOps.shouldIgnore('/very/deep/path/to/file.swp')).toBe(true);
|
||||||
|
expect(fileOps.shouldIgnore('/very/deep/path/to/normal.js')).toBe(false);
|
||||||
|
// But the directory itself would be ignored
|
||||||
|
expect(fileOps.shouldIgnore('/very/deep/path/to/node_modules')).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle Windows-style paths', () => {
|
||||||
|
// Note: path.basename() on Unix doesn't recognize backslashes
|
||||||
|
// On Unix: basename('C:\\project\\file.tmp') = 'C:\\project\\file.tmp'
|
||||||
|
// So we test cross-platform path handling
|
||||||
|
expect(fileOps.shouldIgnore(String.raw`C:\project\file.tmp`)).toBe(true); // .tmp matches
|
||||||
|
expect(fileOps.shouldIgnore(String.raw`test\file.swp`)).toBe(true); // .swp matches
|
||||||
|
// These won't be ignored because they don't match the patterns on Unix
|
||||||
|
expect(fileOps.shouldIgnore(String.raw`C:\project\node_modules\pkg`)).toBe(false);
|
||||||
|
expect(fileOps.shouldIgnore(String.raw`C:\project\src\main.js`)).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle relative paths', () => {
|
||||||
|
// basename of './node_modules/package' is 'package'
|
||||||
|
expect(fileOps.shouldIgnore('./node_modules/package')).toBe(false);
|
||||||
|
// basename of '../.git/hooks' is 'hooks'
|
||||||
|
expect(fileOps.shouldIgnore('../.git/hooks')).toBe(false);
|
||||||
|
expect(fileOps.shouldIgnore('./src/index.js')).toBe(false);
|
||||||
|
// But the directories themselves would be ignored
|
||||||
|
expect(fileOps.shouldIgnore('./node_modules')).toBe(true);
|
||||||
|
expect(fileOps.shouldIgnore('../.git')).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle files with multiple extensions', () => {
|
||||||
|
expect(fileOps.shouldIgnore('file.tar.tmp')).toBe(true);
|
||||||
|
expect(fileOps.shouldIgnore('backup.sql.swp')).toBe(true);
|
||||||
|
expect(fileOps.shouldIgnore('data.json.gz')).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should be case-sensitive for exact matches', () => {
|
||||||
|
expect(fileOps.shouldIgnore('Node_Modules')).toBe(false);
|
||||||
|
expect(fileOps.shouldIgnore('NODE_MODULES')).toBe(false);
|
||||||
|
expect(fileOps.shouldIgnore('node_modules')).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle files starting with ignored patterns', () => {
|
||||||
|
expect(fileOps.shouldIgnore('.git-credentials')).toBe(false);
|
||||||
|
expect(fileOps.shouldIgnore('.gitattributes')).toBe(false);
|
||||||
|
expect(fileOps.shouldIgnore('.git')).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle Unicode filenames', () => {
|
||||||
|
expect(fileOps.shouldIgnore('文档.swp')).toBe(true);
|
||||||
|
expect(fileOps.shouldIgnore('файл.tmp')).toBe(true);
|
||||||
|
expect(fileOps.shouldIgnore('ドキュメント.txt')).toBe(false);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('pattern matching behavior', () => {
|
||||||
|
it('should match patterns based on basename only', () => {
|
||||||
|
// shouldIgnore uses path.basename(), so only the last segment matters
|
||||||
|
expect(fileOps.shouldIgnore('/home/user/.git/config')).toBe(false); // basename is 'config'
|
||||||
|
expect(fileOps.shouldIgnore('/home/user/project/node_modules')).toBe(true); // basename is 'node_modules'
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle trailing slashes', () => {
|
||||||
|
// path.basename() returns the directory name, not empty string for trailing slash
|
||||||
|
expect(fileOps.shouldIgnore('node_modules/')).toBe(true);
|
||||||
|
expect(fileOps.shouldIgnore('.git/')).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should treat patterns as partial regex matches', () => {
|
||||||
|
// The *.swp pattern becomes /.*\.swp/ regex
|
||||||
|
expect(fileOps.shouldIgnore('test.swp')).toBe(true);
|
||||||
|
expect(fileOps.shouldIgnore('swp')).toBe(false); // doesn't match .*\.swp
|
||||||
|
expect(fileOps.shouldIgnore('.swp')).toBe(true); // matches .*\.swp (. before swp)
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
@ -0,0 +1,316 @@
|
||||||
|
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
||||||
|
import { FileOps } from '../../../tools/cli/lib/file-ops.js';
|
||||||
|
import { createTempDir, cleanupTempDir, createTestFile } from '../../helpers/temp-dir.js';
|
||||||
|
import fs from 'fs-extra';
|
||||||
|
import path from 'node:path';
|
||||||
|
|
||||||
|
describe('FileOps', () => {
|
||||||
|
describe('syncDirectory()', () => {
|
||||||
|
const fileOps = new FileOps();
|
||||||
|
let tmpDir;
|
||||||
|
let sourceDir;
|
||||||
|
let destDir;
|
||||||
|
|
||||||
|
beforeEach(async () => {
|
||||||
|
tmpDir = await createTempDir();
|
||||||
|
sourceDir = path.join(tmpDir, 'source');
|
||||||
|
destDir = path.join(tmpDir, 'dest');
|
||||||
|
await fs.ensureDir(sourceDir);
|
||||||
|
await fs.ensureDir(destDir);
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(async () => {
|
||||||
|
await cleanupTempDir(tmpDir);
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('hash-based selective update', () => {
|
||||||
|
it('should update file when hashes are identical (safe update)', async () => {
|
||||||
|
const content = 'identical content';
|
||||||
|
await createTestFile(sourceDir, 'file.txt', content);
|
||||||
|
await createTestFile(destDir, 'file.txt', content);
|
||||||
|
|
||||||
|
await fileOps.syncDirectory(sourceDir, destDir);
|
||||||
|
|
||||||
|
// File should be updated (copied over) since hashes match
|
||||||
|
const destContent = await fs.readFile(path.join(destDir, 'file.txt'), 'utf8');
|
||||||
|
expect(destContent).toBe(content);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should preserve modified file when dest is newer', async () => {
|
||||||
|
await createTestFile(sourceDir, 'file.txt', 'source content');
|
||||||
|
await createTestFile(destDir, 'file.txt', 'modified by user');
|
||||||
|
|
||||||
|
// Make dest file newer
|
||||||
|
const destFile = path.join(destDir, 'file.txt');
|
||||||
|
const futureTime = new Date(Date.now() + 10_000);
|
||||||
|
await fs.utimes(destFile, futureTime, futureTime);
|
||||||
|
|
||||||
|
await fileOps.syncDirectory(sourceDir, destDir);
|
||||||
|
|
||||||
|
// User modification should be preserved
|
||||||
|
const destContent = await fs.readFile(destFile, 'utf8');
|
||||||
|
expect(destContent).toBe('modified by user');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should update file when source is newer than modified dest', async () => {
|
||||||
|
// Create both files first
|
||||||
|
await createTestFile(sourceDir, 'file.txt', 'new source content');
|
||||||
|
await createTestFile(destDir, 'file.txt', 'old modified content');
|
||||||
|
|
||||||
|
// Make dest older and source newer with explicit times
|
||||||
|
const destFile = path.join(destDir, 'file.txt');
|
||||||
|
const sourceFile = path.join(sourceDir, 'file.txt');
|
||||||
|
|
||||||
|
const pastTime = new Date(Date.now() - 10_000);
|
||||||
|
const futureTime = new Date(Date.now() + 10_000);
|
||||||
|
|
||||||
|
await fs.utimes(destFile, pastTime, pastTime);
|
||||||
|
await fs.utimes(sourceFile, futureTime, futureTime);
|
||||||
|
|
||||||
|
await fileOps.syncDirectory(sourceDir, destDir);
|
||||||
|
|
||||||
|
// Should update to source content since source is newer
|
||||||
|
const destContent = await fs.readFile(destFile, 'utf8');
|
||||||
|
expect(destContent).toBe('new source content');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('new file handling', () => {
|
||||||
|
it('should copy new files from source', async () => {
|
||||||
|
await createTestFile(sourceDir, 'new-file.txt', 'new content');
|
||||||
|
|
||||||
|
await fileOps.syncDirectory(sourceDir, destDir);
|
||||||
|
|
||||||
|
expect(await fs.pathExists(path.join(destDir, 'new-file.txt'))).toBe(true);
|
||||||
|
expect(await fs.readFile(path.join(destDir, 'new-file.txt'), 'utf8')).toBe('new content');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should copy multiple new files', async () => {
|
||||||
|
await createTestFile(sourceDir, 'file1.txt', 'content1');
|
||||||
|
await createTestFile(sourceDir, 'file2.md', 'content2');
|
||||||
|
await createTestFile(sourceDir, 'file3.json', 'content3');
|
||||||
|
|
||||||
|
await fileOps.syncDirectory(sourceDir, destDir);
|
||||||
|
|
||||||
|
expect(await fs.pathExists(path.join(destDir, 'file1.txt'))).toBe(true);
|
||||||
|
expect(await fs.pathExists(path.join(destDir, 'file2.md'))).toBe(true);
|
||||||
|
expect(await fs.pathExists(path.join(destDir, 'file3.json'))).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should create nested directories for new files', async () => {
|
||||||
|
await createTestFile(sourceDir, 'level1/level2/deep.txt', 'deep content');
|
||||||
|
|
||||||
|
await fileOps.syncDirectory(sourceDir, destDir);
|
||||||
|
|
||||||
|
expect(await fs.pathExists(path.join(destDir, 'level1', 'level2', 'deep.txt'))).toBe(true);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('orphaned file removal', () => {
|
||||||
|
it('should remove files that no longer exist in source', async () => {
|
||||||
|
await createTestFile(sourceDir, 'keep.txt', 'keep this');
|
||||||
|
await createTestFile(destDir, 'keep.txt', 'keep this');
|
||||||
|
await createTestFile(destDir, 'remove.txt', 'delete this');
|
||||||
|
|
||||||
|
await fileOps.syncDirectory(sourceDir, destDir);
|
||||||
|
|
||||||
|
expect(await fs.pathExists(path.join(destDir, 'keep.txt'))).toBe(true);
|
||||||
|
expect(await fs.pathExists(path.join(destDir, 'remove.txt'))).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should remove multiple orphaned files', async () => {
|
||||||
|
await createTestFile(sourceDir, 'current.txt', 'current');
|
||||||
|
await createTestFile(destDir, 'current.txt', 'current');
|
||||||
|
await createTestFile(destDir, 'old1.txt', 'orphan 1');
|
||||||
|
await createTestFile(destDir, 'old2.txt', 'orphan 2');
|
||||||
|
await createTestFile(destDir, 'old3.txt', 'orphan 3');
|
||||||
|
|
||||||
|
await fileOps.syncDirectory(sourceDir, destDir);
|
||||||
|
|
||||||
|
expect(await fs.pathExists(path.join(destDir, 'current.txt'))).toBe(true);
|
||||||
|
expect(await fs.pathExists(path.join(destDir, 'old1.txt'))).toBe(false);
|
||||||
|
expect(await fs.pathExists(path.join(destDir, 'old2.txt'))).toBe(false);
|
||||||
|
expect(await fs.pathExists(path.join(destDir, 'old3.txt'))).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should remove orphaned directories', async () => {
|
||||||
|
await createTestFile(sourceDir, 'keep/file.txt', 'keep');
|
||||||
|
await createTestFile(destDir, 'keep/file.txt', 'keep');
|
||||||
|
await createTestFile(destDir, 'remove/orphan.txt', 'orphan');
|
||||||
|
|
||||||
|
await fileOps.syncDirectory(sourceDir, destDir);
|
||||||
|
|
||||||
|
expect(await fs.pathExists(path.join(destDir, 'keep'))).toBe(true);
|
||||||
|
expect(await fs.pathExists(path.join(destDir, 'remove', 'orphan.txt'))).toBe(false);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('complex scenarios', () => {
|
||||||
|
it('should handle mixed operations in single sync', async () => {
|
||||||
|
const now = Date.now();
|
||||||
|
const pastTime = now - 100_000; // 100 seconds ago
|
||||||
|
const futureTime = now + 100_000; // 100 seconds from now
|
||||||
|
|
||||||
|
// Identical file (update)
|
||||||
|
await createTestFile(sourceDir, 'identical.txt', 'same');
|
||||||
|
await createTestFile(destDir, 'identical.txt', 'same');
|
||||||
|
|
||||||
|
// Modified file with newer dest (preserve)
|
||||||
|
await createTestFile(sourceDir, 'modified.txt', 'original');
|
||||||
|
await createTestFile(destDir, 'modified.txt', 'user modified');
|
||||||
|
const modifiedFile = path.join(destDir, 'modified.txt');
|
||||||
|
await fs.utimes(modifiedFile, futureTime, futureTime);
|
||||||
|
|
||||||
|
// New file (copy)
|
||||||
|
await createTestFile(sourceDir, 'new.txt', 'new content');
|
||||||
|
|
||||||
|
// Orphaned file (remove)
|
||||||
|
await createTestFile(destDir, 'orphan.txt', 'delete me');
|
||||||
|
|
||||||
|
await fileOps.syncDirectory(sourceDir, destDir);
|
||||||
|
|
||||||
|
// Verify operations
|
||||||
|
expect(await fs.pathExists(path.join(destDir, 'identical.txt'))).toBe(true);
|
||||||
|
|
||||||
|
expect(await fs.readFile(modifiedFile, 'utf8')).toBe('user modified');
|
||||||
|
|
||||||
|
expect(await fs.pathExists(path.join(destDir, 'new.txt'))).toBe(true);
|
||||||
|
|
||||||
|
expect(await fs.pathExists(path.join(destDir, 'orphan.txt'))).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle nested directory changes', async () => {
|
||||||
|
// Create nested structure in source
|
||||||
|
await createTestFile(sourceDir, 'level1/keep.txt', 'keep');
|
||||||
|
await createTestFile(sourceDir, 'level1/level2/deep.txt', 'deep');
|
||||||
|
|
||||||
|
// Create different nested structure in dest
|
||||||
|
await createTestFile(destDir, 'level1/keep.txt', 'keep');
|
||||||
|
await createTestFile(destDir, 'level1/remove.txt', 'orphan');
|
||||||
|
await createTestFile(destDir, 'old-level/file.txt', 'old');
|
||||||
|
|
||||||
|
await fileOps.syncDirectory(sourceDir, destDir);
|
||||||
|
|
||||||
|
expect(await fs.pathExists(path.join(destDir, 'level1', 'keep.txt'))).toBe(true);
|
||||||
|
expect(await fs.pathExists(path.join(destDir, 'level1', 'level2', 'deep.txt'))).toBe(true);
|
||||||
|
expect(await fs.pathExists(path.join(destDir, 'level1', 'remove.txt'))).toBe(false);
|
||||||
|
expect(await fs.pathExists(path.join(destDir, 'old-level', 'file.txt'))).toBe(false);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('edge cases', () => {
|
||||||
|
it('should handle empty source directory', async () => {
|
||||||
|
await createTestFile(destDir, 'file.txt', 'content');
|
||||||
|
|
||||||
|
await fileOps.syncDirectory(sourceDir, destDir);
|
||||||
|
|
||||||
|
// All files should be removed
|
||||||
|
expect(await fs.pathExists(path.join(destDir, 'file.txt'))).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle empty destination directory', async () => {
|
||||||
|
await createTestFile(sourceDir, 'file.txt', 'content');
|
||||||
|
|
||||||
|
await fileOps.syncDirectory(sourceDir, destDir);
|
||||||
|
|
||||||
|
expect(await fs.pathExists(path.join(destDir, 'file.txt'))).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle Unicode filenames', async () => {
|
||||||
|
await createTestFile(sourceDir, '测试.txt', 'chinese');
|
||||||
|
await createTestFile(destDir, '测试.txt', 'modified chinese');
|
||||||
|
|
||||||
|
// Make dest newer
|
||||||
|
await fs.utimes(path.join(destDir, '测试.txt'), Date.now() + 10_000, Date.now() + 10_000);
|
||||||
|
|
||||||
|
await fileOps.syncDirectory(sourceDir, destDir);
|
||||||
|
|
||||||
|
// Should preserve user modification
|
||||||
|
expect(await fs.readFile(path.join(destDir, '测试.txt'), 'utf8')).toBe('modified chinese');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle large number of files', async () => {
|
||||||
|
// Create 50 files in source
|
||||||
|
for (let i = 0; i < 50; i++) {
|
||||||
|
await createTestFile(sourceDir, `file${i}.txt`, `content ${i}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create 25 matching files and 25 orphaned files in dest
|
||||||
|
for (let i = 0; i < 25; i++) {
|
||||||
|
await createTestFile(destDir, `file${i}.txt`, `content ${i}`);
|
||||||
|
await createTestFile(destDir, `orphan${i}.txt`, `orphan ${i}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
await fileOps.syncDirectory(sourceDir, destDir);
|
||||||
|
|
||||||
|
// All 50 source files should exist
|
||||||
|
for (let i = 0; i < 50; i++) {
|
||||||
|
expect(await fs.pathExists(path.join(destDir, `file${i}.txt`))).toBe(true);
|
||||||
|
}
|
||||||
|
|
||||||
|
// All 25 orphaned files should be removed
|
||||||
|
for (let i = 0; i < 25; i++) {
|
||||||
|
expect(await fs.pathExists(path.join(destDir, `orphan${i}.txt`))).toBe(false);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle binary files correctly', async () => {
|
||||||
|
const buffer = Buffer.from([0x89, 0x50, 0x4e, 0x47]);
|
||||||
|
await fs.writeFile(path.join(sourceDir, 'binary.dat'), buffer);
|
||||||
|
await fs.writeFile(path.join(destDir, 'binary.dat'), buffer);
|
||||||
|
|
||||||
|
await fileOps.syncDirectory(sourceDir, destDir);
|
||||||
|
|
||||||
|
const destBuffer = await fs.readFile(path.join(destDir, 'binary.dat'));
|
||||||
|
expect(destBuffer).toEqual(buffer);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('timestamp precision', () => {
|
||||||
|
it('should handle files with very close modification times', async () => {
|
||||||
|
await createTestFile(sourceDir, 'file.txt', 'source');
|
||||||
|
await createTestFile(destDir, 'file.txt', 'dest modified');
|
||||||
|
|
||||||
|
// Make dest just slightly newer (100ms)
|
||||||
|
const destFile = path.join(destDir, 'file.txt');
|
||||||
|
await fs.utimes(destFile, Date.now() + 100, Date.now() + 100);
|
||||||
|
|
||||||
|
await fileOps.syncDirectory(sourceDir, destDir);
|
||||||
|
|
||||||
|
// Should preserve user modification even with small time difference
|
||||||
|
expect(await fs.readFile(destFile, 'utf8')).toBe('dest modified');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('data integrity', () => {
|
||||||
|
it('should not corrupt files during sync', async () => {
|
||||||
|
const content = 'Important data\nLine 2\nLine 3\n';
|
||||||
|
await createTestFile(sourceDir, 'data.txt', content);
|
||||||
|
|
||||||
|
await fileOps.syncDirectory(sourceDir, destDir);
|
||||||
|
|
||||||
|
expect(await fs.readFile(path.join(destDir, 'data.txt'), 'utf8')).toBe(content);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle sync interruption gracefully', async () => {
|
||||||
|
// This test verifies that partial syncs don't leave inconsistent state
|
||||||
|
await createTestFile(sourceDir, 'file1.txt', 'content1');
|
||||||
|
await createTestFile(sourceDir, 'file2.txt', 'content2');
|
||||||
|
|
||||||
|
// First sync
|
||||||
|
await fileOps.syncDirectory(sourceDir, destDir);
|
||||||
|
|
||||||
|
// Modify source
|
||||||
|
await createTestFile(sourceDir, 'file3.txt', 'content3');
|
||||||
|
|
||||||
|
// Second sync
|
||||||
|
await fileOps.syncDirectory(sourceDir, destDir);
|
||||||
|
|
||||||
|
// All files should be present and correct
|
||||||
|
expect(await fs.pathExists(path.join(destDir, 'file1.txt'))).toBe(true);
|
||||||
|
expect(await fs.pathExists(path.join(destDir, 'file2.txt'))).toBe(true);
|
||||||
|
expect(await fs.pathExists(path.join(destDir, 'file3.txt'))).toBe(true);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
@ -0,0 +1,214 @@
|
||||||
|
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
||||||
|
import { FileOps } from '../../../tools/cli/lib/file-ops.js';
|
||||||
|
import { createTempDir, cleanupTempDir, createTestFile } from '../../helpers/temp-dir.js';
|
||||||
|
import fs from 'fs-extra';
|
||||||
|
import path from 'node:path';
|
||||||
|
|
||||||
|
describe('FileOps', () => {
|
||||||
|
const fileOps = new FileOps();
|
||||||
|
let tmpDir;
|
||||||
|
|
||||||
|
beforeEach(async () => {
|
||||||
|
tmpDir = await createTempDir();
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(async () => {
|
||||||
|
await cleanupTempDir(tmpDir);
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('ensureDir()', () => {
|
||||||
|
it('should create directory if it does not exist', async () => {
|
||||||
|
const newDir = path.join(tmpDir, 'new-directory');
|
||||||
|
|
||||||
|
await fileOps.ensureDir(newDir);
|
||||||
|
|
||||||
|
expect(await fs.pathExists(newDir)).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should not fail if directory already exists', async () => {
|
||||||
|
const existingDir = path.join(tmpDir, 'existing');
|
||||||
|
await fs.ensureDir(existingDir);
|
||||||
|
|
||||||
|
await expect(fileOps.ensureDir(existingDir)).resolves.not.toThrow();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should create nested directories', async () => {
|
||||||
|
const nestedDir = path.join(tmpDir, 'level1', 'level2', 'level3');
|
||||||
|
|
||||||
|
await fileOps.ensureDir(nestedDir);
|
||||||
|
|
||||||
|
expect(await fs.pathExists(nestedDir)).toBe(true);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('remove()', () => {
|
||||||
|
it('should remove a file', async () => {
|
||||||
|
const filePath = await createTestFile(tmpDir, 'test.txt', 'content');
|
||||||
|
|
||||||
|
await fileOps.remove(filePath);
|
||||||
|
|
||||||
|
expect(await fs.pathExists(filePath)).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should remove a directory', async () => {
|
||||||
|
const dirPath = path.join(tmpDir, 'test-dir');
|
||||||
|
await fs.ensureDir(dirPath);
|
||||||
|
await createTestFile(dirPath, 'file.txt', 'content');
|
||||||
|
|
||||||
|
await fileOps.remove(dirPath);
|
||||||
|
|
||||||
|
expect(await fs.pathExists(dirPath)).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should not fail if path does not exist', async () => {
|
||||||
|
const nonExistent = path.join(tmpDir, 'does-not-exist');
|
||||||
|
|
||||||
|
await expect(fileOps.remove(nonExistent)).resolves.not.toThrow();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should remove nested directories', async () => {
|
||||||
|
const nested = path.join(tmpDir, 'a', 'b', 'c');
|
||||||
|
await fs.ensureDir(nested);
|
||||||
|
await createTestFile(nested, 'file.txt', 'content');
|
||||||
|
|
||||||
|
await fileOps.remove(path.join(tmpDir, 'a'));
|
||||||
|
|
||||||
|
expect(await fs.pathExists(path.join(tmpDir, 'a'))).toBe(false);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('readFile()', () => {
|
||||||
|
it('should read file content', async () => {
|
||||||
|
const content = 'test content';
|
||||||
|
const filePath = await createTestFile(tmpDir, 'test.txt', content);
|
||||||
|
|
||||||
|
const result = await fileOps.readFile(filePath);
|
||||||
|
|
||||||
|
expect(result).toBe(content);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should read UTF-8 content', async () => {
|
||||||
|
const content = 'Hello 世界 🌍';
|
||||||
|
const filePath = await createTestFile(tmpDir, 'utf8.txt', content);
|
||||||
|
|
||||||
|
const result = await fileOps.readFile(filePath);
|
||||||
|
|
||||||
|
expect(result).toBe(content);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should read empty file', async () => {
|
||||||
|
const filePath = await createTestFile(tmpDir, 'empty.txt', '');
|
||||||
|
|
||||||
|
const result = await fileOps.readFile(filePath);
|
||||||
|
|
||||||
|
expect(result).toBe('');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should reject for non-existent file', async () => {
|
||||||
|
const nonExistent = path.join(tmpDir, 'does-not-exist.txt');
|
||||||
|
|
||||||
|
await expect(fileOps.readFile(nonExistent)).rejects.toThrow();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('writeFile()', () => {
|
||||||
|
it('should write file content', async () => {
|
||||||
|
const filePath = path.join(tmpDir, 'new-file.txt');
|
||||||
|
const content = 'test content';
|
||||||
|
|
||||||
|
await fileOps.writeFile(filePath, content);
|
||||||
|
|
||||||
|
expect(await fs.readFile(filePath, 'utf8')).toBe(content);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should create parent directories if they do not exist', async () => {
|
||||||
|
const filePath = path.join(tmpDir, 'level1', 'level2', 'file.txt');
|
||||||
|
|
||||||
|
await fileOps.writeFile(filePath, 'content');
|
||||||
|
|
||||||
|
expect(await fs.pathExists(filePath)).toBe(true);
|
||||||
|
expect(await fs.readFile(filePath, 'utf8')).toBe('content');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should overwrite existing file', async () => {
|
||||||
|
const filePath = await createTestFile(tmpDir, 'test.txt', 'old content');
|
||||||
|
|
||||||
|
await fileOps.writeFile(filePath, 'new content');
|
||||||
|
|
||||||
|
expect(await fs.readFile(filePath, 'utf8')).toBe('new content');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle UTF-8 content', async () => {
|
||||||
|
const content = '测试 Тест 🎉';
|
||||||
|
const filePath = path.join(tmpDir, 'unicode.txt');
|
||||||
|
|
||||||
|
await fileOps.writeFile(filePath, content);
|
||||||
|
|
||||||
|
expect(await fs.readFile(filePath, 'utf8')).toBe(content);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('exists()', () => {
|
||||||
|
it('should return true for existing file', async () => {
|
||||||
|
const filePath = await createTestFile(tmpDir, 'test.txt', 'content');
|
||||||
|
|
||||||
|
const result = await fileOps.exists(filePath);
|
||||||
|
|
||||||
|
expect(result).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return true for existing directory', async () => {
|
||||||
|
const dirPath = path.join(tmpDir, 'test-dir');
|
||||||
|
await fs.ensureDir(dirPath);
|
||||||
|
|
||||||
|
const result = await fileOps.exists(dirPath);
|
||||||
|
|
||||||
|
expect(result).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return false for non-existent path', async () => {
|
||||||
|
const nonExistent = path.join(tmpDir, 'does-not-exist');
|
||||||
|
|
||||||
|
const result = await fileOps.exists(nonExistent);
|
||||||
|
|
||||||
|
expect(result).toBe(false);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('stat()', () => {
|
||||||
|
it('should return stats for file', async () => {
|
||||||
|
const filePath = await createTestFile(tmpDir, 'test.txt', 'content');
|
||||||
|
|
||||||
|
const stats = await fileOps.stat(filePath);
|
||||||
|
|
||||||
|
expect(stats.isFile()).toBe(true);
|
||||||
|
expect(stats.isDirectory()).toBe(false);
|
||||||
|
expect(stats.size).toBeGreaterThan(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return stats for directory', async () => {
|
||||||
|
const dirPath = path.join(tmpDir, 'test-dir');
|
||||||
|
await fs.ensureDir(dirPath);
|
||||||
|
|
||||||
|
const stats = await fileOps.stat(dirPath);
|
||||||
|
|
||||||
|
expect(stats.isDirectory()).toBe(true);
|
||||||
|
expect(stats.isFile()).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should reject for non-existent path', async () => {
|
||||||
|
const nonExistent = path.join(tmpDir, 'does-not-exist');
|
||||||
|
|
||||||
|
await expect(fileOps.stat(nonExistent)).rejects.toThrow();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return modification time', async () => {
|
||||||
|
const filePath = await createTestFile(tmpDir, 'test.txt', 'content');
|
||||||
|
|
||||||
|
const stats = await fileOps.stat(filePath);
|
||||||
|
|
||||||
|
expect(stats.mtime).toBeInstanceOf(Date);
|
||||||
|
expect(stats.mtime.getTime()).toBeLessThanOrEqual(Date.now());
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
@ -0,0 +1,335 @@
|
||||||
|
import { describe, it, expect, beforeEach } from 'vitest';
|
||||||
|
import { YamlXmlBuilder } from '../../../tools/cli/lib/yaml-xml-builder.js';
|
||||||
|
|
||||||
|
describe('YamlXmlBuilder - buildCommandsXml()', () => {
|
||||||
|
let builder;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
builder = new YamlXmlBuilder();
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('menu injection', () => {
|
||||||
|
it('should always inject *menu item first', () => {
|
||||||
|
const xml = builder.buildCommandsXml([]);
|
||||||
|
|
||||||
|
expect(xml).toContain('<item cmd="*menu">[M] Redisplay Menu Options</item>');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should always inject *dismiss item last', () => {
|
||||||
|
const xml = builder.buildCommandsXml([]);
|
||||||
|
|
||||||
|
expect(xml).toContain('<item cmd="*dismiss">[D] Dismiss Agent</item>');
|
||||||
|
// Should be at the end before </menu>
|
||||||
|
expect(xml).toMatch(/\*dismiss.*<\/menu>/s);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should place user items between *menu and *dismiss', () => {
|
||||||
|
const menuItems = [{ trigger: 'help', description: 'Show help', action: 'show_help' }];
|
||||||
|
|
||||||
|
const xml = builder.buildCommandsXml(menuItems);
|
||||||
|
|
||||||
|
const menuIndex = xml.indexOf('*menu');
|
||||||
|
const helpIndex = xml.indexOf('*help');
|
||||||
|
const dismissIndex = xml.indexOf('*dismiss');
|
||||||
|
|
||||||
|
expect(menuIndex).toBeLessThan(helpIndex);
|
||||||
|
expect(helpIndex).toBeLessThan(dismissIndex);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('legacy format items', () => {
|
||||||
|
it('should add * prefix to triggers', () => {
|
||||||
|
const menuItems = [{ trigger: 'help', description: 'Help', action: 'show_help' }];
|
||||||
|
|
||||||
|
const xml = builder.buildCommandsXml(menuItems);
|
||||||
|
|
||||||
|
expect(xml).toContain('cmd="*help"');
|
||||||
|
expect(xml).not.toContain('cmd="help"'); // Should not have unprefixed version
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should preserve * prefix if already present', () => {
|
||||||
|
const menuItems = [{ trigger: '*custom', description: 'Custom', action: 'custom_action' }];
|
||||||
|
|
||||||
|
const xml = builder.buildCommandsXml(menuItems);
|
||||||
|
|
||||||
|
expect(xml).toContain('cmd="*custom"');
|
||||||
|
expect(xml).not.toContain('cmd="**custom"'); // Should not double-prefix
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should include description as item content', () => {
|
||||||
|
const menuItems = [{ trigger: 'analyze', description: '[A] Analyze code', action: 'analyze' }];
|
||||||
|
|
||||||
|
const xml = builder.buildCommandsXml(menuItems);
|
||||||
|
|
||||||
|
expect(xml).toContain('>[A] Analyze code</item>');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should escape XML special characters in description', () => {
|
||||||
|
const menuItems = [
|
||||||
|
{
|
||||||
|
trigger: 'test',
|
||||||
|
description: 'Test <brackets> & "quotes"',
|
||||||
|
action: 'test',
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
const xml = builder.buildCommandsXml(menuItems);
|
||||||
|
|
||||||
|
expect(xml).toContain('<brackets> & "quotes"');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('handler attributes', () => {
|
||||||
|
it('should include workflow attribute', () => {
|
||||||
|
const menuItems = [{ trigger: 'start', description: 'Start workflow', workflow: 'main-workflow' }];
|
||||||
|
|
||||||
|
const xml = builder.buildCommandsXml(menuItems);
|
||||||
|
|
||||||
|
expect(xml).toContain('workflow="main-workflow"');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should include exec attribute', () => {
|
||||||
|
const menuItems = [{ trigger: 'run', description: 'Run task', exec: 'path/to/task.md' }];
|
||||||
|
|
||||||
|
const xml = builder.buildCommandsXml(menuItems);
|
||||||
|
|
||||||
|
expect(xml).toContain('exec="path/to/task.md"');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should include action attribute', () => {
|
||||||
|
const menuItems = [{ trigger: 'help', description: 'Help', action: 'show_help' }];
|
||||||
|
|
||||||
|
const xml = builder.buildCommandsXml(menuItems);
|
||||||
|
|
||||||
|
expect(xml).toContain('action="show_help"');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should include tmpl attribute', () => {
|
||||||
|
const menuItems = [{ trigger: 'form', description: 'Form', tmpl: 'templates/form.yaml' }];
|
||||||
|
|
||||||
|
const xml = builder.buildCommandsXml(menuItems);
|
||||||
|
|
||||||
|
expect(xml).toContain('tmpl="templates/form.yaml"');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should include data attribute', () => {
|
||||||
|
const menuItems = [{ trigger: 'load', description: 'Load', data: 'data/config.json' }];
|
||||||
|
|
||||||
|
const xml = builder.buildCommandsXml(menuItems);
|
||||||
|
|
||||||
|
expect(xml).toContain('data="data/config.json"');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should include validate-workflow attribute', () => {
|
||||||
|
const menuItems = [
|
||||||
|
{
|
||||||
|
trigger: 'validate',
|
||||||
|
description: 'Validate',
|
||||||
|
'validate-workflow': 'validation-flow',
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
const xml = builder.buildCommandsXml(menuItems);
|
||||||
|
|
||||||
|
expect(xml).toContain('validate-workflow="validation-flow"');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should prioritize workflow-install over workflow', () => {
|
||||||
|
const menuItems = [
|
||||||
|
{
|
||||||
|
trigger: 'start',
|
||||||
|
description: 'Start',
|
||||||
|
workflow: 'original',
|
||||||
|
'workflow-install': 'installed-location',
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
const xml = builder.buildCommandsXml(menuItems);
|
||||||
|
|
||||||
|
expect(xml).toContain('workflow="installed-location"');
|
||||||
|
expect(xml).not.toContain('workflow="original"');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle multiple attributes on same item', () => {
|
||||||
|
const menuItems = [
|
||||||
|
{
|
||||||
|
trigger: 'complex',
|
||||||
|
description: 'Complex command',
|
||||||
|
workflow: 'flow',
|
||||||
|
data: 'data.json',
|
||||||
|
action: 'custom',
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
const xml = builder.buildCommandsXml(menuItems);
|
||||||
|
|
||||||
|
expect(xml).toContain('workflow="flow"');
|
||||||
|
expect(xml).toContain('data="data.json"');
|
||||||
|
expect(xml).toContain('action="custom"');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('IDE and web filtering', () => {
|
||||||
|
it('should include ide-only items for IDE installation', () => {
|
||||||
|
const menuItems = [
|
||||||
|
{ trigger: 'local', description: 'Local only', action: 'local', 'ide-only': true },
|
||||||
|
{ trigger: 'normal', description: 'Normal', action: 'normal' },
|
||||||
|
];
|
||||||
|
|
||||||
|
const xml = builder.buildCommandsXml(menuItems, false);
|
||||||
|
|
||||||
|
expect(xml).toContain('*local');
|
||||||
|
expect(xml).toContain('*normal');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should skip ide-only items for web bundle', () => {
|
||||||
|
const menuItems = [
|
||||||
|
{ trigger: 'local', description: 'Local only', action: 'local', 'ide-only': true },
|
||||||
|
{ trigger: 'normal', description: 'Normal', action: 'normal' },
|
||||||
|
];
|
||||||
|
|
||||||
|
const xml = builder.buildCommandsXml(menuItems, true);
|
||||||
|
|
||||||
|
expect(xml).not.toContain('*local');
|
||||||
|
expect(xml).toContain('*normal');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should include web-only items for web bundle', () => {
|
||||||
|
const menuItems = [
|
||||||
|
{ trigger: 'web', description: 'Web only', action: 'web', 'web-only': true },
|
||||||
|
{ trigger: 'normal', description: 'Normal', action: 'normal' },
|
||||||
|
];
|
||||||
|
|
||||||
|
const xml = builder.buildCommandsXml(menuItems, true);
|
||||||
|
|
||||||
|
expect(xml).toContain('*web');
|
||||||
|
expect(xml).toContain('*normal');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should skip web-only items for IDE installation', () => {
|
||||||
|
const menuItems = [
|
||||||
|
{ trigger: 'web', description: 'Web only', action: 'web', 'web-only': true },
|
||||||
|
{ trigger: 'normal', description: 'Normal', action: 'normal' },
|
||||||
|
];
|
||||||
|
|
||||||
|
const xml = builder.buildCommandsXml(menuItems, false);
|
||||||
|
|
||||||
|
expect(xml).not.toContain('*web');
|
||||||
|
expect(xml).toContain('*normal');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('multi format with nested handlers', () => {
|
||||||
|
it('should build multi format items with nested handlers', () => {
|
||||||
|
const menuItems = [
|
||||||
|
{
|
||||||
|
multi: '[TS] Technical Specification',
|
||||||
|
triggers: [
|
||||||
|
{
|
||||||
|
'tech-spec': [{ input: 'Create technical specification' }, { route: 'workflows/tech-spec.yaml' }],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
TS: [{ input: 'Create technical specification' }, { route: 'workflows/tech-spec.yaml' }],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
const xml = builder.buildCommandsXml(menuItems);
|
||||||
|
|
||||||
|
expect(xml).toContain('<item type="multi">');
|
||||||
|
expect(xml).toContain('[TS] Technical Specification');
|
||||||
|
expect(xml).toContain('<handler');
|
||||||
|
expect(xml).toContain('match="Create technical specification"');
|
||||||
|
expect(xml).toContain('</item>');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should escape XML in multi description', () => {
|
||||||
|
const menuItems = [
|
||||||
|
{
|
||||||
|
multi: '[A] Analyze <code>',
|
||||||
|
triggers: [
|
||||||
|
{
|
||||||
|
analyze: [{ input: 'Analyze', route: 'task.md' }],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
const xml = builder.buildCommandsXml(menuItems);
|
||||||
|
|
||||||
|
expect(xml).toContain('<code>');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('edge cases', () => {
|
||||||
|
it('should handle empty menu items array', () => {
|
||||||
|
const xml = builder.buildCommandsXml([]);
|
||||||
|
|
||||||
|
expect(xml).toContain('<menu>');
|
||||||
|
expect(xml).toContain('</menu>');
|
||||||
|
expect(xml).toContain('*menu');
|
||||||
|
expect(xml).toContain('*dismiss');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle null menu items', () => {
|
||||||
|
const xml = builder.buildCommandsXml(null);
|
||||||
|
|
||||||
|
expect(xml).toContain('<menu>');
|
||||||
|
expect(xml).toContain('*menu');
|
||||||
|
expect(xml).toContain('*dismiss');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle undefined menu items', () => {
|
||||||
|
const xml = builder.buildCommandsXml();
|
||||||
|
|
||||||
|
expect(xml).toContain('<menu>');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle empty description', () => {
|
||||||
|
const menuItems = [{ trigger: 'test', description: '', action: 'test' }];
|
||||||
|
|
||||||
|
const xml = builder.buildCommandsXml(menuItems);
|
||||||
|
|
||||||
|
expect(xml).toContain('cmd="*test"');
|
||||||
|
expect(xml).toContain('></item>'); // Empty content between tags
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle missing trigger (edge case)', () => {
|
||||||
|
const menuItems = [{ description: 'No trigger', action: 'test' }];
|
||||||
|
|
||||||
|
const xml = builder.buildCommandsXml(menuItems);
|
||||||
|
|
||||||
|
// Should handle gracefully - might skip or add * prefix to empty
|
||||||
|
expect(xml).toContain('<menu>');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle Unicode in descriptions', () => {
|
||||||
|
const menuItems = [{ trigger: 'test', description: '[测试] Test 日本語', action: 'test' }];
|
||||||
|
|
||||||
|
const xml = builder.buildCommandsXml(menuItems);
|
||||||
|
|
||||||
|
expect(xml).toContain('测试');
|
||||||
|
expect(xml).toContain('日本語');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('multiple menu items', () => {
|
||||||
|
it('should process all menu items in order', () => {
|
||||||
|
const menuItems = [
|
||||||
|
{ trigger: 'first', description: 'First', action: 'first' },
|
||||||
|
{ trigger: 'second', description: 'Second', action: 'second' },
|
||||||
|
{ trigger: 'third', description: 'Third', action: 'third' },
|
||||||
|
];
|
||||||
|
|
||||||
|
const xml = builder.buildCommandsXml(menuItems);
|
||||||
|
|
||||||
|
const firstIndex = xml.indexOf('*first');
|
||||||
|
const secondIndex = xml.indexOf('*second');
|
||||||
|
const thirdIndex = xml.indexOf('*third');
|
||||||
|
|
||||||
|
expect(firstIndex).toBeLessThan(secondIndex);
|
||||||
|
expect(secondIndex).toBeLessThan(thirdIndex);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
@ -0,0 +1,605 @@
|
||||||
|
import { describe, it, expect, beforeEach } from 'vitest';
|
||||||
|
import { YamlXmlBuilder } from '../../../tools/cli/lib/yaml-xml-builder.js';
|
||||||
|
|
||||||
|
describe('YamlXmlBuilder - convertToXml()', () => {
|
||||||
|
let builder;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
builder = new YamlXmlBuilder();
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('basic XML generation', () => {
|
||||||
|
it('should generate XML with agent tag and attributes', async () => {
|
||||||
|
const agentYaml = {
|
||||||
|
agent: {
|
||||||
|
metadata: {
|
||||||
|
id: 'test-agent',
|
||||||
|
name: 'Test Agent',
|
||||||
|
title: 'Test Agent Title',
|
||||||
|
icon: '🔧',
|
||||||
|
},
|
||||||
|
persona: {
|
||||||
|
role: 'Test Role',
|
||||||
|
identity: 'Test Identity',
|
||||||
|
communication_style: 'Professional',
|
||||||
|
principles: ['Principle 1'],
|
||||||
|
},
|
||||||
|
menu: [{ trigger: 'help', description: 'Help', action: 'show_help' }],
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const xml = await builder.convertToXml(agentYaml, { skipActivation: true });
|
||||||
|
|
||||||
|
expect(xml).toContain('<agent id="test-agent"');
|
||||||
|
expect(xml).toContain('name="Test Agent"');
|
||||||
|
expect(xml).toContain('title="Test Agent Title"');
|
||||||
|
expect(xml).toContain('icon="🔧"');
|
||||||
|
expect(xml).toContain('</agent>');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should include persona section', async () => {
|
||||||
|
const agentYaml = {
|
||||||
|
agent: {
|
||||||
|
metadata: { id: 'test', name: 'Test', title: 'Test', icon: '🔧' },
|
||||||
|
persona: {
|
||||||
|
role: 'Developer',
|
||||||
|
identity: 'Helpful assistant',
|
||||||
|
communication_style: 'Professional',
|
||||||
|
principles: ['Clear', 'Concise'],
|
||||||
|
},
|
||||||
|
menu: [],
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const xml = await builder.convertToXml(agentYaml, { skipActivation: true });
|
||||||
|
|
||||||
|
expect(xml).toContain('<persona>');
|
||||||
|
expect(xml).toContain('<role>Developer</role>');
|
||||||
|
expect(xml).toContain('<identity>Helpful assistant</identity>');
|
||||||
|
expect(xml).toContain('<communication_style>Professional</communication_style>');
|
||||||
|
expect(xml).toContain('<principles>Clear Concise</principles>');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should include memories section if present', async () => {
|
||||||
|
const agentYaml = {
|
||||||
|
agent: {
|
||||||
|
metadata: { id: 'test', name: 'Test', title: 'Test', icon: '🔧' },
|
||||||
|
persona: {
|
||||||
|
role: 'Role',
|
||||||
|
identity: 'ID',
|
||||||
|
communication_style: 'Style',
|
||||||
|
principles: ['P'],
|
||||||
|
},
|
||||||
|
memories: ['Memory 1', 'Memory 2'],
|
||||||
|
menu: [],
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const xml = await builder.convertToXml(agentYaml, { skipActivation: true });
|
||||||
|
|
||||||
|
expect(xml).toContain('<memories>');
|
||||||
|
expect(xml).toContain('<memory>Memory 1</memory>');
|
||||||
|
expect(xml).toContain('<memory>Memory 2</memory>');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should include prompts section if present', async () => {
|
||||||
|
const agentYaml = {
|
||||||
|
agent: {
|
||||||
|
metadata: { id: 'test', name: 'Test', title: 'Test', icon: '🔧' },
|
||||||
|
persona: {
|
||||||
|
role: 'Role',
|
||||||
|
identity: 'ID',
|
||||||
|
communication_style: 'Style',
|
||||||
|
principles: ['P'],
|
||||||
|
},
|
||||||
|
prompts: [{ id: 'p1', content: 'Prompt content' }],
|
||||||
|
menu: [],
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const xml = await builder.convertToXml(agentYaml, { skipActivation: true });
|
||||||
|
|
||||||
|
expect(xml).toContain('<prompts>');
|
||||||
|
expect(xml).toContain('<prompt id="p1">');
|
||||||
|
expect(xml).toContain('Prompt content');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should include menu section', async () => {
|
||||||
|
const agentYaml = {
|
||||||
|
agent: {
|
||||||
|
metadata: { id: 'test', name: 'Test', title: 'Test', icon: '🔧' },
|
||||||
|
persona: {
|
||||||
|
role: 'Role',
|
||||||
|
identity: 'ID',
|
||||||
|
communication_style: 'Style',
|
||||||
|
principles: ['P'],
|
||||||
|
},
|
||||||
|
menu: [
|
||||||
|
{ trigger: 'help', description: 'Show help', action: 'show_help' },
|
||||||
|
{ trigger: 'start', description: 'Start workflow', workflow: 'main' },
|
||||||
|
],
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const xml = await builder.convertToXml(agentYaml, { skipActivation: true });
|
||||||
|
|
||||||
|
expect(xml).toContain('<menu>');
|
||||||
|
expect(xml).toContain('</menu>');
|
||||||
|
// Menu always includes injected *menu item
|
||||||
|
expect(xml).toContain('*menu');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('XML escaping', () => {
|
||||||
|
it('should escape special characters in all fields', async () => {
|
||||||
|
const agentYaml = {
|
||||||
|
agent: {
|
||||||
|
metadata: {
|
||||||
|
id: 'test',
|
||||||
|
name: 'Test',
|
||||||
|
title: 'Test Agent',
|
||||||
|
icon: '🔧',
|
||||||
|
},
|
||||||
|
persona: {
|
||||||
|
role: 'Role with <brackets>',
|
||||||
|
identity: 'Identity with & ampersand',
|
||||||
|
communication_style: 'Style with "quotes"',
|
||||||
|
principles: ["Principle with ' apostrophe"],
|
||||||
|
},
|
||||||
|
menu: [],
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const xml = await builder.convertToXml(agentYaml, { skipActivation: true });
|
||||||
|
|
||||||
|
// Metadata in attributes might not be escaped - focus on content
|
||||||
|
expect(xml).toContain('<brackets>');
|
||||||
|
expect(xml).toContain('& ampersand');
|
||||||
|
expect(xml).toContain('"quotes"');
|
||||||
|
expect(xml).toContain('' apostrophe');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should preserve Unicode characters', async () => {
|
||||||
|
const agentYaml = {
|
||||||
|
agent: {
|
||||||
|
metadata: {
|
||||||
|
id: 'unicode',
|
||||||
|
name: '测试代理',
|
||||||
|
title: 'Тестовый агент',
|
||||||
|
icon: '🔧',
|
||||||
|
},
|
||||||
|
persona: {
|
||||||
|
role: '開発者',
|
||||||
|
identity: 'مساعد مفيد',
|
||||||
|
communication_style: 'Profesional',
|
||||||
|
principles: ['原则'],
|
||||||
|
},
|
||||||
|
menu: [],
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const xml = await builder.convertToXml(agentYaml, { skipActivation: true });
|
||||||
|
|
||||||
|
expect(xml).toContain('测试代理');
|
||||||
|
expect(xml).toContain('Тестовый агент');
|
||||||
|
expect(xml).toContain('開発者');
|
||||||
|
expect(xml).toContain('مساعد مفيد');
|
||||||
|
expect(xml).toContain('原则');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('module detection', () => {
|
||||||
|
it('should handle module in buildMetadata', async () => {
|
||||||
|
const agentYaml = {
|
||||||
|
agent: {
|
||||||
|
metadata: { id: 'test', name: 'Test', title: 'Test', icon: '🔧' },
|
||||||
|
persona: {
|
||||||
|
role: 'Role',
|
||||||
|
identity: 'ID',
|
||||||
|
communication_style: 'Style',
|
||||||
|
principles: ['P'],
|
||||||
|
},
|
||||||
|
menu: [],
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const xml = await builder.convertToXml(agentYaml, {
|
||||||
|
module: 'bmm',
|
||||||
|
skipActivation: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Module is stored in metadata but may not be rendered as attribute
|
||||||
|
expect(xml).toContain('<agent');
|
||||||
|
expect(xml).toBeDefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should not include module attribute for core agents', async () => {
|
||||||
|
const agentYaml = {
|
||||||
|
agent: {
|
||||||
|
metadata: { id: 'test', name: 'Test', title: 'Test', icon: '🔧' },
|
||||||
|
persona: {
|
||||||
|
role: 'Role',
|
||||||
|
identity: 'ID',
|
||||||
|
communication_style: 'Style',
|
||||||
|
principles: ['P'],
|
||||||
|
},
|
||||||
|
menu: [],
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const xml = await builder.convertToXml(agentYaml, { skipActivation: true });
|
||||||
|
|
||||||
|
// No module attribute for core
|
||||||
|
expect(xml).not.toContain('module=');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('output format variations', () => {
|
||||||
|
it('should generate installation format with YAML frontmatter', async () => {
|
||||||
|
const agentYaml = {
|
||||||
|
agent: {
|
||||||
|
metadata: { id: 'test', name: 'Test', title: 'Test Agent', icon: '🔧' },
|
||||||
|
persona: {
|
||||||
|
role: 'Role',
|
||||||
|
identity: 'ID',
|
||||||
|
communication_style: 'Style',
|
||||||
|
principles: ['P'],
|
||||||
|
},
|
||||||
|
menu: [],
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const xml = await builder.convertToXml(agentYaml, {
|
||||||
|
sourceFile: 'test-agent.yaml',
|
||||||
|
skipActivation: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Installation format has YAML frontmatter
|
||||||
|
expect(xml).toMatch(/^---\n/);
|
||||||
|
expect(xml).toContain('name: "test agent"'); // Derived from filename
|
||||||
|
expect(xml).toContain('description: "Test Agent"');
|
||||||
|
expect(xml).toContain('---');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should generate web bundle format without frontmatter', async () => {
|
||||||
|
const agentYaml = {
|
||||||
|
agent: {
|
||||||
|
metadata: { id: 'test', name: 'Test', title: 'Test Agent', icon: '🔧' },
|
||||||
|
persona: {
|
||||||
|
role: 'Role',
|
||||||
|
identity: 'ID',
|
||||||
|
communication_style: 'Style',
|
||||||
|
principles: ['P'],
|
||||||
|
},
|
||||||
|
menu: [],
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const xml = await builder.convertToXml(agentYaml, {
|
||||||
|
forWebBundle: true,
|
||||||
|
skipActivation: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Web bundle format has comment header
|
||||||
|
expect(xml).toContain('<!-- Powered by BMAD-CORE™ -->');
|
||||||
|
expect(xml).toContain('# Test Agent');
|
||||||
|
expect(xml).not.toMatch(/^---\n/);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should derive name from filename (remove .agent suffix)', async () => {
|
||||||
|
const agentYaml = {
|
||||||
|
agent: {
|
||||||
|
metadata: { id: 'pm', name: 'PM', title: 'Product Manager', icon: '📋' },
|
||||||
|
persona: {
|
||||||
|
role: 'Role',
|
||||||
|
identity: 'ID',
|
||||||
|
communication_style: 'Style',
|
||||||
|
principles: ['P'],
|
||||||
|
},
|
||||||
|
menu: [],
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const xml = await builder.convertToXml(agentYaml, {
|
||||||
|
sourceFile: 'pm.agent.yaml',
|
||||||
|
skipActivation: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Should convert pm.agent.yaml → "pm"
|
||||||
|
expect(xml).toContain('name: "pm"');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should convert hyphens to spaces in filename', async () => {
|
||||||
|
const agentYaml = {
|
||||||
|
agent: {
|
||||||
|
metadata: { id: 'cli', name: 'CLI', title: 'CLI Chief', icon: '⚙️' },
|
||||||
|
persona: {
|
||||||
|
role: 'Role',
|
||||||
|
identity: 'ID',
|
||||||
|
communication_style: 'Style',
|
||||||
|
principles: ['P'],
|
||||||
|
},
|
||||||
|
menu: [],
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const xml = await builder.convertToXml(agentYaml, {
|
||||||
|
sourceFile: 'cli-chief.yaml',
|
||||||
|
skipActivation: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Should convert cli-chief.yaml → "cli chief"
|
||||||
|
expect(xml).toContain('name: "cli chief"');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('localskip attribute', () => {
|
||||||
|
it('should add localskip="true" when metadata has localskip', async () => {
|
||||||
|
const agentYaml = {
|
||||||
|
agent: {
|
||||||
|
metadata: {
|
||||||
|
id: 'web-only',
|
||||||
|
name: 'Web Only',
|
||||||
|
title: 'Web Only Agent',
|
||||||
|
icon: '🌐',
|
||||||
|
localskip: true,
|
||||||
|
},
|
||||||
|
persona: {
|
||||||
|
role: 'Role',
|
||||||
|
identity: 'ID',
|
||||||
|
communication_style: 'Style',
|
||||||
|
principles: ['P'],
|
||||||
|
},
|
||||||
|
menu: [],
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const xml = await builder.convertToXml(agentYaml, { skipActivation: true });
|
||||||
|
|
||||||
|
expect(xml).toContain('localskip="true"');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should not add localskip when false or missing', async () => {
|
||||||
|
const agentYaml = {
|
||||||
|
agent: {
|
||||||
|
metadata: { id: 'test', name: 'Test', title: 'Test', icon: '🔧' },
|
||||||
|
persona: {
|
||||||
|
role: 'Role',
|
||||||
|
identity: 'ID',
|
||||||
|
communication_style: 'Style',
|
||||||
|
principles: ['P'],
|
||||||
|
},
|
||||||
|
menu: [],
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const xml = await builder.convertToXml(agentYaml, { skipActivation: true });
|
||||||
|
|
||||||
|
expect(xml).not.toContain('localskip=');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('edge cases', () => {
|
||||||
|
it('should handle empty menu array', async () => {
|
||||||
|
const agentYaml = {
|
||||||
|
agent: {
|
||||||
|
metadata: { id: 'test', name: 'Test', title: 'Test', icon: '🔧' },
|
||||||
|
persona: {
|
||||||
|
role: 'Role',
|
||||||
|
identity: 'ID',
|
||||||
|
communication_style: 'Style',
|
||||||
|
principles: ['P'],
|
||||||
|
},
|
||||||
|
menu: [],
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const xml = await builder.convertToXml(agentYaml, { skipActivation: true });
|
||||||
|
|
||||||
|
expect(xml).toContain('<menu>');
|
||||||
|
expect(xml).toContain('</menu>');
|
||||||
|
// Should still have injected *menu item
|
||||||
|
expect(xml).toContain('*menu');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle missing memories', async () => {
|
||||||
|
const agentYaml = {
|
||||||
|
agent: {
|
||||||
|
metadata: { id: 'test', name: 'Test', title: 'Test', icon: '🔧' },
|
||||||
|
persona: {
|
||||||
|
role: 'Role',
|
||||||
|
identity: 'ID',
|
||||||
|
communication_style: 'Style',
|
||||||
|
principles: ['P'],
|
||||||
|
},
|
||||||
|
menu: [],
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const xml = await builder.convertToXml(agentYaml, { skipActivation: true });
|
||||||
|
|
||||||
|
expect(xml).not.toContain('<memories>');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle missing prompts', async () => {
|
||||||
|
const agentYaml = {
|
||||||
|
agent: {
|
||||||
|
metadata: { id: 'test', name: 'Test', title: 'Test', icon: '🔧' },
|
||||||
|
persona: {
|
||||||
|
role: 'Role',
|
||||||
|
identity: 'ID',
|
||||||
|
communication_style: 'Style',
|
||||||
|
principles: ['P'],
|
||||||
|
},
|
||||||
|
menu: [],
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const xml = await builder.convertToXml(agentYaml, { skipActivation: true });
|
||||||
|
|
||||||
|
expect(xml).not.toContain('<prompts>');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should wrap XML in markdown code fence', async () => {
|
||||||
|
const agentYaml = {
|
||||||
|
agent: {
|
||||||
|
metadata: { id: 'test', name: 'Test', title: 'Test', icon: '🔧' },
|
||||||
|
persona: {
|
||||||
|
role: 'Role',
|
||||||
|
identity: 'ID',
|
||||||
|
communication_style: 'Style',
|
||||||
|
principles: ['P'],
|
||||||
|
},
|
||||||
|
menu: [],
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const xml = await builder.convertToXml(agentYaml, { skipActivation: true });
|
||||||
|
|
||||||
|
expect(xml).toContain('```xml');
|
||||||
|
expect(xml).toContain('```\n');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should include activation instruction for installation format', async () => {
|
||||||
|
const agentYaml = {
|
||||||
|
agent: {
|
||||||
|
metadata: { id: 'test', name: 'Test', title: 'Test', icon: '🔧' },
|
||||||
|
persona: {
|
||||||
|
role: 'Role',
|
||||||
|
identity: 'ID',
|
||||||
|
communication_style: 'Style',
|
||||||
|
principles: ['P'],
|
||||||
|
},
|
||||||
|
menu: [],
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const xml = await builder.convertToXml(agentYaml, {
|
||||||
|
sourceFile: 'test.yaml',
|
||||||
|
skipActivation: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(xml).toContain('You must fully embody this agent');
|
||||||
|
expect(xml).toContain('NEVER break character');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should not include activation instruction for web bundle', async () => {
|
||||||
|
const agentYaml = {
|
||||||
|
agent: {
|
||||||
|
metadata: { id: 'test', name: 'Test', title: 'Test', icon: '🔧' },
|
||||||
|
persona: {
|
||||||
|
role: 'Role',
|
||||||
|
identity: 'ID',
|
||||||
|
communication_style: 'Style',
|
||||||
|
principles: ['P'],
|
||||||
|
},
|
||||||
|
menu: [],
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const xml = await builder.convertToXml(agentYaml, {
|
||||||
|
forWebBundle: true,
|
||||||
|
skipActivation: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(xml).not.toContain('You must fully embody');
|
||||||
|
expect(xml).toContain('<!-- Powered by BMAD-CORE™ -->');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('legacy commands field support', () => {
|
||||||
|
it('should handle legacy "commands" field (renamed to menu)', async () => {
|
||||||
|
const agentYaml = {
|
||||||
|
agent: {
|
||||||
|
metadata: { id: 'test', name: 'Test', title: 'Test', icon: '🔧' },
|
||||||
|
persona: {
|
||||||
|
role: 'Role',
|
||||||
|
identity: 'ID',
|
||||||
|
communication_style: 'Style',
|
||||||
|
principles: ['P'],
|
||||||
|
},
|
||||||
|
commands: [{ trigger: 'help', description: 'Help', action: 'show_help' }],
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const xml = await builder.convertToXml(agentYaml, { skipActivation: true });
|
||||||
|
|
||||||
|
expect(xml).toContain('<menu>');
|
||||||
|
// Should process commands as menu items
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should prioritize menu over commands when both exist', async () => {
|
||||||
|
const agentYaml = {
|
||||||
|
agent: {
|
||||||
|
metadata: { id: 'test', name: 'Test', title: 'Test', icon: '🔧' },
|
||||||
|
persona: {
|
||||||
|
role: 'Role',
|
||||||
|
identity: 'ID',
|
||||||
|
communication_style: 'Style',
|
||||||
|
principles: ['P'],
|
||||||
|
},
|
||||||
|
menu: [{ trigger: 'new', description: 'New', action: 'new_action' }],
|
||||||
|
commands: [{ trigger: 'old', description: 'Old', action: 'old_action' }],
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const xml = await builder.convertToXml(agentYaml, { skipActivation: true });
|
||||||
|
|
||||||
|
// Should use menu, not commands
|
||||||
|
expect(xml).toContain('<menu>');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('complete agent transformation', () => {
|
||||||
|
it('should transform a complete agent with all fields', async () => {
|
||||||
|
const agentYaml = {
|
||||||
|
agent: {
|
||||||
|
metadata: {
|
||||||
|
id: 'full-agent',
|
||||||
|
name: 'Full Agent',
|
||||||
|
title: 'Complete Test Agent',
|
||||||
|
icon: '🤖',
|
||||||
|
},
|
||||||
|
persona: {
|
||||||
|
role: 'Full Stack Developer',
|
||||||
|
identity: 'Experienced software engineer',
|
||||||
|
communication_style: 'Clear and professional',
|
||||||
|
principles: ['Quality', 'Performance', 'Maintainability'],
|
||||||
|
},
|
||||||
|
memories: ['Remember project context', 'Track user preferences'],
|
||||||
|
prompts: [
|
||||||
|
{ id: 'init', content: 'Initialize the agent' },
|
||||||
|
{ id: 'task', content: 'Process the task' },
|
||||||
|
],
|
||||||
|
critical_actions: ['Never delete data', 'Always backup'],
|
||||||
|
menu: [
|
||||||
|
{ trigger: 'help', description: '[H] Show help', action: 'show_help' },
|
||||||
|
{ trigger: 'start', description: '[S] Start workflow', workflow: 'main' },
|
||||||
|
],
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const xml = await builder.convertToXml(agentYaml, {
|
||||||
|
sourceFile: 'full-agent.yaml',
|
||||||
|
module: 'bmm',
|
||||||
|
skipActivation: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Verify all sections are present
|
||||||
|
expect(xml).toContain('```xml');
|
||||||
|
expect(xml).toContain('<agent id="full-agent"');
|
||||||
|
expect(xml).toContain('<persona>');
|
||||||
|
expect(xml).toContain('<memories>');
|
||||||
|
expect(xml).toContain('<prompts>');
|
||||||
|
expect(xml).toContain('<menu>');
|
||||||
|
expect(xml).toContain('</agent>');
|
||||||
|
expect(xml).toContain('```');
|
||||||
|
// Verify persona content
|
||||||
|
expect(xml).toContain('Full Stack Developer');
|
||||||
|
// Verify memories
|
||||||
|
expect(xml).toContain('Remember project context');
|
||||||
|
// Verify prompts
|
||||||
|
expect(xml).toContain('Initialize the agent');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
@ -0,0 +1,636 @@
|
||||||
|
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
||||||
|
import { YamlXmlBuilder } from '../../../tools/cli/lib/yaml-xml-builder.js';
|
||||||
|
import { createTempDir, cleanupTempDir, createTestFile } from '../../helpers/temp-dir.js';
|
||||||
|
import fs from 'fs-extra';
|
||||||
|
import path from 'node:path';
|
||||||
|
import yaml from 'yaml';
|
||||||
|
|
||||||
|
describe('YamlXmlBuilder', () => {
|
||||||
|
let tmpDir;
|
||||||
|
let builder;
|
||||||
|
|
||||||
|
beforeEach(async () => {
|
||||||
|
tmpDir = await createTempDir();
|
||||||
|
builder = new YamlXmlBuilder();
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(async () => {
|
||||||
|
await cleanupTempDir(tmpDir);
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('deepMerge()', () => {
|
||||||
|
it('should merge shallow objects', () => {
|
||||||
|
const target = { a: 1, b: 2 };
|
||||||
|
const source = { b: 3, c: 4 };
|
||||||
|
|
||||||
|
const result = builder.deepMerge(target, source);
|
||||||
|
|
||||||
|
expect(result).toEqual({ a: 1, b: 3, c: 4 });
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should merge nested objects', () => {
|
||||||
|
const target = { level1: { a: 1, b: 2 } };
|
||||||
|
const source = { level1: { b: 3, c: 4 } };
|
||||||
|
|
||||||
|
const result = builder.deepMerge(target, source);
|
||||||
|
|
||||||
|
expect(result).toEqual({ level1: { a: 1, b: 3, c: 4 } });
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should merge deeply nested objects', () => {
|
||||||
|
const target = { l1: { l2: { l3: { value: 'old' } } } };
|
||||||
|
const source = { l1: { l2: { l3: { value: 'new', extra: 'data' } } } };
|
||||||
|
|
||||||
|
const result = builder.deepMerge(target, source);
|
||||||
|
|
||||||
|
expect(result).toEqual({ l1: { l2: { l3: { value: 'new', extra: 'data' } } } });
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should append arrays instead of replacing', () => {
|
||||||
|
const target = { items: [1, 2, 3] };
|
||||||
|
const source = { items: [4, 5, 6] };
|
||||||
|
|
||||||
|
const result = builder.deepMerge(target, source);
|
||||||
|
|
||||||
|
expect(result.items).toEqual([1, 2, 3, 4, 5, 6]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle arrays in nested objects', () => {
|
||||||
|
const target = { config: { values: ['a', 'b'] } };
|
||||||
|
const source = { config: { values: ['c', 'd'] } };
|
||||||
|
|
||||||
|
const result = builder.deepMerge(target, source);
|
||||||
|
|
||||||
|
expect(result.config.values).toEqual(['a', 'b', 'c', 'd']);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should replace arrays if target is not an array', () => {
|
||||||
|
const target = { items: 'string' };
|
||||||
|
const source = { items: ['a', 'b'] };
|
||||||
|
|
||||||
|
const result = builder.deepMerge(target, source);
|
||||||
|
|
||||||
|
expect(result.items).toEqual(['a', 'b']);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle null values', () => {
|
||||||
|
const target = { a: null, b: 2 };
|
||||||
|
const source = { a: 1, c: null };
|
||||||
|
|
||||||
|
const result = builder.deepMerge(target, source);
|
||||||
|
|
||||||
|
expect(result).toEqual({ a: 1, b: 2, c: null });
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should preserve target values when source has no override', () => {
|
||||||
|
const target = { a: 1, b: 2, c: 3 };
|
||||||
|
const source = { d: 4 };
|
||||||
|
|
||||||
|
const result = builder.deepMerge(target, source);
|
||||||
|
|
||||||
|
expect(result).toEqual({ a: 1, b: 2, c: 3, d: 4 });
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should not mutate original objects', () => {
|
||||||
|
const target = { a: 1 };
|
||||||
|
const source = { b: 2 };
|
||||||
|
|
||||||
|
builder.deepMerge(target, source);
|
||||||
|
|
||||||
|
expect(target).toEqual({ a: 1 }); // Unchanged
|
||||||
|
expect(source).toEqual({ b: 2 }); // Unchanged
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('isObject()', () => {
|
||||||
|
it('should return true for plain objects', () => {
|
||||||
|
expect(builder.isObject({})).toBe(true);
|
||||||
|
expect(builder.isObject({ key: 'value' })).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return false for arrays', () => {
|
||||||
|
expect(builder.isObject([])).toBe(false);
|
||||||
|
expect(builder.isObject([1, 2, 3])).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return falsy for null', () => {
|
||||||
|
expect(builder.isObject(null)).toBeFalsy();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return falsy for primitives', () => {
|
||||||
|
expect(builder.isObject('string')).toBeFalsy();
|
||||||
|
expect(builder.isObject(42)).toBeFalsy();
|
||||||
|
expect(builder.isObject(true)).toBeFalsy();
|
||||||
|
expect(builder.isObject()).toBeFalsy();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('loadAndMergeAgent()', () => {
|
||||||
|
it('should load agent YAML without customization', async () => {
|
||||||
|
const agentYaml = {
|
||||||
|
agent: {
|
||||||
|
metadata: { id: 'test', name: 'Test', title: 'Test Agent', icon: '🔧' },
|
||||||
|
persona: {
|
||||||
|
role: 'Test Role',
|
||||||
|
identity: 'Test Identity',
|
||||||
|
communication_style: 'Professional',
|
||||||
|
principles: ['Principle 1'],
|
||||||
|
},
|
||||||
|
menu: [],
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const agentPath = path.join(tmpDir, 'agent.yaml');
|
||||||
|
await fs.writeFile(agentPath, yaml.stringify(agentYaml));
|
||||||
|
|
||||||
|
const result = await builder.loadAndMergeAgent(agentPath);
|
||||||
|
|
||||||
|
expect(result.agent.metadata.id).toBe('test');
|
||||||
|
expect(result.agent.persona.role).toBe('Test Role');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should preserve base persona when customize has empty strings', async () => {
|
||||||
|
const baseYaml = {
|
||||||
|
agent: {
|
||||||
|
metadata: { id: 'base', name: 'Base', title: 'Base', icon: '🔧' },
|
||||||
|
persona: {
|
||||||
|
role: 'Base Role',
|
||||||
|
identity: 'Base Identity',
|
||||||
|
communication_style: 'Base Style',
|
||||||
|
principles: ['Base Principle'],
|
||||||
|
},
|
||||||
|
menu: [],
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const customizeYaml = {
|
||||||
|
persona: {
|
||||||
|
role: 'Custom Role',
|
||||||
|
identity: '', // Empty - should NOT override
|
||||||
|
communication_style: 'Custom Style',
|
||||||
|
// principles omitted
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const basePath = path.join(tmpDir, 'base.yaml');
|
||||||
|
const customizePath = path.join(tmpDir, 'customize.yaml');
|
||||||
|
await fs.writeFile(basePath, yaml.stringify(baseYaml));
|
||||||
|
await fs.writeFile(customizePath, yaml.stringify(customizeYaml));
|
||||||
|
|
||||||
|
const result = await builder.loadAndMergeAgent(basePath, customizePath);
|
||||||
|
|
||||||
|
expect(result.agent.persona.role).toBe('Custom Role'); // Overridden
|
||||||
|
expect(result.agent.persona.identity).toBe('Base Identity'); // Preserved
|
||||||
|
expect(result.agent.persona.communication_style).toBe('Custom Style'); // Overridden
|
||||||
|
expect(result.agent.persona.principles).toEqual(['Base Principle']); // Preserved
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should preserve base persona when customize has null values', async () => {
|
||||||
|
const baseYaml = {
|
||||||
|
agent: {
|
||||||
|
metadata: { id: 'base', name: 'Base', title: 'Base', icon: '🔧' },
|
||||||
|
persona: {
|
||||||
|
role: 'Base Role',
|
||||||
|
identity: 'Base Identity',
|
||||||
|
communication_style: 'Base Style',
|
||||||
|
principles: ['Base'],
|
||||||
|
},
|
||||||
|
menu: [],
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const customizeYaml = {
|
||||||
|
persona: {
|
||||||
|
role: null,
|
||||||
|
identity: 'Custom Identity',
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const basePath = path.join(tmpDir, 'base.yaml');
|
||||||
|
const customizePath = path.join(tmpDir, 'customize.yaml');
|
||||||
|
await fs.writeFile(basePath, yaml.stringify(baseYaml));
|
||||||
|
await fs.writeFile(customizePath, yaml.stringify(customizeYaml));
|
||||||
|
|
||||||
|
const result = await builder.loadAndMergeAgent(basePath, customizePath);
|
||||||
|
|
||||||
|
expect(result.agent.persona.role).toBe('Base Role'); // Preserved (null skipped)
|
||||||
|
expect(result.agent.persona.identity).toBe('Custom Identity'); // Overridden
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should preserve base persona when customize has empty arrays', async () => {
|
||||||
|
const baseYaml = {
|
||||||
|
agent: {
|
||||||
|
metadata: { id: 'base', name: 'Base', title: 'Base', icon: '🔧' },
|
||||||
|
persona: {
|
||||||
|
role: 'Base Role',
|
||||||
|
identity: 'Base Identity',
|
||||||
|
communication_style: 'Base Style',
|
||||||
|
principles: ['Principle 1', 'Principle 2'],
|
||||||
|
},
|
||||||
|
menu: [],
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const customizeYaml = {
|
||||||
|
persona: {
|
||||||
|
principles: [], // Empty array - should NOT override
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const basePath = path.join(tmpDir, 'base.yaml');
|
||||||
|
const customizePath = path.join(tmpDir, 'customize.yaml');
|
||||||
|
await fs.writeFile(basePath, yaml.stringify(baseYaml));
|
||||||
|
await fs.writeFile(customizePath, yaml.stringify(customizeYaml));
|
||||||
|
|
||||||
|
const result = await builder.loadAndMergeAgent(basePath, customizePath);
|
||||||
|
|
||||||
|
expect(result.agent.persona.principles).toEqual(['Principle 1', 'Principle 2']);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should append menu items from customize', async () => {
|
||||||
|
const baseYaml = {
|
||||||
|
agent: {
|
||||||
|
metadata: { id: 'base', name: 'Base', title: 'Base', icon: '🔧' },
|
||||||
|
persona: { role: 'Role', identity: 'ID', communication_style: 'Style', principles: ['P'] },
|
||||||
|
menu: [{ trigger: 'help', description: 'Help', action: 'show_help' }],
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const customizeYaml = {
|
||||||
|
menu: [{ trigger: 'custom', description: 'Custom', action: 'custom_action' }],
|
||||||
|
};
|
||||||
|
|
||||||
|
const basePath = path.join(tmpDir, 'base.yaml');
|
||||||
|
const customizePath = path.join(tmpDir, 'customize.yaml');
|
||||||
|
await fs.writeFile(basePath, yaml.stringify(baseYaml));
|
||||||
|
await fs.writeFile(customizePath, yaml.stringify(customizeYaml));
|
||||||
|
|
||||||
|
const result = await builder.loadAndMergeAgent(basePath, customizePath);
|
||||||
|
|
||||||
|
expect(result.agent.menu).toHaveLength(2);
|
||||||
|
expect(result.agent.menu[0].trigger).toBe('help');
|
||||||
|
expect(result.agent.menu[1].trigger).toBe('custom');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should append critical_actions from customize', async () => {
|
||||||
|
const baseYaml = {
|
||||||
|
agent: {
|
||||||
|
metadata: { id: 'base', name: 'Base', title: 'Base', icon: '🔧' },
|
||||||
|
persona: { role: 'Role', identity: 'ID', communication_style: 'Style', principles: ['P'] },
|
||||||
|
critical_actions: ['Action 1'],
|
||||||
|
menu: [],
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const customizeYaml = {
|
||||||
|
critical_actions: ['Action 2', 'Action 3'],
|
||||||
|
};
|
||||||
|
|
||||||
|
const basePath = path.join(tmpDir, 'base.yaml');
|
||||||
|
const customizePath = path.join(tmpDir, 'customize.yaml');
|
||||||
|
await fs.writeFile(basePath, yaml.stringify(baseYaml));
|
||||||
|
await fs.writeFile(customizePath, yaml.stringify(customizeYaml));
|
||||||
|
|
||||||
|
const result = await builder.loadAndMergeAgent(basePath, customizePath);
|
||||||
|
|
||||||
|
expect(result.agent.critical_actions).toHaveLength(3);
|
||||||
|
expect(result.agent.critical_actions).toEqual(['Action 1', 'Action 2', 'Action 3']);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should append prompts from customize', async () => {
|
||||||
|
const baseYaml = {
|
||||||
|
agent: {
|
||||||
|
metadata: { id: 'base', name: 'Base', title: 'Base', icon: '🔧' },
|
||||||
|
persona: { role: 'Role', identity: 'ID', communication_style: 'Style', principles: ['P'] },
|
||||||
|
prompts: [{ id: 'p1', content: 'Prompt 1' }],
|
||||||
|
menu: [],
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const customizeYaml = {
|
||||||
|
prompts: [{ id: 'p2', content: 'Prompt 2' }],
|
||||||
|
};
|
||||||
|
|
||||||
|
const basePath = path.join(tmpDir, 'base.yaml');
|
||||||
|
const customizePath = path.join(tmpDir, 'customize.yaml');
|
||||||
|
await fs.writeFile(basePath, yaml.stringify(baseYaml));
|
||||||
|
await fs.writeFile(customizePath, yaml.stringify(customizeYaml));
|
||||||
|
|
||||||
|
const result = await builder.loadAndMergeAgent(basePath, customizePath);
|
||||||
|
|
||||||
|
expect(result.agent.prompts).toHaveLength(2);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle missing customization file', async () => {
|
||||||
|
const agentYaml = {
|
||||||
|
agent: {
|
||||||
|
metadata: { id: 'test', name: 'Test', title: 'Test', icon: '🔧' },
|
||||||
|
persona: { role: 'Role', identity: 'ID', communication_style: 'Style', principles: ['P'] },
|
||||||
|
menu: [],
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const agentPath = path.join(tmpDir, 'agent.yaml');
|
||||||
|
await fs.writeFile(agentPath, yaml.stringify(agentYaml));
|
||||||
|
|
||||||
|
const nonExistent = path.join(tmpDir, 'nonexistent.yaml');
|
||||||
|
const result = await builder.loadAndMergeAgent(agentPath, nonExistent);
|
||||||
|
|
||||||
|
expect(result.agent.metadata.id).toBe('test');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle legacy commands field (renamed to menu)', async () => {
|
||||||
|
const baseYaml = {
|
||||||
|
agent: {
|
||||||
|
metadata: { id: 'base', name: 'Base', title: 'Base', icon: '🔧' },
|
||||||
|
persona: { role: 'Role', identity: 'ID', communication_style: 'Style', principles: ['P'] },
|
||||||
|
commands: [{ trigger: 'old', description: 'Old', action: 'old_action' }],
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const customizeYaml = {
|
||||||
|
commands: [{ trigger: 'new', description: 'New', action: 'new_action' }],
|
||||||
|
};
|
||||||
|
|
||||||
|
const basePath = path.join(tmpDir, 'base.yaml');
|
||||||
|
const customizePath = path.join(tmpDir, 'customize.yaml');
|
||||||
|
await fs.writeFile(basePath, yaml.stringify(baseYaml));
|
||||||
|
await fs.writeFile(customizePath, yaml.stringify(customizeYaml));
|
||||||
|
|
||||||
|
const result = await builder.loadAndMergeAgent(basePath, customizePath);
|
||||||
|
|
||||||
|
expect(result.agent.commands).toHaveLength(2);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should override metadata with non-empty values', async () => {
|
||||||
|
const baseYaml = {
|
||||||
|
agent: {
|
||||||
|
metadata: { id: 'base', name: 'Base Name', title: 'Base Title', icon: '🔧' },
|
||||||
|
persona: { role: 'Role', identity: 'ID', communication_style: 'Style', principles: ['P'] },
|
||||||
|
menu: [],
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const customizeYaml = {
|
||||||
|
agent: {
|
||||||
|
metadata: {
|
||||||
|
name: 'Custom Name',
|
||||||
|
title: '', // Empty - should be skipped
|
||||||
|
icon: '🎯',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const basePath = path.join(tmpDir, 'base.yaml');
|
||||||
|
const customizePath = path.join(tmpDir, 'customize.yaml');
|
||||||
|
await fs.writeFile(basePath, yaml.stringify(baseYaml));
|
||||||
|
await fs.writeFile(customizePath, yaml.stringify(customizeYaml));
|
||||||
|
|
||||||
|
const result = await builder.loadAndMergeAgent(basePath, customizePath);
|
||||||
|
|
||||||
|
expect(result.agent.metadata.name).toBe('Custom Name');
|
||||||
|
expect(result.agent.metadata.title).toBe('Base Title'); // Preserved
|
||||||
|
expect(result.agent.metadata.icon).toBe('🎯');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('buildPersonaXml()', () => {
|
||||||
|
it('should build complete persona XML', () => {
|
||||||
|
const persona = {
|
||||||
|
role: 'Test Role',
|
||||||
|
identity: 'Test Identity',
|
||||||
|
communication_style: 'Professional',
|
||||||
|
principles: ['Principle 1', 'Principle 2', 'Principle 3'],
|
||||||
|
};
|
||||||
|
|
||||||
|
const xml = builder.buildPersonaXml(persona);
|
||||||
|
|
||||||
|
expect(xml).toContain('<persona>');
|
||||||
|
expect(xml).toContain('</persona>');
|
||||||
|
expect(xml).toContain('<role>Test Role</role>');
|
||||||
|
expect(xml).toContain('<identity>Test Identity</identity>');
|
||||||
|
expect(xml).toContain('<communication_style>Professional</communication_style>');
|
||||||
|
expect(xml).toContain('<principles>Principle 1 Principle 2 Principle 3</principles>');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should escape XML special characters in persona', () => {
|
||||||
|
const persona = {
|
||||||
|
role: 'Role with <tags> & "quotes"',
|
||||||
|
identity: "O'Reilly's Identity",
|
||||||
|
communication_style: 'Use <code> tags',
|
||||||
|
principles: ['Principle with & ampersand'],
|
||||||
|
};
|
||||||
|
|
||||||
|
const xml = builder.buildPersonaXml(persona);
|
||||||
|
|
||||||
|
expect(xml).toContain('<tags> & "quotes"');
|
||||||
|
expect(xml).toContain('O'Reilly's Identity');
|
||||||
|
expect(xml).toContain('<code> tags');
|
||||||
|
expect(xml).toContain('& ampersand');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle principles as array', () => {
|
||||||
|
const persona = {
|
||||||
|
role: 'Role',
|
||||||
|
identity: 'ID',
|
||||||
|
communication_style: 'Style',
|
||||||
|
principles: ['P1', 'P2', 'P3'],
|
||||||
|
};
|
||||||
|
|
||||||
|
const xml = builder.buildPersonaXml(persona);
|
||||||
|
|
||||||
|
expect(xml).toContain('<principles>P1 P2 P3</principles>');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle principles as string', () => {
|
||||||
|
const persona = {
|
||||||
|
role: 'Role',
|
||||||
|
identity: 'ID',
|
||||||
|
communication_style: 'Style',
|
||||||
|
principles: 'Single principle string',
|
||||||
|
};
|
||||||
|
|
||||||
|
const xml = builder.buildPersonaXml(persona);
|
||||||
|
|
||||||
|
expect(xml).toContain('<principles>Single principle string</principles>');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should preserve Unicode in persona fields', () => {
|
||||||
|
const persona = {
|
||||||
|
role: 'Тестовая роль',
|
||||||
|
identity: '日本語のアイデンティティ',
|
||||||
|
communication_style: 'Estilo profesional',
|
||||||
|
principles: ['原则一', 'Принцип два'],
|
||||||
|
};
|
||||||
|
|
||||||
|
const xml = builder.buildPersonaXml(persona);
|
||||||
|
|
||||||
|
expect(xml).toContain('Тестовая роль');
|
||||||
|
expect(xml).toContain('日本語のアイデンティティ');
|
||||||
|
expect(xml).toContain('Estilo profesional');
|
||||||
|
expect(xml).toContain('原则一 Принцип два');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle missing persona gracefully', () => {
|
||||||
|
const xml = builder.buildPersonaXml(null);
|
||||||
|
|
||||||
|
expect(xml).toBe('');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle partial persona (missing optional fields)', () => {
|
||||||
|
const persona = {
|
||||||
|
role: 'Role',
|
||||||
|
identity: 'ID',
|
||||||
|
communication_style: 'Style',
|
||||||
|
// principles missing
|
||||||
|
};
|
||||||
|
|
||||||
|
const xml = builder.buildPersonaXml(persona);
|
||||||
|
|
||||||
|
expect(xml).toContain('<role>Role</role>');
|
||||||
|
expect(xml).toContain('<identity>ID</identity>');
|
||||||
|
expect(xml).toContain('<communication_style>Style</communication_style>');
|
||||||
|
expect(xml).not.toContain('<principles>');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('buildMemoriesXml()', () => {
|
||||||
|
it('should build memories XML from array', () => {
|
||||||
|
const memories = ['Memory 1', 'Memory 2', 'Memory 3'];
|
||||||
|
|
||||||
|
const xml = builder.buildMemoriesXml(memories);
|
||||||
|
|
||||||
|
expect(xml).toContain('<memories>');
|
||||||
|
expect(xml).toContain('</memories>');
|
||||||
|
expect(xml).toContain('<memory>Memory 1</memory>');
|
||||||
|
expect(xml).toContain('<memory>Memory 2</memory>');
|
||||||
|
expect(xml).toContain('<memory>Memory 3</memory>');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should escape XML special characters in memories', () => {
|
||||||
|
const memories = ['Memory with <tags>', 'Memory with & ampersand', 'Memory with "quotes"'];
|
||||||
|
|
||||||
|
const xml = builder.buildMemoriesXml(memories);
|
||||||
|
|
||||||
|
expect(xml).toContain('<tags>');
|
||||||
|
expect(xml).toContain('& ampersand');
|
||||||
|
expect(xml).toContain('"quotes"');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return empty string for null memories', () => {
|
||||||
|
expect(builder.buildMemoriesXml(null)).toBe('');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return empty string for empty array', () => {
|
||||||
|
expect(builder.buildMemoriesXml([])).toBe('');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle Unicode in memories', () => {
|
||||||
|
const memories = ['记忆 1', 'Память 2', '記憶 3'];
|
||||||
|
|
||||||
|
const xml = builder.buildMemoriesXml(memories);
|
||||||
|
|
||||||
|
expect(xml).toContain('记忆 1');
|
||||||
|
expect(xml).toContain('Память 2');
|
||||||
|
expect(xml).toContain('記憶 3');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('buildPromptsXml()', () => {
|
||||||
|
it('should build prompts XML from array format', () => {
|
||||||
|
const prompts = [
|
||||||
|
{ id: 'p1', content: 'Prompt 1 content' },
|
||||||
|
{ id: 'p2', content: 'Prompt 2 content' },
|
||||||
|
];
|
||||||
|
|
||||||
|
const xml = builder.buildPromptsXml(prompts);
|
||||||
|
|
||||||
|
expect(xml).toContain('<prompts>');
|
||||||
|
expect(xml).toContain('</prompts>');
|
||||||
|
expect(xml).toContain('<prompt id="p1">');
|
||||||
|
expect(xml).toContain('<content>');
|
||||||
|
expect(xml).toContain('Prompt 1 content');
|
||||||
|
expect(xml).toContain('<prompt id="p2">');
|
||||||
|
expect(xml).toContain('Prompt 2 content');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should escape XML special characters in prompts', () => {
|
||||||
|
const prompts = [{ id: 'test', content: 'Content with <tags> & "quotes"' }];
|
||||||
|
|
||||||
|
const xml = builder.buildPromptsXml(prompts);
|
||||||
|
|
||||||
|
expect(xml).toContain('<content>');
|
||||||
|
expect(xml).toContain('<tags> & "quotes"');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return empty string for null prompts', () => {
|
||||||
|
expect(builder.buildPromptsXml(null)).toBe('');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle Unicode in prompts', () => {
|
||||||
|
const prompts = [{ id: 'unicode', content: 'Test 测试 тест テスト' }];
|
||||||
|
|
||||||
|
const xml = builder.buildPromptsXml(prompts);
|
||||||
|
|
||||||
|
expect(xml).toContain('<content>');
|
||||||
|
expect(xml).toContain('测试 тест テスト');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle object/dictionary format prompts', () => {
|
||||||
|
const prompts = {
|
||||||
|
p1: 'Prompt 1 content',
|
||||||
|
p2: 'Prompt 2 content',
|
||||||
|
};
|
||||||
|
|
||||||
|
const xml = builder.buildPromptsXml(prompts);
|
||||||
|
|
||||||
|
expect(xml).toContain('<prompts>');
|
||||||
|
expect(xml).toContain('<prompt id="p1">');
|
||||||
|
expect(xml).toContain('Prompt 1 content');
|
||||||
|
expect(xml).toContain('<prompt id="p2">');
|
||||||
|
expect(xml).toContain('Prompt 2 content');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return empty string for empty array', () => {
|
||||||
|
expect(builder.buildPromptsXml([])).toBe('');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('calculateFileHash()', () => {
|
||||||
|
it('should calculate MD5 hash of file content', async () => {
|
||||||
|
const content = 'test content for hashing';
|
||||||
|
const filePath = await createTestFile(tmpDir, 'test.txt', content);
|
||||||
|
|
||||||
|
const hash = await builder.calculateFileHash(filePath);
|
||||||
|
|
||||||
|
expect(hash).toHaveLength(8); // MD5 truncated to 8 chars
|
||||||
|
expect(hash).toMatch(/^[a-f0-9]{8}$/);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return consistent hash for same content', async () => {
|
||||||
|
const file1 = await createTestFile(tmpDir, 'file1.txt', 'content');
|
||||||
|
const file2 = await createTestFile(tmpDir, 'file2.txt', 'content');
|
||||||
|
|
||||||
|
const hash1 = await builder.calculateFileHash(file1);
|
||||||
|
const hash2 = await builder.calculateFileHash(file2);
|
||||||
|
|
||||||
|
expect(hash1).toBe(hash2);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return null for non-existent file', async () => {
|
||||||
|
const nonExistent = path.join(tmpDir, 'missing.txt');
|
||||||
|
|
||||||
|
const hash = await builder.calculateFileHash(nonExistent);
|
||||||
|
|
||||||
|
expect(hash).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle empty file', async () => {
|
||||||
|
const file = await createTestFile(tmpDir, 'empty.txt', '');
|
||||||
|
|
||||||
|
const hash = await builder.calculateFileHash(file);
|
||||||
|
|
||||||
|
expect(hash).toHaveLength(8);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
@ -0,0 +1,84 @@
|
||||||
|
import { describe, it, expect } from 'vitest';
|
||||||
|
import { escapeXml } from '../../../tools/lib/xml-utils.js';
|
||||||
|
|
||||||
|
describe('xml-utils', () => {
|
||||||
|
describe('escapeXml()', () => {
|
||||||
|
it('should escape ampersand (&) to &', () => {
|
||||||
|
expect(escapeXml('Tom & Jerry')).toBe('Tom & Jerry');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should escape less than (<) to <', () => {
|
||||||
|
expect(escapeXml('5 < 10')).toBe('5 < 10');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should escape greater than (>) to >', () => {
|
||||||
|
expect(escapeXml('10 > 5')).toBe('10 > 5');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should escape double quote (") to "', () => {
|
||||||
|
expect(escapeXml('He said "hello"')).toBe('He said "hello"');
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should escape single quote (') to '", () => {
|
||||||
|
expect(escapeXml("It's working")).toBe('It's working');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should preserve Unicode characters', () => {
|
||||||
|
expect(escapeXml('Hello 世界 🌍')).toBe('Hello 世界 🌍');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should escape multiple special characters in sequence', () => {
|
||||||
|
expect(escapeXml('<tag attr="value">')).toBe('<tag attr="value">');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should escape all five special characters together', () => {
|
||||||
|
expect(escapeXml(`&<>"'`)).toBe('&<>"'');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle empty string', () => {
|
||||||
|
expect(escapeXml('')).toBe('');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle null', () => {
|
||||||
|
expect(escapeXml(null)).toBe('');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle undefined', () => {
|
||||||
|
expect(escapeXml()).toBe('');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle text with no special characters', () => {
|
||||||
|
expect(escapeXml('Hello World')).toBe('Hello World');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle text that is only special characters', () => {
|
||||||
|
expect(escapeXml('&&&')).toBe('&&&');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should not double-escape already escaped entities', () => {
|
||||||
|
// Note: This is expected behavior - the function WILL double-escape
|
||||||
|
// This test documents the actual behavior
|
||||||
|
expect(escapeXml('&')).toBe('&amp;');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should escape special characters in XML content', () => {
|
||||||
|
const xmlContent = '<persona role="Developer & Architect">Use <code> tags</persona>';
|
||||||
|
const expected = '<persona role="Developer & Architect">Use <code> tags</persona>';
|
||||||
|
expect(escapeXml(xmlContent)).toBe(expected);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle mixed Unicode and special characters', () => {
|
||||||
|
expect(escapeXml('测试 <tag> & "quotes"')).toBe('测试 <tag> & "quotes"');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle newlines and special characters', () => {
|
||||||
|
const multiline = 'Line 1 & text\n<Line 2>\n"Line 3"';
|
||||||
|
const expected = 'Line 1 & text\n<Line 2>\n"Line 3"';
|
||||||
|
expect(escapeXml(multiline)).toBe(expected);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle string with only whitespace', () => {
|
||||||
|
expect(escapeXml(' ')).toBe(' ');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
@ -3,7 +3,7 @@ const path = require('node:path');
|
||||||
const fs = require('node:fs');
|
const fs = require('node:fs');
|
||||||
|
|
||||||
// Fix for stdin issues when running through npm on Windows
|
// Fix for stdin issues when running through npm on Windows
|
||||||
// Ensures keyboard interaction works properly with inquirer prompts
|
// Ensures keyboard interaction works properly with CLI prompts
|
||||||
if (process.stdin.isTTY) {
|
if (process.stdin.isTTY) {
|
||||||
try {
|
try {
|
||||||
process.stdin.resume();
|
process.stdin.resume();
|
||||||
|
|
|
||||||
|
|
@ -71,14 +71,10 @@ module.exports = {
|
||||||
console.log(chalk.dim(' • ElevenLabs AI (150+ premium voices)'));
|
console.log(chalk.dim(' • ElevenLabs AI (150+ premium voices)'));
|
||||||
console.log(chalk.dim(' • Piper TTS (50+ free voices)\n'));
|
console.log(chalk.dim(' • Piper TTS (50+ free voices)\n'));
|
||||||
|
|
||||||
const { default: inquirer } = await import('inquirer');
|
const prompts = require('../lib/prompts');
|
||||||
await inquirer.prompt([
|
await prompts.text({
|
||||||
{
|
|
||||||
type: 'input',
|
|
||||||
name: 'continue',
|
|
||||||
message: chalk.green('Press Enter to start AgentVibes installer...'),
|
message: chalk.green('Press Enter to start AgentVibes installer...'),
|
||||||
},
|
});
|
||||||
]);
|
|
||||||
|
|
||||||
console.log('');
|
console.log('');
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -4,15 +4,7 @@ const yaml = require('yaml');
|
||||||
const chalk = require('chalk');
|
const chalk = require('chalk');
|
||||||
const { getProjectRoot, getModulePath } = require('../../../lib/project-root');
|
const { getProjectRoot, getModulePath } = require('../../../lib/project-root');
|
||||||
const { CLIUtils } = require('../../../lib/cli-utils');
|
const { CLIUtils } = require('../../../lib/cli-utils');
|
||||||
|
const prompts = require('../../../lib/prompts');
|
||||||
// Lazy-load inquirer (ESM module) to avoid ERR_REQUIRE_ESM
|
|
||||||
let _inquirer = null;
|
|
||||||
async function getInquirer() {
|
|
||||||
if (!_inquirer) {
|
|
||||||
_inquirer = (await import('inquirer')).default;
|
|
||||||
}
|
|
||||||
return _inquirer;
|
|
||||||
}
|
|
||||||
|
|
||||||
class ConfigCollector {
|
class ConfigCollector {
|
||||||
constructor() {
|
constructor() {
|
||||||
|
|
@ -183,7 +175,6 @@ class ConfigCollector {
|
||||||
* @returns {boolean} True if new fields were prompted, false if all fields existed
|
* @returns {boolean} True if new fields were prompted, false if all fields existed
|
||||||
*/
|
*/
|
||||||
async collectModuleConfigQuick(moduleName, projectDir, silentMode = true) {
|
async collectModuleConfigQuick(moduleName, projectDir, silentMode = true) {
|
||||||
const inquirer = await getInquirer();
|
|
||||||
this.currentProjectDir = projectDir;
|
this.currentProjectDir = projectDir;
|
||||||
|
|
||||||
// Load existing config if not already loaded
|
// Load existing config if not already loaded
|
||||||
|
|
@ -359,7 +350,7 @@ class ConfigCollector {
|
||||||
// Only show header if we actually have questions
|
// Only show header if we actually have questions
|
||||||
CLIUtils.displayModuleConfigHeader(moduleName, moduleConfig.header, moduleConfig.subheader);
|
CLIUtils.displayModuleConfigHeader(moduleName, moduleConfig.header, moduleConfig.subheader);
|
||||||
console.log(); // Line break before questions
|
console.log(); // Line break before questions
|
||||||
const promptedAnswers = await inquirer.prompt(questions);
|
const promptedAnswers = await prompts.prompt(questions);
|
||||||
|
|
||||||
// Merge prompted answers with static answers
|
// Merge prompted answers with static answers
|
||||||
Object.assign(allAnswers, promptedAnswers);
|
Object.assign(allAnswers, promptedAnswers);
|
||||||
|
|
@ -502,7 +493,6 @@ class ConfigCollector {
|
||||||
* @param {boolean} skipCompletion - Skip showing completion message (for early core collection)
|
* @param {boolean} skipCompletion - Skip showing completion message (for early core collection)
|
||||||
*/
|
*/
|
||||||
async collectModuleConfig(moduleName, projectDir, skipLoadExisting = false, skipCompletion = false) {
|
async collectModuleConfig(moduleName, projectDir, skipLoadExisting = false, skipCompletion = false) {
|
||||||
const inquirer = await getInquirer();
|
|
||||||
this.currentProjectDir = projectDir;
|
this.currentProjectDir = projectDir;
|
||||||
// Load existing config if needed and not already loaded
|
// Load existing config if needed and not already loaded
|
||||||
if (!skipLoadExisting && !this.existingConfig) {
|
if (!skipLoadExisting && !this.existingConfig) {
|
||||||
|
|
@ -597,7 +587,7 @@ class ConfigCollector {
|
||||||
console.log(chalk.cyan('?') + ' ' + chalk.magenta(moduleDisplayName));
|
console.log(chalk.cyan('?') + ' ' + chalk.magenta(moduleDisplayName));
|
||||||
let customize = true;
|
let customize = true;
|
||||||
if (moduleName !== 'core') {
|
if (moduleName !== 'core') {
|
||||||
const customizeAnswer = await inquirer.prompt([
|
const customizeAnswer = await prompts.prompt([
|
||||||
{
|
{
|
||||||
type: 'confirm',
|
type: 'confirm',
|
||||||
name: 'customize',
|
name: 'customize',
|
||||||
|
|
@ -614,7 +604,7 @@ class ConfigCollector {
|
||||||
|
|
||||||
if (questionsWithoutDefaults.length > 0) {
|
if (questionsWithoutDefaults.length > 0) {
|
||||||
console.log(chalk.dim(`\n Asking required questions for ${moduleName.toUpperCase()}...`));
|
console.log(chalk.dim(`\n Asking required questions for ${moduleName.toUpperCase()}...`));
|
||||||
const promptedAnswers = await inquirer.prompt(questionsWithoutDefaults);
|
const promptedAnswers = await prompts.prompt(questionsWithoutDefaults);
|
||||||
Object.assign(allAnswers, promptedAnswers);
|
Object.assign(allAnswers, promptedAnswers);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -628,7 +618,7 @@ class ConfigCollector {
|
||||||
allAnswers[question.name] = question.default;
|
allAnswers[question.name] = question.default;
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
const promptedAnswers = await inquirer.prompt(questions);
|
const promptedAnswers = await prompts.prompt(questions);
|
||||||
Object.assign(allAnswers, promptedAnswers);
|
Object.assign(allAnswers, promptedAnswers);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -750,7 +740,7 @@ class ConfigCollector {
|
||||||
console.log(chalk.cyan('?') + ' ' + chalk.magenta(moduleDisplayName));
|
console.log(chalk.cyan('?') + ' ' + chalk.magenta(moduleDisplayName));
|
||||||
|
|
||||||
// Ask user if they want to accept defaults or customize on the next line
|
// Ask user if they want to accept defaults or customize on the next line
|
||||||
const { customize } = await inquirer.prompt([
|
const { customize } = await prompts.prompt([
|
||||||
{
|
{
|
||||||
type: 'confirm',
|
type: 'confirm',
|
||||||
name: 'customize',
|
name: 'customize',
|
||||||
|
|
@ -845,7 +835,7 @@ class ConfigCollector {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Build an inquirer question from a config item
|
* Build a prompt question from a config item
|
||||||
* @param {string} moduleName - Module name
|
* @param {string} moduleName - Module name
|
||||||
* @param {string} key - Config key
|
* @param {string} key - Config key
|
||||||
* @param {Object} item - Config item definition
|
* @param {Object} item - Config item definition
|
||||||
|
|
@ -1007,7 +997,7 @@ class ConfigCollector {
|
||||||
message: message,
|
message: message,
|
||||||
};
|
};
|
||||||
|
|
||||||
// Set default - if it's dynamic, use a function that inquirer will evaluate with current answers
|
// Set default - if it's dynamic, use a function that the prompt will evaluate with current answers
|
||||||
// But if we have an existing value, always use that instead
|
// But if we have an existing value, always use that instead
|
||||||
if (existingValue !== null && existingValue !== undefined && questionType !== 'list') {
|
if (existingValue !== null && existingValue !== undefined && questionType !== 'list') {
|
||||||
question.default = existingValue;
|
question.default = existingValue;
|
||||||
|
|
|
||||||
|
|
@ -16,6 +16,7 @@ const { CLIUtils } = require('../../../lib/cli-utils');
|
||||||
const { ManifestGenerator } = require('./manifest-generator');
|
const { ManifestGenerator } = require('./manifest-generator');
|
||||||
const { IdeConfigManager } = require('./ide-config-manager');
|
const { IdeConfigManager } = require('./ide-config-manager');
|
||||||
const { CustomHandler } = require('../custom/handler');
|
const { CustomHandler } = require('../custom/handler');
|
||||||
|
const prompts = require('../../../lib/prompts');
|
||||||
|
|
||||||
// BMAD installation folder name - this is constant and should never change
|
// BMAD installation folder name - this is constant and should never change
|
||||||
const BMAD_FOLDER_NAME = '_bmad';
|
const BMAD_FOLDER_NAME = '_bmad';
|
||||||
|
|
@ -758,6 +759,9 @@ class Installer {
|
||||||
config.skipIde = toolSelection.skipIde;
|
config.skipIde = toolSelection.skipIde;
|
||||||
const ideConfigurations = toolSelection.configurations;
|
const ideConfigurations = toolSelection.configurations;
|
||||||
|
|
||||||
|
// Add spacing after prompts before installation progress
|
||||||
|
console.log('');
|
||||||
|
|
||||||
if (spinner.isSpinning) {
|
if (spinner.isSpinning) {
|
||||||
spinner.text = 'Continuing installation...';
|
spinner.text = 'Continuing installation...';
|
||||||
} else {
|
} else {
|
||||||
|
|
@ -2139,15 +2143,11 @@ class Installer {
|
||||||
* Private: Prompt for update action
|
* Private: Prompt for update action
|
||||||
*/
|
*/
|
||||||
async promptUpdateAction() {
|
async promptUpdateAction() {
|
||||||
const { default: inquirer } = await import('inquirer');
|
const action = await prompts.select({
|
||||||
return await inquirer.prompt([
|
|
||||||
{
|
|
||||||
type: 'list',
|
|
||||||
name: 'action',
|
|
||||||
message: 'What would you like to do?',
|
message: 'What would you like to do?',
|
||||||
choices: [{ name: 'Update existing installation', value: 'update' }],
|
choices: [{ name: 'Update existing installation', value: 'update' }],
|
||||||
},
|
});
|
||||||
]);
|
return { action };
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
@ -2156,8 +2156,6 @@ class Installer {
|
||||||
* @param {Object} _legacyV4 - Legacy V4 detection result (unused in simplified version)
|
* @param {Object} _legacyV4 - Legacy V4 detection result (unused in simplified version)
|
||||||
*/
|
*/
|
||||||
async handleLegacyV4Migration(_projectDir, _legacyV4) {
|
async handleLegacyV4Migration(_projectDir, _legacyV4) {
|
||||||
const { default: inquirer } = await import('inquirer');
|
|
||||||
|
|
||||||
console.log('');
|
console.log('');
|
||||||
console.log(chalk.yellow.bold('⚠️ Legacy BMAD v4 detected'));
|
console.log(chalk.yellow.bold('⚠️ Legacy BMAD v4 detected'));
|
||||||
console.log(chalk.yellow('─'.repeat(80)));
|
console.log(chalk.yellow('─'.repeat(80)));
|
||||||
|
|
@ -2172,26 +2170,22 @@ class Installer {
|
||||||
console.log(chalk.dim('If your v4 installation set up rules or commands, you should remove those as well.'));
|
console.log(chalk.dim('If your v4 installation set up rules or commands, you should remove those as well.'));
|
||||||
console.log('');
|
console.log('');
|
||||||
|
|
||||||
const { proceed } = await inquirer.prompt([
|
const proceed = await prompts.select({
|
||||||
{
|
|
||||||
type: 'list',
|
|
||||||
name: 'proceed',
|
|
||||||
message: 'What would you like to do?',
|
message: 'What would you like to do?',
|
||||||
choices: [
|
choices: [
|
||||||
{
|
{
|
||||||
name: 'Exit and clean up manually (recommended)',
|
name: 'Exit and clean up manually (recommended)',
|
||||||
value: 'exit',
|
value: 'exit',
|
||||||
short: 'Exit installation',
|
hint: 'Exit installation',
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: 'Continue with installation anyway',
|
name: 'Continue with installation anyway',
|
||||||
value: 'continue',
|
value: 'continue',
|
||||||
short: 'Continue',
|
hint: 'Continue',
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
default: 'exit',
|
default: 'exit',
|
||||||
},
|
});
|
||||||
]);
|
|
||||||
|
|
||||||
if (proceed === 'exit') {
|
if (proceed === 'exit') {
|
||||||
console.log('');
|
console.log('');
|
||||||
|
|
@ -2437,7 +2431,6 @@ class Installer {
|
||||||
|
|
||||||
console.log(chalk.yellow(`\n⚠️ Found ${customModulesWithMissingSources.length} custom module(s) with missing sources:`));
|
console.log(chalk.yellow(`\n⚠️ Found ${customModulesWithMissingSources.length} custom module(s) with missing sources:`));
|
||||||
|
|
||||||
const { default: inquirer } = await import('inquirer');
|
|
||||||
let keptCount = 0;
|
let keptCount = 0;
|
||||||
let updatedCount = 0;
|
let updatedCount = 0;
|
||||||
let removedCount = 0;
|
let removedCount = 0;
|
||||||
|
|
@ -2451,12 +2444,12 @@ class Installer {
|
||||||
{
|
{
|
||||||
name: 'Keep installed (will not be processed)',
|
name: 'Keep installed (will not be processed)',
|
||||||
value: 'keep',
|
value: 'keep',
|
||||||
short: 'Keep',
|
hint: 'Keep',
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: 'Specify new source location',
|
name: 'Specify new source location',
|
||||||
value: 'update',
|
value: 'update',
|
||||||
short: 'Update',
|
hint: 'Update',
|
||||||
},
|
},
|
||||||
];
|
];
|
||||||
|
|
||||||
|
|
@ -2465,33 +2458,27 @@ class Installer {
|
||||||
choices.push({
|
choices.push({
|
||||||
name: '⚠️ REMOVE module completely (destructive!)',
|
name: '⚠️ REMOVE module completely (destructive!)',
|
||||||
value: 'remove',
|
value: 'remove',
|
||||||
short: 'Remove',
|
hint: 'Remove',
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
const { action } = await inquirer.prompt([
|
const action = await prompts.select({
|
||||||
{
|
|
||||||
type: 'list',
|
|
||||||
name: 'action',
|
|
||||||
message: `How would you like to handle "${missing.name}"?`,
|
message: `How would you like to handle "${missing.name}"?`,
|
||||||
choices,
|
choices,
|
||||||
},
|
});
|
||||||
]);
|
|
||||||
|
|
||||||
switch (action) {
|
switch (action) {
|
||||||
case 'update': {
|
case 'update': {
|
||||||
const { newSourcePath } = await inquirer.prompt([
|
// Use sync validation because @clack/prompts doesn't support async validate
|
||||||
{
|
const newSourcePath = await prompts.text({
|
||||||
type: 'input',
|
|
||||||
name: 'newSourcePath',
|
|
||||||
message: 'Enter the new path to the custom module:',
|
message: 'Enter the new path to the custom module:',
|
||||||
default: missing.sourcePath,
|
default: missing.sourcePath,
|
||||||
validate: async (input) => {
|
validate: (input) => {
|
||||||
if (!input || input.trim() === '') {
|
if (!input || input.trim() === '') {
|
||||||
return 'Please enter a path';
|
return 'Please enter a path';
|
||||||
}
|
}
|
||||||
const expandedPath = path.resolve(input.trim());
|
const expandedPath = path.resolve(input.trim());
|
||||||
if (!(await fs.pathExists(expandedPath))) {
|
if (!fs.pathExistsSync(expandedPath)) {
|
||||||
return 'Path does not exist';
|
return 'Path does not exist';
|
||||||
}
|
}
|
||||||
// Check if it looks like a valid module
|
// Check if it looks like a valid module
|
||||||
|
|
@ -2499,13 +2486,12 @@ class Installer {
|
||||||
const agentsPath = path.join(expandedPath, 'agents');
|
const agentsPath = path.join(expandedPath, 'agents');
|
||||||
const workflowsPath = path.join(expandedPath, 'workflows');
|
const workflowsPath = path.join(expandedPath, 'workflows');
|
||||||
|
|
||||||
if (!(await fs.pathExists(moduleYamlPath)) && !(await fs.pathExists(agentsPath)) && !(await fs.pathExists(workflowsPath))) {
|
if (!fs.pathExistsSync(moduleYamlPath) && !fs.pathExistsSync(agentsPath) && !fs.pathExistsSync(workflowsPath)) {
|
||||||
return 'Path does not appear to contain a valid custom module';
|
return 'Path does not appear to contain a valid custom module';
|
||||||
}
|
}
|
||||||
return true;
|
return; // clack expects undefined for valid input
|
||||||
},
|
},
|
||||||
},
|
});
|
||||||
]);
|
|
||||||
|
|
||||||
// Update the source in manifest
|
// Update the source in manifest
|
||||||
const resolvedPath = path.resolve(newSourcePath.trim());
|
const resolvedPath = path.resolve(newSourcePath.trim());
|
||||||
|
|
@ -2531,46 +2517,38 @@ class Installer {
|
||||||
console.log(chalk.red.bold(`\n⚠️ WARNING: This will PERMANENTLY DELETE "${missing.name}" and all its files!`));
|
console.log(chalk.red.bold(`\n⚠️ WARNING: This will PERMANENTLY DELETE "${missing.name}" and all its files!`));
|
||||||
console.log(chalk.red(` Module location: ${path.join(bmadDir, missing.id)}`));
|
console.log(chalk.red(` Module location: ${path.join(bmadDir, missing.id)}`));
|
||||||
|
|
||||||
const { confirm } = await inquirer.prompt([
|
const confirmDelete = await prompts.confirm({
|
||||||
{
|
|
||||||
type: 'confirm',
|
|
||||||
name: 'confirm',
|
|
||||||
message: chalk.red.bold('Are you absolutely sure you want to delete this module?'),
|
message: chalk.red.bold('Are you absolutely sure you want to delete this module?'),
|
||||||
default: false,
|
default: false,
|
||||||
},
|
});
|
||||||
]);
|
|
||||||
|
|
||||||
if (confirm) {
|
if (confirmDelete) {
|
||||||
const { typedConfirm } = await inquirer.prompt([
|
const typedConfirm = await prompts.text({
|
||||||
{
|
|
||||||
type: 'input',
|
|
||||||
name: 'typedConfirm',
|
|
||||||
message: chalk.red.bold('Type "DELETE" to confirm permanent deletion:'),
|
message: chalk.red.bold('Type "DELETE" to confirm permanent deletion:'),
|
||||||
validate: (input) => {
|
validate: (input) => {
|
||||||
if (input !== 'DELETE') {
|
if (input !== 'DELETE') {
|
||||||
return chalk.red('You must type "DELETE" exactly to proceed');
|
return chalk.red('You must type "DELETE" exactly to proceed');
|
||||||
}
|
}
|
||||||
return true;
|
return; // clack expects undefined for valid input
|
||||||
},
|
},
|
||||||
},
|
});
|
||||||
]);
|
|
||||||
|
|
||||||
if (typedConfirm === 'DELETE') {
|
if (typedConfirm === 'DELETE') {
|
||||||
// Remove the module from filesystem and manifest
|
// Remove the module from filesystem and manifest
|
||||||
const modulePath = path.join(bmadDir, moduleId);
|
const modulePath = path.join(bmadDir, missing.id);
|
||||||
if (await fs.pathExists(modulePath)) {
|
if (await fs.pathExists(modulePath)) {
|
||||||
const fsExtra = require('fs-extra');
|
const fsExtra = require('fs-extra');
|
||||||
await fsExtra.remove(modulePath);
|
await fsExtra.remove(modulePath);
|
||||||
console.log(chalk.yellow(` ✓ Deleted module directory: ${path.relative(projectRoot, modulePath)}`));
|
console.log(chalk.yellow(` ✓ Deleted module directory: ${path.relative(projectRoot, modulePath)}`));
|
||||||
}
|
}
|
||||||
|
|
||||||
await this.manifest.removeModule(bmadDir, moduleId);
|
await this.manifest.removeModule(bmadDir, missing.id);
|
||||||
await this.manifest.removeCustomModule(bmadDir, moduleId);
|
await this.manifest.removeCustomModule(bmadDir, missing.id);
|
||||||
console.log(chalk.yellow(` ✓ Removed from manifest`));
|
console.log(chalk.yellow(` ✓ Removed from manifest`));
|
||||||
|
|
||||||
// Also remove from installedModules list
|
// Also remove from installedModules list
|
||||||
if (installedModules && installedModules.includes(moduleId)) {
|
if (installedModules && installedModules.includes(missing.id)) {
|
||||||
const index = installedModules.indexOf(moduleId);
|
const index = installedModules.indexOf(missing.id);
|
||||||
if (index !== -1) {
|
if (index !== -1) {
|
||||||
installedModules.splice(index, 1);
|
installedModules.splice(index, 1);
|
||||||
}
|
}
|
||||||
|
|
@ -2591,7 +2569,7 @@ class Installer {
|
||||||
}
|
}
|
||||||
case 'keep': {
|
case 'keep': {
|
||||||
keptCount++;
|
keptCount++;
|
||||||
keptModulesWithoutSources.push(moduleId);
|
keptModulesWithoutSources.push(missing.id);
|
||||||
console.log(chalk.dim(` Module will be kept as-is`));
|
console.log(chalk.dim(` Module will be kept as-is`));
|
||||||
|
|
||||||
break;
|
break;
|
||||||
|
|
|
||||||
|
|
@ -13,6 +13,7 @@ const {
|
||||||
resolveSubagentFiles,
|
resolveSubagentFiles,
|
||||||
} = require('./shared/module-injections');
|
} = require('./shared/module-injections');
|
||||||
const { getAgentsFromBmad, getAgentsFromDir } = require('./shared/bmad-artifacts');
|
const { getAgentsFromBmad, getAgentsFromDir } = require('./shared/bmad-artifacts');
|
||||||
|
const prompts = require('../../../lib/prompts');
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Google Antigravity IDE setup handler
|
* Google Antigravity IDE setup handler
|
||||||
|
|
@ -26,6 +27,21 @@ class AntigravitySetup extends BaseIdeSetup {
|
||||||
this.workflowsDir = 'workflows';
|
this.workflowsDir = 'workflows';
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Prompt for subagent installation location
|
||||||
|
* @returns {Promise<string>} Selected location ('project' or 'user')
|
||||||
|
*/
|
||||||
|
async _promptInstallLocation() {
|
||||||
|
return prompts.select({
|
||||||
|
message: 'Where would you like to install Antigravity subagents?',
|
||||||
|
choices: [
|
||||||
|
{ name: 'Project level (.agent/agents/)', value: 'project' },
|
||||||
|
{ name: 'User level (~/.agent/agents/)', value: 'user' },
|
||||||
|
],
|
||||||
|
default: 'project',
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Collect configuration choices before installation
|
* Collect configuration choices before installation
|
||||||
* @param {Object} options - Configuration options
|
* @param {Object} options - Configuration options
|
||||||
|
|
@ -57,21 +73,7 @@ class AntigravitySetup extends BaseIdeSetup {
|
||||||
config.subagentChoices = await this.promptSubagentInstallation(injectionConfig.subagents);
|
config.subagentChoices = await this.promptSubagentInstallation(injectionConfig.subagents);
|
||||||
|
|
||||||
if (config.subagentChoices.install !== 'none') {
|
if (config.subagentChoices.install !== 'none') {
|
||||||
// Ask for installation location
|
config.installLocation = await this._promptInstallLocation();
|
||||||
const { default: inquirer } = await import('inquirer');
|
|
||||||
const locationAnswer = await inquirer.prompt([
|
|
||||||
{
|
|
||||||
type: 'list',
|
|
||||||
name: 'location',
|
|
||||||
message: 'Where would you like to install Antigravity subagents?',
|
|
||||||
choices: [
|
|
||||||
{ name: 'Project level (.agent/agents/)', value: 'project' },
|
|
||||||
{ name: 'User level (~/.agent/agents/)', value: 'user' },
|
|
||||||
],
|
|
||||||
default: 'project',
|
|
||||||
},
|
|
||||||
]);
|
|
||||||
config.installLocation = locationAnswer.location;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
|
@ -297,20 +299,7 @@ class AntigravitySetup extends BaseIdeSetup {
|
||||||
choices = await this.promptSubagentInstallation(config.subagents);
|
choices = await this.promptSubagentInstallation(config.subagents);
|
||||||
|
|
||||||
if (choices.install !== 'none') {
|
if (choices.install !== 'none') {
|
||||||
const { default: inquirer } = await import('inquirer');
|
location = await this._promptInstallLocation();
|
||||||
const locationAnswer = await inquirer.prompt([
|
|
||||||
{
|
|
||||||
type: 'list',
|
|
||||||
name: 'location',
|
|
||||||
message: 'Where would you like to install Antigravity subagents?',
|
|
||||||
choices: [
|
|
||||||
{ name: 'Project level (.agent/agents/)', value: 'project' },
|
|
||||||
{ name: 'User level (~/.agent/agents/)', value: 'user' },
|
|
||||||
],
|
|
||||||
default: 'project',
|
|
||||||
},
|
|
||||||
]);
|
|
||||||
location = locationAnswer.location;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -334,13 +323,8 @@ class AntigravitySetup extends BaseIdeSetup {
|
||||||
* Prompt user for subagent installation preferences
|
* Prompt user for subagent installation preferences
|
||||||
*/
|
*/
|
||||||
async promptSubagentInstallation(subagentConfig) {
|
async promptSubagentInstallation(subagentConfig) {
|
||||||
const { default: inquirer } = await import('inquirer');
|
|
||||||
|
|
||||||
// First ask if they want to install subagents
|
// First ask if they want to install subagents
|
||||||
const { install } = await inquirer.prompt([
|
const install = await prompts.select({
|
||||||
{
|
|
||||||
type: 'list',
|
|
||||||
name: 'install',
|
|
||||||
message: 'Would you like to install Antigravity subagents for enhanced functionality?',
|
message: 'Would you like to install Antigravity subagents for enhanced functionality?',
|
||||||
choices: [
|
choices: [
|
||||||
{ name: 'Yes, install all subagents', value: 'all' },
|
{ name: 'Yes, install all subagents', value: 'all' },
|
||||||
|
|
@ -348,8 +332,7 @@ class AntigravitySetup extends BaseIdeSetup {
|
||||||
{ name: 'No, skip subagent installation', value: 'none' },
|
{ name: 'No, skip subagent installation', value: 'none' },
|
||||||
],
|
],
|
||||||
default: 'all',
|
default: 'all',
|
||||||
},
|
});
|
||||||
]);
|
|
||||||
|
|
||||||
if (install === 'selective') {
|
if (install === 'selective') {
|
||||||
// Show list of available subagents with descriptions
|
// Show list of available subagents with descriptions
|
||||||
|
|
@ -361,18 +344,14 @@ class AntigravitySetup extends BaseIdeSetup {
|
||||||
'document-reviewer.md': 'Document quality review',
|
'document-reviewer.md': 'Document quality review',
|
||||||
};
|
};
|
||||||
|
|
||||||
const { selected } = await inquirer.prompt([
|
const selected = await prompts.multiselect({
|
||||||
{
|
message: `Select subagents to install ${chalk.dim('(↑/↓ navigate, SPACE select, ENTER confirm)')}:`,
|
||||||
type: 'checkbox',
|
|
||||||
name: 'selected',
|
|
||||||
message: 'Select subagents to install:',
|
|
||||||
choices: subagentConfig.files.map((file) => ({
|
choices: subagentConfig.files.map((file) => ({
|
||||||
name: `${file.replace('.md', '')} - ${subagentInfo[file] || 'Specialized assistant'}`,
|
name: `${file.replace('.md', '')} - ${subagentInfo[file] || 'Specialized assistant'}`,
|
||||||
value: file,
|
value: file,
|
||||||
checked: true,
|
checked: true,
|
||||||
})),
|
})),
|
||||||
},
|
});
|
||||||
]);
|
|
||||||
|
|
||||||
return { install: 'selective', selected };
|
return { install: 'selective', selected };
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -13,6 +13,7 @@ const {
|
||||||
resolveSubagentFiles,
|
resolveSubagentFiles,
|
||||||
} = require('./shared/module-injections');
|
} = require('./shared/module-injections');
|
||||||
const { getAgentsFromBmad, getAgentsFromDir } = require('./shared/bmad-artifacts');
|
const { getAgentsFromBmad, getAgentsFromDir } = require('./shared/bmad-artifacts');
|
||||||
|
const prompts = require('../../../lib/prompts');
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Claude Code IDE setup handler
|
* Claude Code IDE setup handler
|
||||||
|
|
@ -25,6 +26,21 @@ class ClaudeCodeSetup extends BaseIdeSetup {
|
||||||
this.agentsDir = 'agents';
|
this.agentsDir = 'agents';
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Prompt for subagent installation location
|
||||||
|
* @returns {Promise<string>} Selected location ('project' or 'user')
|
||||||
|
*/
|
||||||
|
async promptInstallLocation() {
|
||||||
|
return prompts.select({
|
||||||
|
message: 'Where would you like to install Claude Code subagents?',
|
||||||
|
choices: [
|
||||||
|
{ name: 'Project level (.claude/agents/)', value: 'project' },
|
||||||
|
{ name: 'User level (~/.claude/agents/)', value: 'user' },
|
||||||
|
],
|
||||||
|
default: 'project',
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Collect configuration choices before installation
|
* Collect configuration choices before installation
|
||||||
* @param {Object} options - Configuration options
|
* @param {Object} options - Configuration options
|
||||||
|
|
@ -56,21 +72,7 @@ class ClaudeCodeSetup extends BaseIdeSetup {
|
||||||
config.subagentChoices = await this.promptSubagentInstallation(injectionConfig.subagents);
|
config.subagentChoices = await this.promptSubagentInstallation(injectionConfig.subagents);
|
||||||
|
|
||||||
if (config.subagentChoices.install !== 'none') {
|
if (config.subagentChoices.install !== 'none') {
|
||||||
// Ask for installation location
|
config.installLocation = await this.promptInstallLocation();
|
||||||
const { default: inquirer } = await import('inquirer');
|
|
||||||
const locationAnswer = await inquirer.prompt([
|
|
||||||
{
|
|
||||||
type: 'list',
|
|
||||||
name: 'location',
|
|
||||||
message: 'Where would you like to install Claude Code subagents?',
|
|
||||||
choices: [
|
|
||||||
{ name: 'Project level (.claude/agents/)', value: 'project' },
|
|
||||||
{ name: 'User level (~/.claude/agents/)', value: 'user' },
|
|
||||||
],
|
|
||||||
default: 'project',
|
|
||||||
},
|
|
||||||
]);
|
|
||||||
config.installLocation = locationAnswer.location;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
|
@ -305,20 +307,7 @@ class ClaudeCodeSetup extends BaseIdeSetup {
|
||||||
choices = await this.promptSubagentInstallation(config.subagents);
|
choices = await this.promptSubagentInstallation(config.subagents);
|
||||||
|
|
||||||
if (choices.install !== 'none') {
|
if (choices.install !== 'none') {
|
||||||
const { default: inquirer } = await import('inquirer');
|
location = await this.promptInstallLocation();
|
||||||
const locationAnswer = await inquirer.prompt([
|
|
||||||
{
|
|
||||||
type: 'list',
|
|
||||||
name: 'location',
|
|
||||||
message: 'Where would you like to install Claude Code subagents?',
|
|
||||||
choices: [
|
|
||||||
{ name: 'Project level (.claude/agents/)', value: 'project' },
|
|
||||||
{ name: 'User level (~/.claude/agents/)', value: 'user' },
|
|
||||||
],
|
|
||||||
default: 'project',
|
|
||||||
},
|
|
||||||
]);
|
|
||||||
location = locationAnswer.location;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -342,13 +331,8 @@ class ClaudeCodeSetup extends BaseIdeSetup {
|
||||||
* Prompt user for subagent installation preferences
|
* Prompt user for subagent installation preferences
|
||||||
*/
|
*/
|
||||||
async promptSubagentInstallation(subagentConfig) {
|
async promptSubagentInstallation(subagentConfig) {
|
||||||
const { default: inquirer } = await import('inquirer');
|
|
||||||
|
|
||||||
// First ask if they want to install subagents
|
// First ask if they want to install subagents
|
||||||
const { install } = await inquirer.prompt([
|
const install = await prompts.select({
|
||||||
{
|
|
||||||
type: 'list',
|
|
||||||
name: 'install',
|
|
||||||
message: 'Would you like to install Claude Code subagents for enhanced functionality?',
|
message: 'Would you like to install Claude Code subagents for enhanced functionality?',
|
||||||
choices: [
|
choices: [
|
||||||
{ name: 'Yes, install all subagents', value: 'all' },
|
{ name: 'Yes, install all subagents', value: 'all' },
|
||||||
|
|
@ -356,8 +340,7 @@ class ClaudeCodeSetup extends BaseIdeSetup {
|
||||||
{ name: 'No, skip subagent installation', value: 'none' },
|
{ name: 'No, skip subagent installation', value: 'none' },
|
||||||
],
|
],
|
||||||
default: 'all',
|
default: 'all',
|
||||||
},
|
});
|
||||||
]);
|
|
||||||
|
|
||||||
if (install === 'selective') {
|
if (install === 'selective') {
|
||||||
// Show list of available subagents with descriptions
|
// Show list of available subagents with descriptions
|
||||||
|
|
@ -369,18 +352,14 @@ class ClaudeCodeSetup extends BaseIdeSetup {
|
||||||
'document-reviewer.md': 'Document quality review',
|
'document-reviewer.md': 'Document quality review',
|
||||||
};
|
};
|
||||||
|
|
||||||
const { selected } = await inquirer.prompt([
|
const selected = await prompts.multiselect({
|
||||||
{
|
message: `Select subagents to install ${chalk.dim('(↑/↓ navigate, SPACE select, ENTER confirm)')}:`,
|
||||||
type: 'checkbox',
|
options: subagentConfig.files.map((file) => ({
|
||||||
name: 'selected',
|
label: `${file.replace('.md', '')} - ${subagentInfo[file] || 'Specialized assistant'}`,
|
||||||
message: 'Select subagents to install:',
|
|
||||||
choices: subagentConfig.files.map((file) => ({
|
|
||||||
name: `${file.replace('.md', '')} - ${subagentInfo[file] || 'Specialized assistant'}`,
|
|
||||||
value: file,
|
value: file,
|
||||||
checked: true,
|
|
||||||
})),
|
})),
|
||||||
},
|
initialValues: subagentConfig.files,
|
||||||
]);
|
});
|
||||||
|
|
||||||
return { install: 'selective', selected };
|
return { install: 'selective', selected };
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -6,6 +6,7 @@ const { BaseIdeSetup } = require('./_base-ide');
|
||||||
const { WorkflowCommandGenerator } = require('./shared/workflow-command-generator');
|
const { WorkflowCommandGenerator } = require('./shared/workflow-command-generator');
|
||||||
const { AgentCommandGenerator } = require('./shared/agent-command-generator');
|
const { AgentCommandGenerator } = require('./shared/agent-command-generator');
|
||||||
const { getTasksFromBmad } = require('./shared/bmad-artifacts');
|
const { getTasksFromBmad } = require('./shared/bmad-artifacts');
|
||||||
|
const prompts = require('../../../lib/prompts');
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Codex setup handler (CLI mode)
|
* Codex setup handler (CLI mode)
|
||||||
|
|
@ -21,16 +22,11 @@ class CodexSetup extends BaseIdeSetup {
|
||||||
* @returns {Object} Collected configuration
|
* @returns {Object} Collected configuration
|
||||||
*/
|
*/
|
||||||
async collectConfiguration(options = {}) {
|
async collectConfiguration(options = {}) {
|
||||||
const { default: inquirer } = await import('inquirer');
|
|
||||||
|
|
||||||
let confirmed = false;
|
let confirmed = false;
|
||||||
let installLocation = 'global';
|
let installLocation = 'global';
|
||||||
|
|
||||||
while (!confirmed) {
|
while (!confirmed) {
|
||||||
const { location } = await inquirer.prompt([
|
installLocation = await prompts.select({
|
||||||
{
|
|
||||||
type: 'list',
|
|
||||||
name: 'location',
|
|
||||||
message: 'Where would you like to install Codex CLI prompts?',
|
message: 'Where would you like to install Codex CLI prompts?',
|
||||||
choices: [
|
choices: [
|
||||||
{
|
{
|
||||||
|
|
@ -43,10 +39,7 @@ class CodexSetup extends BaseIdeSetup {
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
default: 'global',
|
default: 'global',
|
||||||
},
|
});
|
||||||
]);
|
|
||||||
|
|
||||||
installLocation = location;
|
|
||||||
|
|
||||||
// Display detailed instructions for the chosen option
|
// Display detailed instructions for the chosen option
|
||||||
console.log('');
|
console.log('');
|
||||||
|
|
@ -57,16 +50,10 @@ class CodexSetup extends BaseIdeSetup {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Confirm the choice
|
// Confirm the choice
|
||||||
const { proceed } = await inquirer.prompt([
|
confirmed = await prompts.confirm({
|
||||||
{
|
|
||||||
type: 'confirm',
|
|
||||||
name: 'proceed',
|
|
||||||
message: 'Proceed with this installation option?',
|
message: 'Proceed with this installation option?',
|
||||||
default: true,
|
default: true,
|
||||||
},
|
});
|
||||||
]);
|
|
||||||
|
|
||||||
confirmed = proceed;
|
|
||||||
|
|
||||||
if (!confirmed) {
|
if (!confirmed) {
|
||||||
console.log(chalk.yellow("\n Let's choose a different installation option.\n"));
|
console.log(chalk.yellow("\n Let's choose a different installation option.\n"));
|
||||||
|
|
|
||||||
|
|
@ -2,6 +2,7 @@ const path = require('node:path');
|
||||||
const { BaseIdeSetup } = require('./_base-ide');
|
const { BaseIdeSetup } = require('./_base-ide');
|
||||||
const chalk = require('chalk');
|
const chalk = require('chalk');
|
||||||
const { AgentCommandGenerator } = require('./shared/agent-command-generator');
|
const { AgentCommandGenerator } = require('./shared/agent-command-generator');
|
||||||
|
const prompts = require('../../../lib/prompts');
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* GitHub Copilot setup handler
|
* GitHub Copilot setup handler
|
||||||
|
|
@ -21,16 +22,12 @@ class GitHubCopilotSetup extends BaseIdeSetup {
|
||||||
* @returns {Object} Collected configuration
|
* @returns {Object} Collected configuration
|
||||||
*/
|
*/
|
||||||
async collectConfiguration(options = {}) {
|
async collectConfiguration(options = {}) {
|
||||||
const { default: inquirer } = await import('inquirer');
|
|
||||||
const config = {};
|
const config = {};
|
||||||
|
|
||||||
console.log('\n' + chalk.blue(' 🔧 VS Code Settings Configuration'));
|
console.log('\n' + chalk.blue(' 🔧 VS Code Settings Configuration'));
|
||||||
console.log(chalk.dim(' GitHub Copilot works best with specific settings\n'));
|
console.log(chalk.dim(' GitHub Copilot works best with specific settings\n'));
|
||||||
|
|
||||||
const response = await inquirer.prompt([
|
config.vsCodeConfig = await prompts.select({
|
||||||
{
|
|
||||||
type: 'list',
|
|
||||||
name: 'configChoice',
|
|
||||||
message: 'How would you like to configure VS Code settings?',
|
message: 'How would you like to configure VS Code settings?',
|
||||||
choices: [
|
choices: [
|
||||||
{ name: 'Use recommended defaults (fastest)', value: 'defaults' },
|
{ name: 'Use recommended defaults (fastest)', value: 'defaults' },
|
||||||
|
|
@ -38,12 +35,10 @@ class GitHubCopilotSetup extends BaseIdeSetup {
|
||||||
{ name: 'Skip settings configuration', value: 'skip' },
|
{ name: 'Skip settings configuration', value: 'skip' },
|
||||||
],
|
],
|
||||||
default: 'defaults',
|
default: 'defaults',
|
||||||
},
|
});
|
||||||
]);
|
|
||||||
config.vsCodeConfig = response.configChoice;
|
|
||||||
|
|
||||||
if (response.configChoice === 'manual') {
|
if (config.vsCodeConfig === 'manual') {
|
||||||
config.manualSettings = await inquirer.prompt([
|
config.manualSettings = await prompts.prompt([
|
||||||
{
|
{
|
||||||
type: 'input',
|
type: 'input',
|
||||||
name: 'maxRequests',
|
name: 'maxRequests',
|
||||||
|
|
@ -52,7 +47,8 @@ class GitHubCopilotSetup extends BaseIdeSetup {
|
||||||
validate: (input) => {
|
validate: (input) => {
|
||||||
const num = parseInt(input, 10);
|
const num = parseInt(input, 10);
|
||||||
if (isNaN(num)) return 'Enter a valid number 1-50';
|
if (isNaN(num)) return 'Enter a valid number 1-50';
|
||||||
return (num >= 1 && num <= 50) || 'Enter 1-50';
|
if (num < 1 || num > 50) return 'Enter a number between 1-50';
|
||||||
|
return true;
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
|
|
||||||
|
|
@ -119,7 +119,8 @@ class KiloSetup extends BaseIdeSetup {
|
||||||
modeEntry += ` name: '${icon} ${title}'\n`;
|
modeEntry += ` name: '${icon} ${title}'\n`;
|
||||||
modeEntry += ` roleDefinition: ${roleDefinition}\n`;
|
modeEntry += ` roleDefinition: ${roleDefinition}\n`;
|
||||||
modeEntry += ` whenToUse: ${whenToUse}\n`;
|
modeEntry += ` whenToUse: ${whenToUse}\n`;
|
||||||
modeEntry += ` customInstructions: ${activationHeader} Read the full YAML from ${relativePath} start activation to alter your state of being follow startup section instructions stay in this being until told to exit this mode\n`;
|
modeEntry += ` customInstructions: |\n`;
|
||||||
|
modeEntry += ` ${activationHeader} Read the full YAML from ${relativePath} start activation to alter your state of being follow startup section instructions stay in this being until told to exit this mode\n`;
|
||||||
modeEntry += ` groups:\n`;
|
modeEntry += ` groups:\n`;
|
||||||
modeEntry += ` - read\n`;
|
modeEntry += ` - read\n`;
|
||||||
modeEntry += ` - edit\n`;
|
modeEntry += ` - edit\n`;
|
||||||
|
|
|
||||||
|
|
@ -108,7 +108,10 @@ async function resolveSubagentFiles(handlerBaseDir, subagentConfig, subagentChoi
|
||||||
const resolved = [];
|
const resolved = [];
|
||||||
|
|
||||||
for (const file of filesToCopy) {
|
for (const file of filesToCopy) {
|
||||||
const pattern = path.join(sourceDir, '**', file);
|
// Use forward slashes for glob pattern (works on both Windows and Unix)
|
||||||
|
// Convert backslashes to forward slashes for glob compatibility
|
||||||
|
const normalizedSourceDir = sourceDir.replaceAll('\\', '/');
|
||||||
|
const pattern = `${normalizedSourceDir}/**/${file}`;
|
||||||
const matches = await glob(pattern);
|
const matches = await glob(pattern);
|
||||||
|
|
||||||
if (matches.length > 0) {
|
if (matches.length > 0) {
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,432 @@
|
||||||
|
/**
|
||||||
|
* @clack/prompts wrapper for BMAD CLI
|
||||||
|
*
|
||||||
|
* This module provides a unified interface for CLI prompts using @clack/prompts.
|
||||||
|
* It replaces Inquirer.js to fix Windows arrow key navigation issues (libuv #852).
|
||||||
|
*
|
||||||
|
* @module prompts
|
||||||
|
*/
|
||||||
|
|
||||||
|
let _clack = null;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Lazy-load @clack/prompts (ESM module)
|
||||||
|
* @returns {Promise<Object>} The clack prompts module
|
||||||
|
*/
|
||||||
|
async function getClack() {
|
||||||
|
if (!_clack) {
|
||||||
|
_clack = await import('@clack/prompts');
|
||||||
|
}
|
||||||
|
return _clack;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle user cancellation gracefully
|
||||||
|
* @param {any} value - The value to check
|
||||||
|
* @param {string} [message='Operation cancelled'] - Message to display
|
||||||
|
* @returns {boolean} True if cancelled
|
||||||
|
*/
|
||||||
|
async function handleCancel(value, message = 'Operation cancelled') {
|
||||||
|
const clack = await getClack();
|
||||||
|
if (clack.isCancel(value)) {
|
||||||
|
clack.cancel(message);
|
||||||
|
process.exit(0);
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Display intro message
|
||||||
|
* @param {string} message - The intro message
|
||||||
|
*/
|
||||||
|
async function intro(message) {
|
||||||
|
const clack = await getClack();
|
||||||
|
clack.intro(message);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Display outro message
|
||||||
|
* @param {string} message - The outro message
|
||||||
|
*/
|
||||||
|
async function outro(message) {
|
||||||
|
const clack = await getClack();
|
||||||
|
clack.outro(message);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Display a note/info box
|
||||||
|
* @param {string} message - The note content
|
||||||
|
* @param {string} [title] - Optional title
|
||||||
|
*/
|
||||||
|
async function note(message, title) {
|
||||||
|
const clack = await getClack();
|
||||||
|
clack.note(message, title);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Display a spinner for async operations
|
||||||
|
* @returns {Object} Spinner controller with start, stop, message methods
|
||||||
|
*/
|
||||||
|
async function spinner() {
|
||||||
|
const clack = await getClack();
|
||||||
|
return clack.spinner();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Single-select prompt (replaces Inquirer 'list' type)
|
||||||
|
* @param {Object} options - Prompt options
|
||||||
|
* @param {string} options.message - The question to ask
|
||||||
|
* @param {Array} options.choices - Array of choices [{name, value, hint?}]
|
||||||
|
* @param {any} [options.default] - Default selected value
|
||||||
|
* @returns {Promise<any>} Selected value
|
||||||
|
*/
|
||||||
|
async function select(options) {
|
||||||
|
const clack = await getClack();
|
||||||
|
|
||||||
|
// Convert Inquirer-style choices to clack format
|
||||||
|
// Handle both object choices {name, value, hint} and primitive choices (string/number)
|
||||||
|
const clackOptions = options.choices
|
||||||
|
.filter((c) => c.type !== 'separator') // Skip separators for now
|
||||||
|
.map((choice) => {
|
||||||
|
if (typeof choice === 'string' || typeof choice === 'number') {
|
||||||
|
return { value: choice, label: String(choice) };
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
value: choice.value === undefined ? choice.name : choice.value,
|
||||||
|
label: choice.name || choice.label || String(choice.value),
|
||||||
|
hint: choice.hint || choice.description,
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
// Find initial value
|
||||||
|
let initialValue;
|
||||||
|
if (options.default !== undefined) {
|
||||||
|
initialValue = options.default;
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = await clack.select({
|
||||||
|
message: options.message,
|
||||||
|
options: clackOptions,
|
||||||
|
initialValue,
|
||||||
|
});
|
||||||
|
|
||||||
|
await handleCancel(result);
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Multi-select prompt (replaces Inquirer 'checkbox' type)
|
||||||
|
* @param {Object} options - Prompt options
|
||||||
|
* @param {string} options.message - The question to ask
|
||||||
|
* @param {Array} options.choices - Array of choices [{name, value, checked?, hint?}]
|
||||||
|
* @param {boolean} [options.required=false] - Whether at least one must be selected
|
||||||
|
* @returns {Promise<Array>} Array of selected values
|
||||||
|
*/
|
||||||
|
async function multiselect(options) {
|
||||||
|
const clack = await getClack();
|
||||||
|
|
||||||
|
// Support both clack-native (options) and Inquirer-style (choices) APIs
|
||||||
|
let clackOptions;
|
||||||
|
let initialValues;
|
||||||
|
|
||||||
|
if (options.options) {
|
||||||
|
// Native clack format: options with label/value
|
||||||
|
clackOptions = options.options;
|
||||||
|
initialValues = options.initialValues || [];
|
||||||
|
} else {
|
||||||
|
// Convert Inquirer-style choices to clack format
|
||||||
|
// Handle both object choices {name, value, hint} and primitive choices (string/number)
|
||||||
|
clackOptions = options.choices
|
||||||
|
.filter((c) => c.type !== 'separator') // Skip separators
|
||||||
|
.map((choice) => {
|
||||||
|
if (typeof choice === 'string' || typeof choice === 'number') {
|
||||||
|
return { value: choice, label: String(choice) };
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
value: choice.value === undefined ? choice.name : choice.value,
|
||||||
|
label: choice.name || choice.label || String(choice.value),
|
||||||
|
hint: choice.hint || choice.description,
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
// Find initial values (pre-checked items)
|
||||||
|
initialValues = options.choices
|
||||||
|
.filter((c) => c.checked && c.type !== 'separator')
|
||||||
|
.map((c) => (c.value === undefined ? c.name : c.value));
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = await clack.multiselect({
|
||||||
|
message: options.message,
|
||||||
|
options: clackOptions,
|
||||||
|
initialValues: initialValues.length > 0 ? initialValues : undefined,
|
||||||
|
required: options.required || false,
|
||||||
|
});
|
||||||
|
|
||||||
|
await handleCancel(result);
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Grouped multi-select prompt for categorized options
|
||||||
|
* @param {Object} options - Prompt options
|
||||||
|
* @param {string} options.message - The question to ask
|
||||||
|
* @param {Object} options.options - Object mapping group names to arrays of choices
|
||||||
|
* @param {Array} [options.initialValues] - Array of initially selected values
|
||||||
|
* @param {boolean} [options.required=false] - Whether at least one must be selected
|
||||||
|
* @param {boolean} [options.selectableGroups=false] - Whether groups can be selected as a whole
|
||||||
|
* @returns {Promise<Array>} Array of selected values
|
||||||
|
*/
|
||||||
|
async function groupMultiselect(options) {
|
||||||
|
const clack = await getClack();
|
||||||
|
|
||||||
|
const result = await clack.groupMultiselect({
|
||||||
|
message: options.message,
|
||||||
|
options: options.options,
|
||||||
|
initialValues: options.initialValues,
|
||||||
|
required: options.required || false,
|
||||||
|
});
|
||||||
|
|
||||||
|
await handleCancel(result);
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Confirm prompt (replaces Inquirer 'confirm' type)
|
||||||
|
* @param {Object} options - Prompt options
|
||||||
|
* @param {string} options.message - The question to ask
|
||||||
|
* @param {boolean} [options.default=true] - Default value
|
||||||
|
* @returns {Promise<boolean>} User's answer
|
||||||
|
*/
|
||||||
|
async function confirm(options) {
|
||||||
|
const clack = await getClack();
|
||||||
|
|
||||||
|
const result = await clack.confirm({
|
||||||
|
message: options.message,
|
||||||
|
initialValue: options.default === undefined ? true : options.default,
|
||||||
|
});
|
||||||
|
|
||||||
|
await handleCancel(result);
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Text input prompt (replaces Inquirer 'input' type)
|
||||||
|
* @param {Object} options - Prompt options
|
||||||
|
* @param {string} options.message - The question to ask
|
||||||
|
* @param {string} [options.default] - Default value
|
||||||
|
* @param {string} [options.placeholder] - Placeholder text (defaults to options.default if not provided)
|
||||||
|
* @param {Function} [options.validate] - Validation function
|
||||||
|
* @returns {Promise<string>} User's input
|
||||||
|
*/
|
||||||
|
async function text(options) {
|
||||||
|
const clack = await getClack();
|
||||||
|
|
||||||
|
// Use default as placeholder if placeholder not explicitly provided
|
||||||
|
// This shows the default value as grayed-out hint text
|
||||||
|
const placeholder = options.placeholder === undefined ? options.default : options.placeholder;
|
||||||
|
|
||||||
|
const result = await clack.text({
|
||||||
|
message: options.message,
|
||||||
|
defaultValue: options.default,
|
||||||
|
placeholder: typeof placeholder === 'string' ? placeholder : undefined,
|
||||||
|
validate: options.validate,
|
||||||
|
});
|
||||||
|
|
||||||
|
await handleCancel(result);
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Password input prompt (replaces Inquirer 'password' type)
|
||||||
|
* @param {Object} options - Prompt options
|
||||||
|
* @param {string} options.message - The question to ask
|
||||||
|
* @param {Function} [options.validate] - Validation function
|
||||||
|
* @returns {Promise<string>} User's input
|
||||||
|
*/
|
||||||
|
async function password(options) {
|
||||||
|
const clack = await getClack();
|
||||||
|
|
||||||
|
const result = await clack.password({
|
||||||
|
message: options.message,
|
||||||
|
validate: options.validate,
|
||||||
|
});
|
||||||
|
|
||||||
|
await handleCancel(result);
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Group multiple prompts together
|
||||||
|
* @param {Object} prompts - Object of prompt functions
|
||||||
|
* @param {Object} [options] - Group options
|
||||||
|
* @returns {Promise<Object>} Object with all answers
|
||||||
|
*/
|
||||||
|
async function group(prompts, options = {}) {
|
||||||
|
const clack = await getClack();
|
||||||
|
|
||||||
|
const result = await clack.group(prompts, {
|
||||||
|
onCancel: () => {
|
||||||
|
clack.cancel('Operation cancelled');
|
||||||
|
process.exit(0);
|
||||||
|
},
|
||||||
|
...options,
|
||||||
|
});
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Run tasks with spinner feedback
|
||||||
|
* @param {Array} tasks - Array of task objects [{title, task, enabled?}]
|
||||||
|
* @returns {Promise<void>}
|
||||||
|
*/
|
||||||
|
async function tasks(taskList) {
|
||||||
|
const clack = await getClack();
|
||||||
|
await clack.tasks(taskList);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Log messages with styling
|
||||||
|
*/
|
||||||
|
const log = {
|
||||||
|
async info(message) {
|
||||||
|
const clack = await getClack();
|
||||||
|
clack.log.info(message);
|
||||||
|
},
|
||||||
|
async success(message) {
|
||||||
|
const clack = await getClack();
|
||||||
|
clack.log.success(message);
|
||||||
|
},
|
||||||
|
async warn(message) {
|
||||||
|
const clack = await getClack();
|
||||||
|
clack.log.warn(message);
|
||||||
|
},
|
||||||
|
async error(message) {
|
||||||
|
const clack = await getClack();
|
||||||
|
clack.log.error(message);
|
||||||
|
},
|
||||||
|
async message(message) {
|
||||||
|
const clack = await getClack();
|
||||||
|
clack.log.message(message);
|
||||||
|
},
|
||||||
|
async step(message) {
|
||||||
|
const clack = await getClack();
|
||||||
|
clack.log.step(message);
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Execute an array of Inquirer-style questions using @clack/prompts
|
||||||
|
* This provides compatibility with dynamic question arrays
|
||||||
|
* @param {Array} questions - Array of Inquirer-style question objects
|
||||||
|
* @returns {Promise<Object>} Object with answers keyed by question name
|
||||||
|
*/
|
||||||
|
async function prompt(questions) {
|
||||||
|
const answers = {};
|
||||||
|
|
||||||
|
for (const question of questions) {
|
||||||
|
const { type, name, message, choices, default: defaultValue, validate, when } = question;
|
||||||
|
|
||||||
|
// Handle conditional questions via 'when' property
|
||||||
|
if (when !== undefined) {
|
||||||
|
const shouldAsk = typeof when === 'function' ? await when(answers) : when;
|
||||||
|
if (!shouldAsk) continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
let answer;
|
||||||
|
|
||||||
|
switch (type) {
|
||||||
|
case 'input': {
|
||||||
|
// Note: @clack/prompts doesn't support async validation, so validate must be sync
|
||||||
|
answer = await text({
|
||||||
|
message,
|
||||||
|
default: typeof defaultValue === 'function' ? defaultValue(answers) : defaultValue,
|
||||||
|
validate: validate
|
||||||
|
? (val) => {
|
||||||
|
const result = validate(val, answers);
|
||||||
|
if (result instanceof Promise) {
|
||||||
|
throw new TypeError('Async validation is not supported by @clack/prompts. Please use synchronous validation.');
|
||||||
|
}
|
||||||
|
return result === true ? undefined : result;
|
||||||
|
}
|
||||||
|
: undefined,
|
||||||
|
});
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
case 'confirm': {
|
||||||
|
answer = await confirm({
|
||||||
|
message,
|
||||||
|
default: typeof defaultValue === 'function' ? defaultValue(answers) : defaultValue,
|
||||||
|
});
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
case 'list': {
|
||||||
|
answer = await select({
|
||||||
|
message,
|
||||||
|
choices: choices || [],
|
||||||
|
default: typeof defaultValue === 'function' ? defaultValue(answers) : defaultValue,
|
||||||
|
});
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
case 'checkbox': {
|
||||||
|
answer = await multiselect({
|
||||||
|
message,
|
||||||
|
choices: choices || [],
|
||||||
|
required: false,
|
||||||
|
});
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
case 'password': {
|
||||||
|
// Note: @clack/prompts doesn't support async validation, so validate must be sync
|
||||||
|
answer = await password({
|
||||||
|
message,
|
||||||
|
validate: validate
|
||||||
|
? (val) => {
|
||||||
|
const result = validate(val, answers);
|
||||||
|
if (result instanceof Promise) {
|
||||||
|
throw new TypeError('Async validation is not supported by @clack/prompts. Please use synchronous validation.');
|
||||||
|
}
|
||||||
|
return result === true ? undefined : result;
|
||||||
|
}
|
||||||
|
: undefined,
|
||||||
|
});
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
default: {
|
||||||
|
// Default to text input for unknown types
|
||||||
|
answer = await text({
|
||||||
|
message,
|
||||||
|
default: typeof defaultValue === 'function' ? defaultValue(answers) : defaultValue,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
answers[name] = answer;
|
||||||
|
}
|
||||||
|
|
||||||
|
return answers;
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
getClack,
|
||||||
|
handleCancel,
|
||||||
|
intro,
|
||||||
|
outro,
|
||||||
|
note,
|
||||||
|
spinner,
|
||||||
|
select,
|
||||||
|
multiselect,
|
||||||
|
groupMultiselect,
|
||||||
|
confirm,
|
||||||
|
text,
|
||||||
|
password,
|
||||||
|
group,
|
||||||
|
tasks,
|
||||||
|
log,
|
||||||
|
prompt,
|
||||||
|
};
|
||||||
|
|
@ -4,16 +4,21 @@ const os = require('node:os');
|
||||||
const fs = require('fs-extra');
|
const fs = require('fs-extra');
|
||||||
const { CLIUtils } = require('./cli-utils');
|
const { CLIUtils } = require('./cli-utils');
|
||||||
const { CustomHandler } = require('../installers/lib/custom/handler');
|
const { CustomHandler } = require('../installers/lib/custom/handler');
|
||||||
|
const prompts = require('./prompts');
|
||||||
|
|
||||||
// Lazy-load inquirer (ESM module) to avoid ERR_REQUIRE_ESM
|
// Separator class for visual grouping in select/multiselect prompts
|
||||||
let _inquirer = null;
|
// Note: @clack/prompts doesn't support separators natively, they are filtered out
|
||||||
async function getInquirer() {
|
class Separator {
|
||||||
if (!_inquirer) {
|
constructor(text = '────────') {
|
||||||
_inquirer = (await import('inquirer')).default;
|
this.line = text;
|
||||||
|
this.name = text;
|
||||||
}
|
}
|
||||||
return _inquirer;
|
type = 'separator';
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Separator for choice lists (compatible interface)
|
||||||
|
const choiceUtils = { Separator };
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* UI utilities for the installer
|
* UI utilities for the installer
|
||||||
*/
|
*/
|
||||||
|
|
@ -23,7 +28,6 @@ class UI {
|
||||||
* @returns {Object} Installation configuration
|
* @returns {Object} Installation configuration
|
||||||
*/
|
*/
|
||||||
async promptInstall() {
|
async promptInstall() {
|
||||||
const inquirer = await getInquirer();
|
|
||||||
CLIUtils.displayLogo();
|
CLIUtils.displayLogo();
|
||||||
|
|
||||||
// Display version-specific start message from install-messages.yaml
|
// Display version-specific start message from install-messages.yaml
|
||||||
|
|
@ -113,26 +117,20 @@ class UI {
|
||||||
console.log(chalk.yellow('─'.repeat(80)));
|
console.log(chalk.yellow('─'.repeat(80)));
|
||||||
console.log('');
|
console.log('');
|
||||||
|
|
||||||
const { proceed } = await inquirer.prompt([
|
const proceed = await prompts.select({
|
||||||
{
|
|
||||||
type: 'list',
|
|
||||||
name: 'proceed',
|
|
||||||
message: 'What would you like to do?',
|
message: 'What would you like to do?',
|
||||||
choices: [
|
choices: [
|
||||||
{
|
{
|
||||||
name: 'Cancel and do a fresh install (recommended)',
|
name: 'Cancel and do a fresh install (recommended)',
|
||||||
value: 'cancel',
|
value: 'cancel',
|
||||||
short: 'Cancel installation',
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: 'Proceed anyway (will attempt update, potentially may fail or have unstable behavior)',
|
name: 'Proceed anyway (will attempt update, potentially may fail or have unstable behavior)',
|
||||||
value: 'proceed',
|
value: 'proceed',
|
||||||
short: 'Proceed with update',
|
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
default: 'cancel',
|
default: 'cancel',
|
||||||
},
|
});
|
||||||
]);
|
|
||||||
|
|
||||||
if (proceed === 'cancel') {
|
if (proceed === 'cancel') {
|
||||||
console.log('');
|
console.log('');
|
||||||
|
|
@ -188,14 +186,10 @@ class UI {
|
||||||
|
|
||||||
// If Claude Code was selected, ask about TTS
|
// If Claude Code was selected, ask about TTS
|
||||||
if (claudeCodeSelected) {
|
if (claudeCodeSelected) {
|
||||||
const { enableTts } = await inquirer.prompt([
|
const enableTts = await prompts.confirm({
|
||||||
{
|
|
||||||
type: 'confirm',
|
|
||||||
name: 'enableTts',
|
|
||||||
message: 'Claude Code supports TTS (Text-to-Speech). Would you like to enable it?',
|
message: 'Claude Code supports TTS (Text-to-Speech). Would you like to enable it?',
|
||||||
default: false,
|
default: false,
|
||||||
},
|
});
|
||||||
]);
|
|
||||||
|
|
||||||
if (enableTts) {
|
if (enableTts) {
|
||||||
agentVibesConfig = { enabled: true, alreadyInstalled: false };
|
agentVibesConfig = { enabled: true, alreadyInstalled: false };
|
||||||
|
|
@ -250,18 +244,11 @@ class UI {
|
||||||
// Common actions
|
// Common actions
|
||||||
choices.push({ name: 'Modify BMAD Installation', value: 'update' });
|
choices.push({ name: 'Modify BMAD Installation', value: 'update' });
|
||||||
|
|
||||||
const promptResult = await inquirer.prompt([
|
actionType = await prompts.select({
|
||||||
{
|
|
||||||
type: 'list',
|
|
||||||
name: 'actionType',
|
|
||||||
message: 'What would you like to do?',
|
message: 'What would you like to do?',
|
||||||
choices: choices,
|
choices: choices,
|
||||||
default: choices[0].value, // Use the first option as default
|
default: choices[0].value,
|
||||||
},
|
});
|
||||||
]);
|
|
||||||
|
|
||||||
// Extract actionType from prompt result
|
|
||||||
actionType = promptResult.actionType;
|
|
||||||
|
|
||||||
// Handle quick update separately
|
// Handle quick update separately
|
||||||
if (actionType === 'quick-update') {
|
if (actionType === 'quick-update') {
|
||||||
|
|
@ -290,14 +277,10 @@ class UI {
|
||||||
const { installedModuleIds } = await this.getExistingInstallation(confirmedDirectory);
|
const { installedModuleIds } = await this.getExistingInstallation(confirmedDirectory);
|
||||||
|
|
||||||
console.log(chalk.dim(` Found existing modules: ${[...installedModuleIds].join(', ')}`));
|
console.log(chalk.dim(` Found existing modules: ${[...installedModuleIds].join(', ')}`));
|
||||||
const { changeModuleSelection } = await inquirer.prompt([
|
const changeModuleSelection = await prompts.confirm({
|
||||||
{
|
|
||||||
type: 'confirm',
|
|
||||||
name: 'changeModuleSelection',
|
|
||||||
message: 'Modify official module selection (BMad Method, BMad Builder, Creative Innovation Suite)?',
|
message: 'Modify official module selection (BMad Method, BMad Builder, Creative Innovation Suite)?',
|
||||||
default: false,
|
default: false,
|
||||||
},
|
});
|
||||||
]);
|
|
||||||
|
|
||||||
let selectedModules = [];
|
let selectedModules = [];
|
||||||
if (changeModuleSelection) {
|
if (changeModuleSelection) {
|
||||||
|
|
@ -310,14 +293,10 @@ class UI {
|
||||||
|
|
||||||
// After module selection, ask about custom modules
|
// After module selection, ask about custom modules
|
||||||
console.log('');
|
console.log('');
|
||||||
const { changeCustomModules } = await inquirer.prompt([
|
const changeCustomModules = await prompts.confirm({
|
||||||
{
|
|
||||||
type: 'confirm',
|
|
||||||
name: 'changeCustomModules',
|
|
||||||
message: 'Modify custom module selection (add, update, or remove custom modules/agents/workflows)?',
|
message: 'Modify custom module selection (add, update, or remove custom modules/agents/workflows)?',
|
||||||
default: false,
|
default: false,
|
||||||
},
|
});
|
||||||
]);
|
|
||||||
|
|
||||||
let customModuleResult = { selectedCustomModules: [], customContentConfig: { hasCustomContent: false } };
|
let customModuleResult = { selectedCustomModules: [], customContentConfig: { hasCustomContent: false } };
|
||||||
if (changeCustomModules) {
|
if (changeCustomModules) {
|
||||||
|
|
@ -352,15 +331,10 @@ class UI {
|
||||||
let enableTts = false;
|
let enableTts = false;
|
||||||
|
|
||||||
if (hasClaudeCode) {
|
if (hasClaudeCode) {
|
||||||
const { enableTts: enable } = await inquirer.prompt([
|
enableTts = await prompts.confirm({
|
||||||
{
|
|
||||||
type: 'confirm',
|
|
||||||
name: 'enableTts',
|
|
||||||
message: 'Claude Code supports TTS (Text-to-Speech). Would you like to enable it?',
|
message: 'Claude Code supports TTS (Text-to-Speech). Would you like to enable it?',
|
||||||
default: false,
|
default: false,
|
||||||
},
|
});
|
||||||
]);
|
|
||||||
enableTts = enable;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Core config with existing defaults (ask after TTS)
|
// Core config with existing defaults (ask after TTS)
|
||||||
|
|
@ -385,14 +359,10 @@ class UI {
|
||||||
const { installedModuleIds } = await this.getExistingInstallation(confirmedDirectory);
|
const { installedModuleIds } = await this.getExistingInstallation(confirmedDirectory);
|
||||||
|
|
||||||
// Ask about official modules for new installations
|
// Ask about official modules for new installations
|
||||||
const { wantsOfficialModules } = await inquirer.prompt([
|
const wantsOfficialModules = await prompts.confirm({
|
||||||
{
|
|
||||||
type: 'confirm',
|
|
||||||
name: 'wantsOfficialModules',
|
|
||||||
message: 'Will you be installing any official BMad modules (BMad Method, BMad Builder, Creative Innovation Suite)?',
|
message: 'Will you be installing any official BMad modules (BMad Method, BMad Builder, Creative Innovation Suite)?',
|
||||||
default: true,
|
default: true,
|
||||||
},
|
});
|
||||||
]);
|
|
||||||
|
|
||||||
let selectedOfficialModules = [];
|
let selectedOfficialModules = [];
|
||||||
if (wantsOfficialModules) {
|
if (wantsOfficialModules) {
|
||||||
|
|
@ -401,14 +371,10 @@ class UI {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Ask about custom content
|
// Ask about custom content
|
||||||
const { wantsCustomContent } = await inquirer.prompt([
|
const wantsCustomContent = await prompts.confirm({
|
||||||
{
|
|
||||||
type: 'confirm',
|
|
||||||
name: 'wantsCustomContent',
|
|
||||||
message: 'Would you like to install a local custom module (this includes custom agents and workflows also)?',
|
message: 'Would you like to install a local custom module (this includes custom agents and workflows also)?',
|
||||||
default: false,
|
default: false,
|
||||||
},
|
});
|
||||||
]);
|
|
||||||
|
|
||||||
if (wantsCustomContent) {
|
if (wantsCustomContent) {
|
||||||
customContentConfig = await this.promptCustomContentSource();
|
customContentConfig = await this.promptCustomContentSource();
|
||||||
|
|
@ -459,7 +425,6 @@ class UI {
|
||||||
* @returns {Object} Tool configuration
|
* @returns {Object} Tool configuration
|
||||||
*/
|
*/
|
||||||
async promptToolSelection(projectDir, selectedModules) {
|
async promptToolSelection(projectDir, selectedModules) {
|
||||||
const inquirer = await getInquirer();
|
|
||||||
// Check for existing configured IDEs - use findBmadDir to detect custom folder names
|
// Check for existing configured IDEs - use findBmadDir to detect custom folder names
|
||||||
const { Detector } = require('../installers/lib/core/detector');
|
const { Detector } = require('../installers/lib/core/detector');
|
||||||
const { Installer } = require('../installers/lib/core/installer');
|
const { Installer } = require('../installers/lib/core/installer');
|
||||||
|
|
@ -477,13 +442,14 @@ class UI {
|
||||||
const preferredIdes = ideManager.getPreferredIdes();
|
const preferredIdes = ideManager.getPreferredIdes();
|
||||||
const otherIdes = ideManager.getOtherIdes();
|
const otherIdes = ideManager.getOtherIdes();
|
||||||
|
|
||||||
// Build IDE choices array with separators
|
// Build grouped options object for groupMultiselect
|
||||||
const ideChoices = [];
|
const groupedOptions = {};
|
||||||
const processedIdes = new Set();
|
const processedIdes = new Set();
|
||||||
|
const initialValues = [];
|
||||||
|
|
||||||
// First, add previously configured IDEs at the top, marked with ✅
|
// First, add previously configured IDEs at the top, marked with ✅
|
||||||
if (configuredIdes.length > 0) {
|
if (configuredIdes.length > 0) {
|
||||||
ideChoices.push(new inquirer.Separator('── Previously Configured ──'));
|
const configuredGroup = [];
|
||||||
for (const ideValue of configuredIdes) {
|
for (const ideValue of configuredIdes) {
|
||||||
// Skip empty or invalid IDE values
|
// Skip empty or invalid IDE values
|
||||||
if (!ideValue || typeof ideValue !== 'string') {
|
if (!ideValue || typeof ideValue !== 'string') {
|
||||||
|
|
@ -496,81 +462,71 @@ class UI {
|
||||||
const ide = preferredIde || otherIde;
|
const ide = preferredIde || otherIde;
|
||||||
|
|
||||||
if (ide) {
|
if (ide) {
|
||||||
ideChoices.push({
|
configuredGroup.push({
|
||||||
name: `${ide.name} ✅`,
|
label: `${ide.name} ✅`,
|
||||||
value: ide.value,
|
value: ide.value,
|
||||||
checked: true, // Previously configured IDEs are checked by default
|
|
||||||
});
|
});
|
||||||
processedIdes.add(ide.value);
|
processedIdes.add(ide.value);
|
||||||
|
initialValues.push(ide.value); // Pre-select configured IDEs
|
||||||
} else {
|
} else {
|
||||||
// Warn about unrecognized IDE (but don't fail)
|
// Warn about unrecognized IDE (but don't fail)
|
||||||
console.log(chalk.yellow(`⚠️ Previously configured IDE '${ideValue}' is no longer available`));
|
console.log(chalk.yellow(`⚠️ Previously configured IDE '${ideValue}' is no longer available`));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if (configuredGroup.length > 0) {
|
||||||
|
groupedOptions['Previously Configured'] = configuredGroup;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add preferred tools (excluding already processed)
|
// Add preferred tools (excluding already processed)
|
||||||
const remainingPreferred = preferredIdes.filter((ide) => !processedIdes.has(ide.value));
|
const remainingPreferred = preferredIdes.filter((ide) => !processedIdes.has(ide.value));
|
||||||
if (remainingPreferred.length > 0) {
|
if (remainingPreferred.length > 0) {
|
||||||
ideChoices.push(new inquirer.Separator('── Recommended Tools ──'));
|
groupedOptions['Recommended Tools'] = remainingPreferred.map((ide) => {
|
||||||
for (const ide of remainingPreferred) {
|
|
||||||
ideChoices.push({
|
|
||||||
name: `${ide.name} ⭐`,
|
|
||||||
value: ide.value,
|
|
||||||
checked: false,
|
|
||||||
});
|
|
||||||
processedIdes.add(ide.value);
|
processedIdes.add(ide.value);
|
||||||
}
|
return {
|
||||||
|
label: `${ide.name} ⭐`,
|
||||||
|
value: ide.value,
|
||||||
|
};
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add other tools (excluding already processed)
|
// Add other tools (excluding already processed)
|
||||||
const remainingOther = otherIdes.filter((ide) => !processedIdes.has(ide.value));
|
const remainingOther = otherIdes.filter((ide) => !processedIdes.has(ide.value));
|
||||||
if (remainingOther.length > 0) {
|
if (remainingOther.length > 0) {
|
||||||
ideChoices.push(new inquirer.Separator('── Additional Tools ──'));
|
groupedOptions['Additional Tools'] = remainingOther.map((ide) => ({
|
||||||
for (const ide of remainingOther) {
|
label: ide.name,
|
||||||
ideChoices.push({
|
|
||||||
name: ide.name,
|
|
||||||
value: ide.value,
|
value: ide.value,
|
||||||
checked: false,
|
}));
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let answers;
|
let selectedIdes = [];
|
||||||
let userConfirmedNoTools = false;
|
let userConfirmedNoTools = false;
|
||||||
|
|
||||||
// Loop until user selects at least one tool OR explicitly confirms no tools
|
// Loop until user selects at least one tool OR explicitly confirms no tools
|
||||||
while (!userConfirmedNoTools) {
|
while (!userConfirmedNoTools) {
|
||||||
answers = await inquirer.prompt([
|
selectedIdes = await prompts.groupMultiselect({
|
||||||
{
|
message: `Select tools to configure ${chalk.dim('(↑/↓ navigate, SPACE select, ENTER confirm)')}:`,
|
||||||
type: 'checkbox',
|
options: groupedOptions,
|
||||||
name: 'ides',
|
initialValues: initialValues.length > 0 ? initialValues : undefined,
|
||||||
message: 'Select tools to configure:',
|
required: false,
|
||||||
choices: ideChoices,
|
});
|
||||||
pageSize: 30,
|
|
||||||
},
|
|
||||||
]);
|
|
||||||
|
|
||||||
// If tools were selected, we're done
|
// If tools were selected, we're done
|
||||||
if (answers.ides && answers.ides.length > 0) {
|
if (selectedIdes && selectedIdes.length > 0) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Warn that no tools were selected - users often miss the spacebar requirement
|
// Warn that no tools were selected - users often miss the spacebar requirement
|
||||||
console.log();
|
console.log();
|
||||||
console.log(chalk.red.bold('⚠️ WARNING: No tools were selected!'));
|
console.log(chalk.red.bold('⚠️ WARNING: No tools were selected!'));
|
||||||
console.log(chalk.red(' You must press SPACEBAR to select items, then ENTER to confirm.'));
|
console.log(chalk.red(' You must press SPACE to select items, then ENTER to confirm.'));
|
||||||
console.log(chalk.red(' Simply highlighting an item does NOT select it.'));
|
console.log(chalk.red(' Simply highlighting an item does NOT select it.'));
|
||||||
console.log();
|
console.log();
|
||||||
|
|
||||||
const { goBack } = await inquirer.prompt([
|
const goBack = await prompts.confirm({
|
||||||
{
|
|
||||||
type: 'confirm',
|
|
||||||
name: 'goBack',
|
|
||||||
message: chalk.yellow('Would you like to go back and select at least one tool?'),
|
message: chalk.yellow('Would you like to go back and select at least one tool?'),
|
||||||
default: true,
|
default: true,
|
||||||
},
|
});
|
||||||
]);
|
|
||||||
|
|
||||||
if (goBack) {
|
if (goBack) {
|
||||||
// Re-display a message before looping back
|
// Re-display a message before looping back
|
||||||
|
|
@ -582,8 +538,8 @@ class UI {
|
||||||
}
|
}
|
||||||
|
|
||||||
return {
|
return {
|
||||||
ides: answers.ides || [],
|
ides: selectedIdes || [],
|
||||||
skipIde: !answers.ides || answers.ides.length === 0,
|
skipIde: !selectedIdes || selectedIdes.length === 0,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -592,23 +548,17 @@ class UI {
|
||||||
* @returns {Object} Update configuration
|
* @returns {Object} Update configuration
|
||||||
*/
|
*/
|
||||||
async promptUpdate() {
|
async promptUpdate() {
|
||||||
const inquirer = await getInquirer();
|
const backupFirst = await prompts.confirm({
|
||||||
const answers = await inquirer.prompt([
|
|
||||||
{
|
|
||||||
type: 'confirm',
|
|
||||||
name: 'backupFirst',
|
|
||||||
message: 'Create backup before updating?',
|
message: 'Create backup before updating?',
|
||||||
default: true,
|
default: true,
|
||||||
},
|
});
|
||||||
{
|
|
||||||
type: 'confirm',
|
const preserveCustomizations = await prompts.confirm({
|
||||||
name: 'preserveCustomizations',
|
|
||||||
message: 'Preserve local customizations?',
|
message: 'Preserve local customizations?',
|
||||||
default: true,
|
default: true,
|
||||||
},
|
});
|
||||||
]);
|
|
||||||
|
|
||||||
return answers;
|
return { backupFirst, preserveCustomizations };
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
@ -617,27 +567,17 @@ class UI {
|
||||||
* @returns {Array} Selected modules
|
* @returns {Array} Selected modules
|
||||||
*/
|
*/
|
||||||
async promptModules(modules) {
|
async promptModules(modules) {
|
||||||
const inquirer = await getInquirer();
|
|
||||||
const choices = modules.map((mod) => ({
|
const choices = modules.map((mod) => ({
|
||||||
name: `${mod.name} - ${mod.description}`,
|
name: `${mod.name} - ${mod.description}`,
|
||||||
value: mod.id,
|
value: mod.id,
|
||||||
checked: false,
|
checked: false,
|
||||||
}));
|
}));
|
||||||
|
|
||||||
const { selectedModules } = await inquirer.prompt([
|
const selectedModules = await prompts.multiselect({
|
||||||
{
|
message: `Select modules to add ${chalk.dim('(↑/↓ navigate, SPACE select, ENTER confirm)')}:`,
|
||||||
type: 'checkbox',
|
|
||||||
name: 'selectedModules',
|
|
||||||
message: 'Select modules to add:',
|
|
||||||
choices,
|
choices,
|
||||||
validate: (answer) => {
|
required: true,
|
||||||
if (answer.length === 0) {
|
});
|
||||||
return 'You must choose at least one module.';
|
|
||||||
}
|
|
||||||
return true;
|
|
||||||
},
|
|
||||||
},
|
|
||||||
]);
|
|
||||||
|
|
||||||
return selectedModules;
|
return selectedModules;
|
||||||
}
|
}
|
||||||
|
|
@ -649,17 +589,10 @@ class UI {
|
||||||
* @returns {boolean} User confirmation
|
* @returns {boolean} User confirmation
|
||||||
*/
|
*/
|
||||||
async confirm(message, defaultValue = false) {
|
async confirm(message, defaultValue = false) {
|
||||||
const inquirer = await getInquirer();
|
return await prompts.confirm({
|
||||||
const { confirmed } = await inquirer.prompt([
|
|
||||||
{
|
|
||||||
type: 'confirm',
|
|
||||||
name: 'confirmed',
|
|
||||||
message,
|
message,
|
||||||
default: defaultValue,
|
default: defaultValue,
|
||||||
},
|
});
|
||||||
]);
|
|
||||||
|
|
||||||
return confirmed;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
@ -753,10 +686,9 @@ class UI {
|
||||||
* Get module choices for selection
|
* Get module choices for selection
|
||||||
* @param {Set} installedModuleIds - Currently installed module IDs
|
* @param {Set} installedModuleIds - Currently installed module IDs
|
||||||
* @param {Object} customContentConfig - Custom content configuration
|
* @param {Object} customContentConfig - Custom content configuration
|
||||||
* @returns {Array} Module choices for inquirer
|
* @returns {Array} Module choices for prompt
|
||||||
*/
|
*/
|
||||||
async getModuleChoices(installedModuleIds, customContentConfig = null) {
|
async getModuleChoices(installedModuleIds, customContentConfig = null) {
|
||||||
const inquirer = await getInquirer();
|
|
||||||
const moduleChoices = [];
|
const moduleChoices = [];
|
||||||
const isNewInstallation = installedModuleIds.size === 0;
|
const isNewInstallation = installedModuleIds.size === 0;
|
||||||
|
|
||||||
|
|
@ -811,9 +743,9 @@ class UI {
|
||||||
if (allCustomModules.length > 0) {
|
if (allCustomModules.length > 0) {
|
||||||
// Add separator for custom content, all custom modules, and official content separator
|
// Add separator for custom content, all custom modules, and official content separator
|
||||||
moduleChoices.push(
|
moduleChoices.push(
|
||||||
new inquirer.Separator('── Custom Content ──'),
|
new choiceUtils.Separator('── Custom Content ──'),
|
||||||
...allCustomModules,
|
...allCustomModules,
|
||||||
new inquirer.Separator('── Official Content ──'),
|
new choiceUtils.Separator('── Official Content ──'),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -837,44 +769,43 @@ class UI {
|
||||||
* @returns {Array} Selected module IDs
|
* @returns {Array} Selected module IDs
|
||||||
*/
|
*/
|
||||||
async selectModules(moduleChoices, defaultSelections = []) {
|
async selectModules(moduleChoices, defaultSelections = []) {
|
||||||
const inquirer = await getInquirer();
|
// Mark choices as checked based on defaultSelections
|
||||||
const moduleAnswer = await inquirer.prompt([
|
const choicesWithDefaults = moduleChoices.map((choice) => ({
|
||||||
{
|
...choice,
|
||||||
type: 'checkbox',
|
checked: defaultSelections.includes(choice.value),
|
||||||
name: 'modules',
|
}));
|
||||||
message: 'Select modules to install:',
|
|
||||||
choices: moduleChoices,
|
|
||||||
default: defaultSelections,
|
|
||||||
},
|
|
||||||
]);
|
|
||||||
|
|
||||||
const selected = moduleAnswer.modules || [];
|
const selected = await prompts.multiselect({
|
||||||
|
message: `Select modules to install ${chalk.dim('(↑/↓ navigate, SPACE select, ENTER confirm)')}:`,
|
||||||
|
choices: choicesWithDefaults,
|
||||||
|
required: false,
|
||||||
|
});
|
||||||
|
|
||||||
return selected;
|
return selected || [];
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Prompt for directory selection
|
* Prompt for directory selection
|
||||||
* @returns {Object} Directory answer from inquirer
|
* @returns {Object} Directory answer from prompt
|
||||||
*/
|
*/
|
||||||
async promptForDirectory() {
|
async promptForDirectory() {
|
||||||
const inquirer = await getInquirer();
|
// Use sync validation because @clack/prompts doesn't support async validate
|
||||||
return await inquirer.prompt([
|
const directory = await prompts.text({
|
||||||
{
|
message: 'Installation directory:',
|
||||||
type: 'input',
|
|
||||||
name: 'directory',
|
|
||||||
message: `Installation directory:`,
|
|
||||||
default: process.cwd(),
|
default: process.cwd(),
|
||||||
validate: async (input) => this.validateDirectory(input),
|
placeholder: process.cwd(),
|
||||||
filter: (input) => {
|
validate: (input) => this.validateDirectorySync(input),
|
||||||
// If empty, use the default
|
});
|
||||||
if (!input || input.trim() === '') {
|
|
||||||
return process.cwd();
|
// Apply filter logic
|
||||||
|
let filteredDir = directory;
|
||||||
|
if (!filteredDir || filteredDir.trim() === '') {
|
||||||
|
filteredDir = process.cwd();
|
||||||
|
} else {
|
||||||
|
filteredDir = this.expandUserPath(filteredDir);
|
||||||
}
|
}
|
||||||
return this.expandUserPath(input);
|
|
||||||
},
|
return { directory: filteredDir };
|
||||||
},
|
|
||||||
]);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
@ -915,45 +846,92 @@ class UI {
|
||||||
* @returns {boolean} Whether user confirmed
|
* @returns {boolean} Whether user confirmed
|
||||||
*/
|
*/
|
||||||
async confirmDirectory(directory) {
|
async confirmDirectory(directory) {
|
||||||
const inquirer = await getInquirer();
|
|
||||||
const dirExists = await fs.pathExists(directory);
|
const dirExists = await fs.pathExists(directory);
|
||||||
|
|
||||||
if (dirExists) {
|
if (dirExists) {
|
||||||
const confirmAnswer = await inquirer.prompt([
|
const proceed = await prompts.confirm({
|
||||||
{
|
message: 'Install to this directory?',
|
||||||
type: 'confirm',
|
|
||||||
name: 'proceed',
|
|
||||||
message: `Install to this directory?`,
|
|
||||||
default: true,
|
default: true,
|
||||||
},
|
});
|
||||||
]);
|
|
||||||
|
|
||||||
if (!confirmAnswer.proceed) {
|
if (!proceed) {
|
||||||
console.log(chalk.yellow("\nLet's try again with a different path.\n"));
|
console.log(chalk.yellow("\nLet's try again with a different path.\n"));
|
||||||
}
|
}
|
||||||
|
|
||||||
return confirmAnswer.proceed;
|
return proceed;
|
||||||
} else {
|
} else {
|
||||||
// Ask for confirmation to create the directory
|
// Ask for confirmation to create the directory
|
||||||
const createConfirm = await inquirer.prompt([
|
const create = await prompts.confirm({
|
||||||
{
|
|
||||||
type: 'confirm',
|
|
||||||
name: 'create',
|
|
||||||
message: `The directory '${directory}' doesn't exist. Would you like to create it?`,
|
message: `The directory '${directory}' doesn't exist. Would you like to create it?`,
|
||||||
default: false,
|
default: false,
|
||||||
},
|
});
|
||||||
]);
|
|
||||||
|
|
||||||
if (!createConfirm.create) {
|
if (!create) {
|
||||||
console.log(chalk.yellow("\nLet's try again with a different path.\n"));
|
console.log(chalk.yellow("\nLet's try again with a different path.\n"));
|
||||||
}
|
}
|
||||||
|
|
||||||
return createConfirm.create;
|
return create;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Validate directory path for installation
|
* Validate directory path for installation (sync version for clack prompts)
|
||||||
|
* @param {string} input - User input path
|
||||||
|
* @returns {string|undefined} Error message or undefined if valid
|
||||||
|
*/
|
||||||
|
validateDirectorySync(input) {
|
||||||
|
// Allow empty input to use the default
|
||||||
|
if (!input || input.trim() === '') {
|
||||||
|
return; // Empty means use default, undefined = valid for clack
|
||||||
|
}
|
||||||
|
|
||||||
|
let expandedPath;
|
||||||
|
try {
|
||||||
|
expandedPath = this.expandUserPath(input.trim());
|
||||||
|
} catch (error) {
|
||||||
|
return error.message;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if the path exists
|
||||||
|
const pathExists = fs.pathExistsSync(expandedPath);
|
||||||
|
|
||||||
|
if (!pathExists) {
|
||||||
|
// Find the first existing parent directory
|
||||||
|
const existingParent = this.findExistingParentSync(expandedPath);
|
||||||
|
|
||||||
|
if (!existingParent) {
|
||||||
|
return 'Cannot create directory: no existing parent directory found';
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if the existing parent is writable
|
||||||
|
try {
|
||||||
|
fs.accessSync(existingParent, fs.constants.W_OK);
|
||||||
|
// Path doesn't exist but can be created - will prompt for confirmation later
|
||||||
|
return;
|
||||||
|
} catch {
|
||||||
|
// Provide a detailed error message explaining both issues
|
||||||
|
return `Directory '${expandedPath}' does not exist and cannot be created: parent directory '${existingParent}' is not writable`;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// If it exists, validate it's a directory and writable
|
||||||
|
const stat = fs.statSync(expandedPath);
|
||||||
|
if (!stat.isDirectory()) {
|
||||||
|
return `Path exists but is not a directory: ${expandedPath}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check write permissions
|
||||||
|
try {
|
||||||
|
fs.accessSync(expandedPath, fs.constants.W_OK);
|
||||||
|
} catch {
|
||||||
|
return `Directory is not writable: ${expandedPath}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Validate directory path for installation (async version)
|
||||||
* @param {string} input - User input path
|
* @param {string} input - User input path
|
||||||
* @returns {string|true} Error message or true if valid
|
* @returns {string|true} Error message or true if valid
|
||||||
*/
|
*/
|
||||||
|
|
@ -1009,7 +987,28 @@ class UI {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Find the first existing parent directory
|
* Find the first existing parent directory (sync version)
|
||||||
|
* @param {string} targetPath - The path to check
|
||||||
|
* @returns {string|null} The first existing parent directory, or null if none found
|
||||||
|
*/
|
||||||
|
findExistingParentSync(targetPath) {
|
||||||
|
let currentPath = path.resolve(targetPath);
|
||||||
|
|
||||||
|
// Walk up the directory tree until we find an existing directory
|
||||||
|
while (currentPath !== path.dirname(currentPath)) {
|
||||||
|
// Stop at root
|
||||||
|
const parent = path.dirname(currentPath);
|
||||||
|
if (fs.pathExistsSync(parent)) {
|
||||||
|
return parent;
|
||||||
|
}
|
||||||
|
currentPath = parent;
|
||||||
|
}
|
||||||
|
|
||||||
|
return null; // No existing parent found (shouldn't happen in practice)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Find the first existing parent directory (async version)
|
||||||
* @param {string} targetPath - The path to check
|
* @param {string} targetPath - The path to check
|
||||||
* @returns {string|null} The first existing parent directory, or null if none found
|
* @returns {string|null} The first existing parent directory, or null if none found
|
||||||
*/
|
*/
|
||||||
|
|
@ -1071,7 +1070,7 @@ class UI {
|
||||||
* @sideeffects None - pure user input collection, no files written
|
* @sideeffects None - pure user input collection, no files written
|
||||||
* @edgecases Shows warning if user enables TTS but AgentVibes not detected
|
* @edgecases Shows warning if user enables TTS but AgentVibes not detected
|
||||||
* @calledby promptInstall() during installation flow, after core config, before IDE selection
|
* @calledby promptInstall() during installation flow, after core config, before IDE selection
|
||||||
* @calls checkAgentVibesInstalled(), inquirer.prompt(), chalk.green/yellow/dim()
|
* @calls checkAgentVibesInstalled(), prompts.select(), chalk.green/yellow/dim()
|
||||||
*
|
*
|
||||||
* AI NOTE: This prompt is strategically positioned in installation flow:
|
* AI NOTE: This prompt is strategically positioned in installation flow:
|
||||||
* - AFTER core config (user_name, etc)
|
* - AFTER core config (user_name, etc)
|
||||||
|
|
@ -1102,7 +1101,6 @@ class UI {
|
||||||
* - GitHub Issue: paulpreibisch/AgentVibes#36
|
* - GitHub Issue: paulpreibisch/AgentVibes#36
|
||||||
*/
|
*/
|
||||||
async promptAgentVibes(projectDir) {
|
async promptAgentVibes(projectDir) {
|
||||||
const inquirer = await getInquirer();
|
|
||||||
CLIUtils.displaySection('🎤 Voice Features', 'Enable TTS for multi-agent conversations');
|
CLIUtils.displaySection('🎤 Voice Features', 'Enable TTS for multi-agent conversations');
|
||||||
|
|
||||||
// Check if AgentVibes is already installed
|
// Check if AgentVibes is already installed
|
||||||
|
|
@ -1114,23 +1112,19 @@ class UI {
|
||||||
console.log(chalk.dim(' AgentVibes not detected'));
|
console.log(chalk.dim(' AgentVibes not detected'));
|
||||||
}
|
}
|
||||||
|
|
||||||
const answers = await inquirer.prompt([
|
const enableTts = await prompts.confirm({
|
||||||
{
|
|
||||||
type: 'confirm',
|
|
||||||
name: 'enableTts',
|
|
||||||
message: 'Enable Agents to Speak Out loud (powered by Agent Vibes? Claude Code only currently)',
|
message: 'Enable Agents to Speak Out loud (powered by Agent Vibes? Claude Code only currently)',
|
||||||
default: false, // Default to yes - recommended for best experience
|
default: false,
|
||||||
},
|
});
|
||||||
]);
|
|
||||||
|
|
||||||
if (answers.enableTts && !agentVibesInstalled) {
|
if (enableTts && !agentVibesInstalled) {
|
||||||
console.log(chalk.yellow('\n ⚠️ AgentVibes not installed'));
|
console.log(chalk.yellow('\n ⚠️ AgentVibes not installed'));
|
||||||
console.log(chalk.dim(' Install AgentVibes separately to enable TTS:'));
|
console.log(chalk.dim(' Install AgentVibes separately to enable TTS:'));
|
||||||
console.log(chalk.dim(' https://github.com/paulpreibisch/AgentVibes\n'));
|
console.log(chalk.dim(' https://github.com/paulpreibisch/AgentVibes\n'));
|
||||||
}
|
}
|
||||||
|
|
||||||
return {
|
return {
|
||||||
enabled: answers.enableTts,
|
enabled: enableTts,
|
||||||
alreadyInstalled: agentVibesInstalled,
|
alreadyInstalled: agentVibesInstalled,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
@ -1248,30 +1242,75 @@ class UI {
|
||||||
return existingInstall.ides || [];
|
return existingInstall.ides || [];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Validate custom content path synchronously
|
||||||
|
* @param {string} input - User input path
|
||||||
|
* @returns {string|undefined} Error message or undefined if valid
|
||||||
|
*/
|
||||||
|
validateCustomContentPathSync(input) {
|
||||||
|
// Allow empty input to cancel
|
||||||
|
if (!input || input.trim() === '') {
|
||||||
|
return; // Allow empty to exit
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Expand the path
|
||||||
|
const expandedPath = this.expandUserPath(input.trim());
|
||||||
|
|
||||||
|
// Check if path exists
|
||||||
|
if (!fs.pathExistsSync(expandedPath)) {
|
||||||
|
return 'Path does not exist';
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if it's a directory
|
||||||
|
const stat = fs.statSync(expandedPath);
|
||||||
|
if (!stat.isDirectory()) {
|
||||||
|
return 'Path must be a directory';
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for module.yaml in the root
|
||||||
|
const moduleYamlPath = path.join(expandedPath, 'module.yaml');
|
||||||
|
if (!fs.pathExistsSync(moduleYamlPath)) {
|
||||||
|
return 'Directory must contain a module.yaml file in the root';
|
||||||
|
}
|
||||||
|
|
||||||
|
// Try to parse the module.yaml to get the module ID
|
||||||
|
try {
|
||||||
|
const yaml = require('yaml');
|
||||||
|
const content = fs.readFileSync(moduleYamlPath, 'utf8');
|
||||||
|
const moduleData = yaml.parse(content);
|
||||||
|
if (!moduleData.code) {
|
||||||
|
return 'module.yaml must contain a "code" field for the module ID';
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
return 'Invalid module.yaml file: ' + error.message;
|
||||||
|
}
|
||||||
|
|
||||||
|
return; // Valid
|
||||||
|
} catch (error) {
|
||||||
|
return 'Error validating path: ' + error.message;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Prompt user for custom content source location
|
* Prompt user for custom content source location
|
||||||
* @returns {Object} Custom content configuration
|
* @returns {Object} Custom content configuration
|
||||||
*/
|
*/
|
||||||
async promptCustomContentSource() {
|
async promptCustomContentSource() {
|
||||||
const inquirer = await getInquirer();
|
|
||||||
const customContentConfig = { hasCustomContent: true, sources: [] };
|
const customContentConfig = { hasCustomContent: true, sources: [] };
|
||||||
|
|
||||||
// Keep asking for more sources until user is done
|
// Keep asking for more sources until user is done
|
||||||
while (true) {
|
while (true) {
|
||||||
// First ask if user wants to add another module or continue
|
// First ask if user wants to add another module or continue
|
||||||
if (customContentConfig.sources.length > 0) {
|
if (customContentConfig.sources.length > 0) {
|
||||||
const { action } = await inquirer.prompt([
|
const action = await prompts.select({
|
||||||
{
|
|
||||||
type: 'list',
|
|
||||||
name: 'action',
|
|
||||||
message: 'Would you like to:',
|
message: 'Would you like to:',
|
||||||
choices: [
|
choices: [
|
||||||
{ name: 'Add another custom module', value: 'add' },
|
{ name: 'Add another custom module', value: 'add' },
|
||||||
{ name: 'Continue with installation', value: 'continue' },
|
{ name: 'Continue with installation', value: 'continue' },
|
||||||
],
|
],
|
||||||
default: 'continue',
|
default: 'continue',
|
||||||
},
|
});
|
||||||
]);
|
|
||||||
|
|
||||||
if (action === 'continue') {
|
if (action === 'continue') {
|
||||||
break;
|
break;
|
||||||
|
|
@ -1282,57 +1321,11 @@ class UI {
|
||||||
let isValid = false;
|
let isValid = false;
|
||||||
|
|
||||||
while (!isValid) {
|
while (!isValid) {
|
||||||
const { path: inputPath } = await inquirer.prompt([
|
// Use sync validation because @clack/prompts doesn't support async validate
|
||||||
{
|
const inputPath = await prompts.text({
|
||||||
type: 'input',
|
|
||||||
name: 'path',
|
|
||||||
message: 'Enter the path to your custom content folder (or press Enter to cancel):',
|
message: 'Enter the path to your custom content folder (or press Enter to cancel):',
|
||||||
validate: async (input) => {
|
validate: (input) => this.validateCustomContentPathSync(input),
|
||||||
// Allow empty input to cancel
|
});
|
||||||
if (!input || input.trim() === '') {
|
|
||||||
return true; // Allow empty to exit
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
// Expand the path
|
|
||||||
const expandedPath = this.expandUserPath(input.trim());
|
|
||||||
|
|
||||||
// Check if path exists
|
|
||||||
if (!(await fs.pathExists(expandedPath))) {
|
|
||||||
return 'Path does not exist';
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check if it's a directory
|
|
||||||
const stat = await fs.stat(expandedPath);
|
|
||||||
if (!stat.isDirectory()) {
|
|
||||||
return 'Path must be a directory';
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check for module.yaml in the root
|
|
||||||
const moduleYamlPath = path.join(expandedPath, 'module.yaml');
|
|
||||||
if (!(await fs.pathExists(moduleYamlPath))) {
|
|
||||||
return 'Directory must contain a module.yaml file in the root';
|
|
||||||
}
|
|
||||||
|
|
||||||
// Try to parse the module.yaml to get the module ID
|
|
||||||
try {
|
|
||||||
const yaml = require('yaml');
|
|
||||||
const content = await fs.readFile(moduleYamlPath, 'utf8');
|
|
||||||
const moduleData = yaml.parse(content);
|
|
||||||
if (!moduleData.code) {
|
|
||||||
return 'module.yaml must contain a "code" field for the module ID';
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
return 'Invalid module.yaml file: ' + error.message;
|
|
||||||
}
|
|
||||||
|
|
||||||
return true;
|
|
||||||
} catch (error) {
|
|
||||||
return 'Error validating path: ' + error.message;
|
|
||||||
}
|
|
||||||
},
|
|
||||||
},
|
|
||||||
]);
|
|
||||||
|
|
||||||
// If user pressed Enter without typing anything, exit the loop
|
// If user pressed Enter without typing anything, exit the loop
|
||||||
if (!inputPath || inputPath.trim() === '') {
|
if (!inputPath || inputPath.trim() === '') {
|
||||||
|
|
@ -1364,14 +1357,10 @@ class UI {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Ask if user wants to add these to the installation
|
// Ask if user wants to add these to the installation
|
||||||
const { shouldInstall } = await inquirer.prompt([
|
const shouldInstall = await prompts.confirm({
|
||||||
{
|
|
||||||
type: 'confirm',
|
|
||||||
name: 'shouldInstall',
|
|
||||||
message: `Install ${customContentConfig.sources.length} custom module(s) now?`,
|
message: `Install ${customContentConfig.sources.length} custom module(s) now?`,
|
||||||
default: true,
|
default: true,
|
||||||
},
|
});
|
||||||
]);
|
|
||||||
|
|
||||||
if (shouldInstall) {
|
if (shouldInstall) {
|
||||||
customContentConfig.selected = true;
|
customContentConfig.selected = true;
|
||||||
|
|
@ -1391,7 +1380,6 @@ class UI {
|
||||||
* @returns {Object} Result with selected custom modules and custom content config
|
* @returns {Object} Result with selected custom modules and custom content config
|
||||||
*/
|
*/
|
||||||
async handleCustomModulesInModifyFlow(directory, selectedModules) {
|
async handleCustomModulesInModifyFlow(directory, selectedModules) {
|
||||||
const inquirer = await getInquirer();
|
|
||||||
// Get existing installation to find custom modules
|
// Get existing installation to find custom modules
|
||||||
const { existingInstall } = await this.getExistingInstallation(directory);
|
const { existingInstall } = await this.getExistingInstallation(directory);
|
||||||
|
|
||||||
|
|
@ -1451,16 +1439,11 @@ class UI {
|
||||||
choices.push({ name: 'Add new custom modules', value: 'add' }, { name: 'Cancel (no custom modules)', value: 'cancel' });
|
choices.push({ name: 'Add new custom modules', value: 'add' }, { name: 'Cancel (no custom modules)', value: 'cancel' });
|
||||||
}
|
}
|
||||||
|
|
||||||
const { customAction } = await inquirer.prompt([
|
const customAction = await prompts.select({
|
||||||
{
|
message: cachedCustomModules.length > 0 ? 'What would you like to do with custom modules?' : 'Would you like to add custom modules?',
|
||||||
type: 'list',
|
|
||||||
name: 'customAction',
|
|
||||||
message:
|
|
||||||
cachedCustomModules.length > 0 ? 'What would you like to do with custom modules?' : 'Would you like to add custom modules?',
|
|
||||||
choices: choices,
|
choices: choices,
|
||||||
default: cachedCustomModules.length > 0 ? 'keep' : 'add',
|
default: cachedCustomModules.length > 0 ? 'keep' : 'add',
|
||||||
},
|
});
|
||||||
]);
|
|
||||||
|
|
||||||
switch (customAction) {
|
switch (customAction) {
|
||||||
case 'keep': {
|
case 'keep': {
|
||||||
|
|
@ -1472,21 +1455,18 @@ class UI {
|
||||||
|
|
||||||
case 'select': {
|
case 'select': {
|
||||||
// Let user choose which to keep
|
// Let user choose which to keep
|
||||||
const choices = cachedCustomModules.map((m) => ({
|
const selectChoices = cachedCustomModules.map((m) => ({
|
||||||
name: `${m.name} ${chalk.gray(`(${m.id})`)}`,
|
name: `${m.name} ${chalk.gray(`(${m.id})`)}`,
|
||||||
value: m.id,
|
value: m.id,
|
||||||
|
checked: m.checked,
|
||||||
}));
|
}));
|
||||||
|
|
||||||
const { keepModules } = await inquirer.prompt([
|
const keepModules = await prompts.multiselect({
|
||||||
{
|
message: `Select custom modules to keep ${chalk.dim('(↑/↓ navigate, SPACE select, ENTER confirm)')}:`,
|
||||||
type: 'checkbox',
|
choices: selectChoices,
|
||||||
name: 'keepModules',
|
required: false,
|
||||||
message: 'Select custom modules to keep:',
|
});
|
||||||
choices: choices,
|
result.selectedCustomModules = keepModules || [];
|
||||||
default: cachedCustomModules.filter((m) => m.checked).map((m) => m.id),
|
|
||||||
},
|
|
||||||
]);
|
|
||||||
result.selectedCustomModules = keepModules;
|
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -1586,7 +1566,6 @@ class UI {
|
||||||
* @returns {Promise<boolean>} True if user wants to proceed, false if they cancel
|
* @returns {Promise<boolean>} True if user wants to proceed, false if they cancel
|
||||||
*/
|
*/
|
||||||
async showOldAlphaVersionWarning(installedVersion, currentVersion, bmadFolderName) {
|
async showOldAlphaVersionWarning(installedVersion, currentVersion, bmadFolderName) {
|
||||||
const inquirer = await getInquirer();
|
|
||||||
const versionInfo = this.checkAlphaVersionAge(installedVersion, currentVersion);
|
const versionInfo = this.checkAlphaVersionAge(installedVersion, currentVersion);
|
||||||
|
|
||||||
// Also warn if version is unknown or can't be parsed (legacy/unsupported)
|
// Also warn if version is unknown or can't be parsed (legacy/unsupported)
|
||||||
|
|
@ -1627,26 +1606,20 @@ class UI {
|
||||||
console.log(chalk.yellow('─'.repeat(80)));
|
console.log(chalk.yellow('─'.repeat(80)));
|
||||||
console.log('');
|
console.log('');
|
||||||
|
|
||||||
const { proceed } = await inquirer.prompt([
|
const proceed = await prompts.select({
|
||||||
{
|
|
||||||
type: 'list',
|
|
||||||
name: 'proceed',
|
|
||||||
message: 'What would you like to do?',
|
message: 'What would you like to do?',
|
||||||
choices: [
|
choices: [
|
||||||
{
|
{
|
||||||
name: 'Proceed with update anyway (may have issues)',
|
name: 'Proceed with update anyway (may have issues)',
|
||||||
value: 'proceed',
|
value: 'proceed',
|
||||||
short: 'Proceed with update',
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: 'Cancel (recommended - do a fresh install instead)',
|
name: 'Cancel (recommended - do a fresh install instead)',
|
||||||
value: 'cancel',
|
value: 'cancel',
|
||||||
short: 'Cancel installation',
|
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
default: 'cancel',
|
default: 'cancel',
|
||||||
},
|
});
|
||||||
]);
|
|
||||||
|
|
||||||
if (proceed === 'cancel') {
|
if (proceed === 'cancel') {
|
||||||
console.log('');
|
console.log('');
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,51 @@
|
||||||
|
import { defineConfig } from 'vitest/config';
|
||||||
|
|
||||||
|
export default defineConfig({
|
||||||
|
test: {
|
||||||
|
// Test file patterns
|
||||||
|
include: ['test/unit/**/*.test.js', 'test/integration/**/*.test.js'],
|
||||||
|
exclude: ['test/test-*.js', 'node_modules/**'],
|
||||||
|
|
||||||
|
// Timeouts
|
||||||
|
testTimeout: 10_000, // 10s for unit tests
|
||||||
|
hookTimeout: 30_000, // 30s for setup/teardown
|
||||||
|
|
||||||
|
// Parallel execution for speed
|
||||||
|
threads: true,
|
||||||
|
maxThreads: 4,
|
||||||
|
|
||||||
|
// Coverage configuration (using V8)
|
||||||
|
coverage: {
|
||||||
|
provider: 'v8',
|
||||||
|
reporter: ['text', 'html', 'lcov', 'json-summary'],
|
||||||
|
|
||||||
|
// Files to include in coverage
|
||||||
|
include: ['tools/**/*.js', 'src/**/*.js'],
|
||||||
|
|
||||||
|
// Files to exclude from coverage
|
||||||
|
exclude: [
|
||||||
|
'test/**',
|
||||||
|
'tools/flattener/**', // Separate concern
|
||||||
|
'tools/bmad-npx-wrapper.js', // Entry point
|
||||||
|
'tools/build-docs.js', // Documentation tools
|
||||||
|
'tools/check-doc-links.js', // Documentation tools
|
||||||
|
'**/*.config.js', // Configuration files
|
||||||
|
],
|
||||||
|
|
||||||
|
// Include all files for accurate coverage
|
||||||
|
all: true,
|
||||||
|
|
||||||
|
// Coverage thresholds (fail if below these)
|
||||||
|
statements: 85,
|
||||||
|
branches: 80,
|
||||||
|
functions: 85,
|
||||||
|
lines: 85,
|
||||||
|
},
|
||||||
|
|
||||||
|
// Global setup file
|
||||||
|
setupFiles: ['./test/setup.js'],
|
||||||
|
|
||||||
|
// Environment
|
||||||
|
environment: 'node',
|
||||||
|
},
|
||||||
|
});
|
||||||
Loading…
Reference in New Issue