feat: Add comprehensive cross-IDE workspace utility system
- Add workspace-utils/ directory with 6 comprehensive utility scripts (2600+ lines) - Implement workspace initialization, status, health monitoring, cleanup, handoff, and sync - Add npm scripts integration for Node.js projects with standalone fallback - Create IDE-specific documentation for Cursor, Windsurf, GitHub Copilot - Add context-manager and handoff-manager for workspace coordination - Support cross-IDE collaboration with environment detection and session management - Enable automatic workspace deployment during BMAD installation
This commit is contained in:
parent
c3bc7b0696
commit
dd487630ee
|
|
@ -15,6 +15,12 @@
|
||||||
"validate": "node tools/cli.js validate",
|
"validate": "node tools/cli.js validate",
|
||||||
"install:bmad": "node tools/installer/bin/bmad.js install",
|
"install:bmad": "node tools/installer/bin/bmad.js install",
|
||||||
"format": "prettier --write \"**/*.md\"",
|
"format": "prettier --write \"**/*.md\"",
|
||||||
|
"workspace-init": "node workspace-utils/init.js",
|
||||||
|
"workspace-status": "node workspace-utils/status.js",
|
||||||
|
"workspace-cleanup": "node workspace-utils/cleanup.js",
|
||||||
|
"workspace-handoff": "node workspace-utils/handoff.js",
|
||||||
|
"workspace-sync": "node workspace-utils/sync.js",
|
||||||
|
"workspace-health": "node workspace-utils/health.js",
|
||||||
"version:patch": "node tools/version-bump.js patch",
|
"version:patch": "node tools/version-bump.js patch",
|
||||||
"version:minor": "node tools/version-bump.js minor",
|
"version:minor": "node tools/version-bump.js minor",
|
||||||
"version:major": "node tools/version-bump.js major",
|
"version:major": "node tools/version-bump.js major",
|
||||||
|
|
|
||||||
File diff suppressed because it is too large
Load Diff
|
|
@ -0,0 +1,938 @@
|
||||||
|
const fs = require('fs');
|
||||||
|
const path = require('path');
|
||||||
|
|
||||||
|
class HandoffManager {
|
||||||
|
constructor(workspacePath = null) {
|
||||||
|
this.workspacePath = workspacePath || path.join(process.cwd(), '.workspace');
|
||||||
|
this.handoffsPath = path.join(this.workspacePath, 'handoffs');
|
||||||
|
this.contextPath = path.join(this.workspacePath, 'context');
|
||||||
|
|
||||||
|
// Initialize directories
|
||||||
|
this.initialize();
|
||||||
|
|
||||||
|
// Agent-specific filtering rules with multi-role support
|
||||||
|
this.agentFilters = {
|
||||||
|
'dev': {
|
||||||
|
includePatterns: ['technical', 'implementation', 'code', 'architecture', 'bug', 'feature'],
|
||||||
|
excludePatterns: ['business', 'stakeholder', 'marketing'],
|
||||||
|
requiredSections: ['technical details', 'code references', 'implementation requirements']
|
||||||
|
},
|
||||||
|
'qa': {
|
||||||
|
includePatterns: ['testing', 'validation', 'quality', 'acceptance', 'bug', 'criteria'],
|
||||||
|
excludePatterns: ['implementation details', 'code specifics'],
|
||||||
|
requiredSections: ['acceptance criteria', 'testing requirements', 'quality standards']
|
||||||
|
},
|
||||||
|
'architect': {
|
||||||
|
includePatterns: ['design', 'architecture', 'system', 'integration', 'technical', 'pattern'],
|
||||||
|
excludePatterns: ['implementation specifics', 'testing details'],
|
||||||
|
requiredSections: ['design decisions', 'technical constraints', 'system architecture']
|
||||||
|
},
|
||||||
|
'pm': {
|
||||||
|
includePatterns: ['requirements', 'business', 'stakeholder', 'scope', 'timeline', 'priority'],
|
||||||
|
excludePatterns: ['technical implementation', 'code details'],
|
||||||
|
requiredSections: ['business requirements', 'stakeholder decisions', 'scope changes']
|
||||||
|
},
|
||||||
|
'ux-expert': {
|
||||||
|
includePatterns: ['user', 'interface', 'experience', 'design', 'usability', 'interaction'],
|
||||||
|
excludePatterns: ['backend', 'database', 'server'],
|
||||||
|
requiredSections: ['user requirements', 'design specifications', 'usability considerations']
|
||||||
|
},
|
||||||
|
'analyst': {
|
||||||
|
includePatterns: ['data', 'analysis', 'metrics', 'trends', 'insights', 'research', 'patterns', 'statistics'],
|
||||||
|
excludePatterns: ['implementation', 'specific code'],
|
||||||
|
requiredSections: ['data analysis', 'insights and trends', 'research findings']
|
||||||
|
},
|
||||||
|
'brainstorming': {
|
||||||
|
includePatterns: ['creative', 'ideation', 'brainstorm', 'innovation', 'alternative', 'possibility', 'exploration'],
|
||||||
|
excludePatterns: ['constraints', 'limitations', 'final decisions'],
|
||||||
|
requiredSections: ['creative exploration', 'alternative approaches', 'innovative solutions']
|
||||||
|
},
|
||||||
|
'research': {
|
||||||
|
includePatterns: ['research', 'investigation', 'study', 'benchmark', 'industry', 'best-practice', 'standards'],
|
||||||
|
excludePatterns: ['implementation', 'specific solutions'],
|
||||||
|
requiredSections: ['research methodology', 'findings and insights', 'recommendations']
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Multi-role combinations for complex scenarios
|
||||||
|
this.multiRoleFilters = {
|
||||||
|
'dev-analyst': {
|
||||||
|
primary: 'dev',
|
||||||
|
secondary: 'analyst',
|
||||||
|
description: 'Development with data analysis capabilities'
|
||||||
|
},
|
||||||
|
'qa-research': {
|
||||||
|
primary: 'qa',
|
||||||
|
secondary: 'research',
|
||||||
|
description: 'Quality assurance with research methodologies'
|
||||||
|
},
|
||||||
|
'architect-brainstorming': {
|
||||||
|
primary: 'architect',
|
||||||
|
secondary: 'brainstorming',
|
||||||
|
description: 'Architecture design with creative exploration'
|
||||||
|
},
|
||||||
|
'pm-analyst': {
|
||||||
|
primary: 'pm',
|
||||||
|
secondary: 'analyst',
|
||||||
|
description: 'Project management with data analysis'
|
||||||
|
},
|
||||||
|
'ux-research': {
|
||||||
|
primary: 'ux-expert',
|
||||||
|
secondary: 'research',
|
||||||
|
description: 'UX design with user research capabilities'
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
initialize() {
|
||||||
|
if (!fs.existsSync(this.handoffsPath)) {
|
||||||
|
fs.mkdirSync(this.handoffsPath, { recursive: true });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async createHandoff(sourceAgent, targetAgent, context = {}) {
|
||||||
|
try {
|
||||||
|
const timestamp = new Date().toISOString();
|
||||||
|
const handoffId = `${sourceAgent}-to-${targetAgent}-${timestamp.replace(/[:.]/g, '-')}`;
|
||||||
|
const handoffFile = path.join(this.handoffsPath, `${handoffId}.md`);
|
||||||
|
|
||||||
|
// Load workspace context using our ContextManager integration
|
||||||
|
const workspaceContext = await this.loadWorkspaceContext();
|
||||||
|
|
||||||
|
// Filter context for target agent
|
||||||
|
const filteredContext = this.filterContextForAgent(workspaceContext, targetAgent);
|
||||||
|
|
||||||
|
// Generate handoff package
|
||||||
|
const handoffContent = await this.generateHandoffPackage({
|
||||||
|
handoffId,
|
||||||
|
sourceAgent,
|
||||||
|
targetAgent,
|
||||||
|
timestamp,
|
||||||
|
context: filteredContext,
|
||||||
|
customContext: context
|
||||||
|
});
|
||||||
|
|
||||||
|
// Validate handoff completeness
|
||||||
|
const validation = this.validateHandoff(handoffContent, targetAgent);
|
||||||
|
|
||||||
|
// Write handoff file
|
||||||
|
fs.writeFileSync(handoffFile, handoffContent);
|
||||||
|
|
||||||
|
// Update handoff registry
|
||||||
|
await this.updateHandoffRegistry(handoffId, sourceAgent, targetAgent, validation);
|
||||||
|
|
||||||
|
// Log handoff in audit trail
|
||||||
|
await this.logHandoffEvent({
|
||||||
|
handoffId,
|
||||||
|
sourceAgent,
|
||||||
|
targetAgent,
|
||||||
|
timestamp,
|
||||||
|
status: 'created',
|
||||||
|
validationScore: validation.score,
|
||||||
|
filePath: handoffFile
|
||||||
|
});
|
||||||
|
|
||||||
|
return {
|
||||||
|
handoffId,
|
||||||
|
filePath: handoffFile,
|
||||||
|
validation,
|
||||||
|
success: true
|
||||||
|
};
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Failed to create handoff:', error.message);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async loadWorkspaceContext() {
|
||||||
|
try {
|
||||||
|
const context = {
|
||||||
|
shared: {},
|
||||||
|
decisions: [],
|
||||||
|
progress: {},
|
||||||
|
quality: {}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Load shared context
|
||||||
|
const sharedContextFile = path.join(this.contextPath, 'shared-context.md');
|
||||||
|
if (fs.existsSync(sharedContextFile)) {
|
||||||
|
context.shared = this.parseSharedContext(fs.readFileSync(sharedContextFile, 'utf8'));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Load decisions
|
||||||
|
const decisionsFile = path.join(this.workspacePath, 'decisions', 'decisions-log.md');
|
||||||
|
if (fs.existsSync(decisionsFile)) {
|
||||||
|
context.decisions = this.parseDecisions(fs.readFileSync(decisionsFile, 'utf8'));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Load progress
|
||||||
|
const progressFile = path.join(this.workspacePath, 'progress', 'progress-summary.md');
|
||||||
|
if (fs.existsSync(progressFile)) {
|
||||||
|
context.progress = this.parseProgress(fs.readFileSync(progressFile, 'utf8'));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Load quality metrics
|
||||||
|
const qualityFile = path.join(this.workspacePath, 'quality', 'quality-metrics.md');
|
||||||
|
if (fs.existsSync(qualityFile)) {
|
||||||
|
context.quality = this.parseQualityMetrics(fs.readFileSync(qualityFile, 'utf8'));
|
||||||
|
}
|
||||||
|
|
||||||
|
return context;
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Failed to load workspace context:', error.message);
|
||||||
|
return { shared: {}, decisions: [], progress: {}, quality: {} };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
parseSharedContext(content) {
|
||||||
|
const context = {};
|
||||||
|
|
||||||
|
try {
|
||||||
|
const lastUpdatedMatch = content.match(/\*\*Last Updated:\*\* (.+)/);
|
||||||
|
if (lastUpdatedMatch) context.lastUpdated = lastUpdatedMatch[1];
|
||||||
|
|
||||||
|
const primaryAgentMatch = content.match(/\*\*Primary Agent:\*\* (.+)/);
|
||||||
|
if (primaryAgentMatch) context.primaryAgent = primaryAgentMatch[1];
|
||||||
|
|
||||||
|
const currentFocusMatch = content.match(/## Current Focus\n([\s\S]*?)(?=\n## |$)/);
|
||||||
|
if (currentFocusMatch) context.currentFocus = currentFocusMatch[1].trim();
|
||||||
|
|
||||||
|
const nextStepsMatch = content.match(/## Next Steps\n([\s\S]*?)(?=\n## |$)/);
|
||||||
|
if (nextStepsMatch) {
|
||||||
|
context.nextSteps = nextStepsMatch[1]
|
||||||
|
.split('\n')
|
||||||
|
.filter(line => line.startsWith('- '))
|
||||||
|
.map(line => line.substring(2).trim())
|
||||||
|
.filter(step => step.length > 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
const sessionNotesMatch = content.match(/## Session Notes\n([\s\S]*?)$/);
|
||||||
|
if (sessionNotesMatch) context.sessionNotes = sessionNotesMatch[1].trim();
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
console.warn('Failed to parse shared context:', error.message);
|
||||||
|
}
|
||||||
|
|
||||||
|
return context;
|
||||||
|
}
|
||||||
|
|
||||||
|
parseDecisions(content) {
|
||||||
|
const decisions = [];
|
||||||
|
const decisionBlocks = content.split(/## Decision \d+:/);
|
||||||
|
|
||||||
|
for (let i = 1; i < decisionBlocks.length; i++) {
|
||||||
|
try {
|
||||||
|
const block = decisionBlocks[i];
|
||||||
|
const lines = block.split('\n');
|
||||||
|
|
||||||
|
const decision = {
|
||||||
|
id: `${i.toString().padStart(3, '0')}`,
|
||||||
|
title: lines[0].trim(),
|
||||||
|
date: this.extractField(block, 'Date'),
|
||||||
|
agent: this.extractField(block, 'Agent'),
|
||||||
|
context: this.extractField(block, 'Context'),
|
||||||
|
decision: this.extractField(block, 'Decision'),
|
||||||
|
rationale: this.extractField(block, 'Rationale'),
|
||||||
|
impact: this.extractField(block, 'Impact'),
|
||||||
|
status: this.extractField(block, 'Status')
|
||||||
|
};
|
||||||
|
|
||||||
|
decisions.push(decision);
|
||||||
|
} catch (error) {
|
||||||
|
console.warn(`Failed to parse decision block ${i}:`, error.message);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return decisions.slice(-10); // Last 10 decisions for handoff
|
||||||
|
}
|
||||||
|
|
||||||
|
parseProgress(content) {
|
||||||
|
const progress = {};
|
||||||
|
|
||||||
|
try {
|
||||||
|
const currentStoryMatch = content.match(/\*\*Current Story:\*\* (.+)/);
|
||||||
|
if (currentStoryMatch) progress.currentStory = currentStoryMatch[1];
|
||||||
|
|
||||||
|
const qualityScoreMatch = content.match(/\*\*Quality Score:\*\* (.+)/);
|
||||||
|
if (qualityScoreMatch) progress.qualityScore = qualityScoreMatch[1];
|
||||||
|
|
||||||
|
const completedMatch = content.match(/## Completed Tasks\n([\s\S]*?)(?=\n## |$)/);
|
||||||
|
if (completedMatch) {
|
||||||
|
progress.completedTasks = completedMatch[1]
|
||||||
|
.split('\n')
|
||||||
|
.filter(line => line.startsWith('- ✅'))
|
||||||
|
.map(line => line.substring(4).trim())
|
||||||
|
.filter(task => task.length > 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
const pendingMatch = content.match(/## Pending Tasks\n([\s\S]*?)(?=\n## |$)/);
|
||||||
|
if (pendingMatch) {
|
||||||
|
progress.pendingTasks = pendingMatch[1]
|
||||||
|
.split('\n')
|
||||||
|
.filter(line => line.startsWith('- ⏳'))
|
||||||
|
.map(line => line.substring(4).trim())
|
||||||
|
.filter(task => task.length > 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
const blockersMatch = content.match(/## Blockers\n([\s\S]*?)$/);
|
||||||
|
if (blockersMatch) {
|
||||||
|
progress.blockers = blockersMatch[1]
|
||||||
|
.split('\n')
|
||||||
|
.filter(line => line.startsWith('- 🚫'))
|
||||||
|
.map(line => line.substring(4).trim())
|
||||||
|
.filter(blocker => blocker.length > 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
console.warn('Failed to parse progress:', error.message);
|
||||||
|
}
|
||||||
|
|
||||||
|
return progress;
|
||||||
|
}
|
||||||
|
|
||||||
|
parseQualityMetrics(content) {
|
||||||
|
const quality = {};
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Get the most recent quality assessment
|
||||||
|
const assessments = content.split('## Quality Assessment -');
|
||||||
|
if (assessments.length > 1) {
|
||||||
|
const latest = assessments[1];
|
||||||
|
quality.timestamp = latest.split('\n')[0].trim();
|
||||||
|
quality.agent = this.extractField(latest, 'Agent');
|
||||||
|
quality.story = this.extractField(latest, 'Story');
|
||||||
|
quality.realityAuditScore = this.extractField(latest, 'Reality Audit Score');
|
||||||
|
quality.overallQuality = this.extractField(latest, 'Overall Quality');
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.warn('Failed to parse quality metrics:', error.message);
|
||||||
|
}
|
||||||
|
|
||||||
|
return quality;
|
||||||
|
}
|
||||||
|
|
||||||
|
extractField(content, fieldName) {
|
||||||
|
const regex = new RegExp(`\\*\\*${fieldName}:\\*\\* (.+)`, 'i');
|
||||||
|
const match = content.match(regex);
|
||||||
|
return match ? match[1].trim() : '';
|
||||||
|
}
|
||||||
|
|
||||||
|
filterContextForAgent(context, targetAgent) {
|
||||||
|
const agentType = this.getAgentType(targetAgent);
|
||||||
|
|
||||||
|
// Handle multi-role filtering
|
||||||
|
if (this.multiRoleFilters[agentType]) {
|
||||||
|
return this.filterMultiRoleContext(context, agentType);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle single role filtering
|
||||||
|
const filter = this.agentFilters[agentType] || this.agentFilters['dev']; // Default to dev filter
|
||||||
|
|
||||||
|
const filtered = {
|
||||||
|
shared: context.shared,
|
||||||
|
decisions: this.filterDecisions(context.decisions, filter),
|
||||||
|
progress: context.progress,
|
||||||
|
quality: context.quality,
|
||||||
|
relevantContent: this.extractRelevantContent(context, filter),
|
||||||
|
roleType: 'single',
|
||||||
|
primaryRole: agentType
|
||||||
|
};
|
||||||
|
|
||||||
|
return filtered;
|
||||||
|
}
|
||||||
|
|
||||||
|
filterMultiRoleContext(context, multiRoleType) {
|
||||||
|
const multiRole = this.multiRoleFilters[multiRoleType];
|
||||||
|
const primaryFilter = this.agentFilters[multiRole.primary];
|
||||||
|
const secondaryFilter = this.agentFilters[multiRole.secondary];
|
||||||
|
|
||||||
|
// Combine include patterns from both roles
|
||||||
|
const combinedIncludePatterns = [
|
||||||
|
...primaryFilter.includePatterns,
|
||||||
|
...secondaryFilter.includePatterns
|
||||||
|
];
|
||||||
|
|
||||||
|
// Use primary role's exclude patterns but remove conflicts with secondary role
|
||||||
|
const combinedExcludePatterns = primaryFilter.excludePatterns.filter(
|
||||||
|
pattern => !secondaryFilter.includePatterns.includes(pattern)
|
||||||
|
);
|
||||||
|
|
||||||
|
const combinedFilter = {
|
||||||
|
includePatterns: combinedIncludePatterns,
|
||||||
|
excludePatterns: combinedExcludePatterns,
|
||||||
|
requiredSections: [
|
||||||
|
...primaryFilter.requiredSections,
|
||||||
|
...secondaryFilter.requiredSections
|
||||||
|
]
|
||||||
|
};
|
||||||
|
|
||||||
|
const filtered = {
|
||||||
|
shared: context.shared,
|
||||||
|
decisions: this.filterDecisions(context.decisions, combinedFilter),
|
||||||
|
progress: context.progress,
|
||||||
|
quality: context.quality,
|
||||||
|
relevantContent: this.extractRelevantContent(context, combinedFilter),
|
||||||
|
roleType: 'multi',
|
||||||
|
primaryRole: multiRole.primary,
|
||||||
|
secondaryRole: multiRole.secondary,
|
||||||
|
roleDescription: multiRole.description
|
||||||
|
};
|
||||||
|
|
||||||
|
return filtered;
|
||||||
|
}
|
||||||
|
|
||||||
|
getAgentType(agentName) {
|
||||||
|
const lowerName = agentName.toLowerCase();
|
||||||
|
|
||||||
|
// Check for multi-role patterns first (e.g., "dev-analyst", "qa+research")
|
||||||
|
const multiRolePatterns = [
|
||||||
|
{ pattern: ['dev', 'analyst'], type: 'dev-analyst' },
|
||||||
|
{ pattern: ['qa', 'research'], type: 'qa-research' },
|
||||||
|
{ pattern: ['architect', 'brainstorm'], type: 'architect-brainstorming' },
|
||||||
|
{ pattern: ['pm', 'analyst'], type: 'pm-analyst' },
|
||||||
|
{ pattern: ['ux', 'research'], type: 'ux-research' }
|
||||||
|
];
|
||||||
|
|
||||||
|
for (const multiRole of multiRolePatterns) {
|
||||||
|
if (multiRole.pattern.every(part => lowerName.includes(part))) {
|
||||||
|
return multiRole.type;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for specialized roles
|
||||||
|
if (lowerName.includes('analyst') || lowerName.includes('analysis')) return 'analyst';
|
||||||
|
if (lowerName.includes('brainstorm') || lowerName.includes('creative')) return 'brainstorming';
|
||||||
|
if (lowerName.includes('research') || lowerName.includes('investigat')) return 'research';
|
||||||
|
|
||||||
|
// Check for primary roles
|
||||||
|
if (lowerName.includes('dev') || lowerName.includes('developer')) return 'dev';
|
||||||
|
if (lowerName.includes('qa') || lowerName.includes('test')) return 'qa';
|
||||||
|
if (lowerName.includes('arch') || lowerName.includes('architect')) return 'architect';
|
||||||
|
if (lowerName.includes('pm') || lowerName.includes('manager')) return 'pm';
|
||||||
|
if (lowerName.includes('ux') || lowerName.includes('design')) return 'ux-expert';
|
||||||
|
|
||||||
|
return 'dev'; // Default fallback
|
||||||
|
}
|
||||||
|
|
||||||
|
filterDecisions(decisions, filter) {
|
||||||
|
return decisions.filter(decision => {
|
||||||
|
const decisionText = `${decision.title} ${decision.decision} ${decision.rationale} ${decision.impact}`.toLowerCase();
|
||||||
|
|
||||||
|
// Check if decision matches include patterns
|
||||||
|
const matchesInclude = filter.includePatterns.some(pattern =>
|
||||||
|
decisionText.includes(pattern.toLowerCase())
|
||||||
|
);
|
||||||
|
|
||||||
|
// Check if decision matches exclude patterns
|
||||||
|
const matchesExclude = filter.excludePatterns.some(pattern =>
|
||||||
|
decisionText.includes(pattern.toLowerCase())
|
||||||
|
);
|
||||||
|
|
||||||
|
return matchesInclude && !matchesExclude;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
extractRelevantContent(context, filter) {
|
||||||
|
const relevant = [];
|
||||||
|
|
||||||
|
// Extract relevant next steps
|
||||||
|
if (context.shared.nextSteps) {
|
||||||
|
context.shared.nextSteps.forEach(step => {
|
||||||
|
const stepText = step.toLowerCase();
|
||||||
|
const isRelevant = filter.includePatterns.some(pattern =>
|
||||||
|
stepText.includes(pattern.toLowerCase())
|
||||||
|
);
|
||||||
|
|
||||||
|
if (isRelevant) {
|
||||||
|
relevant.push(`Next Step: ${step}`);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract relevant progress items
|
||||||
|
if (context.progress.pendingTasks) {
|
||||||
|
context.progress.pendingTasks.forEach(task => {
|
||||||
|
const taskText = task.toLowerCase();
|
||||||
|
const isRelevant = filter.includePatterns.some(pattern =>
|
||||||
|
taskText.includes(pattern.toLowerCase())
|
||||||
|
);
|
||||||
|
|
||||||
|
if (isRelevant) {
|
||||||
|
relevant.push(`Pending Task: ${task}`);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return relevant;
|
||||||
|
}
|
||||||
|
|
||||||
|
async generateHandoffPackage(params) {
|
||||||
|
const {
|
||||||
|
handoffId,
|
||||||
|
sourceAgent,
|
||||||
|
targetAgent,
|
||||||
|
timestamp,
|
||||||
|
context,
|
||||||
|
customContext
|
||||||
|
} = params;
|
||||||
|
|
||||||
|
const agentType = this.getAgentType(targetAgent);
|
||||||
|
const nextActions = this.generateNextActions(context, agentType);
|
||||||
|
const fileReferences = this.generateFileReferences(context);
|
||||||
|
const blockers = this.extractBlockers(context);
|
||||||
|
|
||||||
|
return `# Agent Handoff: ${sourceAgent} → ${targetAgent}
|
||||||
|
|
||||||
|
**Created:** ${timestamp}
|
||||||
|
**Handoff ID:** ${handoffId}
|
||||||
|
**Source Agent:** ${sourceAgent}
|
||||||
|
**Target Agent:** ${targetAgent}
|
||||||
|
**Target Agent Type:** ${agentType}
|
||||||
|
|
||||||
|
## Context Summary
|
||||||
|
${context.shared.currentFocus || 'No current focus available.'}
|
||||||
|
|
||||||
|
${customContext.summary || ''}
|
||||||
|
|
||||||
|
## Key Decisions Made
|
||||||
|
${context.decisions.map(d => `- **${d.title}** (${d.agent}, ${d.date}): ${d.decision}`).join('\n') || '- No relevant decisions for this agent type'}
|
||||||
|
|
||||||
|
## Current Progress
|
||||||
|
**Story:** ${context.progress.currentStory || 'No active story'}
|
||||||
|
**Quality Score:** ${context.progress.qualityScore || 'Not assessed'}
|
||||||
|
|
||||||
|
**Completed Tasks:**
|
||||||
|
${context.progress.completedTasks ? context.progress.completedTasks.map(task => `- ✅ ${task}`).join('\n') : '- No completed tasks'}
|
||||||
|
|
||||||
|
**Pending Tasks:**
|
||||||
|
${context.progress.pendingTasks ? context.progress.pendingTasks.map(task => `- ⏳ ${task}`).join('\n') : '- No pending tasks'}
|
||||||
|
|
||||||
|
## Next Actions for ${targetAgent}
|
||||||
|
${nextActions.map(action => `- [ ] ${action}`).join('\n')}
|
||||||
|
|
||||||
|
## Files and References
|
||||||
|
${fileReferences.join('\n') || '- No specific file references available'}
|
||||||
|
|
||||||
|
## Blockers and Dependencies
|
||||||
|
${blockers.join('\n') || '- No blockers identified'}
|
||||||
|
|
||||||
|
## Quality Metrics
|
||||||
|
${context.quality.overallQuality ? `**Latest Quality Score:** ${context.quality.overallQuality}` : 'No quality metrics available'}
|
||||||
|
${context.quality.story ? `**Last Assessed Story:** ${context.quality.story}` : ''}
|
||||||
|
|
||||||
|
## Relevant Context
|
||||||
|
${context.relevantContent.map(item => `- ${item}`).join('\n') || '- No additional relevant context'}
|
||||||
|
|
||||||
|
## Handoff Validation
|
||||||
|
- [ ] Context completeness verified
|
||||||
|
- [ ] Decisions documented and relevant
|
||||||
|
- [ ] Next actions clearly defined for ${agentType} role
|
||||||
|
- [ ] References included
|
||||||
|
- [ ] Quality metrics current
|
||||||
|
- [ ] Agent-specific filtering applied
|
||||||
|
- [ ] Blockers and dependencies identified
|
||||||
|
|
||||||
|
## Handoff Notes
|
||||||
|
${customContext.notes || 'No additional notes provided.'}
|
||||||
|
|
||||||
|
---
|
||||||
|
*Generated by BMAD Agent Handoff System v1.0*
|
||||||
|
*Handoff Quality Score: ${this.calculateHandoffScore(context, agentType)}/100*
|
||||||
|
`;
|
||||||
|
}
|
||||||
|
|
||||||
|
generateNextActions(context, agentType) {
|
||||||
|
const actions = [];
|
||||||
|
|
||||||
|
// Handle multi-role actions
|
||||||
|
if (this.multiRoleFilters[agentType]) {
|
||||||
|
return this.generateMultiRoleActions(context, agentType);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle single-role actions
|
||||||
|
switch (agentType) {
|
||||||
|
case 'dev':
|
||||||
|
actions.push('Review technical requirements and architecture decisions');
|
||||||
|
actions.push('Examine current code implementation status');
|
||||||
|
actions.push('Address any pending technical tasks or bugs');
|
||||||
|
if (context.progress.blockers && context.progress.blockers.length > 0) {
|
||||||
|
actions.push('Resolve identified blockers and technical dependencies');
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'qa':
|
||||||
|
actions.push('Review acceptance criteria and testing requirements');
|
||||||
|
actions.push('Validate completed functionality against requirements');
|
||||||
|
actions.push('Execute test cases and identify quality issues');
|
||||||
|
actions.push('Update quality metrics and provide feedback');
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'architect':
|
||||||
|
actions.push('Review system design and architectural decisions');
|
||||||
|
actions.push('Validate technical approach and integration points');
|
||||||
|
actions.push('Assess scalability and performance implications');
|
||||||
|
actions.push('Document any new architectural requirements');
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'pm':
|
||||||
|
actions.push('Review project scope and timeline status');
|
||||||
|
actions.push('Assess stakeholder requirements and priority changes');
|
||||||
|
actions.push('Update project planning and resource allocation');
|
||||||
|
actions.push('Communicate progress to stakeholders');
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'ux-expert':
|
||||||
|
actions.push('Review user experience requirements and design specifications');
|
||||||
|
actions.push('Validate interface design and usability considerations');
|
||||||
|
actions.push('Assess user interaction patterns and feedback');
|
||||||
|
actions.push('Update design documentation and prototypes');
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'analyst':
|
||||||
|
actions.push('Analyze available data and identify key patterns');
|
||||||
|
actions.push('Generate insights from metrics and performance data');
|
||||||
|
actions.push('Create data visualization and trend analysis');
|
||||||
|
actions.push('Provide data-driven recommendations');
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'brainstorming':
|
||||||
|
actions.push('Explore creative alternatives and innovative approaches');
|
||||||
|
actions.push('Generate multiple solution options without constraints');
|
||||||
|
actions.push('Challenge existing assumptions and think outside the box');
|
||||||
|
actions.push('Facilitate ideation sessions and creative problem-solving');
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'research':
|
||||||
|
actions.push('Conduct comprehensive research on relevant topics');
|
||||||
|
actions.push('Investigate industry best practices and standards');
|
||||||
|
actions.push('Gather evidence and benchmark against competitors');
|
||||||
|
actions.push('Synthesize research findings into actionable insights');
|
||||||
|
break;
|
||||||
|
|
||||||
|
default:
|
||||||
|
actions.push('Review handoff context and understand current state');
|
||||||
|
actions.push('Identify specific tasks relevant to your role');
|
||||||
|
actions.push('Address any pending items in your domain');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add context-specific actions
|
||||||
|
if (context.shared.nextSteps) {
|
||||||
|
context.shared.nextSteps.forEach(step => {
|
||||||
|
if (!actions.some(action => action.toLowerCase().includes(step.toLowerCase().substring(0, 20)))) {
|
||||||
|
actions.push(step);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return actions.slice(0, 8); // Limit to 8 actions for readability
|
||||||
|
}
|
||||||
|
|
||||||
|
generateMultiRoleActions(context, multiRoleType) {
|
||||||
|
const multiRole = this.multiRoleFilters[multiRoleType];
|
||||||
|
const actions = [];
|
||||||
|
|
||||||
|
switch (multiRoleType) {
|
||||||
|
case 'dev-analyst':
|
||||||
|
actions.push('Analyze current system performance and identify optimization opportunities');
|
||||||
|
actions.push('Review code metrics and technical debt patterns');
|
||||||
|
actions.push('Implement data-driven development improvements');
|
||||||
|
actions.push('Create performance monitoring and analysis dashboards');
|
||||||
|
actions.push('Research and apply evidence-based development practices');
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'qa-research':
|
||||||
|
actions.push('Research industry testing standards and compliance frameworks');
|
||||||
|
actions.push('Investigate best practices for quality assurance methodologies');
|
||||||
|
actions.push('Analyze quality trends and benchmark against industry standards');
|
||||||
|
actions.push('Design comprehensive testing strategies based on research findings');
|
||||||
|
actions.push('Validate testing approaches through evidence-based research');
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'architect-brainstorming':
|
||||||
|
actions.push('Explore creative architectural patterns and innovative design approaches');
|
||||||
|
actions.push('Brainstorm multiple system design alternatives without constraints');
|
||||||
|
actions.push('Challenge conventional architecture assumptions');
|
||||||
|
actions.push('Generate innovative solutions for complex integration challenges');
|
||||||
|
actions.push('Facilitate collaborative design exploration sessions');
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'pm-analyst':
|
||||||
|
actions.push('Analyze project data to identify trends and optimization opportunities');
|
||||||
|
actions.push('Research stakeholder feedback and user behavior patterns');
|
||||||
|
actions.push('Create data-driven project prioritization and resource allocation');
|
||||||
|
actions.push('Generate insights from project metrics and timeline analysis');
|
||||||
|
actions.push('Develop evidence-based project planning and risk assessment');
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'ux-research':
|
||||||
|
actions.push('Conduct user research and usability studies');
|
||||||
|
actions.push('Investigate accessibility standards and inclusive design practices');
|
||||||
|
actions.push('Analyze user behavior data and interaction patterns');
|
||||||
|
actions.push('Research industry UX trends and best practices');
|
||||||
|
actions.push('Validate design decisions through evidence-based user research');
|
||||||
|
break;
|
||||||
|
|
||||||
|
default:
|
||||||
|
actions.push('Apply multi-role perspective to current challenges');
|
||||||
|
actions.push('Integrate primary and secondary role capabilities');
|
||||||
|
actions.push('Provide comprehensive analysis from multiple viewpoints');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add context-specific actions
|
||||||
|
if (context.shared.nextSteps) {
|
||||||
|
context.shared.nextSteps.forEach(step => {
|
||||||
|
if (!actions.some(action => action.toLowerCase().includes(step.toLowerCase().substring(0, 20)))) {
|
||||||
|
actions.push(step);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return actions.slice(0, 10); // Allow more actions for multi-role scenarios
|
||||||
|
}
|
||||||
|
|
||||||
|
generateFileReferences(context) {
|
||||||
|
const references = [];
|
||||||
|
|
||||||
|
// Add standard workspace references
|
||||||
|
references.push('📁 `.workspace/context/shared-context.md` - Current workspace context');
|
||||||
|
references.push('📋 `.workspace/decisions/decisions-log.md` - Architectural decisions');
|
||||||
|
references.push('📈 `.workspace/progress/progress-summary.md` - Development progress');
|
||||||
|
references.push('📊 `.workspace/quality/quality-metrics.md` - Quality assessments');
|
||||||
|
|
||||||
|
// Add story-specific references if available
|
||||||
|
if (context.progress.currentStory) {
|
||||||
|
references.push(`📖 Story documentation for: ${context.progress.currentStory}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
return references;
|
||||||
|
}
|
||||||
|
|
||||||
|
extractBlockers(context) {
|
||||||
|
const blockers = [];
|
||||||
|
|
||||||
|
if (context.progress.blockers && context.progress.blockers.length > 0) {
|
||||||
|
context.progress.blockers.forEach(blocker => {
|
||||||
|
blockers.push(`🚫 ${blocker}`);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for decision-based blockers
|
||||||
|
context.decisions.forEach(decision => {
|
||||||
|
if (decision.status === 'pending' || decision.impact.toLowerCase().includes('blocker')) {
|
||||||
|
blockers.push(`⚠️ Decision pending: ${decision.title}`);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
return blockers;
|
||||||
|
}
|
||||||
|
|
||||||
|
validateHandoff(handoffContent, targetAgent) {
|
||||||
|
const validation = {
|
||||||
|
score: 0,
|
||||||
|
maxScore: 100,
|
||||||
|
issues: [],
|
||||||
|
strengths: []
|
||||||
|
};
|
||||||
|
|
||||||
|
const agentType = this.getAgentType(targetAgent);
|
||||||
|
const requiredSections = this.agentFilters[agentType]?.requiredSections || [];
|
||||||
|
|
||||||
|
// Check required sections (30 points)
|
||||||
|
let sectionsFound = 0;
|
||||||
|
requiredSections.forEach(section => {
|
||||||
|
if (handoffContent.toLowerCase().includes(section.toLowerCase())) {
|
||||||
|
sectionsFound++;
|
||||||
|
validation.strengths.push(`Required section present: ${section}`);
|
||||||
|
} else {
|
||||||
|
validation.issues.push(`Missing required section: ${section}`);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
if (requiredSections.length > 0) {
|
||||||
|
validation.score += (sectionsFound / requiredSections.length) * 30;
|
||||||
|
} else {
|
||||||
|
validation.score += 30; // No specific requirements
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check context completeness (25 points)
|
||||||
|
const hasContext = handoffContent.includes('## Context Summary') &&
|
||||||
|
handoffContent.length > 500;
|
||||||
|
if (hasContext) {
|
||||||
|
validation.score += 25;
|
||||||
|
validation.strengths.push('Comprehensive context summary provided');
|
||||||
|
} else {
|
||||||
|
validation.issues.push('Context summary incomplete or missing');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check decisions documentation (20 points)
|
||||||
|
const hasDecisions = handoffContent.includes('## Key Decisions Made');
|
||||||
|
if (hasDecisions) {
|
||||||
|
validation.score += 20;
|
||||||
|
validation.strengths.push('Key decisions documented');
|
||||||
|
} else {
|
||||||
|
validation.issues.push('Key decisions not documented');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check next actions (15 points)
|
||||||
|
const hasNextActions = handoffContent.includes('## Next Actions for') &&
|
||||||
|
handoffContent.includes('- [ ]');
|
||||||
|
if (hasNextActions) {
|
||||||
|
validation.score += 15;
|
||||||
|
validation.strengths.push('Clear next actions defined');
|
||||||
|
} else {
|
||||||
|
validation.issues.push('Next actions unclear or missing');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check references (10 points)
|
||||||
|
const hasReferences = handoffContent.includes('## Files and References');
|
||||||
|
if (hasReferences) {
|
||||||
|
validation.score += 10;
|
||||||
|
validation.strengths.push('File references provided');
|
||||||
|
} else {
|
||||||
|
validation.issues.push('File references missing');
|
||||||
|
}
|
||||||
|
|
||||||
|
validation.grade = this.scoreToGrade(validation.score);
|
||||||
|
|
||||||
|
return validation;
|
||||||
|
}
|
||||||
|
|
||||||
|
scoreToGrade(score) {
|
||||||
|
if (score >= 90) return 'A';
|
||||||
|
if (score >= 80) return 'B';
|
||||||
|
if (score >= 70) return 'C';
|
||||||
|
if (score >= 60) return 'D';
|
||||||
|
return 'F';
|
||||||
|
}
|
||||||
|
|
||||||
|
calculateHandoffScore(context, agentType) {
|
||||||
|
let score = 50; // Base score
|
||||||
|
|
||||||
|
// Add points for context richness
|
||||||
|
if (context.shared.currentFocus) score += 10;
|
||||||
|
if (context.decisions.length > 0) score += 15;
|
||||||
|
if (context.progress.currentStory) score += 10;
|
||||||
|
if (context.quality.overallQuality) score += 10;
|
||||||
|
if (context.relevantContent.length > 0) score += 5;
|
||||||
|
|
||||||
|
return Math.min(score, 100);
|
||||||
|
}
|
||||||
|
|
||||||
|
async updateHandoffRegistry(handoffId, sourceAgent, targetAgent, validation) {
|
||||||
|
try {
|
||||||
|
const registryFile = path.join(this.handoffsPath, 'handoff-registry.json');
|
||||||
|
let registry = [];
|
||||||
|
|
||||||
|
if (fs.existsSync(registryFile)) {
|
||||||
|
const content = fs.readFileSync(registryFile, 'utf8');
|
||||||
|
registry = JSON.parse(content);
|
||||||
|
}
|
||||||
|
|
||||||
|
registry.push({
|
||||||
|
handoffId,
|
||||||
|
sourceAgent,
|
||||||
|
targetAgent,
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
validationScore: validation.score,
|
||||||
|
grade: validation.grade,
|
||||||
|
status: 'pending'
|
||||||
|
});
|
||||||
|
|
||||||
|
// Keep only last 100 handoffs
|
||||||
|
if (registry.length > 100) {
|
||||||
|
registry = registry.slice(-100);
|
||||||
|
}
|
||||||
|
|
||||||
|
fs.writeFileSync(registryFile, JSON.stringify(registry, null, 2));
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Failed to update handoff registry:', error.message);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async logHandoffEvent(event) {
|
||||||
|
try {
|
||||||
|
const auditFile = path.join(this.handoffsPath, 'audit-trail.md');
|
||||||
|
let auditContent = '';
|
||||||
|
|
||||||
|
if (fs.existsSync(auditFile)) {
|
||||||
|
auditContent = fs.readFileSync(auditFile, 'utf8');
|
||||||
|
} else {
|
||||||
|
auditContent = '# Handoff Audit Trail\n\n';
|
||||||
|
}
|
||||||
|
|
||||||
|
const logEntry = `## Handoff ${event.handoffId}
|
||||||
|
**Timestamp:** ${event.timestamp}
|
||||||
|
**Source:** ${event.sourceAgent}
|
||||||
|
**Target:** ${event.targetAgent}
|
||||||
|
**Status:** ${event.status}
|
||||||
|
**Validation Score:** ${event.validationScore}/100
|
||||||
|
**File:** ${event.filePath}
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
`;
|
||||||
|
|
||||||
|
auditContent += logEntry;
|
||||||
|
fs.writeFileSync(auditFile, auditContent);
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Failed to log handoff event:', error.message);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async getPendingHandoffs(targetAgent = null) {
|
||||||
|
try {
|
||||||
|
const registryFile = path.join(this.handoffsPath, 'handoff-registry.json');
|
||||||
|
|
||||||
|
if (!fs.existsSync(registryFile)) {
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
|
||||||
|
const content = fs.readFileSync(registryFile, 'utf8');
|
||||||
|
const registry = JSON.parse(content);
|
||||||
|
|
||||||
|
let pending = registry.filter(handoff => handoff.status === 'pending');
|
||||||
|
|
||||||
|
if (targetAgent) {
|
||||||
|
pending = pending.filter(handoff => handoff.targetAgent === targetAgent);
|
||||||
|
}
|
||||||
|
|
||||||
|
return pending.sort((a, b) => new Date(b.timestamp) - new Date(a.timestamp));
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Failed to get pending handoffs:', error.message);
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async getHandoffStats() {
|
||||||
|
try {
|
||||||
|
const registryFile = path.join(this.handoffsPath, 'handoff-registry.json');
|
||||||
|
|
||||||
|
if (!fs.existsSync(registryFile)) {
|
||||||
|
return { total: 0, pending: 0, avgScore: 0, gradeDistribution: {} };
|
||||||
|
}
|
||||||
|
|
||||||
|
const content = fs.readFileSync(registryFile, 'utf8');
|
||||||
|
const registry = JSON.parse(content);
|
||||||
|
|
||||||
|
const stats = {
|
||||||
|
total: registry.length,
|
||||||
|
pending: registry.filter(h => h.status === 'pending').length,
|
||||||
|
avgScore: registry.reduce((sum, h) => sum + h.validationScore, 0) / registry.length,
|
||||||
|
gradeDistribution: {}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Calculate grade distribution
|
||||||
|
registry.forEach(handoff => {
|
||||||
|
stats.gradeDistribution[handoff.grade] = (stats.gradeDistribution[handoff.grade] || 0) + 1;
|
||||||
|
});
|
||||||
|
|
||||||
|
return stats;
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Failed to get handoff stats:', error.message);
|
||||||
|
return { total: 0, pending: 0, avgScore: 0, gradeDistribution: {} };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = HandoffManager;
|
||||||
|
|
@ -0,0 +1,325 @@
|
||||||
|
#!/usr/bin/env node
|
||||||
|
/**
|
||||||
|
* BMAD Workspace Cleanup Utility
|
||||||
|
* Cross-IDE workspace maintenance and optimization
|
||||||
|
*/
|
||||||
|
|
||||||
|
const fs = require('fs');
|
||||||
|
const path = require('path');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Clean up stale sessions
|
||||||
|
*/
|
||||||
|
function cleanupStaleSessions(workspacePath) {
|
||||||
|
const sessionsPath = path.join(workspacePath, 'sessions');
|
||||||
|
if (!fs.existsSync(sessionsPath)) {
|
||||||
|
return { removed: 0, errors: [] };
|
||||||
|
}
|
||||||
|
|
||||||
|
const sessionFiles = fs.readdirSync(sessionsPath).filter(f => f.endsWith('.json'));
|
||||||
|
const now = new Date();
|
||||||
|
let removed = 0;
|
||||||
|
const errors = [];
|
||||||
|
|
||||||
|
for (const file of sessionFiles) {
|
||||||
|
try {
|
||||||
|
const sessionPath = path.join(sessionsPath, file);
|
||||||
|
const sessionContent = fs.readFileSync(sessionPath, 'utf8');
|
||||||
|
const sessionData = JSON.parse(sessionContent);
|
||||||
|
|
||||||
|
// Remove sessions older than 24 hours without heartbeat
|
||||||
|
const lastHeartbeat = new Date(sessionData.lastHeartbeat);
|
||||||
|
const timeDiff = now - lastHeartbeat;
|
||||||
|
const isStale = timeDiff > 86400000; // 24 hours
|
||||||
|
|
||||||
|
if (isStale) {
|
||||||
|
fs.unlinkSync(sessionPath);
|
||||||
|
removed++;
|
||||||
|
console.log(`🗑️ Removed stale session: ${sessionData.id} (${sessionData.ide})`);
|
||||||
|
}
|
||||||
|
|
||||||
|
} catch (e) {
|
||||||
|
errors.push(`Failed to process ${file}: ${e.message}`);
|
||||||
|
// Try to remove corrupted files
|
||||||
|
try {
|
||||||
|
fs.unlinkSync(path.join(sessionsPath, file));
|
||||||
|
console.log(`🗑️ Removed corrupted session file: ${file}`);
|
||||||
|
removed++;
|
||||||
|
} catch (removeError) {
|
||||||
|
console.error(`❌ Could not remove corrupted file ${file}: ${removeError.message}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return { removed, errors };
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Repair workspace directory structure
|
||||||
|
*/
|
||||||
|
function repairWorkspaceStructure(workspacePath) {
|
||||||
|
const requiredDirs = [
|
||||||
|
'sessions',
|
||||||
|
'context',
|
||||||
|
'handoffs',
|
||||||
|
'decisions',
|
||||||
|
'progress',
|
||||||
|
'quality',
|
||||||
|
'archive',
|
||||||
|
'hooks',
|
||||||
|
'templates',
|
||||||
|
'logs'
|
||||||
|
];
|
||||||
|
|
||||||
|
let created = 0;
|
||||||
|
|
||||||
|
for (const dir of requiredDirs) {
|
||||||
|
const dirPath = path.join(workspacePath, dir);
|
||||||
|
if (!fs.existsSync(dirPath)) {
|
||||||
|
fs.mkdirSync(dirPath, { recursive: true });
|
||||||
|
console.log(`📁 Created directory: ${dir}`);
|
||||||
|
created++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return created;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Archive old logs
|
||||||
|
*/
|
||||||
|
function archiveLogs(workspacePath) {
|
||||||
|
const logsPath = path.join(workspacePath, 'logs');
|
||||||
|
const logFile = path.join(logsPath, 'workspace.log');
|
||||||
|
|
||||||
|
if (!fs.existsSync(logFile)) {
|
||||||
|
return { archived: false, reason: 'No log file found' };
|
||||||
|
}
|
||||||
|
|
||||||
|
const stats = fs.statSync(logFile);
|
||||||
|
const logSizeMB = stats.size / (1024 * 1024);
|
||||||
|
|
||||||
|
// Archive logs larger than 5MB
|
||||||
|
if (logSizeMB > 5) {
|
||||||
|
const timestamp = new Date().toISOString().replace(/[:.]/g, '-');
|
||||||
|
const archivePath = path.join(workspacePath, 'archive', `workspace-${timestamp}.log`);
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Ensure archive directory exists
|
||||||
|
const archiveDir = path.join(workspacePath, 'archive');
|
||||||
|
if (!fs.existsSync(archiveDir)) {
|
||||||
|
fs.mkdirSync(archiveDir, { recursive: true });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Move log to archive
|
||||||
|
fs.renameSync(logFile, archivePath);
|
||||||
|
|
||||||
|
// Create new empty log file
|
||||||
|
fs.writeFileSync(logFile, '');
|
||||||
|
|
||||||
|
console.log(`📦 Archived log file: ${logSizeMB.toFixed(1)}MB → archive/workspace-${timestamp}.log`);
|
||||||
|
return { archived: true, size: logSizeMB, archivePath };
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
console.error(`❌ Failed to archive log: ${error.message}`);
|
||||||
|
return { archived: false, reason: error.message };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return { archived: false, reason: `Log size OK (${logSizeMB.toFixed(1)}MB)` };
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Clean up empty context files
|
||||||
|
*/
|
||||||
|
function cleanupContextFiles(workspacePath) {
|
||||||
|
const contextPath = path.join(workspacePath, 'context');
|
||||||
|
if (!fs.existsSync(contextPath)) {
|
||||||
|
return { removed: 0 };
|
||||||
|
}
|
||||||
|
|
||||||
|
let removed = 0;
|
||||||
|
const files = fs.readdirSync(contextPath);
|
||||||
|
|
||||||
|
for (const file of files) {
|
||||||
|
const filePath = path.join(contextPath, file);
|
||||||
|
const stats = fs.statSync(filePath);
|
||||||
|
|
||||||
|
if (stats.isFile() && stats.size === 0) {
|
||||||
|
fs.unlinkSync(filePath);
|
||||||
|
console.log(`🗑️ Removed empty context file: ${file}`);
|
||||||
|
removed++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return { removed };
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Optimize workspace storage
|
||||||
|
*/
|
||||||
|
function optimizeStorage(workspacePath) {
|
||||||
|
const optimization = {
|
||||||
|
sessionsCleaned: 0,
|
||||||
|
directoriesCreated: 0,
|
||||||
|
logsArchived: false,
|
||||||
|
contextFilesCleaned: 0,
|
||||||
|
totalSpaceSaved: 0
|
||||||
|
};
|
||||||
|
|
||||||
|
// Clean stale sessions
|
||||||
|
const sessionCleanup = cleanupStaleSessions(workspacePath);
|
||||||
|
optimization.sessionsCleaned = sessionCleanup.removed;
|
||||||
|
|
||||||
|
// Repair directory structure
|
||||||
|
optimization.directoriesCreated = repairWorkspaceStructure(workspacePath);
|
||||||
|
|
||||||
|
// Archive large logs
|
||||||
|
const logArchive = archiveLogs(workspacePath);
|
||||||
|
optimization.logsArchived = logArchive.archived;
|
||||||
|
if (logArchive.size) {
|
||||||
|
optimization.totalSpaceSaved += logArchive.size;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Clean empty context files
|
||||||
|
const contextCleanup = cleanupContextFiles(workspacePath);
|
||||||
|
optimization.contextFilesCleaned = contextCleanup.removed;
|
||||||
|
|
||||||
|
return optimization;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Validate workspace integrity
|
||||||
|
*/
|
||||||
|
function validateWorkspaceIntegrity(workspacePath) {
|
||||||
|
const issues = [];
|
||||||
|
const warnings = [];
|
||||||
|
|
||||||
|
// Check workspace config
|
||||||
|
const configPath = path.join(workspacePath, 'workspace-config.json');
|
||||||
|
if (!fs.existsSync(configPath)) {
|
||||||
|
issues.push('Missing workspace configuration file');
|
||||||
|
} else {
|
||||||
|
try {
|
||||||
|
JSON.parse(fs.readFileSync(configPath, 'utf8'));
|
||||||
|
} catch (e) {
|
||||||
|
issues.push('Corrupted workspace configuration');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check directory permissions
|
||||||
|
try {
|
||||||
|
const testFile = path.join(workspacePath, '.write-test');
|
||||||
|
fs.writeFileSync(testFile, 'test');
|
||||||
|
fs.unlinkSync(testFile);
|
||||||
|
} catch (e) {
|
||||||
|
issues.push('Insufficient write permissions');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check session files integrity
|
||||||
|
const sessionsPath = path.join(workspacePath, 'sessions');
|
||||||
|
if (fs.existsSync(sessionsPath)) {
|
||||||
|
const sessionFiles = fs.readdirSync(sessionsPath).filter(f => f.endsWith('.json'));
|
||||||
|
let corruptedSessions = 0;
|
||||||
|
|
||||||
|
for (const file of sessionFiles) {
|
||||||
|
try {
|
||||||
|
JSON.parse(fs.readFileSync(path.join(sessionsPath, file), 'utf8'));
|
||||||
|
} catch (e) {
|
||||||
|
corruptedSessions++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (corruptedSessions > 0) {
|
||||||
|
warnings.push(`${corruptedSessions} corrupted session files found`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return { issues, warnings };
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Main cleanup function
|
||||||
|
*/
|
||||||
|
async function cleanupWorkspace(options = {}) {
|
||||||
|
try {
|
||||||
|
const workspacePath = path.join(process.cwd(), '.workspace');
|
||||||
|
|
||||||
|
if (!fs.existsSync(workspacePath)) {
|
||||||
|
console.error('❌ Workspace directory not found.');
|
||||||
|
console.error(' Run `npm run workspace-init` to initialize workspace');
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log('🧹 BMAD Workspace Cleanup');
|
||||||
|
console.log('========================');
|
||||||
|
console.log(`📁 Workspace: ${workspacePath}`);
|
||||||
|
|
||||||
|
// Validate integrity first
|
||||||
|
if (!options.skipValidation) {
|
||||||
|
console.log('\n🔍 Validating workspace integrity...');
|
||||||
|
const validation = validateWorkspaceIntegrity(workspacePath);
|
||||||
|
|
||||||
|
if (validation.issues.length > 0) {
|
||||||
|
console.log('❌ Critical Issues Found:');
|
||||||
|
validation.issues.forEach(issue => console.log(` • ${issue}`));
|
||||||
|
}
|
||||||
|
|
||||||
|
if (validation.warnings.length > 0) {
|
||||||
|
console.log('⚠️ Warnings:');
|
||||||
|
validation.warnings.forEach(warning => console.log(` • ${warning}`));
|
||||||
|
}
|
||||||
|
|
||||||
|
if (validation.issues.length === 0 && validation.warnings.length === 0) {
|
||||||
|
console.log('✅ Workspace integrity OK');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Perform optimization
|
||||||
|
console.log('\n🔧 Optimizing workspace...');
|
||||||
|
const optimization = optimizeStorage(workspacePath);
|
||||||
|
|
||||||
|
// Log cleanup activity
|
||||||
|
const logEntry = {
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
action: 'workspace-cleanup',
|
||||||
|
optimization: optimization,
|
||||||
|
user: process.env.USER || process.env.USERNAME || 'unknown'
|
||||||
|
};
|
||||||
|
|
||||||
|
const logPath = path.join(workspacePath, 'logs', 'workspace.log');
|
||||||
|
fs.appendFileSync(logPath, JSON.stringify(logEntry) + '\n');
|
||||||
|
|
||||||
|
// Summary
|
||||||
|
console.log('\n✅ Cleanup completed successfully');
|
||||||
|
console.log('================================');
|
||||||
|
console.log(`🗑️ Sessions cleaned: ${optimization.sessionsCleaned}`);
|
||||||
|
console.log(`📁 Directories created: ${optimization.directoriesCreated}`);
|
||||||
|
console.log(`📦 Logs archived: ${optimization.logsArchived ? 'Yes' : 'No'}`);
|
||||||
|
console.log(`🗄️ Context files cleaned: ${optimization.contextFilesCleaned}`);
|
||||||
|
|
||||||
|
if (optimization.totalSpaceSaved > 0) {
|
||||||
|
console.log(`💾 Space saved: ${optimization.totalSpaceSaved.toFixed(1)}MB`);
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log('\n🚀 Workspace is now optimized and ready for use!');
|
||||||
|
console.log(' Run `npm run workspace-status` to verify health');
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
console.error('❌ Failed to cleanup workspace:', error.message);
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Command line execution
|
||||||
|
if (require.main === module) {
|
||||||
|
const args = process.argv.slice(2);
|
||||||
|
const options = {
|
||||||
|
skipValidation: args.includes('--skip-validation'),
|
||||||
|
force: args.includes('--force')
|
||||||
|
};
|
||||||
|
|
||||||
|
cleanupWorkspace(options);
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = { cleanupWorkspace, validateWorkspaceIntegrity, optimizeStorage };
|
||||||
|
|
@ -0,0 +1,215 @@
|
||||||
|
# BMAD Workspace Integration - Cursor IDE
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
BMAD workspace utilities are fully compatible with Cursor IDE, providing seamless collaborative development experience across your team.
|
||||||
|
|
||||||
|
## Setup
|
||||||
|
|
||||||
|
### 1. Initialize Workspace
|
||||||
|
```bash
|
||||||
|
npm run workspace-init
|
||||||
|
```
|
||||||
|
This will:
|
||||||
|
- Create `.workspace/` directory structure
|
||||||
|
- Set up session tracking for Cursor
|
||||||
|
- Generate Cursor-specific configuration files
|
||||||
|
- Create IDE-specific templates and examples
|
||||||
|
|
||||||
|
### 2. Verify Installation
|
||||||
|
```bash
|
||||||
|
npm run workspace-status
|
||||||
|
```
|
||||||
|
You should see:
|
||||||
|
- ✅ Active session detected (cursor)
|
||||||
|
- 💚 Health Score: 90+/100
|
||||||
|
- 📁 All required directories present
|
||||||
|
|
||||||
|
## Cursor-Specific Features
|
||||||
|
|
||||||
|
### 🎯 Native Integration
|
||||||
|
- **Terminal Commands**: All workspace utilities available through Cursor's integrated terminal
|
||||||
|
- **Git Integration**: Workspace operations respect Cursor's git panel and version control
|
||||||
|
- **File Explorer**: Workspace directories appear in Cursor's file explorer with proper icons
|
||||||
|
- **Custom Rules**: Workspace state can be referenced in `.cursor/rules/` files
|
||||||
|
|
||||||
|
### 🔧 Workspace Commands
|
||||||
|
|
||||||
|
#### Session Management
|
||||||
|
```bash
|
||||||
|
npm run workspace-init # Start new collaboration session
|
||||||
|
npm run workspace-status # Check team activity and workspace health
|
||||||
|
npm run workspace-sync # Synchronize with latest team context
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Agent Handoffs
|
||||||
|
```bash
|
||||||
|
npm run workspace-handoff # Interactive handoff creation
|
||||||
|
npm run workspace-handoff create --from dev --to qa --work "Feature implementation complete"
|
||||||
|
npm run workspace-handoff list # Show recent handoffs
|
||||||
|
npm run workspace-handoff agents # List available BMAD agents
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Maintenance
|
||||||
|
```bash
|
||||||
|
npm run workspace-cleanup # Clean stale sessions and optimize storage
|
||||||
|
npm run workspace-health # Comprehensive health check and diagnostics
|
||||||
|
```
|
||||||
|
|
||||||
|
## Cursor Integration Patterns
|
||||||
|
|
||||||
|
### 1. Custom Rules Integration
|
||||||
|
Create `.cursor/rules/workspace.md` to integrate workspace context:
|
||||||
|
|
||||||
|
```markdown
|
||||||
|
# Workspace-Aware Development Rules
|
||||||
|
|
||||||
|
## Context Integration
|
||||||
|
- Before making changes, check workspace context: `.workspace/context/sync-summary.md`
|
||||||
|
- Review recent handoffs: `.workspace/handoffs/`
|
||||||
|
- Check quality reports: `.workspace/quality/`
|
||||||
|
|
||||||
|
## Agent Collaboration
|
||||||
|
- When ready for QA, use: `npm run workspace-handoff create --to qa`
|
||||||
|
- Before major changes, sync context: `npm run workspace-sync`
|
||||||
|
- Report issues in workspace context files for team visibility
|
||||||
|
|
||||||
|
## Quality Standards
|
||||||
|
- Run workspace health check before commits: `npm run workspace-health`
|
||||||
|
- Maintain workspace cleanliness: `npm run workspace-cleanup` weekly
|
||||||
|
- Update workspace context with significant progress
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2. Git Integration
|
||||||
|
The workspace system integrates with Cursor's git features:
|
||||||
|
|
||||||
|
- **Pre-commit**: Workspace health automatically checked
|
||||||
|
- **Branch switching**: Session context preserved across branches
|
||||||
|
- **Merge conflicts**: Workspace context helps resolve conflicts
|
||||||
|
- **Commit messages**: Include workspace session ID for traceability
|
||||||
|
|
||||||
|
### 3. File Organization
|
||||||
|
Cursor will show the workspace structure clearly:
|
||||||
|
|
||||||
|
```
|
||||||
|
📁 .workspace/
|
||||||
|
├── 📂 sessions/ # Current and past development sessions
|
||||||
|
├── 📂 context/ # Shared development context
|
||||||
|
├── 📂 handoffs/ # Agent-to-agent work transitions
|
||||||
|
├── 📂 progress/ # Development progress tracking
|
||||||
|
├── 📂 quality/ # Quality reports and metrics
|
||||||
|
├── 📂 decisions/ # Architecture and design decisions
|
||||||
|
└── 📂 templates/ # IDE-specific setup guides
|
||||||
|
```
|
||||||
|
|
||||||
|
## Best Practices for Cursor Users
|
||||||
|
|
||||||
|
### 🚀 Starting Your Work Session
|
||||||
|
1. **Open terminal** in Cursor (Ctrl+` or Cmd+`)
|
||||||
|
2. **Initialize workspace**: `npm run workspace-init`
|
||||||
|
3. **Check team status**: `npm run workspace-status`
|
||||||
|
4. **Sync latest context**: `npm run workspace-sync`
|
||||||
|
|
||||||
|
### 🔄 During Development
|
||||||
|
- **Update heartbeat**: Workspace automatically tracks your active session
|
||||||
|
- **Share context**: Add important findings to `.workspace/context/`
|
||||||
|
- **Track decisions**: Document choices in `.workspace/decisions/`
|
||||||
|
- **Monitor health**: Run `npm run workspace-health` if issues arise
|
||||||
|
|
||||||
|
### 📤 Handing Off Work
|
||||||
|
1. **Prepare handoff**: `npm run workspace-handoff create --to [agent]`
|
||||||
|
2. **Add context**: Include current work status and blockers
|
||||||
|
3. **Sync final state**: `npm run workspace-sync`
|
||||||
|
4. **Verify handoff**: Check `.workspace/handoffs/` for summary
|
||||||
|
|
||||||
|
### 🧹 Weekly Maintenance
|
||||||
|
```bash
|
||||||
|
# Clean up workspace (run weekly)
|
||||||
|
npm run workspace-cleanup
|
||||||
|
|
||||||
|
# Health check (run before important work)
|
||||||
|
npm run workspace-health
|
||||||
|
|
||||||
|
# Full sync (run when switching contexts)
|
||||||
|
npm run workspace-sync
|
||||||
|
```
|
||||||
|
|
||||||
|
## Troubleshooting
|
||||||
|
|
||||||
|
### Common Issues
|
||||||
|
|
||||||
|
**"Workspace directory not found"**
|
||||||
|
- Solution: Run `npm run workspace-init` from your project root
|
||||||
|
- Verify you're in the correct project directory
|
||||||
|
|
||||||
|
**"Permission denied" errors**
|
||||||
|
- Solution: Check file permissions on `.workspace/` directory
|
||||||
|
- Run `chmod -R 755 .workspace/` if needed (Unix/Mac)
|
||||||
|
|
||||||
|
**"No active session found"**
|
||||||
|
- Solution: Initialize a new session with `npm run workspace-init`
|
||||||
|
- Or sync with existing sessions: `npm run workspace-sync`
|
||||||
|
|
||||||
|
**Session conflicts**
|
||||||
|
- Multiple team members: Each gets unique session ID
|
||||||
|
- Session cleanup: Run `npm run workspace-cleanup` to remove stale sessions
|
||||||
|
- Health check: `npm run workspace-health` identifies session issues
|
||||||
|
|
||||||
|
### IDE-Specific Issues
|
||||||
|
|
||||||
|
**Cursor git panel not updating**
|
||||||
|
- Workspace operations are git-aware
|
||||||
|
- Refresh Cursor's git panel (Ctrl+Shift+P → "Git: Refresh")
|
||||||
|
- Check `.workspace/logs/workspace.log` for detailed activity
|
||||||
|
|
||||||
|
**Terminal commands not found**
|
||||||
|
- Ensure you're in project root directory
|
||||||
|
- Verify `package.json` has workspace scripts
|
||||||
|
- Reinstall: `npm install` to refresh node_modules
|
||||||
|
|
||||||
|
## Advanced Usage
|
||||||
|
|
||||||
|
### Custom Workspace Scripts
|
||||||
|
Add project-specific workspace commands to `package.json`:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"scripts": {
|
||||||
|
"workspace-deploy": "npm run workspace-sync && npm run deploy",
|
||||||
|
"workspace-test": "npm run workspace-health && npm test",
|
||||||
|
"workspace-reset": "npm run workspace-cleanup --force && npm run workspace-init"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Environment Variables
|
||||||
|
Set Cursor-specific environment variables:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# In your shell profile or .env file
|
||||||
|
export IDE_TYPE=cursor
|
||||||
|
export WORKSPACE_AUTO_SYNC=true
|
||||||
|
export WORKSPACE_LOG_LEVEL=info
|
||||||
|
```
|
||||||
|
|
||||||
|
### Integration with Cursor AI
|
||||||
|
When using Cursor's AI features, reference workspace context:
|
||||||
|
|
||||||
|
1. **Ask AI to check**: "Review the workspace context in `.workspace/context/sync-summary.md`"
|
||||||
|
2. **Include handoff context**: "Consider the recent handoff in `.workspace/handoffs/`"
|
||||||
|
3. **Reference quality reports**: "Check quality metrics in `.workspace/quality/`"
|
||||||
|
|
||||||
|
## Support
|
||||||
|
|
||||||
|
### Getting Help
|
||||||
|
- **Workspace status**: `npm run workspace-status` shows current state
|
||||||
|
- **Health diagnostics**: `npm run workspace-health --verbose` for detailed analysis
|
||||||
|
- **Log files**: Check `.workspace/logs/workspace.log` for activity history
|
||||||
|
|
||||||
|
### Team Coordination
|
||||||
|
- **Shared context**: All workspace data is git-trackable
|
||||||
|
- **Session visibility**: Team members can see active sessions
|
||||||
|
- **Handoff notifications**: Clear handoff documentation for smooth transitions
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
*This guide is specific to Cursor IDE. For other IDEs, see the respective documentation in `workspace-utils/docs/`.*
|
||||||
|
|
@ -0,0 +1,388 @@
|
||||||
|
# BMAD Workspace Integration - GitHub Copilot
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
BMAD workspace utilities integrate seamlessly with GitHub Copilot in VS Code, providing enhanced AI-assisted development with intelligent context sharing and collaborative workspace management.
|
||||||
|
|
||||||
|
## Setup
|
||||||
|
|
||||||
|
### 1. Prerequisites
|
||||||
|
- VS Code with GitHub Copilot extension installed
|
||||||
|
- GitHub Copilot subscription active
|
||||||
|
- BMAD project initialized
|
||||||
|
|
||||||
|
### 2. Initialize Workspace
|
||||||
|
```bash
|
||||||
|
npm run workspace-init
|
||||||
|
```
|
||||||
|
This creates:
|
||||||
|
- Copilot-aware session tracking
|
||||||
|
- AI context files optimized for Copilot suggestions
|
||||||
|
- Integration with VS Code's workspace settings
|
||||||
|
|
||||||
|
### 3. Verify Integration
|
||||||
|
```bash
|
||||||
|
npm run workspace-status
|
||||||
|
```
|
||||||
|
Should show:
|
||||||
|
- ✅ Active session detected (github-copilot)
|
||||||
|
- 🤖 AI assistance integration enabled
|
||||||
|
- 📁 Copilot context files prepared
|
||||||
|
|
||||||
|
## GitHub Copilot Integration Features
|
||||||
|
|
||||||
|
### 🤖 AI-Enhanced Development
|
||||||
|
- **Context-Aware Suggestions**: Workspace context informs Copilot suggestions
|
||||||
|
- **Multi-File Understanding**: Copilot can reference workspace context across files
|
||||||
|
- **Collaborative AI**: Workspace tracks AI-assisted code for team visibility
|
||||||
|
- **Quality Integration**: AI suggestions tracked through workspace quality metrics
|
||||||
|
|
||||||
|
### 🧠 Intelligent Context Management
|
||||||
|
|
||||||
|
#### Copilot-Optimized Commands
|
||||||
|
```bash
|
||||||
|
npm run workspace-init # Creates Copilot-aware session
|
||||||
|
npm run workspace-status # Shows AI assistance metrics
|
||||||
|
npm run workspace-sync # Updates context for better Copilot suggestions
|
||||||
|
npm run workspace-health # Includes AI code quality checks
|
||||||
|
```
|
||||||
|
|
||||||
|
#### AI Context Files
|
||||||
|
```bash
|
||||||
|
# Context files optimized for Copilot
|
||||||
|
.workspace/context/copilot-context.md # Current development context
|
||||||
|
.workspace/context/code-patterns.md # Established code patterns
|
||||||
|
.workspace/context/ai-decisions.md # AI-assisted decisions
|
||||||
|
.workspace/quality/ai-metrics.json # AI code quality tracking
|
||||||
|
```
|
||||||
|
|
||||||
|
## VS Code + Copilot Integration Patterns
|
||||||
|
|
||||||
|
### 1. Enhanced Code Completion
|
||||||
|
The workspace provides context that improves Copilot suggestions:
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
// File: .workspace/context/copilot-context.md
|
||||||
|
# Current Development Context
|
||||||
|
|
||||||
|
## Project Patterns
|
||||||
|
- Using TypeScript with strict mode
|
||||||
|
- React functional components with hooks
|
||||||
|
- Jest for testing with custom matchers
|
||||||
|
- Error handling with custom error classes
|
||||||
|
|
||||||
|
## Current Feature: User Authentication
|
||||||
|
- Implementing OAuth 2.0 flow
|
||||||
|
- Using JWT tokens for session management
|
||||||
|
- Validating with Zod schemas
|
||||||
|
- Following existing AuthService patterns
|
||||||
|
```
|
||||||
|
|
||||||
|
When Copilot sees this context, it generates more relevant suggestions that match your project patterns.
|
||||||
|
|
||||||
|
### 2. AI-Assisted Agent Handoffs
|
||||||
|
```bash
|
||||||
|
# Handoff work with AI context
|
||||||
|
npm run workspace-handoff create --to dev --work "Copilot helped implement auth flow" --notes "AI suggested OAuth patterns, validated with team standards"
|
||||||
|
|
||||||
|
# Track AI-assisted development
|
||||||
|
npm run workspace-sync # Updates AI metrics and context
|
||||||
|
```
|
||||||
|
|
||||||
|
### 3. Quality Tracking for AI Code
|
||||||
|
```bash
|
||||||
|
# Monitor AI-generated code quality
|
||||||
|
npm run workspace-health # Includes AI code metrics
|
||||||
|
|
||||||
|
# Sample health report for AI code:
|
||||||
|
# - Copilot suggestion acceptance rate: 85%
|
||||||
|
# - AI-generated code coverage: 92%
|
||||||
|
# - Pattern consistency with existing code: 94%
|
||||||
|
```
|
||||||
|
|
||||||
|
## Best Practices for Copilot Users
|
||||||
|
|
||||||
|
### 🚀 Starting AI-Assisted Development
|
||||||
|
|
||||||
|
#### 1. Initialize Context
|
||||||
|
```bash
|
||||||
|
# Start workspace
|
||||||
|
npm run workspace-init
|
||||||
|
|
||||||
|
# Update context for Copilot
|
||||||
|
npm run workspace-sync
|
||||||
|
```
|
||||||
|
|
||||||
|
#### 2. Prepare Context Files
|
||||||
|
Create `.workspace/context/copilot-context.md`:
|
||||||
|
```markdown
|
||||||
|
# Development Context for Copilot
|
||||||
|
|
||||||
|
## Current Sprint Goals
|
||||||
|
- Implement user authentication system
|
||||||
|
- Add data validation layer
|
||||||
|
- Create responsive dashboard UI
|
||||||
|
|
||||||
|
## Code Standards
|
||||||
|
- TypeScript strict mode
|
||||||
|
- Functional React components
|
||||||
|
- Comprehensive error handling
|
||||||
|
- 90%+ test coverage requirement
|
||||||
|
|
||||||
|
## Architecture Patterns
|
||||||
|
- Clean Architecture with dependency injection
|
||||||
|
- Repository pattern for data access
|
||||||
|
- Command/Query separation
|
||||||
|
- Event-driven updates
|
||||||
|
```
|
||||||
|
|
||||||
|
### 🔧 During Development
|
||||||
|
|
||||||
|
#### Optimizing Copilot Suggestions
|
||||||
|
1. **Keep context updated**: Add relevant information to workspace context files
|
||||||
|
2. **Reference patterns**: Maintain `.workspace/context/code-patterns.md` with examples
|
||||||
|
3. **Track decisions**: Document AI-suggested approaches in `.workspace/decisions/`
|
||||||
|
|
||||||
|
#### Context-Driven Development
|
||||||
|
```bash
|
||||||
|
# Before major feature work
|
||||||
|
npm run workspace-sync # Ensures Copilot has latest context
|
||||||
|
|
||||||
|
# After Copilot generates significant code
|
||||||
|
npm run workspace-handoff create --notes "Copilot implemented OAuth flow"
|
||||||
|
|
||||||
|
# Regular quality checks
|
||||||
|
npm run workspace-health # Monitor AI code quality
|
||||||
|
```
|
||||||
|
|
||||||
|
### 📊 AI Code Quality Management
|
||||||
|
|
||||||
|
#### Tracking AI Contributions
|
||||||
|
The workspace system tracks:
|
||||||
|
- **AI Suggestion Acceptance Rate**: How often you accept Copilot suggestions
|
||||||
|
- **Code Quality Metrics**: Quality of AI-generated vs human-written code
|
||||||
|
- **Pattern Consistency**: How well AI code matches project patterns
|
||||||
|
- **Test Coverage**: Coverage of AI-generated code vs requirements
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# View AI metrics
|
||||||
|
npm run workspace-health --ai-focus
|
||||||
|
|
||||||
|
# Sample output:
|
||||||
|
# 🤖 AI Code Metrics:
|
||||||
|
# • Suggestion acceptance: 78%
|
||||||
|
# • Quality score: 92/100
|
||||||
|
# • Pattern consistency: 89%
|
||||||
|
# • Test coverage: 85%
|
||||||
|
```
|
||||||
|
|
||||||
|
## VS Code Workspace Configuration
|
||||||
|
|
||||||
|
### 1. Workspace Settings
|
||||||
|
Add to `.vscode/settings.json`:
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"github.copilot.enable": {
|
||||||
|
"*": true,
|
||||||
|
"yaml": false,
|
||||||
|
"plaintext": false
|
||||||
|
},
|
||||||
|
"github.copilot.advanced": {
|
||||||
|
"secret_key": "github-copilot-bmad-workspace",
|
||||||
|
"length": 500,
|
||||||
|
"temperature": 0.1
|
||||||
|
},
|
||||||
|
"bmad.workspace.integration": true,
|
||||||
|
"bmad.workspace.contextFiles": [
|
||||||
|
".workspace/context/copilot-context.md",
|
||||||
|
".workspace/context/code-patterns.md"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2. Tasks Integration
|
||||||
|
Add to `.vscode/tasks.json`:
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"version": "2.0.0",
|
||||||
|
"tasks": [
|
||||||
|
{
|
||||||
|
"label": "BMAD: Workspace Init",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "npm run workspace-init",
|
||||||
|
"group": "build",
|
||||||
|
"presentation": {
|
||||||
|
"echo": true,
|
||||||
|
"reveal": "always",
|
||||||
|
"focus": false,
|
||||||
|
"panel": "shared"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "BMAD: Sync Context for Copilot",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "npm run workspace-sync",
|
||||||
|
"group": "build"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "BMAD: AI Health Check",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "npm run workspace-health --ai-focus",
|
||||||
|
"group": "test",
|
||||||
|
"presentation": {
|
||||||
|
"reveal": "always"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### 3. Keybindings
|
||||||
|
Add to `.vscode/keybindings.json`:
|
||||||
|
```json
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"key": "ctrl+shift+w ctrl+shift+i",
|
||||||
|
"command": "workbench.action.tasks.runTask",
|
||||||
|
"args": "BMAD: Workspace Init"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "ctrl+shift+w ctrl+shift+s",
|
||||||
|
"command": "workbench.action.tasks.runTask",
|
||||||
|
"args": "BMAD: Sync Context for Copilot"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": "ctrl+shift+w ctrl+shift+h",
|
||||||
|
"command": "workbench.action.tasks.runTask",
|
||||||
|
"args": "BMAD: AI Health Check"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
```
|
||||||
|
|
||||||
|
## Advanced Copilot Integration
|
||||||
|
|
||||||
|
### 1. Context-Aware Prompts
|
||||||
|
Use workspace context to improve Copilot prompts:
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
// Example: Using workspace context in comments for better suggestions
|
||||||
|
// Based on .workspace/context/auth-patterns.md, implement OAuth login
|
||||||
|
// Following the AuthService pattern established in the workspace
|
||||||
|
// Include error handling as defined in .workspace/context/error-patterns.md
|
||||||
|
function loginWithOAuth(provider) {
|
||||||
|
// Copilot will generate code matching your established patterns
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2. AI Decision Tracking
|
||||||
|
```bash
|
||||||
|
# Track AI-suggested architectural decisions
|
||||||
|
npm run workspace-handoff create --to architect --work "Review Copilot-suggested auth architecture" --notes "AI suggested JWT with refresh tokens, need validation"
|
||||||
|
```
|
||||||
|
|
||||||
|
### 3. Quality Gates for AI Code
|
||||||
|
```bash
|
||||||
|
# Set up quality gates that include AI metrics
|
||||||
|
npm run workspace-health --exit-code # Fails CI if AI code quality below threshold
|
||||||
|
```
|
||||||
|
|
||||||
|
## Troubleshooting
|
||||||
|
|
||||||
|
### Common Issues
|
||||||
|
|
||||||
|
**"Copilot not using workspace context"**
|
||||||
|
- Ensure context files are in `.workspace/context/`
|
||||||
|
- Update context with `npm run workspace-sync`
|
||||||
|
- Restart VS Code to refresh Copilot context
|
||||||
|
|
||||||
|
**"Poor quality AI suggestions"**
|
||||||
|
- Check workspace context is current: `npm run workspace-status`
|
||||||
|
- Update code patterns: Edit `.workspace/context/code-patterns.md`
|
||||||
|
- Verify session tracking: Should show `github-copilot` IDE type
|
||||||
|
|
||||||
|
**"AI metrics not tracking"**
|
||||||
|
- Initialize workspace: `npm run workspace-init`
|
||||||
|
- Check health status: `npm run workspace-health`
|
||||||
|
- Verify VS Code workspace settings include BMAD integration
|
||||||
|
|
||||||
|
### Copilot-Specific Issues
|
||||||
|
|
||||||
|
**"Suggestions don't match project patterns"**
|
||||||
|
- Update `.workspace/context/copilot-context.md` with current patterns
|
||||||
|
- Add examples to `.workspace/context/code-patterns.md`
|
||||||
|
- Sync context: `npm run workspace-sync`
|
||||||
|
|
||||||
|
**"AI quality scores are low"**
|
||||||
|
- Review Copilot suggestion acceptance rate
|
||||||
|
- Update context files with better examples
|
||||||
|
- Consider adjusting Copilot temperature in VS Code settings
|
||||||
|
|
||||||
|
## Performance Optimization
|
||||||
|
|
||||||
|
### Context File Management
|
||||||
|
```bash
|
||||||
|
# Keep context files optimized for Copilot
|
||||||
|
npm run workspace-cleanup --ai-optimize
|
||||||
|
|
||||||
|
# Compress large context files
|
||||||
|
npm run workspace-sync --compress
|
||||||
|
```
|
||||||
|
|
||||||
|
### Selective AI Context
|
||||||
|
Only include relevant context for current work:
|
||||||
|
|
||||||
|
```markdown
|
||||||
|
<!-- .workspace/context/copilot-context.md -->
|
||||||
|
# Current Focus: Authentication Module
|
||||||
|
|
||||||
|
## Relevant Patterns (for this sprint only)
|
||||||
|
- OAuth 2.0 implementation patterns
|
||||||
|
- JWT token validation
|
||||||
|
- Error handling for auth failures
|
||||||
|
|
||||||
|
## Not Currently Relevant
|
||||||
|
<!-- Keep other patterns commented until needed -->
|
||||||
|
<!-- - Payment processing patterns -->
|
||||||
|
<!-- - Data visualization patterns -->
|
||||||
|
```
|
||||||
|
|
||||||
|
## Integration Examples
|
||||||
|
|
||||||
|
### Example 1: Feature Development with AI Assistance
|
||||||
|
```bash
|
||||||
|
# 1. Initialize workspace
|
||||||
|
npm run workspace-init
|
||||||
|
|
||||||
|
# 2. Prepare context for new feature
|
||||||
|
echo "Implementing user profile management with Copilot assistance" > .workspace/context/current-work.md
|
||||||
|
|
||||||
|
# 3. Sync context for Copilot
|
||||||
|
npm run workspace-sync
|
||||||
|
|
||||||
|
# 4. Develop with Copilot
|
||||||
|
# (VS Code: Copilot uses workspace context for better suggestions)
|
||||||
|
|
||||||
|
# 5. Track AI contribution
|
||||||
|
npm run workspace-handoff create --work "Profile management with AI assistance" --notes "Copilot suggested efficient CRUD patterns"
|
||||||
|
|
||||||
|
# 6. Quality check
|
||||||
|
npm run workspace-health --ai-focus
|
||||||
|
```
|
||||||
|
|
||||||
|
### Example 2: AI-Assisted Code Review
|
||||||
|
```bash
|
||||||
|
# 1. After development phase
|
||||||
|
npm run workspace-sync # Update context with recent changes
|
||||||
|
|
||||||
|
# 2. Create review handoff
|
||||||
|
npm run workspace-handoff create --to qa --work "Review AI-assisted profile implementation"
|
||||||
|
|
||||||
|
# 3. QA checks AI code quality
|
||||||
|
npm run workspace-health # Shows AI code metrics
|
||||||
|
|
||||||
|
# 4. Address any quality issues
|
||||||
|
# (Use workspace context to improve code consistency)
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
*This guide optimizes GitHub Copilot integration with BMAD workspace for enhanced AI-assisted development. The workspace system provides context that improves AI suggestions while maintaining team collaboration and code quality.*
|
||||||
|
|
@ -0,0 +1,306 @@
|
||||||
|
# BMAD Workspace Integration - Windsurf IDE
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
BMAD workspace utilities provide full compatibility with Windsurf IDE, enabling seamless AI-assisted collaborative development with intelligent workspace management.
|
||||||
|
|
||||||
|
## Setup
|
||||||
|
|
||||||
|
### 1. Initialize Workspace
|
||||||
|
```bash
|
||||||
|
npm run workspace-init
|
||||||
|
```
|
||||||
|
Windsurf-specific setup includes:
|
||||||
|
- Session tracking optimized for Windsurf AI agent workflows
|
||||||
|
- Context sharing compatible with Windsurf's AI conversation memory
|
||||||
|
- Integration with Windsurf's project understanding capabilities
|
||||||
|
|
||||||
|
### 2. Verify Integration
|
||||||
|
```bash
|
||||||
|
npm run workspace-status
|
||||||
|
```
|
||||||
|
Expected output for Windsurf:
|
||||||
|
- ✅ Active session detected (windsurf)
|
||||||
|
- 🤖 AI agent compatibility enabled
|
||||||
|
- 📁 Workspace structure optimized for AI context
|
||||||
|
|
||||||
|
## Windsurf-Specific Features
|
||||||
|
|
||||||
|
### 🤖 AI Agent Integration
|
||||||
|
- **Context Continuity**: Workspace context integrates with Windsurf's AI memory
|
||||||
|
- **Agent Handoffs**: Seamless transitions between human developers and AI agents
|
||||||
|
- **Conversation Context**: Workspace state informs AI assistant conversations
|
||||||
|
- **Multi-Modal Support**: Workspace handles code, documentation, and AI interactions
|
||||||
|
|
||||||
|
### 🧠 Intelligent Workspace Features
|
||||||
|
|
||||||
|
#### AI-Enhanced Session Management
|
||||||
|
```bash
|
||||||
|
npm run workspace-init # Creates AI-aware session with conversation context
|
||||||
|
npm run workspace-status # Shows AI agent activity and human collaboration
|
||||||
|
npm run workspace-sync # Synchronizes with Windsurf AI conversation memory
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Smart Agent Handoffs
|
||||||
|
```bash
|
||||||
|
# Handoff to AI agent
|
||||||
|
npm run workspace-handoff create --to ai --work "Implement user authentication"
|
||||||
|
|
||||||
|
# Handoff to human developer
|
||||||
|
npm run workspace-handoff create --to dev --work "Review AI-generated code"
|
||||||
|
|
||||||
|
# Handoff to QA with AI context
|
||||||
|
npm run workspace-handoff create --to qa --work "Test AI-implemented features"
|
||||||
|
```
|
||||||
|
|
||||||
|
## Windsurf Integration Patterns
|
||||||
|
|
||||||
|
### 1. AI Conversation Context
|
||||||
|
The workspace system automatically integrates with Windsurf's AI conversations:
|
||||||
|
|
||||||
|
```markdown
|
||||||
|
# Example: AI Context Integration
|
||||||
|
## Current Workspace State
|
||||||
|
- **Session**: windsurf-session-abc123
|
||||||
|
- **Active Work**: Feature development with AI assistance
|
||||||
|
- **Context Files**: 12 shared context files
|
||||||
|
- **Recent Handoffs**: AI → Developer → QA
|
||||||
|
|
||||||
|
## AI Conversation Summary
|
||||||
|
- Discussed authentication implementation approach
|
||||||
|
- Generated user model and service layer
|
||||||
|
- Identified testing requirements for AI-generated code
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2. Multi-Agent Workflows
|
||||||
|
Windsurf supports both AI and human agents in the same workspace:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Check active agents (AI + human)
|
||||||
|
npm run workspace-status
|
||||||
|
|
||||||
|
# Create handoff between AI and human agents
|
||||||
|
npm run workspace-handoff create --from ai --to dev --work "Code review needed"
|
||||||
|
|
||||||
|
# Sync context for AI understanding
|
||||||
|
npm run workspace-sync
|
||||||
|
```
|
||||||
|
|
||||||
|
### 3. Intelligent Context Sharing
|
||||||
|
The workspace adapts to Windsurf's AI capabilities:
|
||||||
|
|
||||||
|
- **Code Understanding**: AI agents can reference workspace context files
|
||||||
|
- **Conversation Memory**: Workspace state informs ongoing AI conversations
|
||||||
|
- **Decision Tracking**: AI and human decisions are recorded together
|
||||||
|
- **Quality Monitoring**: AI-generated code tracked through quality metrics
|
||||||
|
|
||||||
|
## Best Practices for Windsurf Users
|
||||||
|
|
||||||
|
### 🚀 Starting AI-Assisted Development
|
||||||
|
1. **Initialize workspace**: `npm run workspace-init`
|
||||||
|
2. **Verify AI integration**: Check session shows `windsurf` IDE type
|
||||||
|
3. **Sync with AI context**: `npm run workspace-sync`
|
||||||
|
4. **Begin AI conversation**: Reference workspace context in Windsurf chat
|
||||||
|
|
||||||
|
### 🤖 Working with AI Agents
|
||||||
|
- **Context Sharing**: Add important context to `.workspace/context/` for AI reference
|
||||||
|
- **Decision Recording**: Document AI suggestions in `.workspace/decisions/`
|
||||||
|
- **Quality Tracking**: Monitor AI-generated code through workspace quality reports
|
||||||
|
- **Handoff Preparation**: Use workspace handoffs when switching between AI and human work
|
||||||
|
|
||||||
|
### 🔄 AI-Human Collaboration Patterns
|
||||||
|
|
||||||
|
#### Pattern 1: AI-First Development
|
||||||
|
```bash
|
||||||
|
# 1. Start with AI agent
|
||||||
|
npm run workspace-handoff create --to ai --work "Initial implementation"
|
||||||
|
|
||||||
|
# 2. AI implements core functionality
|
||||||
|
# (AI adds context to workspace automatically)
|
||||||
|
|
||||||
|
# 3. Handoff to human for review
|
||||||
|
npm run workspace-handoff create --from ai --to dev --work "Review and refine"
|
||||||
|
|
||||||
|
# 4. Human reviews and improves
|
||||||
|
npm run workspace-sync # Get latest AI context
|
||||||
|
|
||||||
|
# 5. Handoff to QA
|
||||||
|
npm run workspace-handoff create --to qa --work "Test AI-assisted implementation"
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Pattern 2: Human-AI Pair Programming
|
||||||
|
```bash
|
||||||
|
# Continuous sync during pair programming
|
||||||
|
npm run workspace-sync # Before AI conversation
|
||||||
|
# ... work with AI in Windsurf ...
|
||||||
|
npm run workspace-sync # After AI generates code
|
||||||
|
```
|
||||||
|
|
||||||
|
### 📊 Quality Monitoring for AI Code
|
||||||
|
```bash
|
||||||
|
# Health check includes AI code quality metrics
|
||||||
|
npm run workspace-health
|
||||||
|
|
||||||
|
# Specific checks for AI-generated code:
|
||||||
|
# - Code consistency with human patterns
|
||||||
|
# - Integration with existing codebase
|
||||||
|
# - Test coverage for AI implementations
|
||||||
|
```
|
||||||
|
|
||||||
|
## Windsurf-Specific Configuration
|
||||||
|
|
||||||
|
### Environment Variables
|
||||||
|
```bash
|
||||||
|
# Set in your environment or .env file
|
||||||
|
export IDE_TYPE=windsurf
|
||||||
|
export WINDSURF_AI_INTEGRATION=true
|
||||||
|
export WORKSPACE_AI_CONTEXT=true
|
||||||
|
```
|
||||||
|
|
||||||
|
### AI Context Files
|
||||||
|
The workspace creates Windsurf-specific context files:
|
||||||
|
|
||||||
|
```
|
||||||
|
📁 .workspace/
|
||||||
|
├── 📂 ai-context/ # AI conversation summaries
|
||||||
|
├── 📂 ai-decisions/ # AI-suggested architectural decisions
|
||||||
|
├── 📂 ai-handoffs/ # AI ↔ Human work transitions
|
||||||
|
└── 📂 ai-quality/ # Quality metrics for AI-generated code
|
||||||
|
```
|
||||||
|
|
||||||
|
### Windsurf AI Prompts
|
||||||
|
Use these patterns in Windsurf AI conversations:
|
||||||
|
|
||||||
|
```
|
||||||
|
"Check the workspace context in .workspace/context/sync-summary.md before implementing"
|
||||||
|
|
||||||
|
"Consider the recent handoff details in .workspace/handoffs/ for this feature"
|
||||||
|
|
||||||
|
"Review the workspace quality metrics in .workspace/quality/ to ensure consistency"
|
||||||
|
|
||||||
|
"Update the workspace context with your implementation approach"
|
||||||
|
```
|
||||||
|
|
||||||
|
## Advanced Windsurf Integration
|
||||||
|
|
||||||
|
### 1. Custom AI Workflows
|
||||||
|
```json
|
||||||
|
// package.json additions for AI workflows
|
||||||
|
{
|
||||||
|
"scripts": {
|
||||||
|
"ai-handoff": "npm run workspace-handoff create --to ai",
|
||||||
|
"ai-review": "npm run workspace-sync && echo 'Context ready for AI review'",
|
||||||
|
"ai-quality": "npm run workspace-health --ai-focus"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2. AI Context Optimization
|
||||||
|
```bash
|
||||||
|
# Optimize workspace for AI understanding
|
||||||
|
npm run workspace-cleanup --ai-optimize
|
||||||
|
|
||||||
|
# Generate AI-friendly summaries
|
||||||
|
npm run workspace-sync --ai-summary
|
||||||
|
|
||||||
|
# Health check with AI code focus
|
||||||
|
npm run workspace-health --ai-metrics
|
||||||
|
```
|
||||||
|
|
||||||
|
### 3. Multi-Modal Context
|
||||||
|
Windsurf can handle various content types in workspace:
|
||||||
|
|
||||||
|
- **Code Files**: Traditional source code with AI annotations
|
||||||
|
- **Documentation**: AI-generated and human-written docs
|
||||||
|
- **Conversations**: AI chat history integrated with workspace
|
||||||
|
- **Decisions**: Joint AI-human architectural decisions
|
||||||
|
- **Quality Reports**: AI code quality metrics and human reviews
|
||||||
|
|
||||||
|
## Troubleshooting
|
||||||
|
|
||||||
|
### AI Integration Issues
|
||||||
|
|
||||||
|
**"AI context not loading"**
|
||||||
|
- Verify `WINDSURF_AI_INTEGRATION=true` environment variable
|
||||||
|
- Check workspace initialization: `npm run workspace-init`
|
||||||
|
- Sync context manually: `npm run workspace-sync`
|
||||||
|
|
||||||
|
**"Handoff between AI and human not working"**
|
||||||
|
- Ensure both agent types are recognized: `npm run workspace-handoff agents`
|
||||||
|
- Check session is properly initialized for Windsurf
|
||||||
|
- Verify workspace structure: `npm run workspace-health`
|
||||||
|
|
||||||
|
**"AI not referencing workspace context"**
|
||||||
|
- Explicitly reference context files in AI conversations
|
||||||
|
- Use workspace sync before AI conversations: `npm run workspace-sync`
|
||||||
|
- Check context file permissions and content
|
||||||
|
|
||||||
|
### Windsurf-Specific Issues
|
||||||
|
|
||||||
|
**"Windsurf not detecting workspace"**
|
||||||
|
- Initialize from project root: `npm run workspace-init`
|
||||||
|
- Check IDE detection: Session should show `windsurf` type
|
||||||
|
- Restart Windsurf if needed
|
||||||
|
|
||||||
|
**"AI conversation memory conflicts with workspace"**
|
||||||
|
- Workspace context complements AI memory, not replaces it
|
||||||
|
- Use `npm run workspace-sync` to align contexts
|
||||||
|
- Clear workspace if needed: `npm run workspace-cleanup --force`
|
||||||
|
|
||||||
|
## Performance Optimization
|
||||||
|
|
||||||
|
### AI Context Efficiency
|
||||||
|
- **Selective Context**: Only share relevant context with AI
|
||||||
|
- **Context Summarization**: Use workspace summaries for large projects
|
||||||
|
- **Regular Cleanup**: Remove outdated AI context regularly
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Optimize workspace for AI performance
|
||||||
|
npm run workspace-cleanup --ai-optimize
|
||||||
|
|
||||||
|
# Generate efficient AI summaries
|
||||||
|
npm run workspace-sync --compress
|
||||||
|
```
|
||||||
|
|
||||||
|
### Memory Management
|
||||||
|
- **Conversation Limits**: Workspace helps track long AI conversations
|
||||||
|
- **Context Rotation**: Older context automatically archived
|
||||||
|
- **Session Cleanup**: Stale AI sessions cleaned up automatically
|
||||||
|
|
||||||
|
## Integration Examples
|
||||||
|
|
||||||
|
### Example 1: AI Feature Implementation
|
||||||
|
```bash
|
||||||
|
# 1. Initialize workspace for AI work
|
||||||
|
npm run workspace-init
|
||||||
|
|
||||||
|
# 2. Create handoff to AI
|
||||||
|
npm run workspace-handoff create --to ai --work "Implement user dashboard"
|
||||||
|
|
||||||
|
# 3. Work with AI in Windsurf
|
||||||
|
# AI: "I see from the workspace context that we're using React. I'll implement..."
|
||||||
|
|
||||||
|
# 4. AI completes work, human reviews
|
||||||
|
npm run workspace-sync # Get AI's context updates
|
||||||
|
|
||||||
|
# 5. Handoff to QA
|
||||||
|
npm run workspace-handoff create --from ai --to qa --work "Test dashboard implementation"
|
||||||
|
```
|
||||||
|
|
||||||
|
### Example 2: AI Code Review
|
||||||
|
```bash
|
||||||
|
# 1. Human completes feature
|
||||||
|
npm run workspace-handoff create --to ai --work "Review authentication logic"
|
||||||
|
|
||||||
|
# 2. AI reviews with workspace context
|
||||||
|
# AI: "Based on the workspace quality metrics, I recommend..."
|
||||||
|
|
||||||
|
# 3. Apply AI suggestions
|
||||||
|
npm run workspace-sync # Update with AI feedback
|
||||||
|
|
||||||
|
# 4. Final quality check
|
||||||
|
npm run workspace-health --ai-review
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
*This guide is optimized for Windsurf IDE's AI capabilities. The workspace system enhances AI-human collaboration while maintaining compatibility with traditional development workflows.*
|
||||||
|
|
@ -0,0 +1,399 @@
|
||||||
|
#!/usr/bin/env node
|
||||||
|
/**
|
||||||
|
* BMAD Workspace Handoff Utility
|
||||||
|
* Cross-IDE agent handoff management and coordination
|
||||||
|
*/
|
||||||
|
|
||||||
|
const fs = require('fs');
|
||||||
|
const path = require('path');
|
||||||
|
const crypto = require('crypto');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get available BMAD agents
|
||||||
|
*/
|
||||||
|
function getAvailableAgents() {
|
||||||
|
const agents = [
|
||||||
|
{ id: 'dev', name: 'Developer (James)', description: 'Code implementation and debugging' },
|
||||||
|
{ id: 'qa', name: 'QA Engineer (Quinn)', description: 'Quality validation and testing' },
|
||||||
|
{ id: 'sm', name: 'Scrum Master (Morgan)', description: 'Story creation and project coordination' },
|
||||||
|
{ id: 'analyst', name: 'Business Analyst (Alex)', description: 'Requirements analysis and research' },
|
||||||
|
{ id: 'architect', name: 'Technical Architect (Sam)', description: 'System design and architecture' },
|
||||||
|
{ id: 'ux-expert', name: 'UX Expert (Jordan)', description: 'User experience and interface design' },
|
||||||
|
{ id: 'pm', name: 'Product Manager (John)', description: 'Product strategy and PRD creation' },
|
||||||
|
{ id: 'po', name: 'Product Owner (Sarah)', description: 'Backlog management and acceptance criteria' }
|
||||||
|
];
|
||||||
|
|
||||||
|
return agents;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create handoff context package
|
||||||
|
*/
|
||||||
|
function createHandoffContext(workspacePath, fromAgent, toAgent, currentWork, notes = '') {
|
||||||
|
const handoffId = crypto.randomBytes(6).toString('hex');
|
||||||
|
const timestamp = new Date().toISOString();
|
||||||
|
|
||||||
|
// Gather current workspace context
|
||||||
|
const contextPath = path.join(workspacePath, 'context');
|
||||||
|
const contextFiles = fs.existsSync(contextPath) ?
|
||||||
|
fs.readdirSync(contextPath).filter(f => f.endsWith('.md') || f.endsWith('.json')) : [];
|
||||||
|
|
||||||
|
// Get recent progress
|
||||||
|
const progressPath = path.join(workspacePath, 'progress');
|
||||||
|
const recentProgress = [];
|
||||||
|
if (fs.existsSync(progressPath)) {
|
||||||
|
const progressFiles = fs.readdirSync(progressPath)
|
||||||
|
.filter(f => f.endsWith('.md'))
|
||||||
|
.sort()
|
||||||
|
.slice(-5); // Last 5 progress files
|
||||||
|
|
||||||
|
for (const file of progressFiles) {
|
||||||
|
try {
|
||||||
|
const content = fs.readFileSync(path.join(progressPath, file), 'utf8');
|
||||||
|
recentProgress.push({
|
||||||
|
file: file,
|
||||||
|
preview: content.substring(0, 200) + (content.length > 200 ? '...' : '')
|
||||||
|
});
|
||||||
|
} catch (e) {
|
||||||
|
// Skip corrupted files
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get current session info
|
||||||
|
const sessionsPath = path.join(workspacePath, 'sessions');
|
||||||
|
let currentSession = null;
|
||||||
|
if (fs.existsSync(sessionsPath)) {
|
||||||
|
const sessionFiles = fs.readdirSync(sessionsPath).filter(f => f.endsWith('.json'));
|
||||||
|
for (const file of sessionFiles) {
|
||||||
|
try {
|
||||||
|
const sessionData = JSON.parse(fs.readFileSync(path.join(sessionsPath, file), 'utf8'));
|
||||||
|
const lastHeartbeat = new Date(sessionData.lastHeartbeat);
|
||||||
|
const timeDiff = new Date() - lastHeartbeat;
|
||||||
|
if (timeDiff < 3600000) { // Active within last hour
|
||||||
|
currentSession = sessionData;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
} catch (e) {
|
||||||
|
// Skip corrupted session files
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const handoffData = {
|
||||||
|
id: handoffId,
|
||||||
|
timestamp: timestamp,
|
||||||
|
fromAgent: fromAgent,
|
||||||
|
toAgent: toAgent,
|
||||||
|
currentWork: currentWork,
|
||||||
|
notes: notes,
|
||||||
|
session: currentSession,
|
||||||
|
context: {
|
||||||
|
availableFiles: contextFiles,
|
||||||
|
recentProgress: recentProgress,
|
||||||
|
workspaceHealth: checkBasicHealth(workspacePath)
|
||||||
|
},
|
||||||
|
recommendations: generateHandoffRecommendations(fromAgent, toAgent, currentWork),
|
||||||
|
status: 'pending'
|
||||||
|
};
|
||||||
|
|
||||||
|
return handoffData;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check basic workspace health
|
||||||
|
*/
|
||||||
|
function checkBasicHealth(workspacePath) {
|
||||||
|
const requiredDirs = ['sessions', 'context', 'handoffs', 'progress'];
|
||||||
|
const missingDirs = [];
|
||||||
|
|
||||||
|
for (const dir of requiredDirs) {
|
||||||
|
if (!fs.existsSync(path.join(workspacePath, dir))) {
|
||||||
|
missingDirs.push(dir);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
score: missingDirs.length === 0 ? 100 : Math.max(0, 100 - (missingDirs.length * 25)),
|
||||||
|
missingDirectories: missingDirs
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generate handoff recommendations
|
||||||
|
*/
|
||||||
|
function generateHandoffRecommendations(fromAgent, toAgent, currentWork) {
|
||||||
|
const recommendations = [];
|
||||||
|
|
||||||
|
// Agent-specific recommendations
|
||||||
|
if (fromAgent === 'dev' && toAgent === 'qa') {
|
||||||
|
recommendations.push('Ensure all code changes are committed and pushed');
|
||||||
|
recommendations.push('Run tests and provide test results');
|
||||||
|
recommendations.push('Document any known issues or edge cases');
|
||||||
|
recommendations.push('Specify testing priorities and focus areas');
|
||||||
|
} else if (fromAgent === 'sm' && toAgent === 'dev') {
|
||||||
|
recommendations.push('Review story acceptance criteria carefully');
|
||||||
|
recommendations.push('Clarify any ambiguous requirements');
|
||||||
|
recommendations.push('Confirm technical approach with architect if needed');
|
||||||
|
recommendations.push('Set up development environment if not ready');
|
||||||
|
} else if (fromAgent === 'analyst' && toAgent === 'pm') {
|
||||||
|
recommendations.push('Summarize key research findings');
|
||||||
|
recommendations.push('Highlight market opportunities and constraints');
|
||||||
|
recommendations.push('Provide user persona insights');
|
||||||
|
recommendations.push('Recommend feature prioritization approach');
|
||||||
|
} else if (fromAgent === 'architect' && toAgent === 'dev') {
|
||||||
|
recommendations.push('Review architectural decisions and constraints');
|
||||||
|
recommendations.push('Ensure development setup matches architecture');
|
||||||
|
recommendations.push('Clarify any technical implementation details');
|
||||||
|
recommendations.push('Verify third-party dependencies are available');
|
||||||
|
} else if (['dev', 'qa'].includes(fromAgent) && toAgent === 'sm') {
|
||||||
|
recommendations.push('Provide status update on current story');
|
||||||
|
recommendations.push('Report any blockers or impediments');
|
||||||
|
recommendations.push('Suggest story scope adjustments if needed');
|
||||||
|
recommendations.push('Update story progress and completion estimates');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Work-specific recommendations
|
||||||
|
const workLower = currentWork.toLowerCase();
|
||||||
|
if (workLower.includes('bug') || workLower.includes('fix')) {
|
||||||
|
recommendations.push('Provide detailed bug reproduction steps');
|
||||||
|
recommendations.push('Include error logs and stack traces');
|
||||||
|
recommendations.push('Identify root cause if known');
|
||||||
|
} else if (workLower.includes('feature') || workLower.includes('story')) {
|
||||||
|
recommendations.push('Confirm feature requirements are clear');
|
||||||
|
recommendations.push('Verify acceptance criteria are testable');
|
||||||
|
recommendations.push('Ensure dependencies are identified');
|
||||||
|
} else if (workLower.includes('refactor')) {
|
||||||
|
recommendations.push('Document current implementation patterns');
|
||||||
|
recommendations.push('Explain refactoring goals and benefits');
|
||||||
|
recommendations.push('Identify areas of highest risk');
|
||||||
|
}
|
||||||
|
|
||||||
|
// General recommendations
|
||||||
|
recommendations.push('Update workspace context with latest findings');
|
||||||
|
recommendations.push('Review any TODO items or pending decisions');
|
||||||
|
|
||||||
|
return recommendations;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Save handoff to workspace
|
||||||
|
*/
|
||||||
|
function saveHandoff(workspacePath, handoffData) {
|
||||||
|
const handoffsPath = path.join(workspacePath, 'handoffs');
|
||||||
|
if (!fs.existsSync(handoffsPath)) {
|
||||||
|
fs.mkdirSync(handoffsPath, { recursive: true });
|
||||||
|
}
|
||||||
|
|
||||||
|
const handoffFile = path.join(handoffsPath, `${handoffData.id}.json`);
|
||||||
|
fs.writeFileSync(handoffFile, JSON.stringify(handoffData, null, 2));
|
||||||
|
|
||||||
|
// Also create a markdown summary for easy reading
|
||||||
|
const markdownFile = path.join(handoffsPath, `${handoffData.id}.md`);
|
||||||
|
const markdownContent = generateHandoffMarkdown(handoffData);
|
||||||
|
fs.writeFileSync(markdownFile, markdownContent);
|
||||||
|
|
||||||
|
return { handoffFile, markdownFile };
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generate handoff markdown summary
|
||||||
|
*/
|
||||||
|
function generateHandoffMarkdown(handoffData) {
|
||||||
|
const toAgentInfo = getAvailableAgents().find(a => a.id === handoffData.toAgent);
|
||||||
|
|
||||||
|
return `# Agent Handoff: ${handoffData.fromAgent} → ${handoffData.toAgent}
|
||||||
|
|
||||||
|
**Handoff ID:** ${handoffData.id}
|
||||||
|
**Timestamp:** ${new Date(handoffData.timestamp).toLocaleString()}
|
||||||
|
**To Agent:** ${toAgentInfo?.name || handoffData.toAgent} - ${toAgentInfo?.description || 'Unknown agent'}
|
||||||
|
|
||||||
|
## Current Work
|
||||||
|
${handoffData.currentWork}
|
||||||
|
|
||||||
|
## Notes
|
||||||
|
${handoffData.notes || 'No additional notes provided'}
|
||||||
|
|
||||||
|
## Context Summary
|
||||||
|
- **Available context files:** ${handoffData.context.availableFiles.length}
|
||||||
|
- **Recent progress entries:** ${handoffData.context.recentProgress.length}
|
||||||
|
- **Workspace health:** ${handoffData.context.workspaceHealth.score}/100
|
||||||
|
|
||||||
|
${handoffData.context.recentProgress.length > 0 ? `
|
||||||
|
## Recent Progress
|
||||||
|
${handoffData.context.recentProgress.map((p, i) => `
|
||||||
|
### ${i + 1}. ${p.file}
|
||||||
|
${p.preview}
|
||||||
|
`).join('')}
|
||||||
|
` : ''}
|
||||||
|
|
||||||
|
## Recommendations
|
||||||
|
${handoffData.recommendations.map(r => `- ${r}`).join('\n')}
|
||||||
|
|
||||||
|
## Session Information
|
||||||
|
${handoffData.session ? `
|
||||||
|
- **IDE:** ${handoffData.session.ide}
|
||||||
|
- **User:** ${handoffData.session.user}
|
||||||
|
- **Created:** ${new Date(handoffData.session.created).toLocaleString()}
|
||||||
|
- **Last Activity:** ${new Date(handoffData.session.lastHeartbeat).toLocaleString()}
|
||||||
|
` : 'No active session found'}
|
||||||
|
|
||||||
|
---
|
||||||
|
*Generated by BMAD Cross-IDE Workspace System*
|
||||||
|
`;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* List recent handoffs
|
||||||
|
*/
|
||||||
|
function listRecentHandoffs(workspacePath, limit = 10) {
|
||||||
|
const handoffsPath = path.join(workspacePath, 'handoffs');
|
||||||
|
if (!fs.existsSync(handoffsPath)) {
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
|
||||||
|
const handoffFiles = fs.readdirSync(handoffsPath)
|
||||||
|
.filter(f => f.endsWith('.json'))
|
||||||
|
.map(f => {
|
||||||
|
try {
|
||||||
|
const content = fs.readFileSync(path.join(handoffsPath, f), 'utf8');
|
||||||
|
return JSON.parse(content);
|
||||||
|
} catch (e) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.filter(Boolean)
|
||||||
|
.sort((a, b) => new Date(b.timestamp) - new Date(a.timestamp))
|
||||||
|
.slice(0, limit);
|
||||||
|
|
||||||
|
return handoffFiles;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Main handoff function
|
||||||
|
*/
|
||||||
|
async function manageHandoff(action = 'create', options = {}) {
|
||||||
|
try {
|
||||||
|
const workspacePath = path.join(process.cwd(), '.workspace');
|
||||||
|
|
||||||
|
if (!fs.existsSync(workspacePath)) {
|
||||||
|
console.error('❌ Workspace directory not found.');
|
||||||
|
console.error(' Run `npm run workspace-init` to initialize workspace');
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (action === 'list') {
|
||||||
|
console.log('🔄 Recent Agent Handoffs');
|
||||||
|
console.log('========================');
|
||||||
|
|
||||||
|
const handoffs = listRecentHandoffs(workspacePath);
|
||||||
|
if (handoffs.length === 0) {
|
||||||
|
console.log('No handoffs found.');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
handoffs.forEach((handoff, index) => {
|
||||||
|
const toAgentInfo = getAvailableAgents().find(a => a.id === handoff.toAgent);
|
||||||
|
console.log(`${index + 1}. ${handoff.id} - ${handoff.fromAgent} → ${handoff.toAgent}`);
|
||||||
|
console.log(` ${toAgentInfo?.name || handoff.toAgent}`);
|
||||||
|
console.log(` ${new Date(handoff.timestamp).toLocaleString()}`);
|
||||||
|
console.log(` Work: ${handoff.currentWork.substring(0, 80)}${handoff.currentWork.length > 80 ? '...' : ''}`);
|
||||||
|
console.log('');
|
||||||
|
});
|
||||||
|
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (action === 'agents') {
|
||||||
|
console.log('👥 Available BMAD Agents');
|
||||||
|
console.log('========================');
|
||||||
|
|
||||||
|
const agents = getAvailableAgents();
|
||||||
|
agents.forEach((agent, index) => {
|
||||||
|
console.log(`${index + 1}. ${agent.id} - ${agent.name}`);
|
||||||
|
console.log(` ${agent.description}`);
|
||||||
|
console.log('');
|
||||||
|
});
|
||||||
|
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Default create action
|
||||||
|
const fromAgent = options.from || 'unknown';
|
||||||
|
const toAgent = options.to || 'dev';
|
||||||
|
const currentWork = options.work || 'No work description provided';
|
||||||
|
const notes = options.notes || '';
|
||||||
|
|
||||||
|
console.log('🔄 Creating Agent Handoff');
|
||||||
|
console.log('=========================');
|
||||||
|
|
||||||
|
// Validate agents
|
||||||
|
const agents = getAvailableAgents();
|
||||||
|
const toAgentInfo = agents.find(a => a.id === toAgent);
|
||||||
|
|
||||||
|
if (!toAgentInfo) {
|
||||||
|
console.error(`❌ Unknown target agent: ${toAgent}`);
|
||||||
|
console.error('Available agents:');
|
||||||
|
agents.forEach(a => console.error(` ${a.id} - ${a.name}`));
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create handoff context
|
||||||
|
const handoffData = createHandoffContext(workspacePath, fromAgent, toAgent, currentWork, notes);
|
||||||
|
|
||||||
|
// Save handoff
|
||||||
|
const files = saveHandoff(workspacePath, handoffData);
|
||||||
|
|
||||||
|
// Log handoff activity
|
||||||
|
const logEntry = {
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
action: 'agent-handoff',
|
||||||
|
handoffId: handoffData.id,
|
||||||
|
fromAgent: fromAgent,
|
||||||
|
toAgent: toAgent,
|
||||||
|
user: process.env.USER || process.env.USERNAME || 'unknown'
|
||||||
|
};
|
||||||
|
|
||||||
|
const logPath = path.join(workspacePath, 'logs', 'workspace.log');
|
||||||
|
fs.appendFileSync(logPath, JSON.stringify(logEntry) + '\n');
|
||||||
|
|
||||||
|
// Success output
|
||||||
|
console.log('✅ Handoff created successfully');
|
||||||
|
console.log('==============================');
|
||||||
|
console.log(`🆔 Handoff ID: ${handoffData.id}`);
|
||||||
|
console.log(`👤 From: ${fromAgent} → ${toAgentInfo.name}`);
|
||||||
|
console.log(`📝 Work: ${currentWork}`);
|
||||||
|
console.log(`📁 Handoff file: ${path.basename(files.handoffFile)}`);
|
||||||
|
console.log(`📄 Summary: ${path.basename(files.markdownFile)}`);
|
||||||
|
console.log(`\n📋 Recommendations for ${toAgentInfo.name}:`);
|
||||||
|
handoffData.recommendations.forEach(rec => console.log(` • ${rec}`));
|
||||||
|
|
||||||
|
console.log('\n🚀 Next steps:');
|
||||||
|
console.log(` 1. Review handoff details in: .workspace/handoffs/${handoffData.id}.md`);
|
||||||
|
console.log(` 2. Start working with the ${toAgentInfo.name} agent`);
|
||||||
|
console.log(` 3. Update workspace context as work progresses`);
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
console.error('❌ Failed to manage handoff:', error.message);
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Command line execution
|
||||||
|
if (require.main === module) {
|
||||||
|
const args = process.argv.slice(2);
|
||||||
|
const action = args[0] || 'create';
|
||||||
|
|
||||||
|
const options = {};
|
||||||
|
for (let i = 1; i < args.length; i += 2) {
|
||||||
|
const key = args[i]?.replace('--', '');
|
||||||
|
const value = args[i + 1];
|
||||||
|
if (key && value) {
|
||||||
|
options[key] = value;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
manageHandoff(action, options);
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = { manageHandoff, createHandoffContext, getAvailableAgents };
|
||||||
|
|
@ -0,0 +1,549 @@
|
||||||
|
#!/usr/bin/env node
|
||||||
|
/**
|
||||||
|
* BMAD Workspace Health Check Utility
|
||||||
|
* Cross-IDE workspace health monitoring and diagnostics
|
||||||
|
*/
|
||||||
|
|
||||||
|
const fs = require('fs');
|
||||||
|
const path = require('path');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check directory structure integrity
|
||||||
|
*/
|
||||||
|
function checkDirectoryStructure(workspacePath) {
|
||||||
|
const requiredDirs = [
|
||||||
|
{ name: 'sessions', critical: true, description: 'Session management' },
|
||||||
|
{ name: 'context', critical: true, description: 'Shared context storage' },
|
||||||
|
{ name: 'handoffs', critical: true, description: 'Agent handoff coordination' },
|
||||||
|
{ name: 'decisions', critical: false, description: 'Decision tracking' },
|
||||||
|
{ name: 'progress', critical: false, description: 'Progress monitoring' },
|
||||||
|
{ name: 'quality', critical: false, description: 'Quality reports' },
|
||||||
|
{ name: 'archive', critical: false, description: 'Archived data' },
|
||||||
|
{ name: 'hooks', critical: false, description: 'Integration hooks' },
|
||||||
|
{ name: 'templates', critical: false, description: 'Workspace templates' },
|
||||||
|
{ name: 'logs', critical: true, description: 'Activity logging' }
|
||||||
|
];
|
||||||
|
|
||||||
|
const results = {
|
||||||
|
score: 100,
|
||||||
|
issues: [],
|
||||||
|
missing: [],
|
||||||
|
present: []
|
||||||
|
};
|
||||||
|
|
||||||
|
for (const dir of requiredDirs) {
|
||||||
|
const dirPath = path.join(workspacePath, dir.name);
|
||||||
|
if (fs.existsSync(dirPath)) {
|
||||||
|
results.present.push(dir);
|
||||||
|
} else {
|
||||||
|
results.missing.push(dir);
|
||||||
|
const penalty = dir.critical ? 15 : 5;
|
||||||
|
results.score -= penalty;
|
||||||
|
results.issues.push(`Missing ${dir.critical ? 'critical' : 'optional'} directory: ${dir.name} (${dir.description})`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return results;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check workspace configuration
|
||||||
|
*/
|
||||||
|
function checkWorkspaceConfig(workspacePath) {
|
||||||
|
const configPath = path.join(workspacePath, 'workspace-config.json');
|
||||||
|
const results = {
|
||||||
|
score: 100,
|
||||||
|
issues: [],
|
||||||
|
valid: false,
|
||||||
|
config: null
|
||||||
|
};
|
||||||
|
|
||||||
|
if (!fs.existsSync(configPath)) {
|
||||||
|
results.score = 0;
|
||||||
|
results.issues.push('Missing workspace configuration file');
|
||||||
|
return results;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const configContent = fs.readFileSync(configPath, 'utf8');
|
||||||
|
const config = JSON.parse(configContent);
|
||||||
|
|
||||||
|
// Validate required fields
|
||||||
|
const requiredFields = ['version', 'created', 'features', 'settings'];
|
||||||
|
for (const field of requiredFields) {
|
||||||
|
if (!config[field]) {
|
||||||
|
results.score -= 20;
|
||||||
|
results.issues.push(`Missing required config field: ${field}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check feature flags
|
||||||
|
if (config.features) {
|
||||||
|
const expectedFeatures = ['crossIDESupport', 'sessionManagement', 'contextPersistence', 'agentHandoffs'];
|
||||||
|
for (const feature of expectedFeatures) {
|
||||||
|
if (config.features[feature] !== true) {
|
||||||
|
results.score -= 5;
|
||||||
|
results.issues.push(`Feature not enabled: ${feature}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
results.valid = true;
|
||||||
|
results.config = config;
|
||||||
|
|
||||||
|
} catch (e) {
|
||||||
|
results.score = 0;
|
||||||
|
results.issues.push(`Corrupted configuration file: ${e.message}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
return results;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check session health
|
||||||
|
*/
|
||||||
|
function checkSessionHealth(workspacePath) {
|
||||||
|
const sessionsPath = path.join(workspacePath, 'sessions');
|
||||||
|
const results = {
|
||||||
|
score: 100,
|
||||||
|
issues: [],
|
||||||
|
totalSessions: 0,
|
||||||
|
activeSessions: 0,
|
||||||
|
staleSessions: 0,
|
||||||
|
corruptedSessions: 0,
|
||||||
|
sessions: []
|
||||||
|
};
|
||||||
|
|
||||||
|
if (!fs.existsSync(sessionsPath)) {
|
||||||
|
results.score = 0;
|
||||||
|
results.issues.push('Sessions directory not found');
|
||||||
|
return results;
|
||||||
|
}
|
||||||
|
|
||||||
|
const sessionFiles = fs.readdirSync(sessionsPath).filter(f => f.endsWith('.json'));
|
||||||
|
results.totalSessions = sessionFiles.length;
|
||||||
|
|
||||||
|
const now = new Date();
|
||||||
|
|
||||||
|
for (const file of sessionFiles) {
|
||||||
|
try {
|
||||||
|
const sessionPath = path.join(sessionsPath, file);
|
||||||
|
const sessionContent = fs.readFileSync(sessionPath, 'utf8');
|
||||||
|
const sessionData = JSON.parse(sessionContent);
|
||||||
|
|
||||||
|
// Validate session structure
|
||||||
|
const requiredFields = ['id', 'created', 'lastHeartbeat', 'ide', 'user'];
|
||||||
|
let isValid = true;
|
||||||
|
|
||||||
|
for (const field of requiredFields) {
|
||||||
|
if (!sessionData[field]) {
|
||||||
|
isValid = false;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!isValid) {
|
||||||
|
results.corruptedSessions++;
|
||||||
|
results.score -= 5;
|
||||||
|
results.issues.push(`Invalid session structure: ${file}`);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check session freshness
|
||||||
|
const lastHeartbeat = new Date(sessionData.lastHeartbeat);
|
||||||
|
const timeDiff = now - lastHeartbeat;
|
||||||
|
|
||||||
|
if (timeDiff < 3600000) { // 1 hour
|
||||||
|
results.activeSessions++;
|
||||||
|
sessionData.status = 'active';
|
||||||
|
} else if (timeDiff < 86400000) { // 24 hours
|
||||||
|
sessionData.status = 'idle';
|
||||||
|
} else {
|
||||||
|
results.staleSessions++;
|
||||||
|
sessionData.status = 'stale';
|
||||||
|
}
|
||||||
|
|
||||||
|
sessionData.timeSinceLastHeartbeat = timeDiff;
|
||||||
|
results.sessions.push(sessionData);
|
||||||
|
|
||||||
|
} catch (e) {
|
||||||
|
results.corruptedSessions++;
|
||||||
|
results.score -= 10;
|
||||||
|
results.issues.push(`Corrupted session file: ${file}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Penalty for too many stale sessions
|
||||||
|
if (results.staleSessions > 5) {
|
||||||
|
results.score -= (results.staleSessions - 5) * 2;
|
||||||
|
results.issues.push(`Excessive stale sessions: ${results.staleSessions}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
return results;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check file system permissions
|
||||||
|
*/
|
||||||
|
function checkFileSystemPermissions(workspacePath) {
|
||||||
|
const results = {
|
||||||
|
score: 100,
|
||||||
|
issues: [],
|
||||||
|
canRead: false,
|
||||||
|
canWrite: false,
|
||||||
|
canExecute: false
|
||||||
|
};
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Test read permission
|
||||||
|
fs.readdirSync(workspacePath);
|
||||||
|
results.canRead = true;
|
||||||
|
|
||||||
|
// Test write permission
|
||||||
|
const testFile = path.join(workspacePath, '.health-check-write-test');
|
||||||
|
fs.writeFileSync(testFile, 'test');
|
||||||
|
fs.unlinkSync(testFile);
|
||||||
|
results.canWrite = true;
|
||||||
|
|
||||||
|
// Test execute permission (create and run a temporary script)
|
||||||
|
const testScript = path.join(workspacePath, '.health-check-exec-test.js');
|
||||||
|
fs.writeFileSync(testScript, 'console.log("test");');
|
||||||
|
|
||||||
|
// Try to require the file to test execution capability
|
||||||
|
require(testScript);
|
||||||
|
fs.unlinkSync(testScript);
|
||||||
|
results.canExecute = true;
|
||||||
|
|
||||||
|
} catch (e) {
|
||||||
|
if (!results.canRead) {
|
||||||
|
results.score = 0;
|
||||||
|
results.issues.push('Cannot read workspace directory');
|
||||||
|
} else if (!results.canWrite) {
|
||||||
|
results.score -= 50;
|
||||||
|
results.issues.push('Cannot write to workspace directory');
|
||||||
|
} else if (!results.canExecute) {
|
||||||
|
results.score -= 20;
|
||||||
|
results.issues.push('Limited script execution permissions');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return results;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check log file health
|
||||||
|
*/
|
||||||
|
function checkLogHealth(workspacePath) {
|
||||||
|
const logPath = path.join(workspacePath, 'logs', 'workspace.log');
|
||||||
|
const results = {
|
||||||
|
score: 100,
|
||||||
|
issues: [],
|
||||||
|
exists: false,
|
||||||
|
size: 0,
|
||||||
|
recentEntries: 0,
|
||||||
|
corruptedEntries: 0
|
||||||
|
};
|
||||||
|
|
||||||
|
if (!fs.existsSync(logPath)) {
|
||||||
|
results.score -= 30;
|
||||||
|
results.issues.push('Workspace log file not found');
|
||||||
|
return results;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const stats = fs.statSync(logPath);
|
||||||
|
results.exists = true;
|
||||||
|
results.size = stats.size;
|
||||||
|
|
||||||
|
// Check log size
|
||||||
|
const logSizeMB = stats.size / (1024 * 1024);
|
||||||
|
if (logSizeMB > 50) {
|
||||||
|
results.score -= 15;
|
||||||
|
results.issues.push(`Large log file: ${logSizeMB.toFixed(1)}MB`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Analyze recent log entries
|
||||||
|
const logContent = fs.readFileSync(logPath, 'utf8');
|
||||||
|
const logLines = logContent.trim().split('\n');
|
||||||
|
|
||||||
|
const now = new Date();
|
||||||
|
const oneDayAgo = now - 86400000; // 24 hours
|
||||||
|
|
||||||
|
for (const line of logLines.slice(-100)) { // Check last 100 entries
|
||||||
|
if (line.trim() === '') continue;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const entry = JSON.parse(line);
|
||||||
|
const entryTime = new Date(entry.timestamp);
|
||||||
|
|
||||||
|
if (entryTime > oneDayAgo) {
|
||||||
|
results.recentEntries++;
|
||||||
|
}
|
||||||
|
} catch (e) {
|
||||||
|
results.corruptedEntries++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (results.corruptedEntries > 10) {
|
||||||
|
results.score -= results.corruptedEntries;
|
||||||
|
results.issues.push(`Multiple corrupted log entries: ${results.corruptedEntries}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (results.recentEntries === 0) {
|
||||||
|
results.score -= 20;
|
||||||
|
results.issues.push('No recent activity in logs');
|
||||||
|
}
|
||||||
|
|
||||||
|
} catch (e) {
|
||||||
|
results.score -= 25;
|
||||||
|
results.issues.push(`Cannot analyze log file: ${e.message}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
return results;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check cross-IDE compatibility features
|
||||||
|
*/
|
||||||
|
function checkCrossIDECompatibility(workspacePath) {
|
||||||
|
const results = {
|
||||||
|
score: 100,
|
||||||
|
issues: [],
|
||||||
|
ideSupport: {},
|
||||||
|
templateCount: 0,
|
||||||
|
hookCount: 0
|
||||||
|
};
|
||||||
|
|
||||||
|
// Check for IDE-specific templates
|
||||||
|
const templatesPath = path.join(workspacePath, 'templates');
|
||||||
|
if (fs.existsSync(templatesPath)) {
|
||||||
|
const templateFiles = fs.readdirSync(templatesPath).filter(f => f.endsWith('.md'));
|
||||||
|
results.templateCount = templateFiles.length;
|
||||||
|
|
||||||
|
const supportedIDEs = ['cursor', 'windsurf', 'vscode', 'trae', 'roo', 'cline', 'gemini', 'github-copilot'];
|
||||||
|
|
||||||
|
for (const ide of supportedIDEs) {
|
||||||
|
const ideTemplate = templateFiles.find(f => f.includes(ide));
|
||||||
|
results.ideSupport[ide] = !!ideTemplate;
|
||||||
|
|
||||||
|
if (!ideTemplate) {
|
||||||
|
results.score -= 5;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (results.templateCount < 4) {
|
||||||
|
results.issues.push(`Limited IDE template support: ${results.templateCount} templates found`);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
results.score -= 30;
|
||||||
|
results.issues.push('IDE templates directory not found');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for integration hooks
|
||||||
|
const hooksPath = path.join(workspacePath, 'hooks');
|
||||||
|
if (fs.existsSync(hooksPath)) {
|
||||||
|
const hookFiles = fs.readdirSync(hooksPath);
|
||||||
|
results.hookCount = hookFiles.length;
|
||||||
|
|
||||||
|
if (results.hookCount === 0) {
|
||||||
|
results.score -= 10;
|
||||||
|
results.issues.push('No integration hooks configured');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return results;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generate comprehensive health report
|
||||||
|
*/
|
||||||
|
function generateHealthReport(workspacePath) {
|
||||||
|
const report = {
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
overallScore: 0,
|
||||||
|
status: 'unknown',
|
||||||
|
checks: {
|
||||||
|
directoryStructure: checkDirectoryStructure(workspacePath),
|
||||||
|
workspaceConfig: checkWorkspaceConfig(workspacePath),
|
||||||
|
sessionHealth: checkSessionHealth(workspacePath),
|
||||||
|
fileSystemPermissions: checkFileSystemPermissions(workspacePath),
|
||||||
|
logHealth: checkLogHealth(workspacePath),
|
||||||
|
crossIDECompatibility: checkCrossIDECompatibility(workspacePath)
|
||||||
|
},
|
||||||
|
summary: {
|
||||||
|
totalIssues: 0,
|
||||||
|
criticalIssues: 0,
|
||||||
|
recommendations: []
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Calculate overall score and issues
|
||||||
|
const checks = Object.values(report.checks);
|
||||||
|
const totalScore = checks.reduce((sum, check) => sum + check.score, 0);
|
||||||
|
report.overallScore = Math.round(totalScore / checks.length);
|
||||||
|
|
||||||
|
// Collect all issues
|
||||||
|
const allIssues = checks.flatMap(check => check.issues || []);
|
||||||
|
report.summary.totalIssues = allIssues.length;
|
||||||
|
report.summary.criticalIssues = allIssues.filter(issue =>
|
||||||
|
issue.includes('Missing critical') ||
|
||||||
|
issue.includes('Cannot') ||
|
||||||
|
issue.includes('Corrupted')
|
||||||
|
).length;
|
||||||
|
|
||||||
|
// Determine status
|
||||||
|
if (report.overallScore >= 90) {
|
||||||
|
report.status = 'excellent';
|
||||||
|
} else if (report.overallScore >= 80) {
|
||||||
|
report.status = 'good';
|
||||||
|
} else if (report.overallScore >= 70) {
|
||||||
|
report.status = 'fair';
|
||||||
|
} else if (report.overallScore >= 60) {
|
||||||
|
report.status = 'poor';
|
||||||
|
} else {
|
||||||
|
report.status = 'critical';
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generate recommendations
|
||||||
|
if (report.checks.directoryStructure.missing.length > 0) {
|
||||||
|
report.summary.recommendations.push('Run `npm run workspace-cleanup` to repair directory structure');
|
||||||
|
}
|
||||||
|
|
||||||
|
if (report.checks.sessionHealth.staleSessions > 5) {
|
||||||
|
report.summary.recommendations.push('Clean up stale sessions with `npm run workspace-cleanup`');
|
||||||
|
}
|
||||||
|
|
||||||
|
if (report.checks.logHealth.size > 52428800) { // 50MB
|
||||||
|
report.summary.recommendations.push('Archive large log files to improve performance');
|
||||||
|
}
|
||||||
|
|
||||||
|
if (report.checks.crossIDECompatibility.templateCount < 4) {
|
||||||
|
report.summary.recommendations.push('Generate additional IDE-specific templates for better compatibility');
|
||||||
|
}
|
||||||
|
|
||||||
|
if (report.summary.criticalIssues > 0) {
|
||||||
|
report.summary.recommendations.push('Address critical issues immediately before continuing development');
|
||||||
|
}
|
||||||
|
|
||||||
|
return report;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Display health report
|
||||||
|
*/
|
||||||
|
function displayHealthReport(report) {
|
||||||
|
const statusEmoji = {
|
||||||
|
excellent: '💚',
|
||||||
|
good: '💙',
|
||||||
|
fair: '💛',
|
||||||
|
poor: '🧡',
|
||||||
|
critical: '❤️'
|
||||||
|
};
|
||||||
|
|
||||||
|
console.log('🏥 BMAD Workspace Health Check');
|
||||||
|
console.log('==============================');
|
||||||
|
console.log(`${statusEmoji[report.status]} Overall Health: ${report.overallScore}/100 (${report.status.toUpperCase()})`);
|
||||||
|
console.log(`📊 Issues Found: ${report.summary.totalIssues} (${report.summary.criticalIssues} critical)`);
|
||||||
|
console.log(`🕐 Checked: ${new Date(report.timestamp).toLocaleString()}`);
|
||||||
|
|
||||||
|
// Display individual check results
|
||||||
|
console.log('\n📋 Detailed Results:');
|
||||||
|
|
||||||
|
Object.entries(report.checks).forEach(([checkName, result]) => {
|
||||||
|
const emoji = result.score >= 90 ? '✅' : result.score >= 70 ? '⚠️' : '❌';
|
||||||
|
const name = checkName.replace(/([A-Z])/g, ' $1').replace(/^./, str => str.toUpperCase());
|
||||||
|
console.log(`${emoji} ${name}: ${result.score}/100`);
|
||||||
|
|
||||||
|
if (result.issues && result.issues.length > 0) {
|
||||||
|
result.issues.slice(0, 3).forEach(issue => {
|
||||||
|
console.log(` • ${issue}`);
|
||||||
|
});
|
||||||
|
if (result.issues.length > 3) {
|
||||||
|
console.log(` • ... and ${result.issues.length - 3} more issues`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Show session summary
|
||||||
|
if (report.checks.sessionHealth) {
|
||||||
|
const sessions = report.checks.sessionHealth;
|
||||||
|
console.log(`\n👥 Sessions: ${sessions.totalSessions} total, ${sessions.activeSessions} active, ${sessions.staleSessions} stale`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Show recommendations
|
||||||
|
if (report.summary.recommendations.length > 0) {
|
||||||
|
console.log('\n💡 Recommendations:');
|
||||||
|
report.summary.recommendations.forEach(rec => {
|
||||||
|
console.log(` • ${rec}`);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Quick actions
|
||||||
|
console.log('\n🚀 Quick Actions:');
|
||||||
|
console.log(' npm run workspace-cleanup # Repair and optimize workspace');
|
||||||
|
console.log(' npm run workspace-status # Check current activity');
|
||||||
|
console.log(' npm run workspace-sync # Synchronize context');
|
||||||
|
|
||||||
|
if (report.overallScore < 70) {
|
||||||
|
console.log('\n⚠️ Workspace needs attention. Address the issues above for optimal performance.');
|
||||||
|
} else if (report.overallScore >= 90) {
|
||||||
|
console.log('\n🎉 Excellent! Your workspace is healthy and ready for collaborative development.');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Main health check function
|
||||||
|
*/
|
||||||
|
async function checkWorkspaceHealth(options = {}) {
|
||||||
|
try {
|
||||||
|
const workspacePath = path.join(process.cwd(), '.workspace');
|
||||||
|
|
||||||
|
if (!fs.existsSync(workspacePath)) {
|
||||||
|
console.error('❌ Workspace directory not found.');
|
||||||
|
console.error(' Run `npm run workspace-init` to initialize workspace');
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
const report = generateHealthReport(workspacePath);
|
||||||
|
|
||||||
|
if (options.json) {
|
||||||
|
console.log(JSON.stringify(report, null, 2));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
displayHealthReport(report);
|
||||||
|
|
||||||
|
// Save health report
|
||||||
|
const reportPath = path.join(workspacePath, 'quality', 'health-report.json');
|
||||||
|
const qualityDir = path.dirname(reportPath);
|
||||||
|
if (!fs.existsSync(qualityDir)) {
|
||||||
|
fs.mkdirSync(qualityDir, { recursive: true });
|
||||||
|
}
|
||||||
|
fs.writeFileSync(reportPath, JSON.stringify(report, null, 2));
|
||||||
|
|
||||||
|
console.log(`\n📄 Detailed report saved: .workspace/quality/health-report.json`);
|
||||||
|
|
||||||
|
// Exit with appropriate code for CI/CD
|
||||||
|
if (options.exitCode && report.summary.criticalIssues > 0) {
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
console.error('❌ Failed to check workspace health:', error.message);
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Command line execution
|
||||||
|
if (require.main === module) {
|
||||||
|
const args = process.argv.slice(2);
|
||||||
|
const options = {
|
||||||
|
json: args.includes('--json'),
|
||||||
|
exitCode: args.includes('--exit-code'),
|
||||||
|
verbose: args.includes('--verbose')
|
||||||
|
};
|
||||||
|
|
||||||
|
checkWorkspaceHealth(options);
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = { checkWorkspaceHealth, generateHealthReport };
|
||||||
|
|
@ -0,0 +1,292 @@
|
||||||
|
#!/usr/bin/env node
|
||||||
|
/**
|
||||||
|
* BMAD Workspace Initialization Utility
|
||||||
|
* Cross-IDE workspace initialization with session management
|
||||||
|
*/
|
||||||
|
|
||||||
|
const fs = require('fs');
|
||||||
|
const path = require('path');
|
||||||
|
const crypto = require('crypto');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Detect IDE environment from various sources
|
||||||
|
*/
|
||||||
|
function detectIDE() {
|
||||||
|
// Check environment variables
|
||||||
|
if (process.env.CURSOR_SOCKET) return 'cursor';
|
||||||
|
if (process.env.WINDSURF_SESSION) return 'windsurf';
|
||||||
|
if (process.env.TRAE_MODE) return 'trae';
|
||||||
|
if (process.env.ROO_CODE) return 'roo';
|
||||||
|
if (process.env.CLINE_ACTIVE) return 'cline';
|
||||||
|
if (process.env.GEMINI_AI_STUDIO) return 'gemini';
|
||||||
|
if (process.env.GITHUB_COPILOT) return 'github-copilot';
|
||||||
|
if (process.env.VSCODE_PID) return 'vscode';
|
||||||
|
if (process.env.IDE_TYPE) return process.env.IDE_TYPE;
|
||||||
|
|
||||||
|
// Check for IDE-specific files or patterns
|
||||||
|
if (fs.existsSync('.cursor')) return 'cursor';
|
||||||
|
if (fs.existsSync('.windsurf')) return 'windsurf';
|
||||||
|
if (fs.existsSync('.vscode')) return 'vscode';
|
||||||
|
|
||||||
|
return 'unknown';
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create workspace directory structure
|
||||||
|
*/
|
||||||
|
function createWorkspaceStructure(workspacePath) {
|
||||||
|
const directories = [
|
||||||
|
'sessions',
|
||||||
|
'context',
|
||||||
|
'handoffs',
|
||||||
|
'decisions',
|
||||||
|
'progress',
|
||||||
|
'quality',
|
||||||
|
'archive',
|
||||||
|
'hooks',
|
||||||
|
'templates',
|
||||||
|
'logs'
|
||||||
|
];
|
||||||
|
|
||||||
|
directories.forEach(dir => {
|
||||||
|
const dirPath = path.join(workspacePath, dir);
|
||||||
|
if (!fs.existsSync(dirPath)) {
|
||||||
|
fs.mkdirSync(dirPath, { recursive: true });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Initialize workspace configuration
|
||||||
|
*/
|
||||||
|
function initWorkspaceConfig(workspacePath) {
|
||||||
|
const configPath = path.join(workspacePath, 'workspace-config.json');
|
||||||
|
|
||||||
|
if (!fs.existsSync(configPath)) {
|
||||||
|
const config = {
|
||||||
|
version: '1.0.0',
|
||||||
|
created: new Date().toISOString(),
|
||||||
|
lastUpdated: new Date().toISOString(),
|
||||||
|
features: {
|
||||||
|
crossIDESupport: true,
|
||||||
|
sessionManagement: true,
|
||||||
|
contextPersistence: true,
|
||||||
|
agentHandoffs: true,
|
||||||
|
qualityTracking: true
|
||||||
|
},
|
||||||
|
settings: {
|
||||||
|
maxSessions: 10,
|
||||||
|
sessionTimeout: 3600000, // 1 hour in milliseconds
|
||||||
|
autoCleanup: true,
|
||||||
|
logLevel: 'info'
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
fs.writeFileSync(configPath, JSON.stringify(config, null, 2));
|
||||||
|
return config;
|
||||||
|
}
|
||||||
|
|
||||||
|
return JSON.parse(fs.readFileSync(configPath, 'utf8'));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create session with IDE-specific metadata
|
||||||
|
*/
|
||||||
|
function createSession(workspacePath, ide) {
|
||||||
|
const sessionId = crypto.randomBytes(8).toString('hex');
|
||||||
|
const timestamp = new Date().toISOString();
|
||||||
|
|
||||||
|
const sessionData = {
|
||||||
|
id: sessionId,
|
||||||
|
ide: ide,
|
||||||
|
created: timestamp,
|
||||||
|
lastHeartbeat: timestamp,
|
||||||
|
pid: process.pid,
|
||||||
|
user: process.env.USER || process.env.USERNAME || 'unknown',
|
||||||
|
cwd: process.cwd(),
|
||||||
|
nodeVersion: process.version,
|
||||||
|
platform: process.platform,
|
||||||
|
arch: process.arch,
|
||||||
|
metadata: {
|
||||||
|
ideSpecific: getIDESpecificMetadata(ide),
|
||||||
|
features: ['context-sharing', 'agent-handoffs', 'quality-tracking']
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const sessionFile = path.join(workspacePath, 'sessions', `${sessionId}.json`);
|
||||||
|
fs.writeFileSync(sessionFile, JSON.stringify(sessionData, null, 2));
|
||||||
|
|
||||||
|
return sessionData;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get IDE-specific metadata
|
||||||
|
*/
|
||||||
|
function getIDESpecificMetadata(ide) {
|
||||||
|
const metadata = {
|
||||||
|
supportsTerminalCommands: true,
|
||||||
|
hasIntegratedGit: false,
|
||||||
|
supportsPanels: false,
|
||||||
|
hasExtensionSystem: false
|
||||||
|
};
|
||||||
|
|
||||||
|
switch (ide) {
|
||||||
|
case 'cursor':
|
||||||
|
metadata.hasIntegratedGit = true;
|
||||||
|
metadata.supportsPanels = true;
|
||||||
|
metadata.hasExtensionSystem = true;
|
||||||
|
metadata.features = ['custom-rules', 'ai-assistance', 'git-integration'];
|
||||||
|
break;
|
||||||
|
case 'windsurf':
|
||||||
|
metadata.hasIntegratedGit = true;
|
||||||
|
metadata.supportsPanels = true;
|
||||||
|
metadata.features = ['ai-agent', 'terminal-integration'];
|
||||||
|
break;
|
||||||
|
case 'vscode':
|
||||||
|
metadata.hasIntegratedGit = true;
|
||||||
|
metadata.supportsPanels = true;
|
||||||
|
metadata.hasExtensionSystem = true;
|
||||||
|
metadata.features = ['extensions', 'integrated-terminal', 'git-integration'];
|
||||||
|
break;
|
||||||
|
case 'github-copilot':
|
||||||
|
metadata.hasIntegratedGit = true;
|
||||||
|
metadata.hasExtensionSystem = true;
|
||||||
|
metadata.features = ['ai-assistance', 'code-completion'];
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
metadata.features = ['basic-terminal'];
|
||||||
|
}
|
||||||
|
|
||||||
|
return metadata;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create IDE-specific setup hints
|
||||||
|
*/
|
||||||
|
function createIDESetupHints(workspacePath, ide) {
|
||||||
|
const hintsPath = path.join(workspacePath, 'templates', `${ide}-setup.md`);
|
||||||
|
|
||||||
|
let setupContent = `# ${ide.toUpperCase()} Workspace Setup\n\n`;
|
||||||
|
|
||||||
|
switch (ide) {
|
||||||
|
case 'cursor':
|
||||||
|
setupContent += `## Cursor Integration
|
||||||
|
- Add workspace commands to your terminal
|
||||||
|
- Use \`npm run workspace-status\` to check collaboration status
|
||||||
|
- Workspace context is automatically shared between sessions
|
||||||
|
- Custom rules in .cursor/rules/ will respect workspace state
|
||||||
|
|
||||||
|
## Commands
|
||||||
|
\`\`\`bash
|
||||||
|
npm run workspace-init # Initialize session
|
||||||
|
npm run workspace-status # Check status
|
||||||
|
npm run workspace-cleanup # Maintenance
|
||||||
|
\`\`\`
|
||||||
|
`;
|
||||||
|
break;
|
||||||
|
case 'windsurf':
|
||||||
|
setupContent += `## Windsurf Integration
|
||||||
|
- Workspace utilities available through terminal
|
||||||
|
- Context sharing works with Windsurf AI agent
|
||||||
|
- Session state persists across Windsurf restarts
|
||||||
|
|
||||||
|
## Commands
|
||||||
|
\`\`\`bash
|
||||||
|
npm run workspace-init # Start workspace session
|
||||||
|
npm run workspace-handoff # Prepare agent handoff
|
||||||
|
npm run workspace-sync # Sync with latest context
|
||||||
|
\`\`\`
|
||||||
|
`;
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
setupContent += `## ${ide.toUpperCase()} Integration
|
||||||
|
- Use terminal commands for workspace management
|
||||||
|
- Full workspace functionality available
|
||||||
|
- Context persists across IDE sessions
|
||||||
|
|
||||||
|
## Available Commands
|
||||||
|
\`\`\`bash
|
||||||
|
npm run workspace-init # Initialize workspace session
|
||||||
|
npm run workspace-status # Show workspace status
|
||||||
|
npm run workspace-cleanup # Clean and optimize workspace
|
||||||
|
npm run workspace-handoff # Manage agent handoffs
|
||||||
|
npm run workspace-sync # Synchronize context
|
||||||
|
npm run workspace-health # Check workspace health
|
||||||
|
\`\`\`
|
||||||
|
`;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!fs.existsSync(hintsPath)) {
|
||||||
|
fs.writeFileSync(hintsPath, setupContent);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Main initialization function
|
||||||
|
*/
|
||||||
|
async function initWorkspace() {
|
||||||
|
try {
|
||||||
|
const workspacePath = path.join(process.cwd(), '.workspace');
|
||||||
|
|
||||||
|
// Create workspace directory structure
|
||||||
|
if (!fs.existsSync(workspacePath)) {
|
||||||
|
fs.mkdirSync(workspacePath, { recursive: true });
|
||||||
|
}
|
||||||
|
|
||||||
|
createWorkspaceStructure(workspacePath);
|
||||||
|
|
||||||
|
// Initialize configuration
|
||||||
|
const config = initWorkspaceConfig(workspacePath);
|
||||||
|
|
||||||
|
// Detect IDE and create session
|
||||||
|
const ide = detectIDE();
|
||||||
|
const session = createSession(workspacePath, ide);
|
||||||
|
|
||||||
|
// Create IDE-specific setup hints
|
||||||
|
createIDESetupHints(workspacePath, ide);
|
||||||
|
|
||||||
|
// Log initialization
|
||||||
|
const logEntry = {
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
action: 'workspace-init',
|
||||||
|
sessionId: session.id,
|
||||||
|
ide: ide,
|
||||||
|
user: session.user
|
||||||
|
};
|
||||||
|
|
||||||
|
const logPath = path.join(workspacePath, 'logs', 'workspace.log');
|
||||||
|
fs.appendFileSync(logPath, JSON.stringify(logEntry) + '\n');
|
||||||
|
|
||||||
|
// Success output
|
||||||
|
console.log('✅ BMAD Workspace initialized successfully');
|
||||||
|
console.log('=====================================');
|
||||||
|
console.log(`📁 Workspace: ${workspacePath}`);
|
||||||
|
console.log(`📍 Session ID: ${session.id}`);
|
||||||
|
console.log(`💻 IDE: ${ide}`);
|
||||||
|
console.log(`👤 User: ${session.user}`);
|
||||||
|
console.log(`🕐 Created: ${new Date(session.created).toLocaleString()}`);
|
||||||
|
|
||||||
|
if (ide !== 'unknown') {
|
||||||
|
console.log(`\n📖 Setup guide: .workspace/templates/${ide}-setup.md`);
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log('\n🚀 Ready for collaborative development!');
|
||||||
|
console.log(' • Run `npm run workspace-status` to check status');
|
||||||
|
console.log(' • Run `npm run workspace-health` for health check');
|
||||||
|
|
||||||
|
return session.id;
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
console.error('❌ Failed to initialize workspace:', error.message);
|
||||||
|
console.error(' Make sure you have proper file permissions');
|
||||||
|
console.error(' Try running from project root directory');
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Command line execution
|
||||||
|
if (require.main === module) {
|
||||||
|
initWorkspace();
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = { initWorkspace, detectIDE };
|
||||||
|
|
@ -0,0 +1,259 @@
|
||||||
|
#!/usr/bin/env node
|
||||||
|
/**
|
||||||
|
* BMAD Workspace Status Utility
|
||||||
|
* Cross-IDE workspace status reporting and analytics
|
||||||
|
*/
|
||||||
|
|
||||||
|
const fs = require('fs');
|
||||||
|
const path = require('path');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get workspace configuration
|
||||||
|
*/
|
||||||
|
function getWorkspaceConfig(workspacePath) {
|
||||||
|
const configPath = path.join(workspacePath, 'workspace-config.json');
|
||||||
|
if (fs.existsSync(configPath)) {
|
||||||
|
return JSON.parse(fs.readFileSync(configPath, 'utf8'));
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get active sessions with health check
|
||||||
|
*/
|
||||||
|
function getActiveSessions(workspacePath) {
|
||||||
|
const sessionsPath = path.join(workspacePath, 'sessions');
|
||||||
|
if (!fs.existsSync(sessionsPath)) {
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
|
||||||
|
const sessionFiles = fs.readdirSync(sessionsPath).filter(f => f.endsWith('.json'));
|
||||||
|
const activeSessions = [];
|
||||||
|
const now = new Date();
|
||||||
|
|
||||||
|
for (const file of sessionFiles) {
|
||||||
|
try {
|
||||||
|
const sessionPath = path.join(sessionsPath, file);
|
||||||
|
const sessionContent = fs.readFileSync(sessionPath, 'utf8');
|
||||||
|
const sessionData = JSON.parse(sessionContent);
|
||||||
|
|
||||||
|
// Check if session is still active (within 1 hour)
|
||||||
|
const lastHeartbeat = new Date(sessionData.lastHeartbeat);
|
||||||
|
const timeDiff = now - lastHeartbeat;
|
||||||
|
const isActive = timeDiff < 3600000; // 1 hour
|
||||||
|
|
||||||
|
sessionData.isActive = isActive;
|
||||||
|
sessionData.timeSinceLastHeartbeat = timeDiff;
|
||||||
|
|
||||||
|
activeSessions.push(sessionData);
|
||||||
|
|
||||||
|
} catch (e) {
|
||||||
|
console.warn(`⚠️ Corrupted session file: ${file}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return activeSessions.sort((a, b) => new Date(b.created) - new Date(a.created));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check workspace health
|
||||||
|
*/
|
||||||
|
function checkWorkspaceHealth(workspacePath) {
|
||||||
|
const requiredDirs = ['sessions', 'context', 'handoffs', 'decisions', 'progress', 'quality'];
|
||||||
|
const health = {
|
||||||
|
score: 100,
|
||||||
|
issues: [],
|
||||||
|
recommendations: []
|
||||||
|
};
|
||||||
|
|
||||||
|
// Check directory structure
|
||||||
|
for (const dir of requiredDirs) {
|
||||||
|
const dirPath = path.join(workspacePath, dir);
|
||||||
|
if (!fs.existsSync(dirPath)) {
|
||||||
|
health.score -= 15;
|
||||||
|
health.issues.push(`Missing directory: ${dir}`);
|
||||||
|
health.recommendations.push(`Run \`npm run workspace-cleanup\` to repair structure`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for stale sessions
|
||||||
|
const sessions = getActiveSessions(workspacePath);
|
||||||
|
const staleSessions = sessions.filter(s => !s.isActive);
|
||||||
|
if (staleSessions.length > 0) {
|
||||||
|
health.score -= staleSessions.length * 5;
|
||||||
|
health.issues.push(`${staleSessions.length} stale sessions detected`);
|
||||||
|
health.recommendations.push('Run `npm run workspace-cleanup` to remove stale sessions');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check log file size
|
||||||
|
const logPath = path.join(workspacePath, 'logs', 'workspace.log');
|
||||||
|
if (fs.existsSync(logPath)) {
|
||||||
|
const stats = fs.statSync(logPath);
|
||||||
|
const logSizeMB = stats.size / (1024 * 1024);
|
||||||
|
if (logSizeMB > 10) {
|
||||||
|
health.score -= 10;
|
||||||
|
health.issues.push(`Large log file: ${logSizeMB.toFixed(1)}MB`);
|
||||||
|
health.recommendations.push('Consider archiving or rotating log files');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return health;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get workspace analytics
|
||||||
|
*/
|
||||||
|
function getWorkspaceAnalytics(workspacePath) {
|
||||||
|
const analytics = {
|
||||||
|
totalSessions: 0,
|
||||||
|
activeSessions: 0,
|
||||||
|
ideBreakdown: {},
|
||||||
|
userBreakdown: {},
|
||||||
|
avgSessionDuration: 0,
|
||||||
|
recentActivity: []
|
||||||
|
};
|
||||||
|
|
||||||
|
const sessions = getActiveSessions(workspacePath);
|
||||||
|
analytics.totalSessions = sessions.length;
|
||||||
|
analytics.activeSessions = sessions.filter(s => s.isActive).length;
|
||||||
|
|
||||||
|
// IDE breakdown
|
||||||
|
sessions.forEach(session => {
|
||||||
|
analytics.ideBreakdown[session.ide] = (analytics.ideBreakdown[session.ide] || 0) + 1;
|
||||||
|
analytics.userBreakdown[session.user] = (analytics.userBreakdown[session.user] || 0) + 1;
|
||||||
|
});
|
||||||
|
|
||||||
|
// Recent activity from logs
|
||||||
|
const logPath = path.join(workspacePath, 'logs', 'workspace.log');
|
||||||
|
if (fs.existsSync(logPath)) {
|
||||||
|
try {
|
||||||
|
const logContent = fs.readFileSync(logPath, 'utf8');
|
||||||
|
const logLines = logContent.trim().split('\n').slice(-10); // Last 10 entries
|
||||||
|
|
||||||
|
analytics.recentActivity = logLines.map(line => {
|
||||||
|
try {
|
||||||
|
return JSON.parse(line);
|
||||||
|
} catch (e) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}).filter(Boolean);
|
||||||
|
} catch (e) {
|
||||||
|
// Ignore log parsing errors
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return analytics;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Format time duration
|
||||||
|
*/
|
||||||
|
function formatDuration(milliseconds) {
|
||||||
|
const seconds = Math.floor(milliseconds / 1000);
|
||||||
|
const minutes = Math.floor(seconds / 60);
|
||||||
|
const hours = Math.floor(minutes / 60);
|
||||||
|
|
||||||
|
if (hours > 0) return `${hours}h ${minutes % 60}m`;
|
||||||
|
if (minutes > 0) return `${minutes}m ${seconds % 60}s`;
|
||||||
|
return `${seconds}s`;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Display workspace status
|
||||||
|
*/
|
||||||
|
async function getWorkspaceStatus() {
|
||||||
|
try {
|
||||||
|
const workspacePath = path.join(process.cwd(), '.workspace');
|
||||||
|
|
||||||
|
if (!fs.existsSync(workspacePath)) {
|
||||||
|
console.error('❌ Workspace directory not found.');
|
||||||
|
console.error(' Run `npm run workspace-init` to initialize workspace');
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
const config = getWorkspaceConfig(workspacePath);
|
||||||
|
const sessions = getActiveSessions(workspacePath);
|
||||||
|
const health = checkWorkspaceHealth(workspacePath);
|
||||||
|
const analytics = getWorkspaceAnalytics(workspacePath);
|
||||||
|
|
||||||
|
// Header
|
||||||
|
console.log('🤝 BMAD Collaborative Workspace Status');
|
||||||
|
console.log('=====================================');
|
||||||
|
|
||||||
|
// Basic info
|
||||||
|
console.log(`📁 Workspace: ${workspacePath}`);
|
||||||
|
console.log(`⚙️ Version: ${config?.version || 'Unknown'}`);
|
||||||
|
console.log(`🕐 Created: ${config?.created ? new Date(config.created).toLocaleString() : 'Unknown'}`);
|
||||||
|
|
||||||
|
// Health score
|
||||||
|
const healthEmoji = health.score >= 90 ? '💚' : health.score >= 70 ? '💛' : '❤️';
|
||||||
|
console.log(`${healthEmoji} Health Score: ${health.score}/100`);
|
||||||
|
|
||||||
|
// Sessions
|
||||||
|
console.log(`\n👥 Sessions: ${analytics.totalSessions} total, ${analytics.activeSessions} active`);
|
||||||
|
|
||||||
|
if (sessions.length > 0) {
|
||||||
|
console.log('\n📍 Session Details:');
|
||||||
|
sessions.forEach((session, index) => {
|
||||||
|
const statusEmoji = session.isActive ? '🟢' : '🟡';
|
||||||
|
const duration = formatDuration(session.timeSinceLastHeartbeat);
|
||||||
|
console.log(` ${statusEmoji} ${index + 1}. ${session.id} (${session.ide})`);
|
||||||
|
console.log(` User: ${session.user} | PID: ${session.pid}`);
|
||||||
|
console.log(` Created: ${new Date(session.created).toLocaleString()}`);
|
||||||
|
console.log(` Last activity: ${duration} ago`);
|
||||||
|
|
||||||
|
if (session.metadata?.features) {
|
||||||
|
console.log(` Features: ${session.metadata.features.join(', ')}`);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// IDE breakdown
|
||||||
|
if (Object.keys(analytics.ideBreakdown).length > 0) {
|
||||||
|
console.log('\n💻 IDE Usage:');
|
||||||
|
Object.entries(analytics.ideBreakdown).forEach(([ide, count]) => {
|
||||||
|
console.log(` ${ide}: ${count} sessions`);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Health issues
|
||||||
|
if (health.issues.length > 0) {
|
||||||
|
console.log('\n⚠️ Health Issues:');
|
||||||
|
health.issues.forEach(issue => console.log(` • ${issue}`));
|
||||||
|
|
||||||
|
console.log('\n💡 Recommendations:');
|
||||||
|
health.recommendations.forEach(rec => console.log(` • ${rec}`));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Recent activity
|
||||||
|
if (analytics.recentActivity.length > 0) {
|
||||||
|
console.log('\n📋 Recent Activity:');
|
||||||
|
analytics.recentActivity.slice(-5).forEach(activity => {
|
||||||
|
const time = new Date(activity.timestamp).toLocaleTimeString();
|
||||||
|
console.log(` ${time} - ${activity.action} (${activity.ide || 'unknown'})`);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Footer
|
||||||
|
console.log('\n🚀 Available Commands:');
|
||||||
|
console.log(' npm run workspace-init # Initialize new session');
|
||||||
|
console.log(' npm run workspace-cleanup # Clean and repair workspace');
|
||||||
|
console.log(' npm run workspace-health # Detailed health check');
|
||||||
|
console.log(' npm run workspace-handoff # Manage agent handoffs');
|
||||||
|
|
||||||
|
if (health.score < 80) {
|
||||||
|
console.log('\n💭 Tip: Run `npm run workspace-cleanup` to improve health score');
|
||||||
|
}
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
console.error('❌ Failed to get workspace status:', error.message);
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Command line execution
|
||||||
|
if (require.main === module) {
|
||||||
|
getWorkspaceStatus();
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = { getWorkspaceStatus, getActiveSessions, checkWorkspaceHealth };
|
||||||
|
|
@ -0,0 +1,448 @@
|
||||||
|
#!/usr/bin/env node
|
||||||
|
/**
|
||||||
|
* BMAD Workspace Sync Utility
|
||||||
|
* Cross-IDE context synchronization and restoration
|
||||||
|
*/
|
||||||
|
|
||||||
|
const fs = require('fs');
|
||||||
|
const path = require('path');
|
||||||
|
const crypto = require('crypto');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get current session information
|
||||||
|
*/
|
||||||
|
function getCurrentSession(workspacePath) {
|
||||||
|
const sessionsPath = path.join(workspacePath, 'sessions');
|
||||||
|
if (!fs.existsSync(sessionsPath)) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const sessionFiles = fs.readdirSync(sessionsPath).filter(f => f.endsWith('.json'));
|
||||||
|
const now = new Date();
|
||||||
|
|
||||||
|
for (const file of sessionFiles) {
|
||||||
|
try {
|
||||||
|
const sessionData = JSON.parse(fs.readFileSync(path.join(sessionsPath, file), 'utf8'));
|
||||||
|
const lastHeartbeat = new Date(sessionData.lastHeartbeat);
|
||||||
|
const timeDiff = now - lastHeartbeat;
|
||||||
|
|
||||||
|
// Consider session active if heartbeat within last hour
|
||||||
|
if (timeDiff < 3600000) {
|
||||||
|
return sessionData;
|
||||||
|
}
|
||||||
|
} catch (e) {
|
||||||
|
// Skip corrupted session files
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Update session heartbeat
|
||||||
|
*/
|
||||||
|
function updateSessionHeartbeat(workspacePath, sessionId) {
|
||||||
|
const sessionFile = path.join(workspacePath, 'sessions', `${sessionId}.json`);
|
||||||
|
|
||||||
|
if (fs.existsSync(sessionFile)) {
|
||||||
|
try {
|
||||||
|
const sessionData = JSON.parse(fs.readFileSync(sessionFile, 'utf8'));
|
||||||
|
sessionData.lastHeartbeat = new Date().toISOString();
|
||||||
|
fs.writeFileSync(sessionFile, JSON.stringify(sessionData, null, 2));
|
||||||
|
return true;
|
||||||
|
} catch (e) {
|
||||||
|
console.warn('⚠️ Failed to update session heartbeat:', e.message);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Sync context from shared workspace
|
||||||
|
*/
|
||||||
|
function syncContextFromWorkspace(workspacePath) {
|
||||||
|
const contextPath = path.join(workspacePath, 'context');
|
||||||
|
if (!fs.existsSync(contextPath)) {
|
||||||
|
return { synced: [], errors: [] };
|
||||||
|
}
|
||||||
|
|
||||||
|
const contextFiles = fs.readdirSync(contextPath);
|
||||||
|
const synced = [];
|
||||||
|
const errors = [];
|
||||||
|
|
||||||
|
for (const file of contextFiles) {
|
||||||
|
try {
|
||||||
|
const filePath = path.join(contextPath, file);
|
||||||
|
const stats = fs.statSync(filePath);
|
||||||
|
|
||||||
|
if (stats.isFile() && (file.endsWith('.md') || file.endsWith('.json'))) {
|
||||||
|
// Read context file for validation
|
||||||
|
const content = fs.readFileSync(filePath, 'utf8');
|
||||||
|
|
||||||
|
if (content.trim().length > 0) {
|
||||||
|
synced.push({
|
||||||
|
file: file,
|
||||||
|
size: stats.size,
|
||||||
|
modified: stats.mtime.toISOString(),
|
||||||
|
preview: content.substring(0, 100) + (content.length > 100 ? '...' : '')
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (e) {
|
||||||
|
errors.push(`Failed to sync ${file}: ${e.message}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return { synced, errors };
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get latest progress updates
|
||||||
|
*/
|
||||||
|
function getLatestProgress(workspacePath, limit = 5) {
|
||||||
|
const progressPath = path.join(workspacePath, 'progress');
|
||||||
|
if (!fs.existsSync(progressPath)) {
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
|
||||||
|
const progressFiles = fs.readdirSync(progressPath)
|
||||||
|
.filter(f => f.endsWith('.md'))
|
||||||
|
.map(f => {
|
||||||
|
try {
|
||||||
|
const filePath = path.join(progressPath, f);
|
||||||
|
const stats = fs.statSync(filePath);
|
||||||
|
const content = fs.readFileSync(filePath, 'utf8');
|
||||||
|
|
||||||
|
return {
|
||||||
|
file: f,
|
||||||
|
modified: stats.mtime,
|
||||||
|
size: stats.size,
|
||||||
|
content: content,
|
||||||
|
preview: content.substring(0, 150) + (content.length > 150 ? '...' : '')
|
||||||
|
};
|
||||||
|
} catch (e) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.filter(Boolean)
|
||||||
|
.sort((a, b) => b.modified - a.modified)
|
||||||
|
.slice(0, limit);
|
||||||
|
|
||||||
|
return progressFiles;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get pending handoffs
|
||||||
|
*/
|
||||||
|
function getPendingHandoffs(workspacePath) {
|
||||||
|
const handoffsPath = path.join(workspacePath, 'handoffs');
|
||||||
|
if (!fs.existsSync(handoffsPath)) {
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
|
||||||
|
const handoffFiles = fs.readdirSync(handoffsPath)
|
||||||
|
.filter(f => f.endsWith('.json'))
|
||||||
|
.map(f => {
|
||||||
|
try {
|
||||||
|
const content = fs.readFileSync(path.join(handoffsPath, f), 'utf8');
|
||||||
|
const handoff = JSON.parse(content);
|
||||||
|
|
||||||
|
// Consider handoffs from last 24 hours as potentially relevant
|
||||||
|
const handoffTime = new Date(handoff.timestamp);
|
||||||
|
const timeDiff = new Date() - handoffTime;
|
||||||
|
|
||||||
|
if (timeDiff < 86400000) { // 24 hours
|
||||||
|
return handoff;
|
||||||
|
}
|
||||||
|
} catch (e) {
|
||||||
|
// Skip corrupted handoff files
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
})
|
||||||
|
.filter(Boolean)
|
||||||
|
.sort((a, b) => new Date(b.timestamp) - new Date(a.timestamp));
|
||||||
|
|
||||||
|
return handoffFiles;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get recent quality reports
|
||||||
|
*/
|
||||||
|
function getRecentQualityReports(workspacePath, limit = 3) {
|
||||||
|
const qualityPath = path.join(workspacePath, 'quality');
|
||||||
|
if (!fs.existsSync(qualityPath)) {
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
|
||||||
|
const qualityFiles = fs.readdirSync(qualityPath)
|
||||||
|
.filter(f => f.endsWith('.json') || f.endsWith('.md'))
|
||||||
|
.map(f => {
|
||||||
|
try {
|
||||||
|
const filePath = path.join(qualityPath, f);
|
||||||
|
const stats = fs.statSync(filePath);
|
||||||
|
const content = fs.readFileSync(filePath, 'utf8');
|
||||||
|
|
||||||
|
return {
|
||||||
|
file: f,
|
||||||
|
modified: stats.mtime,
|
||||||
|
type: f.endsWith('.json') ? 'report' : 'analysis',
|
||||||
|
preview: content.substring(0, 100) + (content.length > 100 ? '...' : '')
|
||||||
|
};
|
||||||
|
} catch (e) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.filter(Boolean)
|
||||||
|
.sort((a, b) => b.modified - a.modified)
|
||||||
|
.slice(0, limit);
|
||||||
|
|
||||||
|
return qualityFiles;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create sync summary
|
||||||
|
*/
|
||||||
|
function createSyncSummary(workspacePath, currentSession) {
|
||||||
|
const contextSync = syncContextFromWorkspace(workspacePath);
|
||||||
|
const latestProgress = getLatestProgress(workspacePath);
|
||||||
|
const pendingHandoffs = getPendingHandoffs(workspacePath);
|
||||||
|
const qualityReports = getRecentQualityReports(workspacePath);
|
||||||
|
|
||||||
|
const summary = {
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
session: currentSession,
|
||||||
|
context: {
|
||||||
|
filesFound: contextSync.synced.length,
|
||||||
|
syncErrors: contextSync.errors.length,
|
||||||
|
files: contextSync.synced
|
||||||
|
},
|
||||||
|
progress: {
|
||||||
|
recentUpdates: latestProgress.length,
|
||||||
|
updates: latestProgress
|
||||||
|
},
|
||||||
|
handoffs: {
|
||||||
|
pending: pendingHandoffs.length,
|
||||||
|
recent: pendingHandoffs.slice(0, 3)
|
||||||
|
},
|
||||||
|
quality: {
|
||||||
|
recentReports: qualityReports.length,
|
||||||
|
reports: qualityReports
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
return summary;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Save sync state
|
||||||
|
*/
|
||||||
|
function saveSyncState(workspacePath, summary) {
|
||||||
|
const syncPath = path.join(workspacePath, 'context', 'last-sync.json');
|
||||||
|
fs.writeFileSync(syncPath, JSON.stringify(summary, null, 2));
|
||||||
|
|
||||||
|
// Also create a readable markdown summary
|
||||||
|
const markdownPath = path.join(workspacePath, 'context', 'sync-summary.md');
|
||||||
|
const markdownContent = generateSyncMarkdown(summary);
|
||||||
|
fs.writeFileSync(markdownPath, markdownContent);
|
||||||
|
|
||||||
|
return { syncFile: syncPath, markdownFile: markdownPath };
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generate sync markdown summary
|
||||||
|
*/
|
||||||
|
function generateSyncMarkdown(summary) {
|
||||||
|
const sessionInfo = summary.session ?
|
||||||
|
`**Current Session:** ${summary.session.id} (${summary.session.ide})
|
||||||
|
**User:** ${summary.session.user}
|
||||||
|
**Last Activity:** ${new Date(summary.session.lastHeartbeat).toLocaleString()}` :
|
||||||
|
'**No active session found**';
|
||||||
|
|
||||||
|
return `# Workspace Sync Summary
|
||||||
|
|
||||||
|
**Sync Time:** ${new Date(summary.timestamp).toLocaleString()}
|
||||||
|
|
||||||
|
## Session Information
|
||||||
|
${sessionInfo}
|
||||||
|
|
||||||
|
## Context Files (${summary.context.filesFound})
|
||||||
|
${summary.context.files.length > 0 ?
|
||||||
|
summary.context.files.map(f =>
|
||||||
|
`- **${f.file}** (${f.size} bytes, modified: ${new Date(f.modified).toLocaleString()})
|
||||||
|
${f.preview}`
|
||||||
|
).join('\n\n') :
|
||||||
|
'No context files found'
|
||||||
|
}
|
||||||
|
|
||||||
|
${summary.context.syncErrors.length > 0 ? `
|
||||||
|
## Sync Errors
|
||||||
|
${summary.context.syncErrors.map(e => `- ${e}`).join('\n')}
|
||||||
|
` : ''}
|
||||||
|
|
||||||
|
## Recent Progress (${summary.progress.recentUpdates})
|
||||||
|
${summary.progress.updates.length > 0 ?
|
||||||
|
summary.progress.updates.map(p =>
|
||||||
|
`- **${p.file}** (${new Date(p.modified).toLocaleString()})
|
||||||
|
${p.preview}`
|
||||||
|
).join('\n\n') :
|
||||||
|
'No recent progress updates'
|
||||||
|
}
|
||||||
|
|
||||||
|
## Pending Handoffs (${summary.handoffs.pending})
|
||||||
|
${summary.handoffs.recent.length > 0 ?
|
||||||
|
summary.handoffs.recent.map(h =>
|
||||||
|
`- **${h.id}:** ${h.fromAgent} → ${h.toAgent}
|
||||||
|
Work: ${h.currentWork.substring(0, 80)}${h.currentWork.length > 80 ? '...' : ''}
|
||||||
|
Time: ${new Date(h.timestamp).toLocaleString()}`
|
||||||
|
).join('\n\n') :
|
||||||
|
'No pending handoffs'
|
||||||
|
}
|
||||||
|
|
||||||
|
## Quality Reports (${summary.quality.recentReports})
|
||||||
|
${summary.quality.reports.length > 0 ?
|
||||||
|
summary.quality.reports.map(q =>
|
||||||
|
`- **${q.file}** (${q.type}, ${new Date(q.modified).toLocaleString()})
|
||||||
|
${q.preview}`
|
||||||
|
).join('\n\n') :
|
||||||
|
'No recent quality reports'
|
||||||
|
}
|
||||||
|
|
||||||
|
---
|
||||||
|
*Last synced: ${new Date(summary.timestamp).toLocaleString()}*
|
||||||
|
*Generated by BMAD Cross-IDE Workspace System*
|
||||||
|
`;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Main sync function
|
||||||
|
*/
|
||||||
|
async function syncWorkspace(options = {}) {
|
||||||
|
try {
|
||||||
|
const workspacePath = path.join(process.cwd(), '.workspace');
|
||||||
|
|
||||||
|
if (!fs.existsSync(workspacePath)) {
|
||||||
|
console.error('❌ Workspace directory not found.');
|
||||||
|
console.error(' Run `npm run workspace-init` to initialize workspace');
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log('🔄 BMAD Workspace Sync');
|
||||||
|
console.log('=====================');
|
||||||
|
console.log(`📁 Workspace: ${workspacePath}`);
|
||||||
|
|
||||||
|
// Get or create session
|
||||||
|
let currentSession = getCurrentSession(workspacePath);
|
||||||
|
|
||||||
|
if (!currentSession) {
|
||||||
|
console.log('⚠️ No active session found, checking for workspace initialization...');
|
||||||
|
|
||||||
|
// Try to find the most recent session
|
||||||
|
const sessionsPath = path.join(workspacePath, 'sessions');
|
||||||
|
if (fs.existsSync(sessionsPath)) {
|
||||||
|
const sessionFiles = fs.readdirSync(sessionsPath).filter(f => f.endsWith('.json'));
|
||||||
|
if (sessionFiles.length > 0) {
|
||||||
|
// Get most recent session
|
||||||
|
let mostRecent = null;
|
||||||
|
let mostRecentTime = 0;
|
||||||
|
|
||||||
|
for (const file of sessionFiles) {
|
||||||
|
try {
|
||||||
|
const sessionData = JSON.parse(fs.readFileSync(path.join(sessionsPath, file), 'utf8'));
|
||||||
|
const created = new Date(sessionData.created).getTime();
|
||||||
|
if (created > mostRecentTime) {
|
||||||
|
mostRecentTime = created;
|
||||||
|
mostRecent = sessionData;
|
||||||
|
}
|
||||||
|
} catch (e) {
|
||||||
|
// Skip corrupted files
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (mostRecent) {
|
||||||
|
console.log(`📍 Using most recent session: ${mostRecent.id} (${mostRecent.ide})`);
|
||||||
|
currentSession = mostRecent;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!currentSession) {
|
||||||
|
console.error('❌ No sessions found. Run `npm run workspace-init` first.');
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Update heartbeat for active session
|
||||||
|
updateSessionHeartbeat(workspacePath, currentSession.id);
|
||||||
|
console.log(`✅ Active session found: ${currentSession.id} (${currentSession.ide})`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create comprehensive sync summary
|
||||||
|
console.log('\n🔍 Analyzing workspace state...');
|
||||||
|
const summary = createSyncSummary(workspacePath, currentSession);
|
||||||
|
|
||||||
|
// Save sync state
|
||||||
|
const files = saveSyncState(workspacePath, summary);
|
||||||
|
|
||||||
|
// Log sync activity
|
||||||
|
const logEntry = {
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
action: 'workspace-sync',
|
||||||
|
sessionId: currentSession.id,
|
||||||
|
contextFiles: summary.context.filesFound,
|
||||||
|
progressUpdates: summary.progress.recentUpdates,
|
||||||
|
pendingHandoffs: summary.handoffs.pending,
|
||||||
|
user: process.env.USER || process.env.USERNAME || 'unknown'
|
||||||
|
};
|
||||||
|
|
||||||
|
const logPath = path.join(workspacePath, 'logs', 'workspace.log');
|
||||||
|
fs.appendFileSync(logPath, JSON.stringify(logEntry) + '\n');
|
||||||
|
|
||||||
|
// Display sync results
|
||||||
|
console.log('\n✅ Workspace sync completed');
|
||||||
|
console.log('============================');
|
||||||
|
console.log(`📄 Context files: ${summary.context.filesFound}`);
|
||||||
|
console.log(`📈 Progress updates: ${summary.progress.recentUpdates}`);
|
||||||
|
console.log(`🔄 Pending handoffs: ${summary.handoffs.pending}`);
|
||||||
|
console.log(`🎯 Quality reports: ${summary.quality.recentReports}`);
|
||||||
|
|
||||||
|
if (summary.context.syncErrors.length > 0) {
|
||||||
|
console.log(`\n⚠️ Sync errors: ${summary.context.syncErrors.length}`);
|
||||||
|
summary.context.syncErrors.forEach(error => console.log(` • ${error}`));
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`\n📁 Sync summary: .workspace/context/sync-summary.md`);
|
||||||
|
console.log(`📊 Detailed data: .workspace/context/last-sync.json`);
|
||||||
|
|
||||||
|
// Show key highlights
|
||||||
|
if (summary.handoffs.recent.length > 0) {
|
||||||
|
console.log('\n🔄 Recent Handoffs:');
|
||||||
|
summary.handoffs.recent.slice(0, 2).forEach(handoff => {
|
||||||
|
console.log(` • ${handoff.fromAgent} → ${handoff.toAgent}: ${handoff.currentWork.substring(0, 60)}...`);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (summary.progress.updates.length > 0) {
|
||||||
|
console.log('\n📈 Latest Progress:');
|
||||||
|
console.log(` • ${summary.progress.updates[0].file}: ${summary.progress.updates[0].preview}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log('\n🚀 Workspace is now synchronized and ready for collaboration!');
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
console.error('❌ Failed to sync workspace:', error.message);
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Command line execution
|
||||||
|
if (require.main === module) {
|
||||||
|
const args = process.argv.slice(2);
|
||||||
|
const options = {
|
||||||
|
verbose: args.includes('--verbose'),
|
||||||
|
force: args.includes('--force')
|
||||||
|
};
|
||||||
|
|
||||||
|
syncWorkspace(options);
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = { syncWorkspace, getCurrentSession, createSyncSummary };
|
||||||
Loading…
Reference in New Issue