690414:1113 Update README.md /.agents/skills, /.windsurf/workflows

This commit is contained in:
2026-04-14 11:13:42 +07:00
parent 02400fd88c
commit 6d45bdaeb5
194 changed files with 12708 additions and 8762 deletions
+571
View File
@@ -0,0 +1,571 @@
#!/usr/bin/env node
/**
* advanced-validator.js - Advanced validation capabilities for .agents
* Part of LCBP3-DMS Phase 3 enhancements
*/
const fs = require('fs');
const path = require('path');
const yaml = require('js-yaml');
// Configuration
const BASE_DIR = path.resolve(__dirname, '../..');
const AGENTS_DIR = path.join(BASE_DIR, '.agents');
const SKILLS_DIR = path.join(AGENTS_DIR, 'skills');
const WORKFLOWS_DIR = path.join(BASE_DIR, '.windsurf', 'workflows');
// Advanced validation class
class AdvancedValidator {
constructor() {
this.validationResults = {
timestamp: new Date().toISOString(),
validations: {},
summary: {
total_validations: 0,
passed_validations: 0,
failed_validations: 0,
warnings: 0,
critical_issues: 0
}
};
this.criticalIssues = [];
}
log(message, level = 'info') {
const colors = {
info: '\x1b[36m', // Cyan
pass: '\x1b[32m', // Green
fail: '\x1b[31m', // Red
warn: '\x1b[33m', // Yellow
critical: '\x1b[35m', // Magenta
reset: '\x1b[0m'
};
const color = colors[level] || colors.info;
console.log(`${color}[${level.toUpperCase()}] ${message}${colors.reset}`);
}
validateSkillFrontMatter(skillPath, skillName) {
const skillMdPath = path.join(skillPath, 'SKILL.md');
if (!fs.existsSync(skillMdPath)) {
this.addValidationResult(`skill_${skillName}_frontmatter`, 'fail', {
message: 'SKILL.md file not found',
path: skillMdPath
});
return false;
}
try {
const content = fs.readFileSync(skillMdPath, 'utf8');
const frontMatterMatch = content.match(/^---\n([\s\S]*?)\n---/);
if (!frontMatterMatch) {
this.addValidationResult(`skill_${skillName}_frontmatter`, 'fail', {
message: 'No front matter found',
path: skillMdPath
});
return false;
}
try {
const frontMatter = yaml.load(frontMatterMatch[1]);
const requiredFields = ['name', 'description', 'version'];
const missingFields = requiredFields.filter(field => !frontMatter[field]);
if (missingFields.length > 0) {
this.addValidationResult(`skill_${skillName}_frontmatter`, 'fail', {
message: `Missing required fields: ${missingFields.join(', ')}`,
missing_fields: missingFields,
front_matter: frontMatter,
path: skillMdPath
});
return false;
}
// Validate version format
const versionPattern = /^\d+\.\d+\.\d+$/;
if (!versionPattern.test(frontMatter.version)) {
this.addValidationResult(`skill_${skillName}_version_format`, 'warn', {
message: 'Version format should be X.Y.Z',
version: frontMatter.version,
path: skillMdPath
});
}
// Validate dependencies if present
if (frontMatter['depends-on']) {
const dependencies = Array.isArray(frontMatter['depends-on'])
? frontMatter['depends-on']
: [frontMatter['depends-on']];
for (const dep of dependencies) {
const depPath = path.join(SKILLS_DIR, dep);
if (!fs.existsSync(depPath)) {
this.addValidationResult(`skill_${skillName}_dependency_${dep}`, 'critical', {
message: `Dependency not found: ${dep}`,
dependency: dep,
path: skillMdPath
});
}
}
}
this.addValidationResult(`skill_${skillName}_frontmatter`, 'pass', {
message: 'Front matter is valid',
front_matter: frontMatter,
path: skillMdPath
});
return true;
} catch (yamlError) {
this.addValidationResult(`skill_${skillName}_frontmatter`, 'fail', {
message: `Invalid YAML in front matter: ${yamlError.message}`,
path: skillMdPath
});
return false;
}
} catch (error) {
this.addValidationResult(`skill_${skillName}_frontmatter`, 'fail', {
message: `Error reading SKILL.md: ${error.message}`,
path: skillMdPath
});
return false;
}
}
validateSkillContent(skillPath, skillName) {
const skillMdPath = path.join(skillPath, 'SKILL.md');
if (!fs.existsSync(skillMdPath)) {
return false;
}
try {
const content = fs.readFileSync(skillMdPath, 'utf8');
// Check for required sections
const requiredSections = ['## Role', '## Task'];
const missingSections = requiredSections.filter(section => !content.includes(section));
if (missingSections.length > 0) {
this.addValidationResult(`skill_${skillName}_content`, 'fail', {
message: `Missing required sections: ${missingSections.join(', ')}`,
missing_sections: missingSections,
path: skillMdPath
});
return false;
}
// Check for forbidden patterns
const forbiddenPatterns = [
{ pattern: /TODO.*FIX/gi, message: 'TODO items should be resolved' },
{ pattern: /FIXME/gi, message: 'FIXME items should be addressed' },
{ pattern: /XXX/gi, message: 'XXX markers should be replaced' }
];
for (const { pattern, message } of forbiddenPatterns) {
if (pattern.test(content)) {
this.addValidationResult(`skill_${skillName}_forbidden_patterns`, 'warn', {
message: `${message} found in content`,
pattern: pattern.toString(),
path: skillMdPath
});
}
}
// Validate content length
const contentLength = content.length;
if (contentLength < 500) {
this.addValidationResult(`skill_${skillName}_content_length`, 'warn', {
message: 'Skill content seems too short',
length: contentLength,
path: skillMdPath
});
}
this.addValidationResult(`skill_${skillName}_content`, 'pass', {
message: 'Skill content is valid',
length: contentLength,
path: skillMdPath
});
return true;
} catch (error) {
this.addValidationResult(`skill_${skillName}_content`, 'fail', {
message: `Error validating content: ${error.message}`,
path: skillMdPath
});
return false;
}
}
validateWorkflowStructure(workflowPath, workflowName) {
if (!fs.existsSync(workflowPath)) {
this.addValidationResult(`workflow_${workflowName}_exists`, 'fail', {
message: 'Workflow file not found',
path: workflowPath
});
return false;
}
try {
const content = fs.readFileSync(workflowPath, 'utf8');
// Check for markdown headers
if (!content.includes('#')) {
this.addValidationResult(`workflow_${workflowName}_structure`, 'fail', {
message: 'No markdown headers found',
path: workflowPath
});
return false;
}
// Check for workflow-specific patterns
const hasWorkflowContent = content.length > 200;
if (!hasWorkflowContent) {
this.addValidationResult(`workflow_${workflowName}_content`, 'warn', {
message: 'Workflow content seems too short',
length: content.length,
path: workflowPath
});
}
// Validate skill references
const skillReferences = content.match(/@speckit-\w+/g) || [];
for (const skillRef of skillReferences) {
const skillName = skillRef.replace('@', '');
const skillPath = path.join(SKILLS_DIR, skillName);
if (!fs.existsSync(skillPath)) {
this.addValidationResult(`workflow_${workflowName}_skill_ref_${skillName}`, 'critical', {
message: `Workflow references non-existent skill: ${skillRef}`,
skill_reference: skillRef,
path: workflowPath
});
}
}
this.addValidationResult(`workflow_${workflowName}_structure`, 'pass', {
message: 'Workflow structure is valid',
skill_references: skillReferences,
path: workflowPath
});
return true;
} catch (error) {
this.addValidationResult(`workflow_${workflowName}_structure`, 'fail', {
message: `Error validating workflow: ${error.message}`,
path: workflowPath
});
return false;
}
}
validateCrossReferences() {
this.log('Validating cross-references...', 'info');
// Check README.md references
const readmePath = path.join(AGENTS_DIR, 'README.md');
if (fs.existsSync(readmePath)) {
const readmeContent = fs.readFileSync(readmePath, 'utf8');
// Check if README references correct workflow path
if (readmeContent.includes('.agents/workflows') && !readmeContent.includes('.windsurf/workflows')) {
this.addValidationResult('readme_workflow_reference', 'critical', {
message: 'README.md references .agents/workflows instead of .windsurf/workflows',
path: readmePath
});
}
// Check version consistency in README
const versionMatches = readmeContent.match(/v?(\d+\.\d+\.\d+)/g) || [];
const uniqueVersions = [...new Set(versionMatches)];
if (uniqueVersions.length > 1) {
this.addValidationResult('readme_version_consistency', 'warn', {
message: 'Multiple versions found in README.md',
versions: uniqueVersions,
path: readmePath
});
}
}
// Check skills.md references
const skillsMdPath = path.join(SKILLS_DIR, 'skills.md');
if (fs.existsSync(skillsMdPath)) {
const skillsContent = fs.readFileSync(skillsMdPath, 'utf8');
// Validate skill dependency matrix
if (skillsContent.includes('## Skill Dependency Matrix')) {
this.addValidationResult('skills_dependency_matrix', 'pass', {
message: 'Skills documentation includes dependency matrix',
path: skillsMdPath
});
} else {
this.addValidationResult('skills_dependency_matrix', 'warn', {
message: 'Skills documentation missing dependency matrix',
path: skillsMdPath
});
}
}
}
validateSecurityCompliance() {
this.log('Validating security compliance...', 'info');
// Check for security patterns in rules
const securityRulePath = path.join(AGENTS_DIR, 'rules', '02-security.md');
if (fs.existsSync(securityRulePath)) {
const securityContent = fs.readFileSync(securityRulePath, 'utf8');
const requiredSecurityTopics = [
'authentication',
'authorization',
'rbac',
'validation',
'audit'
];
const missingTopics = requiredSecurityTopics.filter(topic =>
!securityContent.toLowerCase().includes(topic.toLowerCase())
);
if (missingTopics.length > 0) {
this.addValidationResult('security_rules_completeness', 'warn', {
message: `Security rules missing topics: ${missingTopics.join(', ')}`,
missing_topics: missingTopics,
path: securityRulePath
});
} else {
this.addValidationResult('security_rules_completeness', 'pass', {
message: 'Security rules cover all required topics',
path: securityRulePath
});
}
}
// Check for ADR-019 compliance in rules
const uuidRulePath = path.join(AGENTS_DIR, 'rules', '01-adr-019-uuid.md');
if (fs.existsSync(uuidRulePath)) {
const uuidContent = fs.readFileSync(uuidRulePath, 'utf8');
const criticalUuidRules = [
'parseInt',
'Number(',
'publicId',
'@Exclude()'
];
const missingRules = criticalUuidRules.filter(rule =>
!uuidContent.includes(rule)
);
if (missingRules.length > 0) {
this.addValidationResult('uuid_rules_completeness', 'critical', {
message: `UUID rules missing critical patterns: ${missingRules.join(', ')}`,
missing_patterns: missingRules,
path: uuidRulePath
});
} else {
this.addValidationResult('uuid_rules_completeness', 'pass', {
message: 'UUID rules cover all critical patterns',
path: uuidRulePath
});
}
}
}
validatePerformanceMetrics() {
this.log('Validating performance metrics...', 'info');
// Check file sizes
const criticalFiles = [
{ path: path.join(AGENTS_DIR, 'README.md'), name: 'README.md' },
{ path: path.join(SKILLS_DIR, 'skills.md'), name: 'skills.md' },
{ path: path.join(AGENTS_DIR, 'skills', 'VERSION'), name: 'VERSION' }
];
for (const file of criticalFiles) {
if (fs.existsSync(file.path)) {
const stats = fs.statSync(file.path);
const sizeKB = stats.size / 1024;
if (sizeKB > 100) {
this.addValidationResult(`file_size_${file.name}`, 'warn', {
message: `File ${file.name} is large (${sizeKB.toFixed(1)}KB)`,
size_kb: sizeKB,
path: file.path
});
} else {
this.addValidationResult(`file_size_${file.name}`, 'pass', {
message: `File ${file.name} size is acceptable`,
size_kb: sizeKB,
path: file.path
});
}
}
}
// Check directory structure depth
function getDirectoryDepth(dirPath, currentDepth = 0) {
let maxDepth = currentDepth;
if (fs.existsSync(dirPath)) {
const items = fs.readdirSync(dirPath);
for (const item of items) {
const itemPath = path.join(dirPath, item);
if (fs.statSync(itemPath).isDirectory()) {
const depth = getDirectoryDepth(itemPath, currentDepth + 1);
maxDepth = Math.max(maxDepth, depth);
}
}
}
return maxDepth;
}
const agentsDepth = getDirectoryDepth(AGENTS_DIR);
if (agentsDepth > 5) {
this.addValidationResult('directory_depth', 'warn', {
message: `.agents directory structure is deep (${agentsDepth} levels)`,
depth: agentsDepth,
path: AGENTS_DIR
});
} else {
this.addValidationResult('directory_depth', 'pass', {
message: `.agents directory structure depth is acceptable`,
depth: agentsDepth,
path: AGENTS_DIR
});
}
}
addValidationResult(name, status, details) {
this.validationResults.validations[name] = {
status,
timestamp: new Date().toISOString(),
...details
};
this.validationResults.summary.total_validations++;
switch (status) {
case 'pass':
this.validationResults.summary.passed_validations++;
this.log(`${name}: PASS - ${details.message}`, 'pass');
break;
case 'fail':
this.validationResults.summary.failed_validations++;
this.log(`${name}: FAIL - ${details.message}`, 'fail');
break;
case 'warn':
this.validationResults.summary.warnings++;
this.log(`${name}: WARN - ${details.message}`, 'warn');
break;
case 'critical':
this.validationResults.summary.critical_issues++;
this.criticalIssues.push({ name, ...details });
this.log(`${name}: CRITICAL - ${details.message}`, 'critical');
break;
}
}
async runAdvancedValidation() {
this.log('Starting advanced validation...', 'info');
this.log(`Base directory: ${BASE_DIR}`, 'info');
// Validate all skills
this.log('Validating skills...', 'info');
if (fs.existsSync(SKILLS_DIR)) {
const skillDirs = fs.readdirSync(SKILLS_DIR).filter(item => {
const itemPath = path.join(SKILLS_DIR, item);
return fs.statSync(itemPath).isDirectory();
});
for (const skillDir of skillDirs) {
const skillPath = path.join(SKILLS_DIR, skillDir);
this.validateSkillFrontMatter(skillPath, skillDir);
this.validateSkillContent(skillPath, skillDir);
}
}
// Validate all workflows
this.log('Validating workflows...', 'info');
if (fs.existsSync(WORKFLOWS_DIR)) {
const workflowFiles = fs.readdirSync(WORKFLOWS_DIR).filter(file => file.endsWith('.md'));
for (const workflowFile of workflowFiles) {
const workflowPath = path.join(WORKFLOWS_DIR, workflowFile);
const workflowName = workflowFile.replace('.md', '');
this.validateWorkflowStructure(workflowPath, workflowName);
}
}
// Cross-reference validation
this.validateCrossReferences();
// Security compliance validation
this.validateSecurityCompliance();
// Performance metrics validation
this.validatePerformanceMetrics();
// Generate summary
this.generateSummary();
return this.validationResults;
}
generateSummary() {
const { summary, critical_issues } = this.validationResults;
this.log('=== Advanced Validation Summary ===', 'info');
this.log(`Total validations: ${summary.total_validations}`, 'info');
this.log(`Passed: ${summary.passed_validations}`, 'pass');
this.log(`Failed: ${summary.failed_validations}`, summary.failed_validations > 0 ? 'fail' : 'info');
this.log(`Warnings: ${summary.warnings}`, 'warn');
this.log(`Critical issues: ${summary.critical_issues}`, 'critical');
if (critical_issues.length > 0) {
this.log('Critical Issues:', 'critical');
critical_issues.forEach(issue => {
this.log(` - ${issue.name}: ${issue.message}`, 'critical');
});
}
// Save validation results
const validationReportPath = path.join(AGENTS_DIR, 'reports', 'advanced-validation.json');
const reportsDir = path.dirname(validationReportPath);
if (!fs.existsSync(reportsDir)) {
fs.mkdirSync(reportsDir, { recursive: true });
}
fs.writeFileSync(validationReportPath, JSON.stringify(this.validationResults, null, 2));
this.log(`Advanced validation report saved to: ${validationReportPath}`, 'info');
}
}
// CLI interface
async function main() {
const validator = new AdvancedValidator();
try {
const results = await validator.runAdvancedValidation();
process.exit(results.summary.critical_issues > 0 ? 1 : 0);
} catch (error) {
console.error('Advanced validation failed:', error);
process.exit(1);
}
}
// Export for use in other modules
module.exports = { AdvancedValidator };
// Run if called directly
if (require.main === module) {
main();
}
+188
View File
@@ -0,0 +1,188 @@
#!/bin/bash
# audit-skills.sh - Verify skill completeness and health
# Part of LCBP3-DMS Phase 2 improvements
set -euo pipefail
# Colors for output
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
NC='\033[0m' # No Color
# Base directory
BASE_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
AGENTS_DIR="$BASE_DIR/.agents"
SKILLS_DIR="$AGENTS_DIR/skills"
echo "=== Skills Health Audit ==="
echo "Base directory: $BASE_DIR"
echo
# Function to check if skill has required files
check_skill_health() {
local skill_dir="$1"
local skill_name="$(basename "$skill_dir")"
local issues=0
# Check for SKILL.md
if [[ -f "$skill_dir/SKILL.md" ]]; then
echo -e "${GREEN} OK${NC}: $skill_name/SKILL.md"
else
echo -e "${RED} MISSING${NC}: $skill_name/SKILL.md"
((issues++))
fi
# Check for templates directory (optional)
if [[ -d "$skill_dir/templates" ]]; then
template_count=$(find "$skill_dir/templates" -name "*.md" -type f | wc -l)
if [[ $template_count -gt 0 ]]; then
echo -e "${GREEN} OK${NC}: $skill_name/templates ($template_count files)"
else
echo -e "${YELLOW} EMPTY${NC}: $skill_name/templates (no files)"
fi
fi
# Check SKILL.md content if exists
local skill_file="$skill_dir/SKILL.md"
if [[ -f "$skill_file" ]]; then
# Check for required front matter fields
local required_fields=("name" "description" "version")
for field in "${required_fields[@]}"; do
if grep -q "^$field:" "$skill_file"; then
echo -e " ${GREEN} FIELD${NC}: $field"
else
echo -e " ${RED} MISSING FIELD${NC}: $field"
((issues++))
fi
done
# Check for Role section
if grep -q "^## Role$" "$skill_file"; then
echo -e " ${GREEN} SECTION${NC}: Role"
else
echo -e " ${YELLOW} MISSING SECTION${NC}: Role"
((issues++))
fi
# Check for Task section
if grep -q "^## Task$" "$skill_file"; then
echo -e " ${GREEN} SECTION${NC}: Task"
else
echo -e " ${YELLOW} MISSING SECTION${NC}: Task"
((issues++))
fi
fi
return $issues
}
# Function to get skill version from SKILL.md
get_skill_version() {
local skill_file="$1"
if [[ -f "$skill_file" ]]; then
grep "^version:" "$skill_file" | head -1 | sed 's/version: *//' || echo "unknown"
else
echo "no_file"
fi
}
# Check skills directory
if [[ ! -d "$SKILLS_DIR" ]]; then
echo -e "${RED}ERROR: Skills directory not found${NC}"
exit 1
fi
echo "Scanning skills directory: $SKILLS_DIR"
echo
# Get all skill directories
SKILL_DIRS=()
while IFS= read -r -d '' dir; do
SKILL_DIRS+=("$dir")
done < <(find "$SKILLS_DIR" -maxdepth 1 -type d -not -path "$SKILLS_DIR" -print0 | sort -z)
echo "Found ${#SKILL_DIRS[@]} skill directories"
echo
# Audit each skill
TOTAL_ISSUES=0
SKILL_SUMMARY=()
for skill_dir in "${SKILL_DIRS[@]}"; do
skill_name="$(basename "$skill_dir")"
echo "Auditing: $skill_name"
echo "------------------------"
check_skill_health "$skill_dir"
issues=$?
skill_version=$(get_skill_version "$skill_dir/SKILL.md")
SKILL_SUMMARY+=("$skill_name:$issues:$skill_version")
TOTAL_ISSUES=$((TOTAL_ISSUES + issues))
echo
done
# Summary report
echo "=== Skills Audit Summary ==="
echo
echo "Skill Status:"
echo "-----------"
for summary in "${SKILL_SUMMARY[@]}"; do
IFS=':' read -r name issues version <<< "$summary"
if [[ $issues -eq 0 ]]; then
echo -e "${GREEN} HEALTHY${NC}: $name (v$version)"
else
echo -e "${RED} ISSUES${NC}: $name (v$version) - $issues issues"
fi
done
echo
# Check skills.md version consistency
SKILLS_VERSION_FILE="$SKILLS_DIR/VERSION"
if [[ -f "$SKILLS_VERSION_FILE" ]]; then
global_version=$(grep "^version:" "$SKILLS_VERSION_FILE" | sed 's/version: *//')
echo "Global skills version: v$global_version"
echo
# Check for version mismatches
echo "Version Consistency Check:"
echo "------------------------"
VERSION_MISMATCHES=0
for summary in "${SKILL_SUMMARY[@]}"; do
IFS=':' read -r name issues version <<< "$summary"
if [[ "$version" != "unknown" && "$version" != "no_file" && "$version" != "$global_version" ]]; then
echo -e "${YELLOW} MISMATCH${NC}: $name is v$version, global is v$global_version"
((VERSION_MISMATCHES++))
fi
done
if [[ $VERSION_MISMATCHES -eq 0 ]]; then
echo -e "${GREEN} All skills match global version${NC}"
fi
fi
echo
# Overall health
if [[ $TOTAL_ISSUES -eq 0 ]]; then
echo -e "${GREEN}=== SUCCESS: All skills healthy ===${NC}"
echo "Total skills: ${#SKILL_DIRS[@]}"
exit 0
else
echo -e "${RED}=== ISSUES FOUND: $TOTAL_ISSUES total issues ===${NC}"
echo
echo "Recommendations:"
echo "1. Fix missing SKILL.md files"
echo "2. Add required front matter fields"
echo "3. Ensure Role and Task sections exist"
echo "4. Align skill versions with global version"
exit 1
fi
+149
View File
@@ -0,0 +1,149 @@
#!/bin/bash
# sync-workflows.sh - Sync workflow references between .agents and .windsurf
# Part of LCBP3-DMS Phase 2 improvements
set -euo pipefail
# Colors for output
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
NC='\033[0m' # No Color
# Base directory
BASE_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
AGENTS_DIR="$BASE_DIR/.agents"
WINDSURF_DIR="$BASE_DIR/.windsurf"
WORKFLOWS_DIR="$WINDSURF_DIR/workflows"
echo "=== Workflow Synchronization Check ==="
echo "Base directory: $BASE_DIR"
echo
# Function to check if workflow exists
check_workflow() {
local workflow_name="$1"
local workflow_file="$WORKFLOWS_DIR/$workflow_name"
if [[ -f "$workflow_file" ]]; then
echo -e "${GREEN} EXISTS${NC}: $workflow_name"
return 0
else
echo -e "${RED} MISSING${NC}: $workflow_name"
return 1
fi
}
# Function to list all workflows
list_workflows() {
if [[ -d "$WORKFLOWS_DIR" ]]; then
find "$WORKFLOWS_DIR" -name "*.md" -type f | sort
else
echo "No workflows directory found"
fi
}
# Check directories
echo "Checking directory structure..."
if [[ -d "$AGENTS_DIR" ]]; then
echo -e "${GREEN} OK${NC}: .agents directory exists"
else
echo -e "${RED} ERROR${NC}: .agents directory not found"
exit 1
fi
if [[ -d "$WINDSURF_DIR" ]]; then
echo -e "${GREEN} OK${NC}: .windsurf directory exists"
else
echo -e "${RED} ERROR${NC}: .windsurf directory not found"
exit 1
fi
if [[ -d "$WORKFLOWS_DIR" ]]; then
echo -e "${GREEN} OK${NC}: workflows directory exists"
else
echo -e "${RED} ERROR${NC}: workflows directory not found"
exit 1
fi
echo
# Expected workflows based on README documentation
echo "Checking expected workflows..."
EXPECTED_WORKFLOWS=(
"00-speckit.all.md"
"01-speckit.constitution.md"
"02-speckit.specify.md"
"03-speckit.clarify.md"
"04-speckit.plan.md"
"05-speckit.tasks.md"
"06-speckit.analyze.md"
"07-speckit.implement.md"
"08-speckit.checker.md"
"09-speckit.tester.md"
"10-speckit.reviewer.md"
"11-speckit.validate.md"
"speckit.prepare.md"
"schema-change.md"
"create-backend-module.md"
"create-frontend-page.md"
"deploy.md"
"review.md"
"util-speckit.checklist.md"
"util-speckit.diff.md"
"util-speckit.migrate.md"
"util-speckit.quizme.md"
"util-speckit.status.md"
"util-speckit.taskstoissues.md"
)
MISSING_WORKFLOWS=0
for workflow in "${EXPECTED_WORKFLOWS[@]}"; do
if ! check_workflow "$workflow"; then
((MISSING_WORKFLOWS++))
fi
done
echo
# List all actual workflows
echo "All workflows in $WORKFLOWS_DIR:"
echo "--------------------------------"
while IFS= read -r workflow; do
echo " $(basename "$workflow")"
done < <(list_workflows)
echo
# Check for orphaned workflows (unexpected ones)
echo "Checking for unexpected workflows..."
ACTUAL_WORKFLOWS=()
while IFS= read -r workflow; do
ACTUAL_WORKFLOWS+=("$(basename "$workflow")")
done < <(list_workflows)
for actual_workflow in "${ACTUAL_WORKFLOWS[@]}"; do
if [[ ! " ${EXPECTED_WORKFLOWS[*]} " =~ " ${actual_workflow} " ]]; then
echo -e "${YELLOW} UNEXPECTED${NC}: $actual_workflow"
fi
done
echo
# Summary
if [[ $MISSING_WORKFLOWS -eq 0 ]]; then
echo -e "${GREEN}=== SUCCESS: All expected workflows present ===${NC}"
echo "Total workflows: ${#ACTUAL_WORKFLOWS[@]}"
exit 0
else
echo -e "${RED}=== FAILED: $MISSING_WORKFLOWS workflows missing ===${NC}"
echo
echo "To fix missing workflows:"
echo "1. Create missing workflow files in $WORKFLOWS_DIR"
echo "2. Use existing workflows as templates"
echo "3. Run this script again to verify"
exit 1
fi
+108
View File
@@ -0,0 +1,108 @@
#!/bin/bash
# validate-versions.sh - Check version consistency across .agents files
# Part of LCBP3-DMS Phase 2 improvements
set -euo pipefail
# Colors for output
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
NC='\033[0m' # No Color
# Base directory
BASE_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
AGENTS_DIR="$BASE_DIR/.agents"
# Expected version (should match LCBP3 version)
EXPECTED_VERSION="1.8.6"
echo "=== .agents Version Validation ==="
echo "Base directory: $BASE_DIR"
echo "Expected version: $EXPECTED_VERSION"
echo
# Function to extract version from file
extract_version() {
local file="$1"
local pattern="$2"
if [[ -f "$file" ]]; then
grep -o "$pattern" "$file" | head -1 | sed 's/.*\([0-9]\+\.[0-9]\+\.[0-9]\+\).*/\1/' || echo "NOT_FOUND"
else
echo "FILE_NOT_FOUND"
fi
}
# Files to check
declare -A FILES_TO_CHECK=(
["$AGENTS_DIR/README.md"]="Version: \([0-9]\+\.[0-9]\+\.[0-9]\+\)"
["$AGENTS_DIR/skills/VERSION"]="version: \([0-9]\+\.[0-9]\+\.[0-9]\+\)"
["$AGENTS_DIR/rules/00-project-context.md"]="Version: \([0-9]\+\.[0-9]\+\.[0-9]\+\)"
["$AGENTS_DIR/skills/skills.md"]="V\([0-9]\+\.[0-9]\+\.[0-9]\+\)"
)
# Track issues
ISSUES=0
echo "Checking version consistency..."
echo
for file in "${!FILES_TO_CHECK[@]}"; do
pattern="${FILES_TO_CHECK[$file]}"
relative_path="${file#$BASE_DIR/}"
version=$(extract_version "$file" "$pattern")
if [[ "$version" == "NOT_FOUND" ]] || [[ "$version" == "FILE_NOT_FOUND" ]]; then
echo -e "${RED} ERROR${NC}: $relative_path - Version not found"
((ISSUES++))
elif [[ "$version" != "$EXPECTED_VERSION" ]]; then
echo -e "${RED} ERROR${NC}: $relative_path - Found v$version, expected v$EXPECTED_VERSION"
((ISSUES++))
else
echo -e "${GREEN} OK${NC}: $relative_path - v$version"
fi
done
echo
# Check for version mismatches in skill files
echo "Checking skill file versions..."
SKILL_VERSIONS_FILE="$AGENTS_DIR/skills/VERSION"
if [[ -f "$SKILL_VERSIONS_FILE" ]]; then
skills_version=$(extract_version "$SKILL_VERSIONS_FILE" "version: \([0-9]\+\.[0-9]\+\.[0-9]\+\)")
echo "Skills version file: v$skills_version"
fi
# Check workflow versions (in .windsurf/workflows)
WORKFLOWS_DIR="$BASE_DIR/.windsurf/workflows"
if [[ -d "$WORKFLOWS_DIR" ]]; then
echo "Checking workflow files..."
workflow_count=0
for workflow in "$WORKFLOWS_DIR"/*.md; do
if [[ -f "$workflow" ]]; then
workflow_count=$((workflow_count + 1))
fi
done
echo -e "${GREEN} OK${NC}: Found $workflow_count workflow files"
else
echo -e "${YELLOW} WARNING${NC}: Workflows directory not found at $WORKFLOWS_DIR"
fi
echo
# Summary
if [[ $ISSUES -eq 0 ]]; then
echo -e "${GREEN}=== SUCCESS: All versions consistent ===${NC}"
exit 0
else
echo -e "${RED}=== FAILED: $ISSUES version issues found ===${NC}"
echo
echo "To fix version issues:"
echo "1. Update files to use v$EXPECTED_VERSION"
echo "2. Ensure LCBP3 project version matches"
echo "3. Run this script again to verify"
exit 1
fi
+516
View File
@@ -0,0 +1,516 @@
# ci-hooks.ps1 - Continuous integration hooks for .agents (PowerShell version)
# Part of LCBP3-DMS Phase 3 enhancements
param(
[Parameter(Mandatory=$false)]
[ValidateSet("pre-commit", "pre-push", "ci-pipeline", "install-hooks", "help")]
[string]$Command = "help"
)
# Configuration
$BaseDir = Split-Path -Parent (Split-Path -Parent $PSScriptRoot)
$AgentsDir = Join-Path $BaseDir ".agents"
$CILogDir = Join-Path $AgentsDir "logs\ci"
$CIReportDir = Join-Path $AgentsDir "reports\ci"
# Ensure directories exist
if (-not (Test-Path $CILogDir)) { New-Item -ItemType Directory -Path $CILogDir -Force | Out-Null }
if (-not (Test-Path $CIReportDir)) { New-Item -ItemType Directory -Path $CIReportDir -Force | Out-Null }
# Colors for output
$Colors = @{
Red = "`e[0;31m"
Green = "`e[0;32m"
Yellow = "`e[1;33m"
Blue = "`e[0;34m"
NoColor = "`e[0m"
}
# Logging function
function Write-CILog {
param(
[string]$Level,
[string]$Message
)
$timestamp = Get-Date -Format "yyyy-MM-dd HH:mm:ss"
$logFile = Join-Path $CILogDir "ci-$(Get-Date -Format 'yyyy-MM-dd').log"
"$timestamp [$Level] $Message" | Out-File -FilePath $logFile -Append
# Console output with colors
switch ($Level) {
"INFO" { Write-Host $Message -ForegroundColor $Colors.Blue }
"PASS" { Write-Host $Message -ForegroundColor $Colors.Green }
"WARN" { Write-Host $Message -ForegroundColor $Colors.Yellow }
"FAIL" { Write-Host $Message -ForegroundColor $Colors.Red }
default { Write-Host $Message }
}
}
# Pre-commit hook
function Invoke-PreCommitHook {
Write-CILog "INFO" "Running pre-commit validation..."
$exitCode = 0
# 1. Run version validation
Write-CILog "INFO" "Checking version consistency..."
$versionScript = Join-Path $AgentsDir "scripts\powershell\validate-versions.ps1"
if (Test-Path $versionScript) {
try {
& $versionScript | Out-File -FilePath (Join-Path $CILogDir "pre-commit-versions.log") -Append
Write-CILog "PASS" "Version validation passed"
} catch {
Write-CILog "FAIL" "Version validation failed"
$exitCode = 1
}
} else {
Write-CILog "WARN" "Version validation script not found"
}
# 2. Run skill audit
Write-CILog "INFO" "Auditing skills..."
$auditScript = Join-Path $AgentsDir "scripts\powershell\audit-skills.ps1"
if (Test-Path $auditScript) {
try {
& $auditScript | Out-File -FilePath (Join-Path $CILogDir "pre-commit-skills.log") -Append
Write-CILog "PASS" "Skill audit passed"
} catch {
Write-CILog "FAIL" "Skill audit failed"
$exitCode = 1
}
} else {
Write-CILog "WARN" "Skill audit script not found"
}
# 3. Run integration tests (if Node.js available)
if (Get-Command node -ErrorAction SilentlyContinue) {
Write-CILog "INFO" "Running integration tests..."
$testScript = Join-Path $AgentsDir "tests\skill-integration.test.js"
if (Test-Path $testScript) {
try {
node $testScript | Out-File -FilePath (Join-Path $CILogDir "pre-commit-tests.log") -Append
Write-CILog "PASS" "Integration tests passed"
} catch {
Write-CILog "WARN" "Integration tests failed (non-blocking)"
}
} else {
Write-CILog "WARN" "Integration test script not found"
}
} else {
Write-CILog "WARN" "Node.js not available, skipping integration tests"
}
# 4. Check for forbidden patterns
Write-CILog "INFO" "Checking for forbidden patterns..."
$forbiddenPatterns = @("TODO", "FIXME", "XXX", "HACK")
$foundForbidden = $false
foreach ($pattern in $forbiddenPatterns) {
$skillsDir = Join-Path $AgentsDir "skills"
if (Test-Path $skillsDir) {
$matches = Select-String -Path $skillsDir\*.md -Pattern $pattern -Recurse
if ($matches) {
Write-CILog "WARN" "Found forbidden pattern: $pattern"
$foundForbidden = $true
}
}
}
if (-not $foundForbidden) {
Write-CILog "PASS" "No forbidden patterns found"
}
# Generate pre-commit report
$reportFile = Join-Path $CIReportDir "pre-commit-$(Get-Date -Format 'yyyyMMdd-HHmmss').json"
$report = @{
timestamp = (Get-Date -Format "yyyy-MM-ddTHH:mm:sszzz")
hook_type = "pre-commit"
exit_code = $exitCode
checks_performed = @(
"version_validation",
"skill_audit",
"integration_tests",
"forbidden_patterns"
)
log_files = @(
"pre-commit-versions.log",
"pre-commit-skills.log",
"pre-commit-tests.log"
)
}
$report | ConvertTo-Json -Depth 10 | Out-File -FilePath $reportFile
Write-CILog "INFO" "Pre-commit report saved to: $reportFile"
if ($exitCode -eq 0) {
Write-CILog "PASS" "Pre-commit validation completed successfully"
} else {
Write-CILog "FAIL" "Pre-commit validation failed"
}
return $exitCode
}
# Pre-push hook
function Invoke-PrePushHook {
Write-CILog "INFO" "Running pre-push validation..."
$exitCode = 0
# 1. Full health check
Write-CILog "INFO" "Running full health check..."
if (Get-Command node -ErrorAction SilentlyContinue) {
$healthScript = Join-Path $AgentsDir "scripts\health-monitor.js"
if (Test-Path $healthScript) {
try {
node $healthScript | Out-File -FilePath (Join-Path $CILogDir "pre-push-health.log") -Append
Write-CILog "PASS" "Health check passed"
} catch {
Write-CILog "FAIL" "Health check failed"
$exitCode = 1
}
} else {
Write-CILog "WARN" "Health monitor script not found"
}
} else {
Write-CILog "WARN" "Node.js not available, using basic health check"
$auditScript = Join-Path $AgentsDir "scripts\powershell\audit-skills.ps1"
if (Test-Path $auditScript) {
try {
& $auditScript | Out-File -FilePath (Join-Path $CILogDir "pre-push-basic.log") -Append
Write-CILog "PASS" "Basic health check passed"
} catch {
Write-CILog "FAIL" "Basic health check failed"
$exitCode = 1
}
}
}
# 2. Advanced validation (if available)
if (Get-Command node -ErrorAction SilentlyContinue) {
$advancedScript = Join-Path $AgentsDir "scripts\advanced-validator.js"
if (Test-Path $advancedScript) {
Write-CILog "INFO" "Running advanced validation..."
try {
node $advancedScript | Out-File -FilePath (Join-Path $CILogDir "pre-push-advanced.log") -Append
Write-CILog "PASS" "Advanced validation passed"
} catch {
Write-CILog "WARN" "Advanced validation found issues (non-blocking)"
}
}
}
# 3. Dependency validation
if (Get-Command node -ErrorAction SilentlyContinue) {
$dependencyScript = Join-Path $AgentsDir "scripts\dependency-validator.js"
if (Test-Path $dependencyScript) {
Write-CILog "INFO" "Running dependency validation..."
try {
node $dependencyScript | Out-File -FilePath (Join-Path $CILogDir "pre-push-dependencies.log") -Append
Write-CILog "PASS" "Dependency validation passed"
} catch {
Write-CILog "WARN" "Dependency validation found issues (non-blocking)"
}
}
}
# 4. Performance monitoring
if (Get-Command node -ErrorAction SilentlyContinue) {
$performanceScript = Join-Path $AgentsDir "scripts\performance-monitor.js"
if (Test-Path $performanceScript) {
Write-CILog "INFO" "Running performance monitoring..."
try {
node $performanceScript | Out-File -FilePath (Join-Path $CILogDir "pre-push-performance.log") -Append
Write-CILog "PASS" "Performance monitoring passed"
} catch {
Write-CILog "WARN" "Performance monitoring found issues (non-blocking)"
}
}
}
# Generate pre-push report
$reportFile = Join-Path $CIReportDir "pre-push-$(Get-Date -Format 'yyyyMMdd-HHmmss').json"
$report = @{
timestamp = (Get-Date -Format "yyyy-MM-ddTHH:mm:sszzz")
hook_type = "pre-push"
exit_code = $exitCode
checks_performed = @(
"health_check",
"advanced_validation",
"dependency_validation",
"performance_monitoring"
)
log_files = @(
"pre-push-health.log",
"pre-push-advanced.log",
"pre-push-dependencies.log",
"pre-push-performance.log"
)
}
$report | ConvertTo-Json -Depth 10 | Out-File -FilePath $reportFile
Write-CILog "INFO" "Pre-push report saved to: $reportFile"
if ($exitCode -eq 0) {
Write-CILog "PASS" "Pre-push validation completed successfully"
} else {
Write-CILog "FAIL" "Pre-push validation failed"
}
return $exitCode
}
# CI pipeline hook
function Invoke-CIPipelineHook {
Write-CILog "INFO" "Running CI pipeline validation..."
$exitCode = 0
$pipelineStart = Get-Date
# Create pipeline workspace
$workspace = Join-Path $CIReportDir "pipeline-$(Get-Date -Format 'yyyyMMdd-HHmmss')"
New-Item -ItemType Directory -Path $workspace -Force | Out-Null
# 1. Environment validation
Write-CILog "INFO" "Validating CI environment..."
# Check required tools
$requiredTools = @("node", "npm")
foreach ($tool in $requiredTools) {
if (Get-Command $tool -ErrorAction SilentlyContinue) {
Write-CILog "PASS" "Tool available: $tool"
} else {
Write-CILog "FAIL" "Tool missing: $tool"
$exitCode = 1
}
}
# Check Node.js modules
$packageJson = Join-Path $AgentsDir "package.json"
if (Test-Path $packageJson) {
Push-Location $AgentsDir
try {
npm list --depth=0 | Out-Null
Write-CILog "PASS" "Node.js dependencies installed"
} catch {
Write-CILog "WARN" "Installing Node.js dependencies..."
npm install | Out-File -FilePath (Join-Path $workspace "npm-install.log")
if ($LASTEXITCODE -ne 0) {
Write-CILog "FAIL" "Failed to install Node.js dependencies"
$exitCode = 1
}
}
Pop-Location
}
# 2. Full test suite
Write-CILog "INFO" "Running full test suite..."
# Integration tests
$integrationTest = Join-Path $AgentsDir "tests\skill-integration.test.js"
if (Test-Path $integrationTest) {
try {
node $integrationTest | Out-File -FilePath (Join-Path $workspace "integration-tests.log")
Write-CILog "PASS" "Integration tests passed"
} catch {
Write-CILog "FAIL" "Integration tests failed"
$exitCode = 1
}
}
# Workflow validation tests
$workflowTest = Join-Path $AgentsDir "tests\workflow-validation.test.js"
if (Test-Path $workflowTest) {
try {
node $workflowTest | Out-File -FilePath (Join-Path $workspace "workflow-tests.log")
Write-CILog "PASS" "Workflow validation tests passed"
} catch {
Write-CILog "FAIL" "Workflow validation tests failed"
$exitCode = 1
}
}
# 3. Comprehensive validation
Write-CILog "INFO" "Running comprehensive validation..."
# Health monitoring
$healthScript = Join-Path $AgentsDir "scripts\health-monitor.js"
if (Test-Path $healthScript) {
try {
node $healthScript | Out-File -FilePath (Join-Path $workspace "health-check.log")
Write-CILog "PASS" "Health monitoring passed"
} catch {
Write-CILog "FAIL" "Health monitoring failed"
$exitCode = 1
}
}
# Advanced validation
$advancedScript = Join-Path $AgentsDir "scripts\advanced-validator.js"
if (Test-Path $advancedScript) {
try {
node $advancedScript | Out-File -FilePath (Join-Path $workspace "advanced-validation.log")
Write-CILog "PASS" "Advanced validation passed"
} catch {
Write-CILog "WARN" "Advanced validation found issues"
}
}
# Dependency validation
$dependencyScript = Join-Path $AgentsDir "scripts\dependency-validator.js"
if (Test-Path $dependencyScript) {
try {
node $dependencyScript | Out-File -FilePath (Join-Path $workspace "dependency-validation.log")
Write-CILog "PASS" "Dependency validation passed"
} catch {
Write-CILog "WARN" "Dependency validation found issues"
}
}
# Performance monitoring
$performanceScript = Join-Path $AgentsDir "scripts\performance-monitor.js"
if (Test-Path $performanceScript) {
try {
node $performanceScript | Out-File -FilePath (Join-Path $workspace "performance-monitor.log")
Write-CILog "PASS" "Performance monitoring passed"
} catch {
Write-CILog "WARN" "Performance monitoring found issues"
}
}
# 4. Generate artifacts
Write-CILog "INFO" "Generating CI artifacts..."
$pipelineEnd = Get-Date
$duration = ($pipelineEnd - $pipelineStart).TotalSeconds
# Consolidated report
$reportFile = Join-Path $workspace "ci-pipeline-report.json"
$report = @{
timestamp = (Get-Date -Format "yyyy-MM-ddTHH:mm:sszzz")
pipeline_type = "full_ci"
duration_seconds = [int]$duration
exit_code = $exitCode
environment = @{
node_version = (node --version)
platform = $env:OS
working_directory = $BaseDir
}
checks_performed = @(
"environment_validation",
"integration_tests",
"workflow_validation_tests",
"health_monitoring",
"advanced_validation",
"dependency_validation",
"performance_monitoring"
)
artifacts = @(
"integration-tests.log",
"workflow-tests.log",
"health-check.log",
"advanced-validation.log",
"dependency-validation.log",
"performance-monitor.log",
"npm-install.log"
)
workspace = $workspace
}
$report | ConvertTo-Json -Depth 10 | Out-File -FilePath $reportFile
Write-CILog "INFO" "CI pipeline report saved to: $reportFile"
Write-CILog "INFO" "CI artifacts saved to: $workspace"
Write-CILog "INFO" "Pipeline duration: $([int]$duration)s"
if ($exitCode -eq 0) {
Write-CILog "PASS" "CI pipeline completed successfully"
} else {
Write-CILog "FAIL" "CI pipeline failed"
}
return $exitCode
}
# Install Git hooks
function Install-GitHooks {
Write-CILog "INFO" "Installing Git hooks..."
$hooksDir = Join-Path $BaseDir ".git\hooks"
$agentsHooksDir = Join-Path $AgentsDir "scripts\git-hooks"
# Create git-hooks directory
if (-not (Test-Path $agentsHooksDir)) {
New-Item -ItemType Directory -Path $agentsHooksDir -Force | Out-Null
}
# Create pre-commit hook
$preCommitContent = @'
#!/bin/bash
# Pre-commit hook for .agents validation
echo "Running .agents pre-commit validation..."
if bash .agents/scripts/ci-hooks.sh pre-commit; then
echo "Pre-commit validation passed"
exit 0
else
echo "Pre-commit validation failed"
exit 1
fi
'@
$preCommitContent | Out-File -FilePath (Join-Path $agentsHooksDir "pre-commit") -Encoding UTF8
# Create pre-push hook
$prePushContent = @'
#!/bin/bash
# Pre-push hook for .agents validation
echo "Running .agents pre-push validation..."
if bash .agents/scripts/ci-hooks.sh pre-push; then
echo "Pre-push validation passed"
exit 0
else
echo "Pre-push validation failed"
exit 1
fi
'@
$prePushContent | Out-File -FilePath (Join-Path $agentsHooksDir "pre-push") -Encoding UTF8
# Install hooks if .git directory exists
if (Test-Path $hooksDir) {
Copy-Item (Join-Path $agentsHooksDir "pre-commit") $hooksDir -Force
Copy-Item (Join-Path $agentsHooksDir "pre-push") $hooksDir -Force
Write-CILog "PASS" "Git hooks installed successfully"
} else {
Write-CILog "WARN" "Git repository not found, hooks copied to .agents\scripts\git-hooks"
}
}
# Main execution
switch ($Command) {
"pre-commit" {
exit (Invoke-PreCommitHook)
}
"pre-push" {
exit (Invoke-PrePushHook)
}
"ci-pipeline" {
exit (Invoke-CIPipelineHook)
}
"install-hooks" {
Install-GitHooks
}
"help" {
Write-Host "Usage: .\ci-hooks.ps1 -Command {pre-commit|pre-push|ci-pipeline|install-hooks|help}"
Write-Host ""
Write-Host "Commands:"
Write-Host " pre-commit - Run pre-commit validation"
Write-Host " pre-push - Run pre-push validation"
Write-Host " ci-pipeline - Run full CI pipeline"
Write-Host " install-hooks - Install Git hooks"
Write-Host " help - Show this help"
}
default {
Write-Host "Unknown command: $Command"
Write-Host "Use 'help' to see available commands"
exit 1
}
}
+445
View File
@@ -0,0 +1,445 @@
#!/bin/bash
# ci-hooks.sh - Continuous integration hooks for .agents
# Part of LCBP3-DMS Phase 3 enhancements
set -euo pipefail
# Colors for output
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
NC='\033[0m' # No Color
# Base directory
BASE_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
AGENTS_DIR="$BASE_DIR/.agents"
# CI configuration
CI_LOG_DIR="$AGENTS_DIR/logs/ci"
CI_REPORT_DIR="$AGENTS_DIR/reports/ci"
# Ensure directories exist
mkdir -p "$CI_LOG_DIR" "$CI_REPORT_DIR"
# Logging function
ci_log() {
local level="$1"
local message="$2"
local timestamp=$(date '+%Y-%m-%d %H:%M:%S')
local log_file="$CI_LOG_DIR/ci-$(date '+%Y-%m-%d').log"
echo "[$timestamp] [$level] $message" | tee -a "$log_file"
# Console output with colors
case "$level" in
"INFO") echo -e "${BLUE}$message${NC}" ;;
"PASS") echo -e "${GREEN}$message${NC}" ;;
"WARN") echo -e "${YELLOW}$message${NC}" ;;
"FAIL") echo -e "${RED}$message${NC}" ;;
*) echo "$message" ;;
esac
}
# Pre-commit hook
pre_commit_hook() {
ci_log "INFO" "Running pre-commit validation..."
local exit_code=0
# 1. Run version validation
ci_log "INFO" "Checking version consistency..."
if "$AGENTS_DIR/scripts/bash/validate-versions.sh" >> "$CI_LOG_DIR/pre-commit-versions.log" 2>&1; then
ci_log "PASS" "Version validation passed"
else
ci_log "FAIL" "Version validation failed"
exit_code=1
fi
# 2. Run skill audit
ci_log "INFO" "Auditing skills..."
if "$AGENTS_DIR/scripts/bash/audit-skills.sh" >> "$CI_LOG_DIR/pre-commit-skills.log" 2>&1; then
ci_log "PASS" "Skill audit passed"
else
ci_log "FAIL" "Skill audit failed"
exit_code=1
fi
# 3. Run integration tests (if Node.js available)
if command -v node >/dev/null 2>&1; then
ci_log "INFO" "Running integration tests..."
if node "$AGENTS_DIR/tests/skill-integration.test.js" >> "$CI_LOG_DIR/pre-commit-tests.log" 2>&1; then
ci_log "PASS" "Integration tests passed"
else
ci_log "WARN" "Integration tests failed (non-blocking)"
fi
else
ci_log "WARN" "Node.js not available, skipping integration tests"
fi
# 4. Check for forbidden patterns
ci_log "INFO" "Checking for forbidden patterns..."
local forbidden_patterns=("TODO" "FIXME" "XXX" "HACK")
local found_forbidden=false
for pattern in "${forbidden_patterns[@]}"; do
if grep -r "$pattern" "$AGENTS_DIR/skills" --include="*.md" >/dev/null 2>&1; then
ci_log "WARN" "Found forbidden pattern: $pattern"
found_forbidden=true
fi
done
if [ "$found_forbidden" = false ]; then
ci_log "PASS" "No forbidden patterns found"
fi
# Generate pre-commit report
local report_file="$CI_REPORT_DIR/pre-commit-$(date '+%Y%m%d-%H%M%S').json"
cat > "$report_file" << EOF
{
"timestamp": "$(date -Iseconds)",
"hook_type": "pre-commit",
"exit_code": $exit_code,
"checks_performed": [
"version_validation",
"skill_audit",
"integration_tests",
"forbidden_patterns"
],
"log_files": [
"pre-commit-versions.log",
"pre-commit-skills.log",
"pre-commit-tests.log"
]
}
EOF
ci_log "INFO" "Pre-commit report saved to: $report_file"
if [ $exit_code -eq 0 ]; then
ci_log "PASS" "Pre-commit validation completed successfully"
else
ci_log "FAIL" "Pre-commit validation failed"
fi
return $exit_code
}
# Pre-push hook
pre_push_hook() {
ci_log "INFO" "Running pre-push validation..."
local exit_code=0
# 1. Full health check
ci_log "INFO" "Running full health check..."
if command -v node >/dev/null 2>&1; then
if node "$AGENTS_DIR/scripts/health-monitor.js" >> "$CI_LOG_DIR/pre-push-health.log" 2>&1; then
ci_log "PASS" "Health check passed"
else
ci_log "FAIL" "Health check failed"
exit_code=1
fi
else
ci_log "WARN" "Node.js not available, using basic health check"
if "$AGENTS_DIR/scripts/bash/audit-skills.sh" >> "$CI_LOG_DIR/pre-push-basic.log" 2>&1; then
ci_log "PASS" "Basic health check passed"
else
ci_log "FAIL" "Basic health check failed"
exit_code=1
fi
fi
# 2. Advanced validation (if available)
if command -v node >/dev/null 2>&1 && [ -f "$AGENTS_DIR/scripts/advanced-validator.js" ]; then
ci_log "INFO" "Running advanced validation..."
if node "$AGENTS_DIR/scripts/advanced-validator.js" >> "$CI_LOG_DIR/pre-push-advanced.log" 2>&1; then
ci_log "PASS" "Advanced validation passed"
else
ci_log "WARN" "Advanced validation found issues (non-blocking)"
fi
fi
# 3. Dependency validation
if command -v node >/dev/null 2>&1 && [ -f "$AGENTS_DIR/scripts/dependency-validator.js" ]; then
ci_log "INFO" "Running dependency validation..."
if node "$AGENTS_DIR/scripts/dependency-validator.js" >> "$CI_LOG_DIR/pre-push-dependencies.log" 2>&1; then
ci_log "PASS" "Dependency validation passed"
else
ci_log "WARN" "Dependency validation found issues (non-blocking)"
fi
fi
# 4. Performance monitoring
if command -v node >/dev/null 2>&1 && [ -f "$AGENTS_DIR/scripts/performance-monitor.js" ]; then
ci_log "INFO" "Running performance monitoring..."
if node "$AGENTS_DIR/scripts/performance-monitor.js" >> "$CI_LOG_DIR/pre-push-performance.log" 2>&1; then
ci_log "PASS" "Performance monitoring passed"
else
ci_log "WARN" "Performance monitoring found issues (non-blocking)"
fi
fi
# Generate pre-push report
local report_file="$CI_REPORT_DIR/pre-push-$(date '+%Y%m%d-%H%M%S').json"
cat > "$report_file" << EOF
{
"timestamp": "$(date -Iseconds)",
"hook_type": "pre-push",
"exit_code": $exit_code,
"checks_performed": [
"health_check",
"advanced_validation",
"dependency_validation",
"performance_monitoring"
],
"log_files": [
"pre-push-health.log",
"pre-push-advanced.log",
"pre-push-dependencies.log",
"pre-push-performance.log"
]
}
EOF
ci_log "INFO" "Pre-push report saved to: $report_file"
if [ $exit_code -eq 0 ]; then
ci_log "PASS" "Pre-push validation completed successfully"
else
ci_log "FAIL" "Pre-push validation failed"
fi
return $exit_code
}
# CI pipeline hook
ci_pipeline_hook() {
ci_log "INFO" "Running CI pipeline validation..."
local exit_code=0
local pipeline_start=$(date +%s)
# Create pipeline workspace
local workspace="$CI_REPORT_DIR/pipeline-$(date '+%Y%m%d-%H%M%S')"
mkdir -p "$workspace"
# 1. Environment validation
ci_log "INFO" "Validating CI environment..."
# Check required tools
local required_tools=("node" "npm")
for tool in "${required_tools[@]}"; do
if command -v "$tool" >/dev/null 2>&1; then
ci_log "PASS" "Tool available: $tool"
else
ci_log "FAIL" "Tool missing: $tool"
exit_code=1
fi
done
# Check Node.js modules
if [ -f "$AGENTS_DIR/package.json" ]; then
cd "$AGENTS_DIR"
if npm list --depth=0 >/dev/null 2>&1; then
ci_log "PASS" "Node.js dependencies installed"
else
ci_log "WARN" "Installing Node.js dependencies..."
npm install >> "$workspace/npm-install.log" 2>&1 || {
ci_log "FAIL" "Failed to install Node.js dependencies"
exit_code=1
}
fi
cd "$BASE_DIR"
fi
# 2. Full test suite
ci_log "INFO" "Running full test suite..."
# Integration tests
if node "$AGENTS_DIR/tests/skill-integration.test.js" >> "$workspace/integration-tests.log" 2>&1; then
ci_log "PASS" "Integration tests passed"
else
ci_log "FAIL" "Integration tests failed"
exit_code=1
fi
# Workflow validation tests
if node "$AGENTS_DIR/tests/workflow-validation.test.js" >> "$workspace/workflow-tests.log" 2>&1; then
ci_log "PASS" "Workflow validation tests passed"
else
ci_log "FAIL" "Workflow validation tests failed"
exit_code=1
fi
# 3. Comprehensive validation
ci_log "INFO" "Running comprehensive validation..."
# Health monitoring
if node "$AGENTS_DIR/scripts/health-monitor.js" >> "$workspace/health-check.log" 2>&1; then
ci_log "PASS" "Health monitoring passed"
else
ci_log "FAIL" "Health monitoring failed"
exit_code=1
fi
# Advanced validation
if node "$AGENTS_DIR/scripts/advanced-validator.js" >> "$workspace/advanced-validation.log" 2>&1; then
ci_log "PASS" "Advanced validation passed"
else
ci_log "WARN" "Advanced validation found issues"
fi
# Dependency validation
if node "$AGENTS_DIR/scripts/dependency-validator.js" >> "$workspace/dependency-validation.log" 2>&1; then
ci_log "PASS" "Dependency validation passed"
else
ci_log "WARN" "Dependency validation found issues"
fi
# Performance monitoring
if node "$AGENTS_DIR/scripts/performance-monitor.js" >> "$workspace/performance-monitor.log" 2>&1; then
ci_log "PASS" "Performance monitoring passed"
else
ci_log "WARN" "Performance monitoring found issues"
fi
# 4. Generate artifacts
ci_log "INFO" "Generating CI artifacts..."
local pipeline_end=$(date +%s)
local duration=$((pipeline_end - pipeline_start))
# Consolidated report
local report_file="$workspace/ci-pipeline-report.json"
cat > "$report_file" << EOF
{
"timestamp": "$(date -Iseconds)",
"pipeline_type": "full_ci",
"duration_seconds": $duration,
"exit_code": $exit_code,
"environment": {
"node_version": "$(node --version)",
"platform": "$(uname -s)",
"working_directory": "$BASE_DIR"
},
"checks_performed": [
"environment_validation",
"integration_tests",
"workflow_validation_tests",
"health_monitoring",
"advanced_validation",
"dependency_validation",
"performance_monitoring"
],
"artifacts": [
"integration-tests.log",
"workflow-tests.log",
"health-check.log",
"advanced-validation.log",
"dependency-validation.log",
"performance-monitor.log",
"npm-install.log"
],
"workspace": "$workspace"
}
EOF
ci_log "INFO" "CI pipeline report saved to: $report_file"
ci_log "INFO" "CI artifacts saved to: $workspace"
ci_log "INFO" "Pipeline duration: ${duration}s"
if [ $exit_code -eq 0 ]; then
ci_log "PASS" "CI pipeline completed successfully"
else
ci_log "FAIL" "CI pipeline failed"
fi
return $exit_code
}
# Install Git hooks
install_git_hooks() {
ci_log "INFO" "Installing Git hooks..."
local hooks_dir="$BASE_DIR/.git/hooks"
local agents_hooks_dir="$AGENTS_DIR/scripts/git-hooks"
# Create git-hooks directory
mkdir -p "$agents_hooks_dir"
# Create pre-commit hook
cat > "$agents_hooks_dir/pre-commit" << 'EOF'
#!/bin/bash
# Pre-commit hook for .agents validation
echo "Running .agents pre-commit validation..."
if bash .agents/scripts/ci-hooks.sh pre-commit; then
echo "Pre-commit validation passed"
exit 0
else
echo "Pre-commit validation failed"
exit 1
fi
EOF
# Create pre-push hook
cat > "$agents_hooks_dir/pre-push" << 'EOF'
#!/bin/bash
# Pre-push hook for .agents validation
echo "Running .agents pre-push validation..."
if bash .agents/scripts/ci-hooks.sh pre-push; then
echo "Pre-push validation passed"
exit 0
else
echo "Pre-push validation failed"
exit 1
fi
EOF
# Make hooks executable
chmod +x "$agents_hooks_dir/pre-commit"
chmod +x "$agents_hooks_dir/pre-push"
# Install hooks if .git directory exists
if [ -d "$hooks_dir" ]; then
cp "$agents_hooks_dir/pre-commit" "$hooks_dir/"
cp "$agents_hooks_dir/pre-push" "$hooks_dir/"
ci_log "PASS" "Git hooks installed successfully"
else
ci_log "WARN" "Git repository not found, hooks copied to .agents/scripts/git-hooks"
fi
}
# Main function
main() {
local command="${1:-help}"
case "$command" in
"pre-commit")
pre_commit_hook
;;
"pre-push")
pre_push_hook
;;
"ci-pipeline")
ci_pipeline_hook
;;
"install-hooks")
install_git_hooks
;;
"help"|*)
echo "Usage: $0 {pre-commit|pre-push|ci-pipeline|install-hooks|help}"
echo ""
echo "Commands:"
echo " pre-commit - Run pre-commit validation"
echo " pre-push - Run pre-push validation"
echo " ci-pipeline - Run full CI pipeline"
echo " install-hooks - Install Git hooks"
echo " help - Show this help"
;;
esac
}
# Run main function with all arguments
main "$@"
+457
View File
@@ -0,0 +1,457 @@
#!/usr/bin/env node
/**
* dependency-validator.js - Skill dependency validation system
* Part of LCBP3-DMS Phase 3 enhancements
*/
const fs = require('fs');
const path = require('path');
const yaml = require('js-yaml');
// Configuration
const BASE_DIR = path.resolve(__dirname, '../..');
const AGENTS_DIR = path.join(BASE_DIR, '.agents');
const SKILLS_DIR = path.join(AGENTS_DIR, 'skills');
const WORKFLOWS_DIR = path.join(BASE_DIR, '.windsurf', 'workflows');
// Dependency validation class
class DependencyValidator {
constructor() {
this.validationResults = {
timestamp: new Date().toISOString(),
dependency_graph: {},
circular_dependencies: [],
missing_dependencies: [],
orphaned_skills: [],
dependency_chains: {},
validation_summary: {
total_skills: 0,
skills_with_dependencies: 0,
circular_dependencies_found: 0,
missing_dependencies_found: 0,
orphaned_skills_found: 0,
max_dependency_depth: 0,
validation_status: 'unknown'
}
};
}
log(message, level = 'info') {
const colors = {
info: '\x1b[36m', // Cyan
pass: '\x1b[32m', // Green
fail: '\x1b[31m', // Red
warn: '\x1b[33m', // Yellow
critical: '\x1b[35m', // Magenta
reset: '\x1b[0m'
};
const color = colors[level] || colors.info;
console.log(`${color}[${level.toUpperCase()}] ${message}${colors.reset}`);
}
extractSkillDependencies(skillPath, skillName) {
const skillMdPath = path.join(skillPath, 'SKILL.md');
if (!fs.existsSync(skillMdPath)) {
this.log(`No SKILL.md found for ${skillName}`, 'warn');
return { dependencies: [], handoffs: [], error: 'SKILL.md not found' };
}
try {
const content = fs.readFileSync(skillMdPath, 'utf8');
// Extract dependencies from front matter
let dependencies = [];
let handoffs = [];
const frontMatterMatch = content.match(/^---\n([\s\S]*?)\n---/);
if (frontMatterMatch) {
try {
const frontMatter = yaml.load(frontMatterMatch[1]);
// Handle depends-on field
if (frontMatter['depends-on']) {
if (Array.isArray(frontMatter['depends-on'])) {
dependencies = frontMatter['depends-on'];
} else {
dependencies = [frontMatter['depends-on']];
}
}
// Handle handoffs field
if (frontMatter.handoffs && Array.isArray(frontMatter.handoffs)) {
handoffs = frontMatter.handoffs.map(h => h.agent);
}
} catch (yamlError) {
this.log(`Invalid YAML in ${skillName} front matter: ${yamlError.message}`, 'warn');
}
}
// Also extract skill references from content
const contentSkillRefs = content.match(/@speckit-\w+/g) || [];
const contentDependencies = contentSkillRefs.map(ref => ref.replace('@', ''));
// Merge dependencies (avoid duplicates)
const allDependencies = [...new Set([...dependencies, ...contentDependencies])];
return {
dependencies: allDependencies,
handoffs: handoffs,
content_references: contentSkillRefs,
front_matter_dependencies: dependencies,
error: null
};
} catch (error) {
this.log(`Error reading ${skillName}: ${error.message}`, 'warn');
return { dependencies: [], handoffs: [], error: error.message };
}
}
buildDependencyGraph() {
this.log('Building dependency graph...', 'info');
if (!fs.existsSync(SKILLS_DIR)) {
this.log('Skills directory not found', 'fail');
return;
}
const skillDirs = fs.readdirSync(SKILLS_DIR).filter(item => {
const itemPath = path.join(SKILLS_DIR, item);
return fs.statSync(itemPath).isDirectory();
});
this.validationResults.validation_summary.total_skills = skillDirs.length;
// Extract dependencies for each skill
for (const skillDir of skillDirs) {
const skillPath = path.join(SKILLS_DIR, skillDir);
const dependencyInfo = this.extractSkillDependencies(skillPath, skillDir);
this.validationResults.dependency_graph[skillDir] = dependencyInfo;
if (dependencyInfo.dependencies.length > 0 || dependencyInfo.handoffs.length > 0) {
this.validationResults.validation_summary.skills_with_dependencies++;
}
}
this.log(`Analyzed ${skillDirs.length} skills`, 'info');
this.log(`Skills with dependencies: ${this.validationResults.validation_summary.skills_with_dependencies}`, 'info');
}
validateDependencies() {
this.log('Validating dependencies...', 'info');
const { dependency_graph } = this.validationResults;
const allSkills = Object.keys(dependency_graph);
// Check for missing dependencies
for (const [skillName, dependencyInfo] of Object.entries(dependency_graph)) {
for (const dependency of dependencyInfo.dependencies) {
if (!allSkills.includes(dependency)) {
this.validationResults.missing_dependencies.push({
skill: skillName,
missing_dependency: dependency,
dependency_type: 'depends-on'
});
this.validationResults.validation_summary.missing_dependencies_found++;
this.log(`Missing dependency: ${skillName} depends on ${dependency}`, 'fail');
}
}
for (const handoff of dependencyInfo.handoffs) {
if (!allSkills.includes(handoff)) {
this.validationResults.missing_dependencies.push({
skill: skillName,
missing_dependency: handoff,
dependency_type: 'handoff'
});
this.validationResults.validation_summary.missing_dependencies_found++;
this.log(`Missing handoff: ${skillName} hands off to ${handoff}`, 'fail');
}
}
}
// Check for orphaned skills (no one depends on them)
const dependedOnSkills = new Set();
for (const dependencyInfo of Object.values(dependency_graph)) {
dependencyInfo.dependencies.forEach(dep => dependedOnSkills.add(dep));
dependencyInfo.handoffs.forEach(handoff => dependedOnSkills.add(handoff));
}
for (const skill of allSkills) {
if (!dependedOnSkills.has(skill) && skill !== 'speckit-constitution') {
// Constitution is allowed to be orphaned (it's a starting point)
this.validationResults.orphaned_skills.push(skill);
this.validationResults.validation_summary.orphaned_skills_found++;
this.log(`Orphaned skill: ${skill} (no dependencies on it)`, 'warn');
}
}
}
detectCircularDependencies() {
this.log('Detecting circular dependencies...', 'info');
const { dependency_graph } = this.validationResults;
const visited = new Set();
const recursionStack = new Set();
const circularDeps = [];
function dfs(skillName, path = []) {
if (recursionStack.has(skillName)) {
// Found circular dependency
const cycleStart = path.indexOf(skillName);
const cycle = path.slice(cycleStart).concat(skillName);
circularDeps.push(cycle);
return;
}
if (visited.has(skillName)) {
return;
}
visited.add(skillName);
recursionStack.add(skillName);
path.push(skillName);
const dependencyInfo = dependency_graph[skillName];
if (dependencyInfo) {
for (const dependency of dependencyInfo.dependencies) {
dfs(dependency, [...path]);
}
}
recursionStack.delete(skillName);
}
// Run DFS from each skill
for (const skillName of Object.keys(dependency_graph)) {
if (!visited.has(skillName)) {
dfs(skillName);
}
}
this.validationResults.circular_dependencies = circularDeps;
this.validationResults.validation_summary.circular_dependencies_found = circularDeps.length;
if (circularDeps.length > 0) {
this.log(`Found ${circularDeps.length} circular dependencies:`, 'critical');
circularDeps.forEach((cycle, index) => {
this.log(` ${index + 1}. ${cycle.join(' -> ')}`, 'critical');
});
} else {
this.log('No circular dependencies found', 'pass');
}
}
calculateDependencyChains() {
this.log('Calculating dependency chains...', 'info');
const { dependency_graph } = this.validationResults;
const chains = {};
function calculateDepth(skillName, visited = new Set()) {
if (visited.has(skillName)) {
return 0; // Circular dependency protection
}
visited.add(skillName);
const dependencyInfo = dependency_graph[skillName];
if (!dependencyInfo || dependencyInfo.dependencies.length === 0) {
return 1;
}
let maxDepth = 0;
for (const dependency of dependencyInfo.dependencies) {
const depth = calculateDepth(dependency, new Set(visited));
maxDepth = Math.max(maxDepth, depth);
}
return maxDepth + 1;
}
function getDependencyChain(skillName) {
const dependencyInfo = dependency_graph[skillName];
if (!dependencyInfo || dependencyInfo.dependencies.length === 0) {
return [skillName];
}
const chains = [];
for (const dependency of dependencyInfo.dependencies) {
const depChain = getDependencyChain(dependency);
chains.push(depChain.concat(skillName));
}
// Return the longest chain
return chains.reduce((longest, current) =>
current.length > longest.length ? current : longest, [skillName]
);
}
for (const skillName of Object.keys(dependency_graph)) {
const depth = calculateDepth(skillName);
const chain = getDependencyChain(skillName);
chains[skillName] = {
depth: depth,
chain: chain,
chain_length: chain.length
};
}
this.validationResults.dependency_chains = chains;
const maxDepth = Math.max(...Object.values(chains).map(c => c.depth));
this.validationResults.validation_summary.max_dependency_depth = maxDepth;
this.log(`Maximum dependency depth: ${maxDepth}`, 'info');
}
validateWorkflowDependencies() {
this.log('Validating workflow dependencies...', 'info');
if (!fs.existsSync(WORKFLOWS_DIR)) {
this.log('Workflows directory not found', 'warn');
return;
}
const workflowFiles = fs.readdirSync(WORKFLOWS_DIR).filter(file => file.endsWith('.md'));
const allSkills = Object.keys(this.validationResults.dependency_graph);
for (const workflowFile of workflowFiles) {
const workflowPath = path.join(WORKFLOWS_DIR, workflowFile);
try {
const content = fs.readFileSync(workflowPath, 'utf8');
const skillReferences = content.match(/@speckit-\w+/g) || [];
for (const skillRef of skillReferences) {
const skillName = skillRef.replace('@', '');
if (!allSkills.includes(skillName)) {
this.validationResults.missing_dependencies.push({
workflow: workflowFile,
missing_dependency: skillName,
dependency_type: 'workflow-reference'
});
this.validationResults.validation_summary.missing_dependencies_found++;
this.log(`Workflow ${workflowFile} references missing skill: ${skillRef}`, 'fail');
}
}
} catch (error) {
this.log(`Error reading workflow ${workflowFile}: ${error.message}`, 'warn');
}
}
}
generateDependencyReport() {
this.log('Generating dependency report...', 'info');
// Determine overall validation status
const summary = this.validationResults.validation_summary;
if (summary.circular_dependencies_found > 0) {
summary.validation_status = 'critical';
} else if (summary.missing_dependencies_found > 0) {
summary.validation_status = 'failed';
} else if (summary.orphaned_skills_found > 0) {
summary.validation_status = 'warning';
} else {
summary.validation_status = 'passed';
}
// Save report
const reportPath = path.join(AGENTS_DIR, 'reports', 'dependency-validation.json');
const reportsDir = path.dirname(reportPath);
if (!fs.existsSync(reportsDir)) {
fs.mkdirSync(reportsDir, { recursive: true });
}
fs.writeFileSync(reportPath, JSON.stringify(this.validationResults, null, 2));
this.log(`Dependency validation report saved to: ${reportPath}`, 'info');
}
printSummary() {
const summary = this.validationResults.validation_summary;
this.log('=== Dependency Validation Summary ===', 'info');
this.log(`Total skills: ${summary.total_skills}`, 'info');
this.log(`Skills with dependencies: ${summary.skills_with_dependencies}`, 'info');
this.log(`Circular dependencies: ${summary.circular_dependencies_found}`, summary.circular_dependencies_found > 0 ? 'critical' : 'pass');
this.log(`Missing dependencies: ${summary.missing_dependencies_found}`, summary.missing_dependencies_found > 0 ? 'fail' : 'pass');
this.log(`Orphaned skills: ${summary.orphaned_skills_found}`, summary.orphaned_skills_found > 0 ? 'warn' : 'info');
this.log(`Max dependency depth: ${summary.max_dependency_depth}`, 'info');
this.log(`Validation status: ${summary.validation_status.toUpperCase()}`,
summary.validation_status === 'passed' ? 'pass' :
summary.validation_status === 'warning' ? 'warn' : 'fail');
// Show longest dependency chains
const chains = this.validationResults.dependency_chains;
const sortedChains = Object.entries(chains)
.sort(([,a], [,b]) => b.depth - a.depth)
.slice(0, 3);
if (sortedChains.length > 0) {
this.log('Top 3 longest dependency chains:', 'info');
sortedChains.forEach(([skillName, chainInfo], index) => {
this.log(` ${index + 1}. ${chainInfo.chain.join(' -> ')} (depth: ${chainInfo.depth})`, 'info');
});
}
}
async runDependencyValidation() {
this.log('Starting dependency validation...', 'info');
this.log(`Base directory: ${BASE_DIR}`, 'info');
// Build dependency graph
this.buildDependencyGraph();
// Validate dependencies
this.validateDependencies();
// Detect circular dependencies
this.detectCircularDependencies();
// Calculate dependency chains
this.calculateDependencyChains();
// Validate workflow dependencies
this.validateWorkflowDependencies();
// Generate report
this.generateDependencyReport();
// Print summary
this.printSummary();
return this.validationResults;
}
}
// CLI interface
async function main() {
const validator = new DependencyValidator();
try {
const results = await validator.runDependencyValidation();
const status = results.validation_summary.validation_status;
process.exit(status === 'passed' || status === 'warning' ? 0 : 1);
} catch (error) {
console.error('Dependency validation failed:', error);
process.exit(1);
}
}
// Export for use in other modules
module.exports = { DependencyValidator };
// Run if called directly
if (require.main === module) {
main();
}
+369
View File
@@ -0,0 +1,369 @@
#!/usr/bin/env node
/**
* health-monitor.js - Automated health monitoring system for .agents
* Part of LCBP3-DMS Phase 3 enhancements
*/
const fs = require('fs');
const path = require('path');
const { execSync } = require('child_process');
// Configuration
const BASE_DIR = path.resolve(__dirname, '../..');
const AGENTS_DIR = path.join(BASE_DIR, '.agents');
const HEALTH_LOG_PATH = path.join(AGENTS_DIR, 'logs', 'health.log');
const HEALTH_REPORT_PATH = path.join(AGENTS_DIR, 'reports', 'health-report.json');
// Ensure directories exist
[ path.dirname(HEALTH_LOG_PATH), path.dirname(HEALTH_REPORT_PATH) ].forEach(dir => {
if (!fs.existsSync(dir)) {
fs.mkdirSync(dir, { recursive: true });
}
});
// Health monitoring class
class HealthMonitor {
constructor() {
this.startTime = new Date();
this.metrics = {
timestamp: this.startTime.toISOString(),
version: '1.8.6',
checks: {},
summary: {
total_checks: 0,
passed_checks: 0,
failed_checks: 0,
warnings: 0,
overall_health: 'unknown'
}
};
}
log(message, level = 'info') {
const timestamp = new Date().toISOString();
const logEntry = `[${timestamp}] [${level.toUpperCase()}] ${message}\n`;
// Console output with colors
const colors = {
info: '\x1b[36m', // Cyan
pass: '\x1b[32m', // Green
fail: '\x1b[31m', // Red
warn: '\x1b[33m', // Yellow
reset: '\x1b[0m'
};
const color = colors[level] || colors.info;
console.log(`${color}${logEntry.trim()}${colors.reset}`);
// File logging
fs.appendFileSync(HEALTH_LOG_PATH, logEntry);
}
checkDirectoryExists(dirPath, checkName) {
this.metrics.summary.total_checks++;
const exists = fs.existsSync(dirPath);
this.metrics.checks[checkName] = {
type: 'directory_exists',
status: exists ? 'pass' : 'fail',
path: dirPath,
message: exists ? 'Directory exists' : 'Directory missing'
};
if (exists) {
this.metrics.summary.passed_checks++;
this.log(`${checkName}: PASS - Directory exists`, 'pass');
} else {
this.metrics.summary.failed_checks++;
this.log(`${checkName}: FAIL - Directory missing: ${dirPath}`, 'fail');
}
return exists;
}
checkFileExists(filePath, checkName) {
this.metrics.summary.total_checks++;
const exists = fs.existsSync(filePath);
this.metrics.checks[checkName] = {
type: 'file_exists',
status: exists ? 'pass' : 'fail',
path: filePath,
message: exists ? 'File exists' : 'File missing'
};
if (exists) {
this.metrics.summary.passed_checks++;
this.log(`${checkName}: PASS - File exists`, 'pass');
} else {
this.metrics.summary.failed_checks++;
this.log(`${checkName}: FAIL - File missing: ${filePath}`, 'fail');
}
return exists;
}
checkFileVersion(filePath, expectedVersion, checkName) {
this.metrics.summary.total_checks++;
if (!fs.existsSync(filePath)) {
this.metrics.summary.failed_checks++;
this.metrics.checks[checkName] = {
type: 'version_check',
status: 'fail',
path: filePath,
message: 'File does not exist'
};
this.log(`${checkName}: FAIL - File not found: ${filePath}`, 'fail');
return false;
}
try {
const content = fs.readFileSync(filePath, 'utf8');
const versionMatch = content.match(/v?(\d+\.\d+\.\d+)/);
const actualVersion = versionMatch ? versionMatch[1] : 'not_found';
const versionMatches = actualVersion === expectedVersion;
this.metrics.checks[checkName] = {
type: 'version_check',
status: versionMatches ? 'pass' : 'fail',
path: filePath,
expected_version: expectedVersion,
actual_version: actualVersion,
message: versionMatches ? 'Version matches' : `Version mismatch (expected ${expectedVersion}, found ${actualVersion})`
};
if (versionMatches) {
this.metrics.summary.passed_checks++;
this.log(`${checkName}: PASS - Version ${actualVersion}`, 'pass');
} else {
this.metrics.summary.failed_checks++;
this.log(`${checkName}: FAIL - Version mismatch (expected ${expectedVersion}, found ${actualVersion})`, 'fail');
}
return versionMatches;
} catch (error) {
this.metrics.summary.failed_checks++;
this.metrics.checks[checkName] = {
type: 'version_check',
status: 'fail',
path: filePath,
message: `Error reading file: ${error.message}`
};
this.log(`${checkName}: FAIL - Error reading file: ${error.message}`, 'fail');
return false;
}
}
checkSkillHealth() {
this.log('Checking skill health...', 'info');
const skillsDir = path.join(AGENTS_DIR, 'skills');
if (!fs.existsSync(skillsDir)) {
this.log('Skills directory not found', 'fail');
return;
}
const skillDirs = fs.readdirSync(skillsDir).filter(item => {
const itemPath = path.join(skillsDir, item);
return fs.statSync(itemPath).isDirectory();
});
this.metrics.checks['skill_count'] = {
type: 'skill_count',
status: skillDirs.length >= 20 ? 'pass' : 'warn',
count: skillDirs.length,
expected: 20,
message: `Found ${skillDirs.length} skills (expected at least 20)`
};
if (skillDirs.length >= 20) {
this.metrics.summary.passed_checks++;
this.log(`Skill count: PASS - Found ${skillDirs.length} skills`, 'pass');
} else {
this.metrics.summary.warnings++;
this.log(`Skill count: WARN - Only ${skillDirs.length} skills found (expected at least 20)`, 'warn');
}
// Check individual skills
let healthySkills = 0;
skillDirs.forEach(skillDir => {
const skillPath = path.join(skillsDir, skillDir);
const skillMdPath = path.join(skillPath, 'SKILL.md');
if (fs.existsSync(skillMdPath)) {
try {
const content = fs.readFileSync(skillMdPath, 'utf8');
const hasName = content.includes('name:');
const hasDescription = content.includes('description:');
const hasVersion = content.includes('version:');
const hasRole = content.includes('## Role');
const hasTask = content.includes('## Task');
const isHealthy = hasName && hasDescription && hasVersion && hasRole && hasTask;
if (isHealthy) healthySkills++;
this.metrics.checks[`skill_${skillDir}_health`] = {
type: 'skill_health',
status: isHealthy ? 'pass' : 'fail',
skill: skillDir,
has_name: hasName,
has_description: hasDescription,
has_version: hasVersion,
has_role: hasRole,
has_task: hasTask,
message: isHealthy ? 'Skill is healthy' : 'Skill has missing sections'
};
} catch (error) {
this.metrics.checks[`skill_${skillDir}_health`] = {
type: 'skill_health',
status: 'fail',
skill: skillDir,
message: `Error reading skill: ${error.message}`
};
}
}
});
this.metrics.summary.total_checks++;
if (healthySkills === skillDirs.length) {
this.metrics.summary.passed_checks++;
this.log(`Individual skills: PASS - All ${healthySkills} skills are healthy`, 'pass');
} else {
this.metrics.summary.failed_checks++;
this.log(`Individual skills: FAIL - Only ${healthySkills}/${skillDirs.length} skills are healthy`, 'fail');
}
}
checkWorkflowHealth() {
this.log('Checking workflow health...', 'info');
const workflowsDir = path.join(BASE_DIR, '.windsurf', 'workflows');
if (!fs.existsSync(workflowsDir)) {
this.log('Workflows directory not found', 'fail');
return;
}
const workflowFiles = fs.readdirSync(workflowsDir).filter(file => file.endsWith('.md'));
this.metrics.checks['workflow_count'] = {
type: 'workflow_count',
status: workflowFiles.length >= 20 ? 'pass' : 'warn',
count: workflowFiles.length,
expected: 20,
message: `Found ${workflowFiles.length} workflows (expected at least 20)`
};
if (workflowFiles.length >= 20) {
this.metrics.summary.passed_checks++;
this.log(`Workflow count: PASS - Found ${workflowFiles.length} workflows`, 'pass');
} else {
this.metrics.summary.warnings++;
this.log(`Workflow count: WARN - Only ${workflowFiles.length} workflows found (expected at least 20)`, 'warn');
}
}
calculateOverallHealth() {
const { total_checks, passed_checks, failed_checks, warnings } = this.metrics.summary;
if (failed_checks === 0) {
this.metrics.summary.overall_health = warnings === 0 ? 'excellent' : 'good';
} else if (failed_checks <= total_checks * 0.1) {
this.metrics.summary.overall_health = 'fair';
} else {
this.metrics.summary.overall_health = 'poor';
}
this.log(`Overall health: ${this.metrics.summary.overall_health}`, 'info');
}
generateReport() {
const report = {
...this.metrics,
duration: new Date() - this.startTime,
environment: {
node_version: process.version,
platform: process.platform,
agents_dir: AGENTS_DIR
}
};
fs.writeFileSync(HEALTH_REPORT_PATH, JSON.stringify(report, null, 2));
this.log(`Health report saved to: ${HEALTH_REPORT_PATH}`, 'info');
return report;
}
async runFullHealthCheck() {
this.log('Starting comprehensive health check...', 'info');
this.log(`Base directory: ${BASE_DIR}`, 'info');
// Core directory checks
this.checkDirectoryExists(AGENTS_DIR, 'agents_directory');
this.checkDirectoryExists(path.join(AGENTS_DIR, 'skills'), 'skills_directory');
this.checkDirectoryExists(path.join(AGENTS_DIR, 'scripts'), 'scripts_directory');
this.checkDirectoryExists(path.join(AGENTS_DIR, 'rules'), 'rules_directory');
this.checkDirectoryExists(path.join(BASE_DIR, '.windsurf', 'workflows'), 'workflows_directory');
// Core file checks
this.checkFileExists(path.join(AGENTS_DIR, 'README.md'), 'readme_file');
this.checkFileExists(path.join(AGENTS_DIR, 'skills', 'VERSION'), 'skills_version_file');
this.checkFileExists(path.join(AGENTS_DIR, 'skills', 'skills.md'), 'skills_documentation');
// Version consistency checks
this.checkFileVersion(path.join(AGENTS_DIR, 'README.md'), '1.8.6', 'readme_version');
this.checkFileVersion(path.join(AGENTS_DIR, 'skills', 'VERSION'), '1.8.6', 'skills_version_file_version');
this.checkFileVersion(path.join(AGENTS_DIR, 'skills', 'skills.md'), '1.8.6', 'skills_documentation_version');
this.checkFileVersion(path.join(AGENTS_DIR, 'rules', '00-project-context.md'), '1.8.6', 'project_context_version');
// Script availability checks
this.checkFileExists(path.join(AGENTS_DIR, 'scripts', 'bash', 'validate-versions.sh'), 'bash_version_script');
this.checkFileExists(path.join(AGENTS_DIR, 'scripts', 'bash', 'audit-skills.sh'), 'bash_audit_script');
this.checkFileExists(path.join(AGENTS_DIR, 'scripts', 'bash', 'sync-workflows.sh'), 'bash_sync_script');
this.checkFileExists(path.join(AGENTS_DIR, 'scripts', 'powershell', 'validate-versions.ps1'), 'powershell_version_script');
this.checkFileExists(path.join(AGENTS_DIR, 'scripts', 'powershell', 'audit-skills.ps1'), 'powershell_audit_script');
// Detailed health checks
this.checkSkillHealth();
this.checkWorkflowHealth();
// Calculate overall health
this.calculateOverallHealth();
// Generate report
const report = this.generateReport();
// Summary
this.log('=== Health Check Summary ===', 'info');
this.log(`Total checks: ${this.metrics.summary.total_checks}`, 'info');
this.log(`Passed: ${this.metrics.summary.passed_checks}`, 'pass');
this.log(`Failed: ${this.metrics.summary.failed_checks}`, this.metrics.summary.failed_checks > 0 ? 'fail' : 'info');
this.log(`Warnings: ${this.metrics.summary.warnings}`, 'warn');
this.log(`Overall health: ${this.metrics.summary.overall_health}`, 'info');
this.log(`Duration: ${new Date() - this.startTime}ms`, 'info');
return report;
}
}
// CLI interface
async function main() {
const monitor = new HealthMonitor();
try {
const report = await monitor.runFullHealthCheck();
process.exit(report.summary.failed_checks > 0 ? 1 : 0);
} catch (error) {
console.error('Health check failed:', error);
process.exit(1);
}
}
// Export for use in other modules
module.exports = { HealthMonitor };
// Run if called directly
if (require.main === module) {
main();
}
+494
View File
@@ -0,0 +1,494 @@
#!/usr/bin/env node
/**
* performance-monitor.js - Performance monitoring for .agents skills
* Part of LCBP3-DMS Phase 3 enhancements
*/
const fs = require('fs');
const path = require('path');
const { performance } = require('perf_hooks');
// Configuration
const BASE_DIR = path.resolve(__dirname, '../..');
const AGENTS_DIR = path.join(BASE_DIR, '.agents');
const SKILLS_DIR = path.join(AGENTS_DIR, 'skills');
const PERFORMANCE_LOG_PATH = path.join(AGENTS_DIR, 'logs', 'performance.log');
const PERFORMANCE_REPORT_PATH = path.join(AGENTS_DIR, 'reports', 'performance-report.json');
// Ensure directories exist
[ path.dirname(PERFORMANCE_LOG_PATH), path.dirname(PERFORMANCE_REPORT_PATH) ].forEach(dir => {
if (!fs.existsSync(dir)) {
fs.mkdirSync(dir, { recursive: true });
}
});
// Performance monitoring class
class PerformanceMonitor {
constructor() {
this.startTime = performance.now();
this.metrics = {
timestamp: new Date().toISOString(),
duration: 0,
skill_metrics: {},
workflow_metrics: {},
system_metrics: {},
summary: {
total_skills_analyzed: 0,
total_workflows_analyzed: 0,
average_skill_size: 0,
average_workflow_size: 0,
performance_score: 0,
recommendations: []
}
};
}
log(message, level = 'info') {
const timestamp = new Date().toISOString();
const logEntry = `[${timestamp}] [${level.toUpperCase()}] ${message}\n`;
// Console output with colors
const colors = {
info: '\x1b[36m', // Cyan
good: '\x1b[32m', // Green
warn: '\x1b[33m', // Yellow
poor: '\x1b[31m', // Red
reset: '\x1b[0m'
};
const color = colors[level] || colors.info;
console.log(`${color}${logEntry.trim()}${colors.reset}`);
// File logging
fs.appendFileSync(PERFORMANCE_LOG_PATH, logEntry);
}
analyzeSkillPerformance(skillPath, skillName) {
const skillMdPath = path.join(skillPath, 'SKILL.md');
if (!fs.existsSync(skillMdPath)) {
this.log(`Skipping ${skillName} - SKILL.md not found`, 'warn');
return null;
}
const startTime = performance.now();
try {
const stats = fs.statSync(skillMdPath);
const content = fs.readFileSync(skillMdPath, 'utf8');
// Basic metrics
const fileSizeKB = stats.size / 1024;
const lineCount = content.split('\n').length;
const wordCount = content.split(/\s+/).filter(word => word.length > 0).length;
const charCount = content.length;
// Content complexity metrics
const sectionCount = (content.match(/^#+\s/gm) || []).length;
const codeBlockCount = (content.match(/```[\s\S]*?```/g) || []).length;
const listCount = (content.match(/^[-*+]\s/gm) || []).length;
// Front matter analysis
const frontMatterMatch = content.match(/^---\n([\s\S]*?)\n---/);
const frontMatterSize = frontMatterMatch ? frontMatterMatch[1].length : 0;
const hasFrontMatter = frontMatterMatch !== null;
// Readability metrics
const sentences = content.split(/[.!?]+/).filter(s => s.trim().length > 0);
const avgWordsPerSentence = sentences.length > 0 ? wordCount / sentences.length : 0;
const avgCharsPerWord = wordCount > 0 ? charCount / wordCount : 0;
// Performance score calculation
let performanceScore = 100;
// Size penalties
if (fileSizeKB > 50) performanceScore -= 10;
if (fileSizeKB > 100) performanceScore -= 20;
// Content quality bonuses
if (hasFrontMatter) performanceScore += 5;
if (sectionCount >= 3) performanceScore += 5;
if (codeBlockCount > 0) performanceScore += 5;
// Readability penalties
if (avgWordsPerSentence > 25) performanceScore -= 5;
if (avgWordsPerSentence > 35) performanceScore -= 10;
const analysisTime = performance.now() - startTime;
const skillMetrics = {
skill_name: skillName,
file_path: skillMdPath,
file_size_kb: Math.round(fileSizeKB * 100) / 100,
line_count: lineCount,
word_count: wordCount,
char_count: charCount,
section_count: sectionCount,
code_block_count: codeBlockCount,
list_count: listCount,
front_matter_size: frontMatterSize,
has_front_matter: hasFrontMatter,
avg_words_per_sentence: Math.round(avgWordsPerSentence * 100) / 100,
avg_chars_per_word: Math.round(avgCharsPerWord * 100) / 100,
performance_score: Math.max(0, Math.min(100, performanceScore)),
analysis_time_ms: Math.round(analysisTime * 100) / 100,
last_modified: stats.mtime.toISOString()
};
this.metrics.skill_metrics[skillName] = skillMetrics;
// Log performance assessment
if (performanceScore >= 80) {
this.log(`${skillName}: GOOD performance (score: ${performanceScore})`, 'good');
} else if (performanceScore >= 60) {
this.log(`${skillName}: OK performance (score: ${performanceScore})`, 'info');
} else {
this.log(`${skillName}: POOR performance (score: ${performanceScore})`, 'poor');
}
return skillMetrics;
} catch (error) {
this.log(`Error analyzing ${skillName}: ${error.message}`, 'warn');
return null;
}
}
analyzeWorkflowPerformance(workflowPath, workflowName) {
const startTime = performance.now();
if (!fs.existsSync(workflowPath)) {
this.log(`Skipping workflow ${workflowName} - file not found`, 'warn');
return null;
}
try {
const stats = fs.statSync(workflowPath);
const content = fs.readFileSync(workflowPath, 'utf8');
// Basic metrics
const fileSizeKB = stats.size / 1024;
const lineCount = content.split('\n').length;
const wordCount = content.split(/\s+/).filter(word => word.length > 0).length;
// Workflow-specific metrics
const stepCount = (content.match(/^\d+\./gm) || []).length;
const codeBlockCount = (content.match(/```[\s\S]*?```/g) || []).length;
const skillReferences = (content.match(/@speckit-\w+/g) || []).length;
// Performance score calculation
let performanceScore = 100;
// Size penalties
if (fileSizeKB > 20) performanceScore -= 10;
if (fileSizeKB > 50) performanceScore -= 20;
// Content quality bonuses
if (stepCount > 0) performanceScore += 10;
if (codeBlockCount > 0) performanceScore += 5;
if (skillReferences > 0) performanceScore += 5;
const analysisTime = performance.now() - startTime;
const workflowMetrics = {
workflow_name: workflowName,
file_path: workflowPath,
file_size_kb: Math.round(fileSizeKB * 100) / 100,
line_count: lineCount,
word_count: wordCount,
step_count: stepCount,
code_block_count: codeBlockCount,
skill_references: skillReferences,
performance_score: Math.max(0, Math.min(100, performanceScore)),
analysis_time_ms: Math.round(analysisTime * 100) / 100,
last_modified: stats.mtime.toISOString()
};
this.metrics.workflow_metrics[workflowName] = workflowMetrics;
// Log performance assessment
if (performanceScore >= 80) {
this.log(`${workflowName}: GOOD performance (score: ${performanceScore})`, 'good');
} else if (performanceScore >= 60) {
this.log(`${workflowName}: OK performance (score: ${performanceScore})`, 'info');
} else {
this.log(`${workflowName}: POOR performance (score: ${performanceScore})`, 'poor');
}
return workflowMetrics;
} catch (error) {
this.log(`Error analyzing workflow ${workflowName}: ${error.message}`, 'warn');
return null;
}
}
analyzeSystemMetrics() {
this.log('Analyzing system metrics...', 'info');
// Directory sizes
const agentsSize = this.getDirectorySize(AGENTS_DIR);
const skillsSize = this.getDirectorySize(SKILLS_DIR);
const workflowsDir = path.join(BASE_DIR, '.windsurf', 'workflows');
const workflowsSize = fs.existsSync(workflowsDir) ? this.getDirectorySize(workflowsDir) : 0;
// File counts
const totalFiles = this.countFiles(AGENTS_DIR);
const skillFiles = this.countFiles(SKILLS_DIR);
const workflowFiles = fs.existsSync(workflowsDir) ? this.countFiles(workflowsDir) : 0;
this.metrics.system_metrics = {
agents_directory_size_kb: Math.round(agentsSize / 1024),
skills_directory_size_kb: Math.round(skillsSize / 1024),
workflows_directory_size_kb: Math.round(workflowsSize / 1024),
total_files: totalFiles,
skill_files: skillFiles,
workflow_files: workflowFiles,
analysis_timestamp: new Date().toISOString()
};
this.log(`System: ${totalFiles} files, ${Math.round(agentsSize / 1024)}KB total`, 'info');
}
getDirectorySize(dirPath) {
let totalSize = 0;
if (!fs.existsSync(dirPath)) {
return 0;
}
const items = fs.readdirSync(dirPath);
for (const item of items) {
const itemPath = path.join(dirPath, item);
const stats = fs.statSync(itemPath);
if (stats.isDirectory()) {
totalSize += this.getDirectorySize(itemPath);
} else {
totalSize += stats.size;
}
}
return totalSize;
}
countFiles(dirPath) {
let fileCount = 0;
if (!fs.existsSync(dirPath)) {
return 0;
}
const items = fs.readdirSync(dirPath);
for (const item of items) {
const itemPath = path.join(dirPath, item);
const stats = fs.statSync(itemPath);
if (stats.isDirectory()) {
fileCount += this.countFiles(itemPath);
} else {
fileCount++;
}
}
return fileCount;
}
generateRecommendations() {
const recommendations = [];
const { skill_metrics, workflow_metrics, system_metrics } = this.metrics;
// Analyze skill performance
const skillScores = Object.values(skill_metrics).map(m => m.performance_score);
const avgSkillScore = skillScores.length > 0 ? skillScores.reduce((a, b) => a + b, 0) / skillScores.length : 0;
if (avgSkillScore < 70) {
recommendations.push({
type: 'performance',
priority: 'high',
message: 'Average skill performance is below optimal. Consider optimizing skill documentation.',
details: `Average score: ${Math.round(avgSkillScore)}`
});
}
// Check for oversized files
const largeSkills = Object.values(skill_metrics).filter(m => m.file_size_kb > 50);
if (largeSkills.length > 0) {
recommendations.push({
type: 'size',
priority: 'medium',
message: `${largeSkills.length} skills have large file sizes (>50KB). Consider breaking down complex skills.`,
details: largeSkills.map(s => `${s.skill_name} (${s.file_size_kb}KB)`).join(', ')
});
}
// Check for missing front matter
const skillsWithoutFrontMatter = Object.values(skill_metrics).filter(m => !m.has_front_matter);
if (skillsWithoutFrontMatter.length > 0) {
recommendations.push({
type: 'structure',
priority: 'high',
message: `${skillsWithoutFrontMatter.length} skills missing front matter. Add proper YAML front matter.`,
details: skillsWithoutFrontMatter.map(s => s.skill_name).join(', ')
});
}
// Analyze workflow performance
const workflowScores = Object.values(workflow_metrics).map(m => m.performance_score);
const avgWorkflowScore = workflowScores.length > 0 ? workflowScores.reduce((a, b) => a + b, 0) / workflowScores.length : 0;
if (avgWorkflowScore < 70) {
recommendations.push({
type: 'performance',
priority: 'medium',
message: 'Average workflow performance could be improved. Add more detailed steps and examples.',
details: `Average score: ${Math.round(avgWorkflowScore)}`
});
}
// System recommendations
if (system_metrics.agents_directory_size_kb > 1000) {
recommendations.push({
type: 'maintenance',
priority: 'low',
message: '.agents directory is growing large. Consider archiving old logs and reports.',
details: `Current size: ${system_metrics.agents_directory_size_kb}KB`
});
}
this.metrics.summary.recommendations = recommendations;
// Log recommendations
if (recommendations.length > 0) {
this.log('Performance Recommendations:', 'info');
recommendations.forEach((rec, index) => {
const priority = rec.priority === 'high' ? 'HIGH' : rec.priority === 'medium' ? 'MED' : 'LOW';
this.log(` ${index + 1}. [${priority}] ${rec.message}`, 'warn');
});
} else {
this.log('No performance issues detected - system is optimized!', 'good');
}
}
calculateOverallPerformance() {
const { skill_metrics, workflow_metrics } = this.metrics;
const skillScores = Object.values(skill_metrics).map(m => m.performance_score);
const workflowScores = Object.values(workflow_metrics).map(m => m.performance_score);
const avgSkillScore = skillScores.length > 0 ? skillScores.reduce((a, b) => a + b, 0) / skillScores.length : 100;
const avgWorkflowScore = workflowScores.length > 0 ? workflowScores.reduce((a, b) => a + b, 0) / workflowScores.length : 100;
// Weight skills more heavily than workflows
const overallScore = (avgSkillScore * 0.7) + (avgWorkflowScore * 0.3);
this.metrics.summary.performance_score = Math.round(overallScore);
this.metrics.summary.average_skill_size = skillScores.length > 0
? Math.round(Object.values(skill_metrics).reduce((sum, m) => sum + m.file_size_kb, 0) / skillScores.length * 100) / 100
: 0;
this.metrics.summary.average_workflow_size = workflowScores.length > 0
? Math.round(Object.values(workflow_metrics).reduce((sum, m) => sum + m.file_size_kb, 0) / workflowScores.length * 100) / 100
: 0;
this.metrics.summary.total_skills_analyzed = skillScores.length;
this.metrics.summary.total_workflows_analyzed = workflowScores.length;
}
generateReport() {
this.metrics.duration = performance.now() - this.startTime;
const report = {
...this.metrics,
generated_at: new Date().toISOString(),
environment: {
node_version: process.version,
platform: process.platform,
memory_usage: process.memoryUsage()
}
};
fs.writeFileSync(PERFORMANCE_REPORT_PATH, JSON.stringify(report, null, 2));
this.log(`Performance report saved to: ${PERFORMANCE_REPORT_PATH}`, 'info');
return report;
}
async runPerformanceAnalysis() {
this.log('Starting performance analysis...', 'info');
this.log(`Base directory: ${BASE_DIR}`, 'info');
// Analyze skills
this.log('Analyzing skill performance...', 'info');
if (fs.existsSync(SKILLS_DIR)) {
const skillDirs = fs.readdirSync(SKILLS_DIR).filter(item => {
const itemPath = path.join(SKILLS_DIR, item);
return fs.statSync(itemPath).isDirectory();
});
for (const skillDir of skillDirs) {
const skillPath = path.join(SKILLS_DIR, skillDir);
this.analyzeSkillPerformance(skillPath, skillDir);
}
}
// Analyze workflows
this.log('Analyzing workflow performance...', 'info');
const workflowsDir = path.join(BASE_DIR, '.windsurf', 'workflows');
if (fs.existsSync(workflowsDir)) {
const workflowFiles = fs.readdirSync(workflowsDir).filter(file => file.endsWith('.md'));
for (const workflowFile of workflowFiles) {
const workflowPath = path.join(workflowsDir, workflowFile);
const workflowName = workflowFile.replace('.md', '');
this.analyzeWorkflowPerformance(workflowPath, workflowName);
}
}
// System metrics
this.analyzeSystemMetrics();
// Calculate overall performance
this.calculateOverallPerformance();
// Generate recommendations
this.generateRecommendations();
// Generate report
const report = this.generateReport();
// Summary
this.log('=== Performance Analysis Summary ===', 'info');
this.log(`Overall performance score: ${this.metrics.summary.performance_score}/100`, 'info');
this.log(`Skills analyzed: ${this.metrics.summary.total_skills_analyzed}`, 'info');
this.log(`Workflows analyzed: ${this.metrics.summary.total_workflows_analyzed}`, 'info');
this.log(`Average skill size: ${this.metrics.summary.average_skill_size}KB`, 'info');
this.log(`Average workflow size: ${this.metrics.summary.average_workflow_size}KB`, 'info');
this.log(`Analysis duration: ${Math.round(this.metrics.duration)}ms`, 'info');
this.log(`Recommendations: ${this.metrics.summary.recommendations.length}`, 'info');
return report;
}
}
// CLI interface
async function main() {
const monitor = new PerformanceMonitor();
try {
const report = await monitor.runPerformanceAnalysis();
process.exit(report.summary.performance_score < 60 ? 1 : 0);
} catch (error) {
console.error('Performance analysis failed:', error);
process.exit(1);
}
}
// Export for use in other modules
module.exports = { PerformanceMonitor };
// Run if called directly
if (require.main === module) {
main();
}
+203
View File
@@ -0,0 +1,203 @@
# audit-skills.ps1 - Verify skill completeness and health
# Part of LCBP3-DMS Phase 2 improvements
param(
[string]$BaseDir = (Split-Path -Parent (Split-Path -Parent $PSScriptRoot))
)
# Colors for output
$Colors = @{
Red = "`e[0;31m"
Green = "`e[0;32m"
Yellow = "`e[1;33m"
Blue = "`e[0;34m"
NoColor = "`e[0m"
}
$AgentsDir = Join-Path $BaseDir ".agents"
$SkillsDir = Join-Path $AgentsDir "skills"
Write-Host "=== Skills Health Audit ===" -ForegroundColor Cyan
Write-Host "Base directory: $BaseDir"
Write-Host ""
# Function to check if skill has required files
function Test-SkillHealth {
param(
[string]$SkillDir
)
$skillName = Split-Path $SkillDir -Leaf
$issues = 0
# Check for SKILL.md
$skillFile = Join-Path $SkillDir "SKILL.md"
if (Test-Path $skillFile) {
Write-Host " OK: $skillName/SKILL.md" -ForegroundColor $Colors.Green
} else {
Write-Host " MISSING: $skillName/SKILL.md" -ForegroundColor $Colors.Red
$issues++
}
# Check for templates directory (optional)
$templatesDir = Join-Path $SkillDir "templates"
if (Test-Path $templatesDir) {
$templateCount = (Get-ChildItem -Path $templatesDir -Filter "*.md" -File | Measure-Object).Count
if ($templateCount -gt 0) {
Write-Host " OK: $skillName/templates ($templateCount files)" -ForegroundColor $Colors.Green
} else {
Write-Host " EMPTY: $skillName/templates (no files)" -ForegroundColor $Colors.Yellow
}
}
# Check SKILL.md content if exists
if (Test-Path $skillFile) {
$content = Get-Content $skillFile -Raw
# Check for required front matter fields
$requiredFields = @("name", "description", "version")
foreach ($field in $requiredFields) {
if ($content -match "^$field:") {
Write-Host " FIELD: $field" -ForegroundColor $Colors.Green
} else {
Write-Host " MISSING FIELD: $field" -ForegroundColor $Colors.Red
$issues++
}
}
# Check for Role section
if ($content -match "^## Role$") {
Write-Host " SECTION: Role" -ForegroundColor $Colors.Green
} else {
Write-Host " MISSING SECTION: Role" -ForegroundColor $Colors.Yellow
$issues++
}
# Check for Task section
if ($content -match "^## Task$") {
Write-Host " SECTION: Task" -ForegroundColor $Colors.Green
} else {
Write-Host " MISSING SECTION: Task" -ForegroundColor $Colors.Yellow
$issues++
}
}
return $issues
}
# Function to get skill version from SKILL.md
function Get-SkillVersion {
param(
[string]$SkillFile
)
if (Test-Path $SkillFile) {
try {
$content = Get-Content $SkillFile -Raw
if ($content -match "^version:\s*(.+)") {
return $matches[1].Trim()
}
} catch {
return "error"
}
}
return "no_file"
}
# Check skills directory
if (-not (Test-Path $SkillsDir)) {
Write-Host "ERROR: Skills directory not found" -ForegroundColor $Colors.Red
exit 1
}
Write-Host "Scanning skills directory: $SkillsDir"
Write-Host ""
# Get all skill directories
$skillDirs = Get-ChildItem -Path $SkillsDir -Directory | Sort-Object Name
Write-Host "Found $($skillDirs.Count) skill directories"
Write-Host ""
# Audit each skill
$totalIssues = 0
$skillSummary = @()
foreach ($skillDir in $skillDirs) {
$skillName = $skillDir.Name
Write-Host "Auditing: $skillName"
Write-Host "------------------------"
$issues = Test-SkillHealth -SkillDir $skillDir.FullName
$skillVersion = Get-SkillVersion -SkillFile (Join-Path $skillDir.FullName "SKILL.md")
$skillSummary += @{
Name = $skillName
Issues = $issues
Version = $skillVersion
}
$totalIssues += $issues
Write-Host ""
}
# Summary report
Write-Host "=== Skills Audit Summary ===" -ForegroundColor Cyan
Write-Host ""
Write-Host "Skill Status:"
Write-Host "-----------"
foreach ($summary in $skillSummary) {
if ($summary.Issues -eq 0) {
Write-Host " HEALTHY: $($summary.Name) (v$($summary.Version))" -ForegroundColor $Colors.Green
} else {
Write-Host " ISSUES: $($summary.Name) (v$($summary.Version)) - $($summary.Issues) issues" -ForegroundColor $Colors.Red
}
}
Write-Host ""
# Check skills.md version consistency
$skillsVersionFile = Join-Path $SkillsDir "VERSION"
if (Test-Path $skillsVersionFile) {
$content = Get-Content $skillsVersionFile -Raw
if ($content -match "^version:\s*(.+)") {
$globalVersion = $matches[1].Trim()
Write-Host "Global skills version: v$globalVersion"
Write-Host ""
# Check for version mismatches
Write-Host "Version Consistency Check:"
Write-Host "------------------------"
$versionMismatches = 0
foreach ($summary in $skillSummary) {
if ($summary.Version -ne "unknown" -and $summary.Version -ne "no_file" -and $summary.Version -ne $globalVersion) {
Write-Host " MISMATCH: $($summary.Name) is v$($summary.Version), global is v$globalVersion" -ForegroundColor $Colors.Yellow
$versionMismatches++
}
}
if ($versionMismatches -eq 0) {
Write-Host " All skills match global version" -ForegroundColor $Colors.Green
}
}
}
Write-Host ""
# Overall health
if ($totalIssues -eq 0) {
Write-Host "=== SUCCESS: All skills healthy ===" -ForegroundColor $Colors.Green
Write-Host "Total skills: $($skillDirs.Count)"
exit 0
} else {
Write-Host "=== ISSUES FOUND: $totalIssues total issues ===" -ForegroundColor $Colors.Red
Write-Host ""
Write-Host "Recommendations:"
Write-Host "1. Fix missing SKILL.md files"
Write-Host "2. Add required front matter fields"
Write-Host "3. Ensure Role and Task sections exist"
Write-Host "4. Align skill versions with global version"
exit 1
}
@@ -0,0 +1,112 @@
# validate-versions.ps1 - Check version consistency across .agents files
# Part of LCBP3-DMS Phase 2 improvements
param(
[string]$BaseDir = (Split-Path -Parent (Split-Path -Parent $PSScriptRoot)),
[string]$ExpectedVersion = "1.8.6"
)
# Colors for output
$Colors = @{
Red = "`e[0;31m"
Green = "`e[0;32m"
Yellow = "`e[1;33m"
NoColor = "`e[0m"
}
$AgentsDir = Join-Path $BaseDir ".agents"
Write-Host "=== .agents Version Validation ===" -ForegroundColor Cyan
Write-Host "Base directory: $BaseDir"
Write-Host "Expected version: $ExpectedVersion"
Write-Host ""
# Function to extract version from file
function Get-VersionFromFile {
param(
[string]$FilePath,
[string]$Pattern
)
if (Test-Path $FilePath) {
try {
$content = Get-Content $FilePath -Raw
if ($content -match $Pattern) {
return $matches[1]
} else {
return "NOT_FOUND"
}
} catch {
return "ERROR"
}
} else {
return "FILE_NOT_FOUND"
}
}
# Files to check
$FilesToCheck = @{
(Join-Path $AgentsDir "README.md") = "Version: ([0-9]+\.[0-9]+\.[0-9]+)"
(Join-Path $AgentsDir "skills\VERSION") = "version: ([0-9]+\.[0-9]+\.[0-9]+)"
(Join-Path $AgentsDir "rules\00-project-context.md") = "Version: ([0-9]+\.[0-9]+\.[0-9]+)"
(Join-Path $AgentsDir "skills\skills.md") = "V([0-9]+\.[0-9]+\.[0-9]+)"
}
# Track issues
$Issues = 0
Write-Host "Checking version consistency..."
Write-Host ""
foreach ($file in $FilesToCheck.Keys) {
$pattern = $FilesToCheck[$file]
$relativePath = $file.Replace($BaseDir + "\", "")
$version = Get-VersionFromFile -FilePath $file -Pattern $pattern
if ($version -eq "NOT_FOUND" -or $version -eq "FILE_NOT_FOUND") {
Write-Host " ERROR: $relativePath - Version not found" -ForegroundColor $Colors.Red
$Issues++
} elseif ($version -ne $ExpectedVersion) {
Write-Host " ERROR: $relativePath - Found v$version, expected v$ExpectedVersion" -ForegroundColor $Colors.Red
$Issues++
} else {
Write-Host " OK: $relativePath - v$version" -ForegroundColor $Colors.Green
}
}
Write-Host ""
# Check for version mismatches in skill files
Write-Host "Checking skill file versions..."
$SkillsVersionFile = Join-Path $AgentsDir "skills\VERSION"
if (Test-Path $SkillsVersionFile) {
$skillsVersion = Get-VersionFromFile -FilePath $SkillsVersionFile -Pattern "version: ([0-9]+\.[0-9]+\.[0-9]+)"
Write-Host "Skills version file: v$skillsVersion"
}
# Check workflow versions (in .windsurf\workflows)
$WorkflowsDir = Join-Path $BaseDir ".windsurf\workflows"
if (Test-Path $WorkflowsDir) {
Write-Host "Checking workflow files..."
$workflowCount = (Get-ChildItem -Path $WorkflowsDir -Filter "*.md" -File | Measure-Object).Count
Write-Host " OK: Found $workflowCount workflow files" -ForegroundColor $Colors.Green
} else {
Write-Host " WARNING: Workflows directory not found at $WorkflowsDir" -ForegroundColor $Colors.Yellow
}
Write-Host ""
# Summary
if ($Issues -eq 0) {
Write-Host "=== SUCCESS: All versions consistent ===" -ForegroundColor $Colors.Green
exit 0
} else {
Write-Host "=== FAILED: $Issues version issues found ===" -ForegroundColor $Colors.Red
Write-Host ""
Write-Host "To fix version issues:"
Write-Host "1. Update files to use v$ExpectedVersion"
Write-Host "2. Ensure LCBP3 project version matches"
Write-Host "3. Run this script again to verify"
exit 1
}