From a57fef4d4479de2c702c10dc7b13176e196283f5 Mon Sep 17 00:00:00 2001 From: admin Date: Mon, 27 Apr 2026 08:12:28 +0700 Subject: [PATCH] 690427:0812 Update Infras #01 --- .agents/scripts/bash/audit-skills.sh | 69 +- .agents/scripts/bash/validate-versions.sh | 14 +- .agents/scripts/powershell/audit-skills.ps1 | 71 +- .../scripts/powershell/validate-versions.ps1 | 22 +- .agents/skills/README.md | 109 + .agents/skills/VERSION | 13 +- .agents/skills/_LCBP3-CONTEXT.md | 91 + .../skills/nestjs-best-practices/AGENTS.md | 1059 ++- .../AGENTS.md.v1.1.0.bak | 5863 +++++++++++++++++ .agents/skills/nestjs-best-practices/SKILL.md | 72 +- .../nestjs-best-practices/metadata.json | 24 + .../rules/db-hybrid-identifier.md | 140 +- .../rules/db-no-typeorm-migrations.md | 100 + .../rules/lcbp3-ai-boundary.md | 157 + .../rules/lcbp3-workflow-engine.md | 181 + .../rules/security-file-two-phase-upload.md | 137 + .../scripts/build-agents.ts | 5 +- .agents/skills/next-best-practices/SKILL.md | 64 +- .agents/skills/next-best-practices/i18n.md | 79 + .../next-best-practices/two-phase-upload.md | 100 + .../next-best-practices/uuid-handling.md | 85 +- .agents/skills/skills.md | 31 +- .agents/skills/speckit-analyze/SKILL.md | 18 +- .agents/skills/speckit-checker/SKILL.md | 14 +- .agents/skills/speckit-checklist/SKILL.md | 14 +- .agents/skills/speckit-clarify/SKILL.md | 14 +- .agents/skills/speckit-constitution/SKILL.md | 30 +- .agents/skills/speckit-diff/SKILL.md | 14 +- .agents/skills/speckit-implement/SKILL.md | 16 +- .agents/skills/speckit-migrate/SKILL.md | 14 +- .agents/skills/speckit-plan/SKILL.md | 18 +- .../speckit-plan/templates/plan-template.md | 2 +- .agents/skills/speckit-quizme/SKILL.md | 14 +- .agents/skills/speckit-reviewer/SKILL.md | 14 +- .../skills/speckit-security-audit/SKILL.md | 24 +- .agents/skills/speckit-specify/SKILL.md | 18 +- .agents/skills/speckit-status/SKILL.md | 14 +- .agents/skills/speckit-tasks/SKILL.md | 14 +- .agents/skills/speckit-taskstoissues/SKILL.md | 14 +- .agents/skills/speckit-tester/SKILL.md | 14 +- .agents/skills/speckit-validate/SKILL.md | 14 +- .windsurf/workflows/00-speckit.all.md | 20 +- .../workflows/01-speckit.constitution.md | 2 +- .windsurf/workflows/02-speckit.specify.md | 2 +- .windsurf/workflows/03-speckit.clarify.md | 2 +- .windsurf/workflows/04-speckit.plan.md | 2 +- .windsurf/workflows/05-speckit.tasks.md | 2 +- .windsurf/workflows/06-speckit.analyze.md | 2 +- .windsurf/workflows/07-speckit.implement.md | 2 +- .windsurf/workflows/08-speckit.checker.md | 2 +- .windsurf/workflows/09-speckit.tester.md | 2 +- .windsurf/workflows/10-speckit.reviewer.md | 2 +- .windsurf/workflows/11-speckit.validate.md | 2 +- .../workflows/12-speckit.security-audit.md | 22 + .windsurf/workflows/speckit.prepare.md | 10 +- .windsurf/workflows/util-speckit.checklist.md | 2 +- .windsurf/workflows/util-speckit.diff.md | 2 +- .windsurf/workflows/util-speckit.migrate.md | 2 +- .windsurf/workflows/util-speckit.quizme.md | 2 +- .windsurf/workflows/util-speckit.status.md | 2 +- .../workflows/util-speckit.taskstoissues.md | 23 + AGENTS.md | 31 +- CONTRIBUTING.md | 47 +- README.md | 35 +- specs/02-architecture/02-03-network-design.md | 295 +- specs/03-Data-and-Storage/deltas/.gitkeep | 0 specs/03-Data-and-Storage/deltas/README.md | 65 + .../switch-configuration-guide.md | 858 +++ 68 files changed, 9750 insertions(+), 468 deletions(-) create mode 100644 .agents/skills/README.md create mode 100644 .agents/skills/_LCBP3-CONTEXT.md create mode 100644 .agents/skills/nestjs-best-practices/AGENTS.md.v1.1.0.bak create mode 100644 .agents/skills/nestjs-best-practices/metadata.json create mode 100644 .agents/skills/nestjs-best-practices/rules/db-no-typeorm-migrations.md create mode 100644 .agents/skills/nestjs-best-practices/rules/lcbp3-ai-boundary.md create mode 100644 .agents/skills/nestjs-best-practices/rules/lcbp3-workflow-engine.md create mode 100644 .agents/skills/nestjs-best-practices/rules/security-file-two-phase-upload.md create mode 100644 .agents/skills/next-best-practices/i18n.md create mode 100644 .agents/skills/next-best-practices/two-phase-upload.md create mode 100644 .windsurf/workflows/12-speckit.security-audit.md create mode 100644 .windsurf/workflows/util-speckit.taskstoissues.md create mode 100644 specs/03-Data-and-Storage/deltas/.gitkeep create mode 100644 specs/03-Data-and-Storage/deltas/README.md create mode 100644 specs/04-Infrastructure-OPS/switch-configuration-guide.md diff --git a/.agents/scripts/bash/audit-skills.sh b/.agents/scripts/bash/audit-skills.sh index 187c51e..e4d2479 100644 --- a/.agents/scripts/bash/audit-skills.sh +++ b/.agents/scripts/bash/audit-skills.sh @@ -3,7 +3,8 @@ # audit-skills.sh - Verify skill completeness and health # Part of LCBP3-DMS Phase 2 improvements -set -euo pipefail +set -uo pipefail +# Note: no -e — we let per-skill checks accumulate issues without terminating # Colors for output RED='\033[0;31m' @@ -13,7 +14,7 @@ BLUE='\033[0;34m' NC='\033[0m' # No Color # Base directory -BASE_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)" +BASE_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/../../.." && pwd)" AGENTS_DIR="$BASE_DIR/.agents" SKILLS_DIR="$AGENTS_DIR/skills" @@ -25,9 +26,9 @@ echo check_skill_health() { local skill_dir="$1" local skill_name="$(basename "$skill_dir")" - + local issues=0 - + # Check for SKILL.md if [[ -f "$skill_dir/SKILL.md" ]]; then echo -e "${GREEN} OK${NC}: $skill_name/SKILL.md" @@ -35,7 +36,7 @@ check_skill_health() { echo -e "${RED} MISSING${NC}: $skill_name/SKILL.md" ((issues++)) fi - + # Check for templates directory (optional) if [[ -d "$skill_dir/templates" ]]; then template_count=$(find "$skill_dir/templates" -name "*.md" -type f | wc -l) @@ -45,7 +46,7 @@ check_skill_health() { echo -e "${YELLOW} EMPTY${NC}: $skill_name/templates (no files)" fi fi - + # Check SKILL.md content if exists local skill_file="$skill_dir/SKILL.md" if [[ -f "$skill_file" ]]; then @@ -56,27 +57,21 @@ check_skill_health() { echo -e " ${GREEN} FIELD${NC}: $field" else echo -e " ${RED} MISSING FIELD${NC}: $field" - ((issues++)) + ((issues++)) || true fi done - - # Check for Role section - if grep -q "^## Role$" "$skill_file"; then - echo -e " ${GREEN} SECTION${NC}: Role" - else - echo -e " ${YELLOW} MISSING SECTION${NC}: Role" - ((issues++)) - fi - - # Check for Task section - if grep -q "^## Task$" "$skill_file"; then - echo -e " ${GREEN} SECTION${NC}: Task" - else - echo -e " ${YELLOW} MISSING SECTION${NC}: Task" - ((issues++)) + + # Check for LCBP3 context reference (speckit-* skills only) + if [[ "$skill_name" == speckit-* ]]; then + if grep -q '_LCBP3-CONTEXT\.md' "$skill_file"; then + echo -e " ${GREEN} CONTEXT${NC}: LCBP3 appendix referenced" + else + echo -e " ${YELLOW} MISSING${NC}: LCBP3 context reference" + ((issues++)) || true + fi fi fi - + return $issues } @@ -84,7 +79,15 @@ check_skill_health() { get_skill_version() { local skill_file="$1" if [[ -f "$skill_file" ]]; then - grep "^version:" "$skill_file" | head -1 | sed 's/version: *//' || echo "unknown" + # Match 'version: X.Y.Z' (or quoted) at a LINE START only; ignore nested ` version:` fields. + # Output: bare X.Y.Z with no quotes/whitespace. + local raw + raw=$(grep -E "^version:[[:space:]]*['\"]?[0-9]+\.[0-9]+\.[0-9]+" "$skill_file" | head -1 || true) + if [[ -n "$raw" ]]; then + printf '%s' "$raw" | sed -E "s/^version:[[:space:]]*['\"]?([0-9]+\.[0-9]+\.[0-9]+).*/\1/" + else + echo "unknown" + fi else echo "no_file" fi @@ -114,15 +117,19 @@ SKILL_SUMMARY=() for skill_dir in "${SKILL_DIRS[@]}"; do skill_name="$(basename "$skill_dir")" + # Skip non-skill entries (e.g. _LCBP3-CONTEXT.md would not match here; safe) + [[ "$skill_name" == _* ]] && continue echo "Auditing: $skill_name" echo "------------------------" - + + set +e check_skill_health "$skill_dir" issues=$? - + set -u + skill_version=$(get_skill_version "$skill_dir/SKILL.md") SKILL_SUMMARY+=("$skill_name:$issues:$skill_version") - + TOTAL_ISSUES=$((TOTAL_ISSUES + issues)) echo done @@ -147,15 +154,15 @@ echo # Check skills.md version consistency SKILLS_VERSION_FILE="$SKILLS_DIR/VERSION" if [[ -f "$SKILLS_VERSION_FILE" ]]; then - global_version=$(grep "^version:" "$SKILLS_VERSION_FILE" | sed 's/version: *//') + global_version=$(grep "^version:" "$SKILLS_VERSION_FILE" | sed 's/version: *//' | tr -d '\r\n ') echo "Global skills version: v$global_version" echo - + # Check for version mismatches echo "Version Consistency Check:" echo "------------------------" VERSION_MISMATCHES=0 - + for summary in "${SKILL_SUMMARY[@]}"; do IFS=':' read -r name issues version <<< "$summary" if [[ "$version" != "unknown" && "$version" != "no_file" && "$version" != "$global_version" ]]; then @@ -163,7 +170,7 @@ if [[ -f "$SKILLS_VERSION_FILE" ]]; then ((VERSION_MISMATCHES++)) fi done - + if [[ $VERSION_MISMATCHES -eq 0 ]]; then echo -e "${GREEN} All skills match global version${NC}" fi diff --git a/.agents/scripts/bash/validate-versions.sh b/.agents/scripts/bash/validate-versions.sh index 9dc316a..1ceb790 100644 --- a/.agents/scripts/bash/validate-versions.sh +++ b/.agents/scripts/bash/validate-versions.sh @@ -12,11 +12,11 @@ YELLOW='\033[1;33m' NC='\033[0m' # No Color # Base directory -BASE_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)" +BASE_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/../../.." && pwd)" AGENTS_DIR="$BASE_DIR/.agents" # Expected version (should match LCBP3 version) -EXPECTED_VERSION="1.8.6" +EXPECTED_VERSION="1.8.9" echo "=== .agents Version Validation ===" echo "Base directory: $BASE_DIR" @@ -27,7 +27,7 @@ echo extract_version() { local file="$1" local pattern="$2" - + if [[ -f "$file" ]]; then grep -o "$pattern" "$file" | head -1 | sed 's/.*\([0-9]\+\.[0-9]\+\.[0-9]\+\).*/\1/' || echo "NOT_FOUND" else @@ -37,10 +37,8 @@ extract_version() { # Files to check declare -A FILES_TO_CHECK=( - ["$AGENTS_DIR/README.md"]="Version: \([0-9]\+\.[0-9]\+\.[0-9]\+\)" ["$AGENTS_DIR/skills/VERSION"]="version: \([0-9]\+\.[0-9]\+\.[0-9]\+\)" - ["$AGENTS_DIR/rules/00-project-context.md"]="Version: \([0-9]\+\.[0-9]\+\.[0-9]\+\)" - ["$AGENTS_DIR/skills/skills.md"]="V\([0-9]\+\.[0-9]\+\.[0-9]\+\)" + ["$AGENTS_DIR/skills/skills.md"]="[Vv]\([0-9]\+\.[0-9]\+\.[0-9]\+\)" ) # Track issues @@ -52,9 +50,9 @@ echo for file in "${!FILES_TO_CHECK[@]}"; do pattern="${FILES_TO_CHECK[$file]}" relative_path="${file#$BASE_DIR/}" - + version=$(extract_version "$file" "$pattern") - + if [[ "$version" == "NOT_FOUND" ]] || [[ "$version" == "FILE_NOT_FOUND" ]]; then echo -e "${RED} ERROR${NC}: $relative_path - Version not found" ((ISSUES++)) diff --git a/.agents/scripts/powershell/audit-skills.ps1 b/.agents/scripts/powershell/audit-skills.ps1 index 27003f7..057e4f1 100644 --- a/.agents/scripts/powershell/audit-skills.ps1 +++ b/.agents/scripts/powershell/audit-skills.ps1 @@ -2,16 +2,16 @@ # Part of LCBP3-DMS Phase 2 improvements param( - [string]$BaseDir = (Split-Path -Parent (Split-Path -Parent $PSScriptRoot)) + [string]$BaseDir = (Split-Path -Parent (Split-Path -Parent (Split-Path -Parent $PSScriptRoot))) ) -# Colors for output +# Map to ConsoleColor enum (Write-Host expects enum, not ANSI strings) $Colors = @{ - Red = "`e[0;31m" - Green = "`e[0;32m" - Yellow = "`e[1;33m" - Blue = "`e[0;34m" - NoColor = "`e[0m" + Red = 'Red' + Green = 'Green' + Yellow = 'Yellow' + Blue = 'Blue' + NoColor = 'Gray' } $AgentsDir = Join-Path $BaseDir ".agents" @@ -26,10 +26,10 @@ function Test-SkillHealth { param( [string]$SkillDir ) - + $skillName = Split-Path $SkillDir -Leaf $issues = 0 - + # Check for SKILL.md $skillFile = Join-Path $SkillDir "SKILL.md" if (Test-Path $skillFile) { @@ -38,7 +38,7 @@ function Test-SkillHealth { Write-Host " MISSING: $skillName/SKILL.md" -ForegroundColor $Colors.Red $issues++ } - + # Check for templates directory (optional) $templatesDir = Join-Path $SkillDir "templates" if (Test-Path $templatesDir) { @@ -49,39 +49,34 @@ function Test-SkillHealth { Write-Host " EMPTY: $skillName/templates (no files)" -ForegroundColor $Colors.Yellow } } - + # Check SKILL.md content if exists if (Test-Path $skillFile) { $content = Get-Content $skillFile -Raw - + # Check for required front matter fields - $requiredFields = @("name", "description", "version") + $requiredFields = @('name', 'description', 'version') foreach ($field in $requiredFields) { - if ($content -match "^$field:") { + $pattern = "(?m)^${field}:" + if ($content -match $pattern) { Write-Host " FIELD: $field" -ForegroundColor $Colors.Green } else { Write-Host " MISSING FIELD: $field" -ForegroundColor $Colors.Red $issues++ } } - - # Check for Role section - if ($content -match "^## Role$") { - Write-Host " SECTION: Role" -ForegroundColor $Colors.Green - } else { - Write-Host " MISSING SECTION: Role" -ForegroundColor $Colors.Yellow - $issues++ - } - - # Check for Task section - if ($content -match "^## Task$") { - Write-Host " SECTION: Task" -ForegroundColor $Colors.Green - } else { - Write-Host " MISSING SECTION: Task" -ForegroundColor $Colors.Yellow - $issues++ + + # Check for LCBP3 context reference (speckit-* skills) + if ($skillName -like 'speckit-*') { + if ($content -match '_LCBP3-CONTEXT\.md') { + Write-Host " CONTEXT: LCBP3 appendix referenced" -ForegroundColor $Colors.Green + } else { + Write-Host " MISSING: LCBP3 context reference" -ForegroundColor $Colors.Yellow + $issues++ + } } } - + return $issues } @@ -90,11 +85,11 @@ function Get-SkillVersion { param( [string]$SkillFile ) - + if (Test-Path $SkillFile) { try { $content = Get-Content $SkillFile -Raw - if ($content -match "^version:\s*(.+)") { + if ($content -match "(?m)^version:\s*['""]?([0-9]+\.[0-9]+\.[0-9]+)['""]?") { return $matches[1].Trim() } } catch { @@ -127,16 +122,16 @@ foreach ($skillDir in $skillDirs) { $skillName = $skillDir.Name Write-Host "Auditing: $skillName" Write-Host "------------------------" - + $issues = Test-SkillHealth -SkillDir $skillDir.FullName - + $skillVersion = Get-SkillVersion -SkillFile (Join-Path $skillDir.FullName "SKILL.md") $skillSummary += @{ Name = $skillName Issues = $issues Version = $skillVersion } - + $totalIssues += $issues Write-Host "" } @@ -165,19 +160,19 @@ if (Test-Path $skillsVersionFile) { $globalVersion = $matches[1].Trim() Write-Host "Global skills version: v$globalVersion" Write-Host "" - + # Check for version mismatches Write-Host "Version Consistency Check:" Write-Host "------------------------" $versionMismatches = 0 - + foreach ($summary in $skillSummary) { if ($summary.Version -ne "unknown" -and $summary.Version -ne "no_file" -and $summary.Version -ne $globalVersion) { Write-Host " MISMATCH: $($summary.Name) is v$($summary.Version), global is v$globalVersion" -ForegroundColor $Colors.Yellow $versionMismatches++ } } - + if ($versionMismatches -eq 0) { Write-Host " All skills match global version" -ForegroundColor $Colors.Green } diff --git a/.agents/scripts/powershell/validate-versions.ps1 b/.agents/scripts/powershell/validate-versions.ps1 index f20fdef..72ec70c 100644 --- a/.agents/scripts/powershell/validate-versions.ps1 +++ b/.agents/scripts/powershell/validate-versions.ps1 @@ -2,16 +2,16 @@ # Part of LCBP3-DMS Phase 2 improvements param( - [string]$BaseDir = (Split-Path -Parent (Split-Path -Parent $PSScriptRoot)), - [string]$ExpectedVersion = "1.8.6" + [string]$BaseDir = (Split-Path -Parent (Split-Path -Parent (Split-Path -Parent $PSScriptRoot))), + [string]$ExpectedVersion = "1.8.9" ) -# Colors for output +# Map to ConsoleColor enum (Write-Host expects enum, not ANSI) $Colors = @{ - Red = "`e[0;31m" - Green = "`e[0;32m" - Yellow = "`e[1;33m" - NoColor = "`e[0m" + Red = 'Red' + Green = 'Green' + Yellow = 'Yellow' + NoColor = 'Gray' } $AgentsDir = Join-Path $BaseDir ".agents" @@ -27,7 +27,7 @@ function Get-VersionFromFile { [string]$FilePath, [string]$Pattern ) - + if (Test-Path $FilePath) { try { $content = Get-Content $FilePath -Raw @@ -46,9 +46,7 @@ function Get-VersionFromFile { # Files to check $FilesToCheck = @{ - (Join-Path $AgentsDir "README.md") = "Version: ([0-9]+\.[0-9]+\.[0-9]+)" (Join-Path $AgentsDir "skills\VERSION") = "version: ([0-9]+\.[0-9]+\.[0-9]+)" - (Join-Path $AgentsDir "rules\00-project-context.md") = "Version: ([0-9]+\.[0-9]+\.[0-9]+)" (Join-Path $AgentsDir "skills\skills.md") = "V([0-9]+\.[0-9]+\.[0-9]+)" } @@ -61,9 +59,9 @@ Write-Host "" foreach ($file in $FilesToCheck.Keys) { $pattern = $FilesToCheck[$file] $relativePath = $file.Replace($BaseDir + "\", "") - + $version = Get-VersionFromFile -FilePath $file -Pattern $pattern - + if ($version -eq "NOT_FOUND" -or $version -eq "FILE_NOT_FOUND") { Write-Host " ERROR: $relativePath - Version not found" -ForegroundColor $Colors.Red $Issues++ diff --git a/.agents/skills/README.md b/.agents/skills/README.md new file mode 100644 index 0000000..e5162b8 --- /dev/null +++ b/.agents/skills/README.md @@ -0,0 +1,109 @@ +# `.agents/skills/` — LCBP3 Agent Skill Pack + +**Version:** 1.8.9 | **Last Updated:** 2026-04-22 | **Total Skills:** 20 + +Agent skills for AI-assisted development in **Windsurf IDE** (and compatible agents: Codex CLI, opencode, Amp, Antigravity, AGENTS.md-aware tools). + +--- + +## 📂 Layout + +``` +.agents/skills/ +├── VERSION # Single source of truth for skill-pack version +├── skills.md # Overview + dependency matrix + health monitoring +├── _LCBP3-CONTEXT.md # Shared LCBP3 context injected into every speckit-* skill +├── README.md # (this file) +├── nestjs-best-practices/ # Backend rules (40 rules across 10 categories) +├── next-best-practices/ # Frontend rules (Next.js 15+) +└── speckit-*/ # 18 workflow skills (spec → plan → tasks → implement → …) +``` + +Each skill directory contains: + +- `SKILL.md` — frontmatter (`name`, `description`, `version: 1.8.9`, `scope`, `depends-on`, `handoffs`) + instructions +- `templates/` _(optional)_ — artifact templates (spec/plan/tasks/checklist) +- `rules/` _(nestjs only)_ — individual rule files grouped by prefix (`arch-`, `security-`, `db-`, etc.) + +--- + +## 🚀 How Windsurf Invokes These Skills + +Windsurf exposes two entry points: + +1. **Skill tool** — Windsurf discovers skills by scanning `.agents/skills/*/SKILL.md` frontmatter. Skills marked `user-invocable: false` are used silently by Cascade. +2. **Slash commands** — `.windsurf/workflows/*.md` wraps each skill as a slash command (e.g. `/04-speckit.plan`). The workflow file is short; the heavy lifting is delegated to the skill via `skill` tool. + +Both paths end up executing the same `SKILL.md` instructions. + +--- + +## 🧭 Typical Flow + +``` +/01-speckit.constitution → AGENTS.md / product vision +/02-speckit.specify → specs/feat-XXX/spec.md +/03-speckit.clarify → updates spec.md (up to 5 targeted questions) +/04-speckit.plan → specs/feat-XXX/plan.md + data-model.md + contracts/ +/05-speckit.tasks → specs/feat-XXX/tasks.md +/06-speckit.analyze → cross-artifact consistency report (read-only) +/07-speckit.implement → executes tasks with Ironclad Protocols (Blast Radius + Strangler + TDD) +/08-speckit.checker → pnpm lint / typecheck / markdown-lint +/09-speckit.tester → pnpm test + coverage gates (Backend 70%+, Business Logic 80%+) +/10-speckit.reviewer → code review with Tier 1/2/3 classification +/11-speckit.validate → UAT / acceptance-criteria.md +``` + +Use `/00-speckit.all` to run specify → clarify → plan → tasks → analyze in one go. + +--- + +## 🛠️ Helper Scripts + +From repo root: + +| Script | Purpose | +| --- | --- | +| `./.agents/scripts/bash/check-prerequisites.sh --json` | Emit `FEATURE_DIR` + `AVAILABLE_DOCS` for a feature branch | +| `./.agents/scripts/bash/setup-plan.sh --json` | Emit `FEATURE_SPEC`, `IMPL_PLAN`, `SPECS_DIR`, `BRANCH` | +| `./.agents/scripts/bash/update-agent-context.sh windsurf` | Append tech entries to `AGENTS.md` | +| `./.agents/scripts/bash/audit-skills.sh` | Validate all `SKILL.md` frontmatter + presence | +| `./.agents/scripts/bash/validate-versions.sh` | Version consistency check | +| `./.agents/scripts/bash/sync-workflows.sh` | Verify every skill has a `.windsurf/workflows/*.md` wrapper | + +All scripts mirror to `.agents/scripts/powershell/*.ps1` for Windows. + +--- + +## ⚠️ Tier 1 Non-Negotiables (auto-enforced) + +- ADR-019 — `publicId` exposed directly; no `parseInt` / `Number` / `+` on UUID; no `id ?? ''` fallback +- ADR-009 — edit SQL schema directly, no TypeORM migrations +- ADR-016 — JWT + CASL on every mutation; `Idempotency-Key` required; ClamAV two-phase upload +- ADR-018 — AI via DMS API only (Ollama on Admin Desktop; no direct DB/storage) +- ADR-007 — layered error classification (Validation / Business / System) +- Zero `any`, zero `console.log` (use `Logger`) + +See [`_LCBP3-CONTEXT.md`](./_LCBP3-CONTEXT.md) for the complete list. + +--- + +## 🤝 Extending + +To add a new skill: + +1. Create `NAME/SKILL.md` with frontmatter: `name`, `description`, `version: 1.8.9`, `scope`, `depends-on`. +2. Append an LCBP3 context reference pointing to `_LCBP3-CONTEXT.md`. +3. Wrap with `.windsurf/workflows/NAME.md` so it becomes a slash command. +4. Update [`skills.md`](./skills.md) dependency matrix. +5. Run `./.agents/scripts/bash/audit-skills.sh` → must pass. + +--- + +## 📚 References + +- **Canonical rules:** `AGENTS.md` (repo root) +- **Product vision:** `specs/00-Overview/00-03-product-vision.md` +- **ADRs:** `specs/06-Decision-Records/` +- **Engineering guidelines:** `specs/05-Engineering-Guidelines/` +- **Contributing:** `CONTRIBUTING.md` diff --git a/.agents/skills/VERSION b/.agents/skills/VERSION index cfedcd2..ddcbf58 100644 --- a/.agents/skills/VERSION +++ b/.agents/skills/VERSION @@ -1,10 +1,19 @@ # Speckit Skills Version -version: 1.8.6 -release_date: 2026-04-14 +version: 1.8.9 +release_date: 2026-04-22 ## Changelog +### 1.8.9 (2026-04-22) +- Full LCBP3-native rebuild of `.agents/skills/` +- Fixed ADR-019 drift (removed `@Expose({ name: 'id' })` and `id ?? ''` fallback patterns) +- Replaced all dead references (`GEMINI.md` → `AGENTS.md`, v1.7.0 → v1.8.0 schema, `.specify/memory/` → `AGENTS.md`) +- Added real helper scripts under `.agents/scripts/bash/` and `.agents/scripts/powershell/` +- Added ADR-007/008/020/021 coverage +- New rules: workflow-engine, file-two-phase-upload, ai-boundary, i18n, file-upload, workflow-banner +- Standardized frontmatter across all 20 skills (`version: 1.8.9`) + ### 1.8.6 (2026-04-14) - Version alignment with LCBP3-DMS v1.8.6 - Complete skill implementations for all 20 skills diff --git a/.agents/skills/_LCBP3-CONTEXT.md b/.agents/skills/_LCBP3-CONTEXT.md new file mode 100644 index 0000000..a00c60a --- /dev/null +++ b/.agents/skills/_LCBP3-CONTEXT.md @@ -0,0 +1,91 @@ +# 🧭 LCBP3-DMS Context Appendix (Shared) + +> This file is included/referenced by every Speckit skill as the authoritative project context. +> Skills **must** load it (or the files it links to) before generating any artifact. + +**Project:** NAP-DMS (LCBP3) — Laem Chabang Port Phase 3 Document Management System +**Stack:** NestJS 11 + Next.js 16 + TypeScript + MariaDB 11.8 + Redis + BullMQ + Elasticsearch + Ollama (on-prem AI) +**Version:** 1.8.9 (2026-04-18) + +--- + +## 📌 Canonical Rule Sources (read in this order) + +1. **`AGENTS.md`** (repo root) — primary rule file for AI agents; supersedes legacy `GEMINI.md`. +2. **`specs/06-Decision-Records/`** — architectural decisions (22 ADRs); ADR priority > Engineering Guidelines. +3. **`specs/05-Engineering-Guidelines/`** — backend/frontend/testing/i18n/git patterns. +4. **`specs/00-Overview/00-02-glossary.md`** — domain terminology (Correspondence / RFA / Transmittal / Circulation). +5. **`specs/00-Overview/00-03-product-vision.md`** — project constitution (Vision, Strategic Pillars, Guardrails). +6. **`CONTRIBUTING.md`** — spec writing standards, PR template, review levels. +7. **`README.md`** — technology stack + getting started. + +--- + +## 🔴 Tier 1 Non-Negotiables + +- **ADR-019 UUID:** `publicId: string` exposed directly — **no** `@Expose({ name: 'id' })` rename; **no** `parseInt`/`Number`/`+` on UUID; **no** `id ?? ''` fallback in frontend. +- **ADR-009:** No TypeORM migrations — edit `specs/03-Data-and-Storage/lcbp3-v1.8.0-schema-02-tables.sql` or add a `deltas/*.sql` file. +- **ADR-016 Security:** JWT + CASL 4-Level RBAC; `@UseGuards(JwtAuthGuard, CaslAbilityGuard)` on every mutation controller; `ThrottlerGuard` on auth; bcrypt 12 rounds; `Idempotency-Key` required on POST/PUT/PATCH. +- **ADR-002 Document Numbering:** Redis Redlock + TypeORM `@VersionColumn` (double-lock). Never use application-side counter alone. +- **ADR-008 Notifications:** BullMQ queue — never inline email/notification in a request thread. +- **ADR-018 AI Boundary:** Ollama on Admin Desktop only; AI → DMS API → DB (never direct DB/storage). Human-in-the-loop validation required. +- **ADR-007 Error Handling:** Layered (Validation / Business / System); `BusinessException` hierarchy; user-friendly `userMessage` + `recoveryAction`; technical stack only in logs. +- **TypeScript Strict:** Zero `any`, zero `console.log` (use NestJS `Logger`). +- **i18n:** No hardcoded Thai/English strings in components — use i18n keys (see `05-08-i18n-guidelines.md`). +- **File Upload:** Two-phase (Temp → ClamAV → Permanent), whitelist `PDF/DWG/DOCX/XLSX/ZIP`, max 50MB, `StorageService` only. + +--- + +## 🏷️ Domain Glossary (reject generic terms) + +| ✅ Use | ❌ Don't Use | +| --- | --- | +| Correspondence | Letter, Communication, Document | +| RFA | Approval Request, Submit for Approval | +| Transmittal | Delivery Note, Cover Letter | +| Circulation | Distribution, Routing | +| Shop Drawing | Construction Drawing | +| Contract Drawing | Design Drawing, Blueprint | +| Workflow Engine | Approval Flow, Process Engine | +| Document Numbering | Document ID, Auto Number | + +--- + +## 📁 Key Files for Generating / Validating Artifacts + +| When you need... | Read | +| --- | --- | +| A new feature spec | `.agents/skills/speckit-specify/templates/spec-template.md` + `specs/01-Requirements/01-06-edge-cases-and-rules.md` | +| A plan | `.agents/skills/speckit-plan/templates/plan-template.md` + relevant ADRs | +| Task breakdown | `.agents/skills/speckit-tasks/templates/tasks-template.md` + existing patterns in `specs/08-Tasks/` | +| Acceptance criteria / UAT | `specs/01-Requirements/01-05-acceptance-criteria.md` | +| Schema / table definition | `specs/03-Data-and-Storage/lcbp3-v1.8.0-schema-02-tables.sql` + `03-01-data-dictionary.md` | +| RBAC / permissions | `specs/03-Data-and-Storage/lcbp3-v1.8.0-seed-permissions.sql` + `01-02-01-rbac-matrix.md` | +| Release / hotfix | `specs/04-Infrastructure-OPS/04-08-release-management-policy.md` | + +--- + +## 🛠️ Helper Scripts (real paths in this repo) + +- `./.agents/scripts/bash/check-prerequisites.sh` / `powershell/*.ps1` +- `./.agents/scripts/bash/setup-plan.sh` +- `./.agents/scripts/bash/update-agent-context.sh windsurf` +- `./.agents/scripts/bash/audit-skills.sh` +- `./.agents/scripts/bash/validate-versions.sh` +- `./.agents/scripts/bash/sync-workflows.sh` + +--- + +## ✅ Commit Checklist (applied automatically by speckit-implement) + +- [ ] UUID pattern verified (no `parseInt` / `Number` / `+` on UUID, no `id ?? ''` fallback) +- [ ] No `any`, no `console.log` in committed code +- [ ] Business comments in Thai, code identifiers in English +- [ ] Schema changes via SQL directly (not migration) +- [ ] Test coverage meets targets (Backend 70%+, Business Logic 80%+) +- [ ] Relevant ADRs referenced (007/008/009/016/018/019/020/021) +- [ ] Domain glossary terms used correctly +- [ ] Error handling: `Logger` + `HttpException` / `BusinessException` +- [ ] i18n keys used (no hardcode text) +- [ ] Cache invalidation when data mutated +- [ ] OWASP Top 10 review passed diff --git a/.agents/skills/nestjs-best-practices/AGENTS.md b/.agents/skills/nestjs-best-practices/AGENTS.md index 49234af..491452c 100644 --- a/.agents/skills/nestjs-best-practices/AGENTS.md +++ b/.agents/skills/nestjs-best-practices/AGENTS.md @@ -1,8 +1,8 @@ # NestJS Best Practices -**Version 1.1.0** -NestJS Best Practices -January 2026 +**Version 1.8.9** +**NAP-DMS / LCBP3** — Laem Chabang Port Phase 3 Document Management System +2026-04-22 > **Note:** > This document is mainly for agents and LLMs to follow when maintaining, @@ -14,7 +14,9 @@ January 2026 ## Abstract -Comprehensive best practices and architecture guide for NestJS applications, designed for AI agents and LLMs. Contains 40 rules across 10 categories, prioritized by impact from critical (architecture, dependency injection) to incremental (DevOps patterns). Each rule includes detailed explanations, real-world examples comparing incorrect vs. correct implementations, and specific impact metrics to guide automated refactoring and code generation. +Comprehensive NestJS best-practices guide compiled for the LCBP3-DMS backend. Contains 40+ rules across 11 categories (10 general + 1 project-specific), prioritized by impact. Forked from Kadajett/nestjs-best-practices (v1.1.0) and aligned to LCBP3 ADRs: ADR-001 (workflow engine), ADR-002 (document numbering), ADR-007 (error handling), ADR-008 (notifications/BullMQ), ADR-009 (no TypeORM migrations), ADR-016 (security), ADR-018/020 (AI boundary), ADR-019 (hybrid UUID identifier — March 2026 pattern), and ADR-021 (workflow context). + +This document is the single, consolidated reference used by Cascade and other AI coding agents when writing, reviewing, or refactoring backend code in this repository. All LCBP3-specific overrides live in section 11. --- @@ -40,10 +42,11 @@ Comprehensive best practices and architecture guide for NestJS applications, des - 3.3 [Use Exception Filters for Error Handling](#33-use-exception-filters-for-error-handling) 4. [Security](#4-security) — **HIGH** - 4.1 [Implement Secure JWT Authentication](#41-implement-secure-jwt-authentication) - - 4.2 [Implement Rate Limiting](#42-implement-rate-limiting) - - 4.3 [Sanitize Output to Prevent XSS](#43-sanitize-output-to-prevent-xss) - - 4.4 [Use Guards for Authentication and Authorization](#44-use-guards-for-authentication-and-authorization) - - 4.5 [Validate All Input with DTOs and Pipes](#45-validate-all-input-with-dtos-and-pipes) + - 4.2 [Two-Phase File Upload + ClamAV (ADR-016)](#42-two-phase-file-upload-clamav-adr-016-) + - 4.3 [Implement Rate Limiting](#43-implement-rate-limiting) + - 4.4 [Sanitize Output to Prevent XSS](#44-sanitize-output-to-prevent-xss) + - 4.5 [Use Guards for Authentication and Authorization](#45-use-guards-for-authentication-and-authorization) + - 4.6 [Validate All Input with DTOs and Pipes](#46-validate-all-input-with-dtos-and-pipes) 5. [Performance](#5-performance) — **HIGH** - 5.1 [Use Async Lifecycle Hooks Correctly](#51-use-async-lifecycle-hooks-correctly) - 5.2 [Use Lazy Loading for Large Modules](#52-use-lazy-loading-for-large-modules) @@ -55,8 +58,10 @@ Comprehensive best practices and architecture guide for NestJS applications, des - 6.3 [Use Testing Module for Unit Tests](#63-use-testing-module-for-unit-tests) 7. [Database & ORM](#7-database-orm) — **MEDIUM-HIGH** - 7.1 [Avoid N+1 Query Problems](#71-avoid-n-1-query-problems) - - 7.2 [Use Database Migrations](#72-use-database-migrations) - - 7.3 [Use Transactions for Multi-Step Operations](#73-use-transactions-for-multi-step-operations) + - 7.2 [Hybrid Identifier Strategy (ADR-019)](#72-hybrid-identifier-strategy-adr-019-) + - 7.3 [No TypeORM Migrations (ADR-009)](#73-no-typeorm-migrations-adr-009-) + - 7.4 [No TypeORM Migrations (ADR-009)](#74-no-typeorm-migrations-adr-009-) + - 7.5 [Use Transactions for Multi-Step Operations](#75-use-transactions-for-multi-step-operations) 8. [API Design](#8-api-design) — **MEDIUM** - 8.1 [Use DTOs and Serialization for API Responses](#81-use-dtos-and-serialization-for-api-responses) - 8.2 [Use Interceptors for Cross-Cutting Concerns](#82-use-interceptors-for-cross-cutting-concerns) @@ -67,10 +72,12 @@ Comprehensive best practices and architecture guide for NestJS applications, des - 9.2 [Use Message and Event Patterns Correctly](#92-use-message-and-event-patterns-correctly) - 9.3 [Use Message Queues for Background Jobs](#93-use-message-queues-for-background-jobs) 10. [DevOps & Deployment](#10-devops-deployment) — **LOW-MEDIUM** - -- 10.1 [Implement Graceful Shutdown](#101-implement-graceful-shutdown) -- 10.2 [Use ConfigModule for Environment Configuration](#102-use-configmodule-for-environment-configuration) -- 10.3 [Use Structured Logging](#103-use-structured-logging) + - 10.1 [Implement Graceful Shutdown](#101-implement-graceful-shutdown) + - 10.2 [Use ConfigModule for Environment Configuration](#102-use-configmodule-for-environment-configuration) + - 10.3 [Use Structured Logging](#103-use-structured-logging) +11. [LCBP3 Project-Specific](#11-lcbp3-project-specific) — **CRITICAL** + - 11.1 [AI Integration Boundary (ADR-018 / ADR-020)](#111-ai-integration-boundary-adr-018-adr-020-) + - 11.2 [Workflow Engine + Document Numbering + Workflow Context (ADR-001 / 002 / 021)](#112-workflow-engine-document-numbering-workflow-context-adr-001-002-021-) --- @@ -80,7 +87,7 @@ Comprehensive best practices and architecture guide for NestJS applications, des ### 1.1 Avoid Circular Dependencies -**Impact: CRITICAL** — "#1 cause of runtime crashes" +**Impact: CRITICAL** — '#1 cause of runtime crashes' Circular dependencies occur when Module A imports Module B, and Module B imports Module A (directly or transitively). NestJS can sometimes resolve these through forward references, but they indicate architectural problems and should be avoided. This is the #1 cause of runtime crashes in NestJS applications. @@ -158,7 +165,7 @@ Reference: [NestJS Circular Dependency](https://docs.nestjs.com/fundamentals/cir ### 1.2 Organize by Feature Modules -**Impact: CRITICAL** — "3-5x faster onboarding and development" +**Impact: CRITICAL** — '3-5x faster onboarding and development' Organize your application into feature modules that encapsulate related functionality. Each feature module should be self-contained with its own controllers, services, entities, and DTOs. Avoid organizing by technical layer (all controllers together, all services together). This enables 3-5x faster onboarding and feature development. @@ -377,7 +384,7 @@ Reference: [NestJS Modules](https://docs.nestjs.com/modules#shared-modules) ### 1.4 Single Responsibility for Services -**Impact: CRITICAL** — "40%+ improvement in testability" +**Impact: CRITICAL** — '40%+ improvement in testability' Each service should have a single, well-defined responsibility. Avoid "god services" that handle multiple unrelated concerns. If a service name includes "And" or handles more than one domain concept, it likely violates single responsibility. This reduces complexity and improves testability by 40%+. @@ -1943,7 +1950,142 @@ Reference: [NestJS Authentication](https://docs.nestjs.com/security/authenticati --- -### 4.2 Implement Rate Limiting +### 4.2 Two-Phase File Upload + ClamAV (ADR-016) + +**Impact: CRITICAL** — Upload → Temp → ClamAV scan → Commit → Permanent. Whitelist + 50MB cap. StorageService only. + +**Never write uploaded files directly to permanent storage.** All uploads must go through: + +``` +Client → Upload endpoint → Temp storage → ClamAV scan → Commit endpoint → Permanent storage +``` + +--- + +## Constraints (non-negotiable) + +| Rule | Value | +| --- | --- | +| Allowed MIME types | `application/pdf`, `image/vnd.dwg`, `application/vnd.openxmlformats-officedocument.wordprocessingml.document`, `application/vnd.openxmlformats-officedocument.spreadsheetml.sheet`, `application/zip` | +| Allowed extensions | `.pdf`, `.dwg`, `.docx`, `.xlsx`, `.zip` | +| Max size | 50 MB | +| Temp TTL | 24 h (purged by cron) | +| Virus scan | ClamAV (blocking) | +| Mover | `StorageService` only — never `fs.rename` directly from controller | + +--- + +## Phase 1: Upload to Temp + +```typescript +@Post('upload') +@UseGuards(JwtAuthGuard, ThrottlerGuard) +@UseInterceptors(FileInterceptor('file', { + limits: { fileSize: 50 * 1024 * 1024 }, // 50 MB +})) +async uploadTemp( + @UploadedFile() file: Express.Multer.File, + @CurrentUser() user: User, +): Promise<{ tempId: string; expiresAt: string }> { + // 1. Validate MIME + extension (defense in depth) + this.fileValidator.assertAllowed(file); + + // 2. Scan with ClamAV + const scanResult = await this.clamavService.scan(file.buffer); + if (!scanResult.clean) { + throw new BusinessException( + `ClamAV rejected: ${scanResult.signature}`, + 'ไฟล์ไม่ปลอดภัย ระบบตรวจพบความเสี่ยง', + 'กรุณาตรวจสอบไฟล์และลองใหม่อีกครั้ง', + 'FILE_INFECTED', + ); + } + + // 3. Save to temp (encrypted at rest) + const tempId = await this.storageService.saveToTemp(file, user.id); + + return { + tempId, + expiresAt: addHours(new Date(), 24).toISOString(), + }; +} +``` + +--- + +## Phase 2: Commit in Transaction + +The business operation (e.g., creating a Correspondence) promotes temp files to permanent **in the same DB transaction**. + +```typescript +async createCorrespondence(dto: CreateCorrespondenceDto, user: User) { + return this.dataSource.transaction(async (manager) => { + // 1. Create domain entity + const entity = await manager.save(Correspondence, { + ...dto, + createdById: user.id, + }); + + // 2. Commit temp files → permanent (ACID together with entity) + await this.storageService.commitFiles( + dto.tempFileIds, + { entityId: entity.id, entityType: 'correspondence' }, + manager, + ); + + return entity; + }); +} +``` + +If the transaction rolls back, temp files remain and expire in 24h — no orphaned permanent files. + +--- + +## StorageService Contract + +```typescript +export interface StorageService { + saveToTemp(file: Express.Multer.File, ownerId: number): Promise; + commitFiles( + tempIds: string[], + target: { entityId: number; entityType: string }, + manager: EntityManager, + ): Promise; + purgeExpiredTemp(): Promise; // called by cron + getPermanentPath(fileId: number): Promise; +} +``` + +--- + +## ❌ Forbidden + +```typescript +// ❌ Direct write to permanent +fs.writeFileSync(`/var/storage/${file.originalname}`, file.buffer); + +// ❌ Skip ClamAV +await this.storageService.savePermanent(file); + +// ❌ Non-whitelist MIME +@UseInterceptors(FileInterceptor('file')) // no size or type limit + +// ❌ Commit outside transaction +const entity = await this.repo.save(...); +await this.storageService.commitFiles(tempIds, ...); // race: entity exists, files may fail +``` + +--- + +## Reference + +- [ADR-016 Security & Authentication](../../../../specs/06-Decision-Records/ADR-016-security-authentication.md) +- [Edge Cases](../../../../specs/01-Requirements/01-06-edge-cases-and-rules.md) — file upload scenarios + +--- + +### 4.3 Implement Rate Limiting **Impact: HIGH** — Protects against abuse and ensures fair resource usage @@ -2066,7 +2208,7 @@ Reference: [NestJS Throttler](https://docs.nestjs.com/security/rate-limiting) --- -### 4.3 Sanitize Output to Prevent XSS +### 4.4 Sanitize Output to Prevent XSS **Impact: HIGH** — XSS vulnerabilities can compromise user sessions and data @@ -2203,7 +2345,7 @@ Reference: [OWASP XSS Prevention](https://cheatsheetseries.owasp.org/cheatsheets --- -### 4.4 Use Guards for Authentication and Authorization +### 4.5 Use Guards for Authentication and Authorization **Impact: HIGH** — Enforces access control before handlers execute @@ -2330,7 +2472,7 @@ Reference: [NestJS Guards](https://docs.nestjs.com/guards) --- -### 4.5 Validate All Input with DTOs and Pipes +### 4.6 Validate All Input with DTOs and Pipes **Impact: HIGH** — First line of defense against attacks @@ -3593,134 +3735,458 @@ Reference: [TypeORM Relations](https://typeorm.io/relations) --- -### 7.2 Use Database Migrations +### 7.2 Hybrid Identifier Strategy (ADR-019) -**Impact: HIGH** — Enables safe, repeatable database schema changes +**Impact: CRITICAL** — Use INT PK internally + UUID for public API per project ADR-019 -Never use `synchronize: true` in production. Use migrations for all schema changes. Migrations provide version control for your database, enable safe rollbacks, and ensure consistency across all environments. +**This project follows ADR-019: INT Primary Key (internal) + UUIDv7 (public API)** -**Incorrect (using synchronize or manual SQL):** +Unlike standard practices that use UUID as the primary key, this project uses a **hybrid approach** optimized for MariaDB performance and API consistency. + +> **Updated pattern (March 2026):** Entities extend `UuidBaseEntity`. The `publicId` column is exposed **directly** in API responses — ห้ามใช้ `@Expose({ name: 'id' })` เพื่อ rename. + +### The Strategy + +| Layer | Field | Type | Usage | +| --------------- | ---------- | ----------------------------------- | ------------------------------------------------- | +| **Database PK** | `id` | `INT AUTO_INCREMENT` | Internal foreign keys only (marked `@Exclude()`) | +| **Public API** | `publicId` | `MariaDB UUID` (native, BINARY(16)) | External references, URLs — exposed as-is | +| **DTO Input** | `xxxUuid` | `string` (UUIDv7) | Accept UUID in create/update DTOs | +| **DTO Output** | `publicId` | `string` (UUIDv7) | API returns `publicId` field directly (no rename) | + +### Why Hybrid IDs? + +- **Performance**: INT PK is faster for joins and indexing than UUID +- **Security**: Internal IDs never exposed in API (enumerable IDs are a risk) +- **Compatibility**: UUID works well with distributed systems and external integrations +- **MariaDB Native**: Uses MariaDB's native UUID type (stored as BINARY(16), auto-converts to string) + +### Entity Definition (Current Pattern) ```typescript -// Use synchronize in production -TypeOrmModule.forRoot({ - type: 'postgres', - synchronize: true, // DANGEROUS in production! - // Can drop columns, tables, or data -}); +import { Entity, Column } from 'typeorm'; +import { UuidBaseEntity } from '@/common/entities/uuid-base.entity'; -// Manual SQL in production -@Injectable() -export class DatabaseService { - async addColumn(): Promise { - await this.dataSource.query('ALTER TABLE users ADD COLUMN age INT'); - // No version control, no rollback, inconsistent across envs - } -} +@Entity('contracts') +export class Contract extends UuidBaseEntity { + // publicId (string UUIDv7) + id (INT, @Exclude) สืบทอดจาก UuidBaseEntity + // API response → { publicId: "019505a1-7c3e-7000-8000-abc123...", contractCode: ..., ... } -// Modify entities without migration -@Entity() -export class User { @Column() - email: string; + contractCode: string; - @Column() // Added without migration - newField: string; // Will crash in production if synchronize is false + @Column() + contractName: string; + + @Column({ name: 'project_id' }) + projectId: number; // INT FK — internal, not exposed if marked @Exclude in UuidBaseEntity } ``` -**Correct (use migrations for all schema changes):** +**`UuidBaseEntity` (shared base):** ```typescript -// Configure TypeORM for migrations -// data-source.ts -export const dataSource = new DataSource({ - type: 'postgres', - host: process.env.DB_HOST, - port: parseInt(process.env.DB_PORT), - username: process.env.DB_USERNAME, - password: process.env.DB_PASSWORD, - database: process.env.DB_NAME, - entities: ['dist/**/*.entity.js'], - migrations: ['dist/migrations/*.js'], - synchronize: false, // Always false in production - migrationsRun: true, // Run migrations on startup -}); +import { PrimaryGeneratedColumn, Column, CreateDateColumn, UpdateDateColumn } from 'typeorm'; +import { Exclude } from 'class-transformer'; -// app.module.ts -TypeOrmModule.forRootAsync({ - inject: [ConfigService], - useFactory: (config: ConfigService) => ({ - type: 'postgres', - host: config.get('DB_HOST'), - synchronize: config.get('NODE_ENV') === 'development', // Only in dev - migrations: ['dist/migrations/*.js'], - migrationsRun: true, - }), -}); +export abstract class UuidBaseEntity { + @PrimaryGeneratedColumn() + @Exclude() // ❗ CRITICAL: INT id must never leak to API + id: number; -// migrations/1705312800000-AddUserAge.ts -import { MigrationInterface, QueryRunner } from 'typeorm'; + @Column({ type: 'uuid', unique: true, generated: 'uuid' }) + publicId: string; // UUIDv7, exposed as-is -export class AddUserAge1705312800000 implements MigrationInterface { - name = 'AddUserAge1705312800000'; + @CreateDateColumn() + createdAt: Date; - public async up(queryRunner: QueryRunner): Promise { - // Add column with default to handle existing rows - await queryRunner.query(` - ALTER TABLE "users" ADD "age" integer DEFAULT 0 - `); + @UpdateDateColumn() + updatedAt: Date; +} +``` - // Add index for frequently queried columns - await queryRunner.query(` - CREATE INDEX "IDX_users_age" ON "users" ("age") - `); - } +### DTO Pattern (Accept UUID, Resolve to INT Internally) - public async down(queryRunner: QueryRunner): Promise { - // Always implement down for rollback - await queryRunner.query(`DROP INDEX "IDX_users_age"`); - await queryRunner.query(`ALTER TABLE "users" DROP COLUMN "age"`); - } +```typescript +// dto/create-contract.dto.ts +import { IsUUID, IsNotEmpty } from 'class-validator'; + +export class CreateContractDto { + @IsNotEmpty() + @IsUUID('7') // UUIDv7 (MariaDB native) + projectUuid: string; // Accept UUID from client + + @IsNotEmpty() + contractCode: string; + + @IsNotEmpty() + contractName: string; } -// Safe column rename (two-step) -export class RenameNameToFullName1705312900000 implements MigrationInterface { - public async up(queryRunner: QueryRunner): Promise { - // Step 1: Add new column - await queryRunner.query(` - ALTER TABLE "users" ADD "full_name" varchar(255) - `); +// ❌ NO Response DTO with @Expose rename needed. +// Entity class_transformer via TransformInterceptor will serialize publicId directly. +``` - // Step 2: Copy data - await queryRunner.query(` - UPDATE "users" SET "full_name" = "name" - `); +### Service/Controller Pattern - // Step 3: Add NOT NULL constraint - await queryRunner.query(` - ALTER TABLE "users" ALTER COLUMN "full_name" SET NOT NULL - `); +```typescript +@Controller('contracts') +@UseGuards(JwtAuthGuard, CaslAbilityGuard) +export class ContractsController { + constructor( + private contractsService: ContractsService, + private uuidResolver: UuidResolver + ) {} - // Step 4: Drop old column (after verifying app works) - await queryRunner.query(` - ALTER TABLE "users" DROP COLUMN "name" - `); + @Post() + async create(@Body() dto: CreateContractDto) { + // Resolve UUID → INT PK for FK relationship + const projectId = await this.uuidResolver.resolveProject(dto.projectUuid); + + const contract = await this.contractsService.create({ + ...dto, + projectId, + }); + + // Response: TransformInterceptor + @Exclude on id → publicId exposed directly + return contract; } - public async down(queryRunner: QueryRunner): Promise { - await queryRunner.query(`ALTER TABLE "users" ADD "name" varchar(255)`); - await queryRunner.query(`UPDATE "users" SET "name" = "full_name"`); - await queryRunner.query(`ALTER TABLE "users" DROP COLUMN "full_name"`); + @Get(':publicId') + async findOne(@Param('publicId', ParseUuidPipe) publicId: string) { + return this.contractsService.findOneByPublicId(publicId); } } ``` -Reference: [TypeORM Migrations](https://typeorm.io/migrations) +### UUID Resolver Helper + +```typescript +@Injectable() +export class UuidResolver { + constructor( + @InjectRepository(Project) + private projectRepo: Repository, + @InjectRepository(Contract) + private contractRepo: Repository + ) {} + + async resolveProject(publicId: string): Promise { + const project = await this.projectRepo.findOne({ + where: { publicId }, + select: ['id'], // Only INT PK for FK + }); + if (!project) throw new NotFoundException('Project not found'); + return project.id; + } + + async resolveContract(publicId: string): Promise { + const contract = await this.contractRepo.findOne({ + where: { publicId }, + select: ['id'], + }); + if (!contract) throw new NotFoundException('Contract not found'); + return contract.id; + } +} +``` + +### TransformInterceptor (Required — register ONCE) + +```typescript +// Register via APP_INTERCEPTOR in CommonModule — ห้ามซ้ำใน main.ts +@Injectable() +export class TransformInterceptor implements NestInterceptor { + intercept(context: ExecutionContext, next: CallHandler): Observable { + return next.handle().pipe( + map((data) => instanceToPlain(data)) // Applies @Exclude / @Expose + ); + } +} + +// common.module.ts +@Module({ + providers: [ + { + provide: APP_INTERCEPTOR, + useClass: TransformInterceptor, + }, + ], +}) +export class CommonModule {} +``` + +> **Warning:** ห้ามเรียก `app.useGlobalInterceptors(new TransformInterceptor())` ใน `main.ts` ซ้ำ — จะทำให้ response double-wrap `{ data: { data: ... } }`. + +### Critical: NEVER ParseInt on UUID + +```typescript +// ❌ WRONG - parseInt on UUID gives garbage value +const id = parseInt(projectPublicId); // "0195a1b2-..." → 195 (wrong!) + +// ❌ WRONG - Number() on UUID +const id = Number(projectPublicId); // NaN + +// ❌ WRONG - Unary plus on UUID +const id = +projectPublicId; // NaN + +// ✅ CORRECT - Resolve via database lookup +const projectId = await uuidResolver.resolveProject(projectPublicId); + +// ✅ CORRECT - Use TypeORM find with publicId column +const project = await projectRepo.findOne({ where: { publicId: projectPublicId } }); +const id = project.id; // Get INT PK from entity +``` + +### Query with publicId (No Resolution Needed) + +```typescript +// Direct UUID lookup in TypeORM +const project = await this.projectRepo.findOne({ + where: { publicId: projectPublicId }, +}); + +// Relations use INT FK internally +const contracts = await this.contractRepo.find({ + where: { projectId: project.id }, // INT for FK query +}); +``` + +### Reference + +- [ADR-019 Hybrid Identifier Strategy](../../../../specs/06-Decision-Records/ADR-019-hybrid-identifier-strategy.md) +- [UUID Implementation Plan](../../../../specs/05-Engineering-Guidelines/05-07-hybrid-uuid-implementation-plan.md) +- [Data Dictionary](../../../../specs/03-Data-and-Storage/03-01-data-dictionary.md) + +> **Warning**: Using `parseInt()`, `Number()`, or unary `+` on UUID values violates ADR-019 and will cause data corruption. Always resolve UUIDs via database lookup. --- -### 7.3 Use Transactions for Multi-Step Operations +### 7.3 No TypeORM Migrations (ADR-009) + +**Impact: CRITICAL** — Edit SQL schema files directly; n8n handles data migration. Do not generate TypeORM migration files. + +**This project does NOT use TypeORM migration files.** + +All schema changes must be made **directly** in the canonical SQL file: + +- `specs/03-Data-and-Storage/lcbp3-v1.8.0-schema-02-tables.sql` + +Delta scripts (for incremental rollout to existing environments) go under: + +- `specs/03-Data-and-Storage/deltas/YYYY-MM-DD-descriptive-name.sql` + +Data migration (e.g., backfilling a new column) is handled by **n8n workflows**, not TypeORM's `QueryRunner`. + +--- + +## Why No Migrations? + +1. **Single source of truth** — The full SQL schema is always readable as one file. No need to replay a migration chain to understand current state. +2. **Review friendly** — Schema diff = git diff on the SQL file. Reviewers see the complete picture. +3. **Ops alignment** — DBAs and operators work in SQL, not TypeScript. +4. **n8n for data** — Business-meaningful data transforms live in n8n where they can be versioned, retried, and orchestrated with monitoring. + +--- + +## ✅ Workflow for a Schema Change + +1. **Update Data Dictionary** first: + - `specs/03-Data-and-Storage/03-01-data-dictionary.md` — add field meaning + business rules. +2. **Update the canonical schema**: + - Edit `lcbp3-v1.8.0-schema-02-tables.sql` — add/alter column, constraint, index. +3. **Add a delta script** (if deploying to existing env): + - `specs/03-Data-and-Storage/deltas/2026-04-22-add-rfa-revision-column.sql` + + ```sql + -- Delta: Add revision column to rfa table + ALTER TABLE rfa + ADD COLUMN revision INT NOT NULL DEFAULT 1 AFTER status; + + CREATE INDEX idx_rfa_revision ON rfa(revision); + ``` +4. **Update the Entity** (`backend/src/.../entities/rfa.entity.ts`): + + ```typescript + @Column({ type: 'int', default: 1 }) + revision: number; + ``` +5. **If data backfill needed** → create n8n workflow, not TypeScript migration. + +--- + +## ❌ Forbidden + +```bash +# ❌ DO NOT generate migrations +pnpm typeorm migration:generate ./src/migrations/AddRevision + +# ❌ DO NOT run migrations +pnpm typeorm migration:run +``` + +```typescript +// ❌ DO NOT write migration classes +export class AddRevision1730000000000 implements MigrationInterface { + async up(queryRunner: QueryRunner): Promise { /* ... */ } + async down(queryRunner: QueryRunner): Promise { /* ... */ } +} +``` + +--- + +## ✅ TypeORM Config (runtime only) + +```typescript +// ormconfig.ts +export default { + type: 'mariadb', + // ... + synchronize: false, // ❗ NEVER true (would auto-sync entity ↔ schema) + migrationsRun: false, // ❗ NEVER true + // ❌ Do NOT specify `migrations:` entries +}; +``` + +`synchronize: false` is mandatory because the canonical SQL file is authoritative — TypeORM should never mutate the schema. + +--- + +## Reference + +- [ADR-009 Database Migration Strategy](../../../../specs/06-Decision-Records/ADR-009-database-migration-strategy.md) +- [Data Dictionary](../../../../specs/03-Data-and-Storage/03-01-data-dictionary.md) +- [Schema Tables](../../../../specs/03-Data-and-Storage/lcbp3-v1.8.0-schema-02-tables.sql) + +--- + +### 7.4 No TypeORM Migrations (ADR-009) + +**Impact: HIGH** — Use direct SQL schema files instead of TypeORM migrations per project ADR + +**This project follows ADR-009: Direct SQL Schema Management** + +Unlike standard NestJS/TypeORM practices, this project does **NOT** use TypeORM migrations. Instead, we manage database schema through direct SQL files. + +### Why No Migrations? + +- **ADR-009 Decision**: Explicit schema control over auto-generated migrations +- **MariaDB-specific features**: Native UUID type, virtual columns, custom indexing +- **Team workflow**: Schema changes reviewed as SQL, not TypeORM migration classes +- **Audit trail**: Single source of truth in `specs/03-Data-and-Storage/` + +### Schema File Locations + +``` +specs/03-Data-and-Storage/ +├── lcbp3-v1.8.0-schema-01-drop.sql # Drop statements (dev only) +├── lcbp3-v1.8.0-schema-02-tables.sql # CREATE TABLE statements +├── lcbp3-v1.8.0-schema-03-views-indexes.sql # Views, indexes, constraints +└── deltas/ # Incremental changes + ├── 01-add-reference-date.sql + ├── 02-add-rbac-bulk-permission.sql + └── 03-fix-numbering-enums.sql +``` + +### Correct: Using SQL Schema Files + +```typescript +// TypeORM configuration - NO migrationsRun +TypeOrmModule.forRootAsync({ + inject: [ConfigService], + useFactory: (config: ConfigService) => ({ + type: 'mariadb', + host: config.get('DB_HOST'), + port: config.get('DB_PORT'), + username: config.get('DB_USERNAME'), + password: config.get('DB_PASSWORD'), + database: config.get('DB_NAME'), + entities: ['dist/**/*.entity.js'], + synchronize: false, // NEVER true, even in development + migrationsRun: false, // Disabled per ADR-009 + // Migrations are managed via SQL files, not TypeORM + }), +}); +``` + +### Schema Change Process (ADR-009) + +1. **Modify SQL file directly**: + + ```sql + -- specs/03-Data-and-Storage/lcbp3-v1.8.0-schema-02-tables.sql + ALTER TABLE correspondences + ADD COLUMN priority VARCHAR(20) DEFAULT 'normal'; + ``` + +2. **Create delta for existing databases**: + + ```sql + -- specs/03-Data-and-Storage/deltas/04-add-priority-column.sql + ALTER TABLE correspondences + ADD COLUMN priority VARCHAR(20) DEFAULT 'normal'; + ``` + +3. **Apply to database manually or via deployment script**: + ```bash + mysql -u root -p lcbp3 < specs/03-Data-and-Storage/deltas/04-add-priority-column.sql + ``` + +### Entity Definition (No Migration Needed) + +```typescript +@Entity('correspondences') +export class Correspondence { + @PrimaryGeneratedColumn() + id: number; // Internal INT PK + + @Column({ type: 'uuid' }) + uuid: string; // Public UUID + + @Column({ name: 'priority', default: 'normal' }) + priority: string; + + // No migration class needed - schema managed via SQL +} +``` + +### Anti-Pattern: TypeORM Migrations (Do NOT Use) + +```typescript +// ❌ WRONG - Do not create migration files +// migrations/1705312800000-AddUserAge.ts +export class AddUserAge1705312800000 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query(`ALTER TABLE "users" ADD "age" integer`); + } +} + +// ❌ WRONG - Do not enable migrationsRun +TypeOrmModule.forRoot({ + migrationsRun: true, // Disabled per ADR-009 + migrations: ['dist/migrations/*.js'], +}); +``` + +### When You Need Schema Changes + +1. Check `specs/03-Data-and-Storage/lcbp3-v1.8.0-schema-02-tables.sql` +2. Add your DDL to the appropriate SQL file +3. Create delta file in `deltas/` directory +4. Apply SQL to your database +5. Update corresponding Entity class + +### Reference + +- [ADR-009 Database Strategy](../../../../specs/06-Decision-Records/ADR-009-db-strategy.md) +- [Schema SQL Files](../../../../specs/03-Data-and-Storage/) +- [Data Dictionary](../../../../specs/03-Data-and-Storage/03-01-data-dictionary.md) + +> **Warning**: Attempting to use TypeORM migrations in this project violates ADR-009 and will be rejected in code review. + +--- + +### 7.5 Use Transactions for Multi-Step Operations **Impact: HIGH** — Ensures data consistency in multi-step operations @@ -5850,14 +6316,363 @@ Reference: [NestJS Logger](https://docs.nestjs.com/techniques/logger) --- -## References +## 11. LCBP3 Project-Specific -- https://docs.nestjs.com -- https://github.com/nestjs/nest -- https://typeorm.io -- https://github.com/typestack/class-validator -- https://github.com/goldbergyoni/nodebestpractices +**Section Impact: CRITICAL** + +### 11.1 AI Integration Boundary (ADR-018 / ADR-020) + +**Impact: CRITICAL** — AI runs on Admin Desktop only; AI → DMS API → DB (never direct); human-in-the-loop validation mandatory; full audit trail. + +LCBP3 uses **on-premises AI only** (Ollama on Admin Desktop) with strict isolation from data layers. --- -_Generated by build-agents.ts on 2026-01-16_ +## The Boundary + +``` +┌────────────────────────────────────────────────────────────┐ +│ User Browser (Next.js) │ +└─────────────────────────┬──────────────────────────────────┘ + │ (authenticated HTTPS) +┌─────────────────────────▼──────────────────────────────────┐ +│ DMS API (NestJS) ◀── enforces CASL, validation, audit │ +│ ├─ AiGateway (proxies to Ollama) │ +│ └─ DB + Storage (Elasticsearch, MariaDB, File System) │ +└─────────────────────────┬──────────────────────────────────┘ + │ (HTTP → Admin Desktop, internal) +┌─────────────────────────▼──────────────────────────────────┐ +│ Admin Desktop (Desk-5439) │ +│ ├─ Ollama (Gemma 4) │ +│ ├─ PaddleOCR (Thai + English) │ +│ └─ n8n orchestration │ +└────────────────────────────────────────────────────────────┘ +``` + +**❗ Admin Desktop has NO network access to MariaDB, no SMB to storage, no shared secrets.** It receives base64-encoded file bytes over HTTPS and returns extracted text + suggestions. + +--- + +## Required Patterns + +### 1. AiGateway Module (backend) + +```typescript +@Module({ + controllers: [AiController], + providers: [AiService, AiGateway, AiAuditLogger], + exports: [AiService], +}) +export class AiModule {} + +@Injectable() +export class AiService { + async extractMetadata(fileId: number, user: User): Promise { + // 1. Authorize (CASL: user can read this file) + await this.ability.ensureCan(user, 'read', File, fileId); + + // 2. Load file (DMS API, inside the boundary) + const fileBytes = await this.storageService.read(fileId); + + // 3. Call Admin Desktop AI over HTTP + const raw = await this.aiGateway.extract(fileBytes); + + // 4. Validate AI output schema (Zod) + const parsed = ExtractedMetadataSchema.parse(raw); + + // 5. Audit log (who, what, when, model, confidence) + await this.auditLogger.log({ + userId: user.id, + action: 'ai.extract_metadata', + fileId, + model: raw.model, + confidence: parsed.confidence, + }); + + // 6. Return — frontend MUST render for human confirmation + return parsed; + } +} +``` + +### 2. Human-in-the-Loop + +AI output is **never persisted directly**. Users must confirm via `DocumentReviewForm`: + +```tsx + saveMetadata(reviewed)} // user edits applied +/> +``` + +The `user_confirmed_at` timestamp and diff (AI suggestion → final value) are stored in the audit log. + +### 3. Rate Limiting + +```typescript +@Post('ai/extract') +@UseGuards(JwtAuthGuard, CaslAbilityGuard, ThrottlerGuard) +@Throttle({ default: { limit: 10, ttl: 60_000 } }) // 10 req/min/user +async extract(@Body() dto: ExtractDto) { /* ... */ } +``` + +--- + +## ❌ Forbidden + +```typescript +// ❌ AI container connecting to DB +// docker-compose.yml inside ai-service: +// environment: +// DATABASE_URL: mysql://... ← NEVER + +// ❌ AI SDK calling cloud API +import OpenAI from 'openai'; // ❌ No cloud AI SDKs in production code +const client = new OpenAI({ apiKey: ... }); + +// ❌ Persisting AI output without human confirm +async extractAndSave(fileId: number) { + const metadata = await this.ai.extract(fileId); + await this.repo.save({ fileId, ...metadata }); // ❌ skips human review +} + +// ❌ Skipping audit log +const result = await this.aiGateway.extract(bytes); // no logging +return result; +``` + +--- + +## Audit Log Schema + +```sql +CREATE TABLE ai_audit_log ( + id INT AUTO_INCREMENT PRIMARY KEY, + public_id UUID UNIQUE NOT NULL, + user_id INT NOT NULL, + action VARCHAR(64) NOT NULL, -- 'ai.extract_metadata', 'ai.classify', etc. + file_id INT, + model VARCHAR(64), -- 'gemma-4:7b', 'paddleocr-v3' + confidence DECIMAL(4,3), + input_hash CHAR(64), -- SHA-256 of input for replay detection + output_summary JSON, + human_confirmed_at DATETIME, + created_at DATETIME DEFAULT CURRENT_TIMESTAMP, + INDEX idx_user_created (user_id, created_at), + INDEX idx_file (file_id) +); +``` + +--- + +## Reference + +- [ADR-018 AI Boundary](../../../../specs/06-Decision-Records/ADR-018-ai-boundary.md) +- [ADR-020 AI Intelligence Integration](../../../../specs/06-Decision-Records/ADR-020-ai-intelligence-integration.md) +- [ADR-017 Ollama Data Migration](../../../../specs/06-Decision-Records/ADR-017-ollama-data-migration.md) + +--- + +### 11.2 Workflow Engine + Document Numbering + Workflow Context (ADR-001 / 002 / 021) + +**Impact: CRITICAL** — DSL-based state machine; double-lock numbering; integrated workflow context exposed to clients. + +LCBP3 uses a **unified workflow engine** (DSL-based state machine) across RFA, Transmittal, Correspondence, Circulation, and Shop Drawing. Every state transition goes through the same engine — no per-type routing tables. + +--- + +## ADR-001: Unified Workflow Engine + +### State Transition Pattern + +```typescript +@Injectable() +export class WorkflowEngine { + async transition( + instanceId: string, + action: WorkflowAction, + actor: User, + context?: WorkflowContext, + ): Promise { + // 1. Load current state from DB (never trust client-provided state) + const instance = await this.repo.findOneByPublicId(instanceId); + if (!instance) throw new NotFoundException(); + + // 2. Validate transition against DSL + const dsl = await this.dslService.load(instance.workflowTypeId); + const nextState = dsl.resolve(instance.currentState, action); + if (!nextState) { + throw new BusinessException( + `Action ${action} not allowed from state ${instance.currentState}`, + 'ไม่สามารถดำเนินการนี้ได้ในสถานะปัจจุบัน', + 'กรุณาตรวจสอบขั้นตอนการอนุมัติ', + 'WF_INVALID_TRANSITION', + ); + } + + // 3. Apply transition atomically (optimistic lock via @VersionColumn) + instance.currentState = nextState; + await this.repo.save(instance); // throws OptimisticLockVersionMismatchError on race + + // 4. Emit event for listeners (notifications via BullMQ — ADR-008) + this.eventBus.publish(new WorkflowTransitionedEvent(instance, action, actor)); + + return instance; + } +} +``` + +### ❌ Anti-Patterns + +- ❌ Hard-coded `switch (state)` in controllers/services +- ❌ Trusting `currentState` from request body +- ❌ Creating separate routing tables per document type + +--- + +## ADR-002: Document Numbering (Double-Lock) + +Concurrent requests for a new document number **must** use both: + +1. **Redis Redlock** — distributed lock across app instances +2. **TypeORM `@VersionColumn`** — optimistic lock on counter row + +### Counter Entity + +```typescript +@Entity('document_number_counters') +@Unique(['projectId', 'documentTypeId']) +export class DocumentNumberCounter extends UuidBaseEntity { + @Column({ name: 'project_id' }) + projectId: number; + + @Column({ name: 'document_type_id' }) + documentTypeId: number; + + @Column({ name: 'last_number', default: 0 }) + lastNumber: number; + + @VersionColumn() + version: number; // ❗ Optimistic lock — do not rename, do not remove +} +``` + +### Service Pattern + +```typescript +@Injectable() +export class DocumentNumberingService { + constructor( + @InjectRepository(DocumentNumberCounter) + private counterRepo: Repository, + private redlock: RedlockService, + private readonly logger: Logger, + ) {} + + async generateNext(ctx: NumberingContext): Promise { + const lockKey = `doc_num:${ctx.projectId}:${ctx.documentTypeId}`; + + // Distributed lock — 3s TTL, up to 5 retries + const lock = await this.redlock.acquire([lockKey], 3000); + + try { + // Optimistic lock via @VersionColumn + const counter = await this.counterRepo.findOne({ + where: { projectId: ctx.projectId, documentTypeId: ctx.documentTypeId }, + }); + + if (!counter) { + throw new NotFoundException('Counter not initialized for this project/type'); + } + + counter.lastNumber += 1; + await this.counterRepo.save(counter); // may throw OptimisticLockVersionMismatchError + + return this.formatNumber(ctx, counter.lastNumber); + } catch (err) { + if (err instanceof OptimisticLockVersionMismatchError) { + this.logger.warn(`Numbering race detected for ${lockKey}, retrying`); + // Let caller retry via BullMQ retry policy + } + throw err; + } finally { + await lock.release(); + } + } + + private formatNumber(ctx: NumberingContext, seq: number): string { + // e.g. "LCBP3-RFA-0042" + return `${ctx.projectCode}-${ctx.typeCode}-${String(seq).padStart(4, '0')}`; + } +} +``` + +### ❌ Anti-Patterns + +- ❌ App-side counter only (`let counter = 0; counter++`) +- ❌ Using `findOne` + `update` without `@VersionColumn` +- ❌ Using only Redis lock without DB optimistic lock (race if Redis fails) + +--- + +## ADR-021: Integrated Workflow Context + +Every workflow-aware API response **must** expose: + +```typescript +export class WorkflowEnvelope { + data: T; + + workflow: { + instancePublicId: string; + currentState: string; // e.g. 'pending_review' + availableActions: string[]; // e.g. ['approve', 'reject', 'request-revision'] + canEdit: boolean; // computed from CASL + current state + lastTransitionAt: string; // ISO 8601 + }; + + stepAttachments?: Array<{ // files produced by the current/previous step + publicId: string; + fileName: string; + stepCode: string; + downloadUrl: string; + }>; +} +``` + +Frontend uses `workflow.availableActions` to render buttons — no client-side state machine logic. + +--- + +## Reference + +- [ADR-001 Unified Workflow Engine](../../../../specs/06-Decision-Records/ADR-001-unified-workflow-engine.md) +- [ADR-002 Document Numbering Strategy](../../../../specs/06-Decision-Records/ADR-002-document-numbering-strategy.md) +- [ADR-021 Workflow Context](../../../../specs/06-Decision-Records/ADR-021-workflow-context.md) + +--- + +## References + +- [AGENTS.md (root)](../../../AGENTS.md) — canonical AI agent rules +- [CONTRIBUTING.md](../../../CONTRIBUTING.md) — spec authoring + PR process +- [ADR-001 Unified Workflow Engine](../../../specs/06-Decision-Records/ADR-001-unified-workflow-engine.md) +- [ADR-002 Document Numbering Strategy](../../../specs/06-Decision-Records/ADR-002-document-numbering-strategy.md) +- [ADR-007 Error Handling Strategy](../../../specs/06-Decision-Records/ADR-007-error-handling-strategy.md) +- [ADR-008 Email/Notification Strategy](../../../specs/06-Decision-Records/ADR-008-email-notification-strategy.md) +- [ADR-009 Database Migration Strategy](../../../specs/06-Decision-Records/ADR-009-database-migration-strategy.md) +- [ADR-016 Security & Authentication](../../../specs/06-Decision-Records/ADR-016-security-authentication.md) +- [ADR-018 AI Boundary](../../../specs/06-Decision-Records/ADR-018-ai-boundary.md) +- [ADR-019 Hybrid Identifier Strategy](../../../specs/06-Decision-Records/ADR-019-hybrid-identifier-strategy.md) +- [ADR-020 AI Intelligence Integration](../../../specs/06-Decision-Records/ADR-020-ai-intelligence-integration.md) +- [ADR-021 Workflow Context](../../../specs/06-Decision-Records/ADR-021-workflow-context.md) +- [Backend Engineering Guidelines](../../../specs/05-Engineering-Guidelines/05-02-backend-guidelines.md) +- [Schema — v1.8.0 Tables](../../../specs/03-Data-and-Storage/lcbp3-v1.8.0-schema-02-tables.sql) +- [Data Dictionary](../../../specs/03-Data-and-Storage/03-01-data-dictionary.md) +- Upstream: [Kadajett/nestjs-best-practices](https://github.com/Kadajett/nestjs-best-practices) v1.1.0 + +--- + +*Generated by build-agents.ts on 2026-04-22* diff --git a/.agents/skills/nestjs-best-practices/AGENTS.md.v1.1.0.bak b/.agents/skills/nestjs-best-practices/AGENTS.md.v1.1.0.bak new file mode 100644 index 0000000..49234af --- /dev/null +++ b/.agents/skills/nestjs-best-practices/AGENTS.md.v1.1.0.bak @@ -0,0 +1,5863 @@ +# NestJS Best Practices + +**Version 1.1.0** +NestJS Best Practices +January 2026 + +> **Note:** +> This document is mainly for agents and LLMs to follow when maintaining, +> generating, or refactoring NestJS codebases. Humans may also find it +> useful, but guidance here is optimized for automation and consistency +> by AI-assisted workflows. + +--- + +## Abstract + +Comprehensive best practices and architecture guide for NestJS applications, designed for AI agents and LLMs. Contains 40 rules across 10 categories, prioritized by impact from critical (architecture, dependency injection) to incremental (DevOps patterns). Each rule includes detailed explanations, real-world examples comparing incorrect vs. correct implementations, and specific impact metrics to guide automated refactoring and code generation. + +--- + +## Table of Contents + +1. [Architecture](#1-architecture) — **CRITICAL** + - 1.1 [Avoid Circular Dependencies](#11-avoid-circular-dependencies) + - 1.2 [Organize by Feature Modules](#12-organize-by-feature-modules) + - 1.3 [Use Proper Module Sharing Patterns](#13-use-proper-module-sharing-patterns) + - 1.4 [Single Responsibility for Services](#14-single-responsibility-for-services) + - 1.5 [Use Event-Driven Architecture for Decoupling](#15-use-event-driven-architecture-for-decoupling) + - 1.6 [Use Repository Pattern for Data Access](#16-use-repository-pattern-for-data-access) +2. [Dependency Injection](#2-dependency-injection) — **CRITICAL** + - 2.1 [Avoid Service Locator Anti-Pattern](#21-avoid-service-locator-anti-pattern) + - 2.2 [Apply Interface Segregation Principle](#22-apply-interface-segregation-principle) + - 2.3 [Honor Liskov Substitution Principle](#23-honor-liskov-substitution-principle) + - 2.4 [Prefer Constructor Injection](#24-prefer-constructor-injection) + - 2.5 [Understand Provider Scopes](#25-understand-provider-scopes) + - 2.6 [Use Injection Tokens for Interfaces](#26-use-injection-tokens-for-interfaces) +3. [Error Handling](#3-error-handling) — **HIGH** + - 3.1 [Handle Async Errors Properly](#31-handle-async-errors-properly) + - 3.2 [Throw HTTP Exceptions from Services](#32-throw-http-exceptions-from-services) + - 3.3 [Use Exception Filters for Error Handling](#33-use-exception-filters-for-error-handling) +4. [Security](#4-security) — **HIGH** + - 4.1 [Implement Secure JWT Authentication](#41-implement-secure-jwt-authentication) + - 4.2 [Implement Rate Limiting](#42-implement-rate-limiting) + - 4.3 [Sanitize Output to Prevent XSS](#43-sanitize-output-to-prevent-xss) + - 4.4 [Use Guards for Authentication and Authorization](#44-use-guards-for-authentication-and-authorization) + - 4.5 [Validate All Input with DTOs and Pipes](#45-validate-all-input-with-dtos-and-pipes) +5. [Performance](#5-performance) — **HIGH** + - 5.1 [Use Async Lifecycle Hooks Correctly](#51-use-async-lifecycle-hooks-correctly) + - 5.2 [Use Lazy Loading for Large Modules](#52-use-lazy-loading-for-large-modules) + - 5.3 [Optimize Database Queries](#53-optimize-database-queries) + - 5.4 [Use Caching Strategically](#54-use-caching-strategically) +6. [Testing](#6-testing) — **MEDIUM-HIGH** + - 6.1 [Use Supertest for E2E Testing](#61-use-supertest-for-e2e-testing) + - 6.2 [Mock External Services in Tests](#62-mock-external-services-in-tests) + - 6.3 [Use Testing Module for Unit Tests](#63-use-testing-module-for-unit-tests) +7. [Database & ORM](#7-database-orm) — **MEDIUM-HIGH** + - 7.1 [Avoid N+1 Query Problems](#71-avoid-n-1-query-problems) + - 7.2 [Use Database Migrations](#72-use-database-migrations) + - 7.3 [Use Transactions for Multi-Step Operations](#73-use-transactions-for-multi-step-operations) +8. [API Design](#8-api-design) — **MEDIUM** + - 8.1 [Use DTOs and Serialization for API Responses](#81-use-dtos-and-serialization-for-api-responses) + - 8.2 [Use Interceptors for Cross-Cutting Concerns](#82-use-interceptors-for-cross-cutting-concerns) + - 8.3 [Use Pipes for Input Transformation](#83-use-pipes-for-input-transformation) + - 8.4 [Use API Versioning for Breaking Changes](#84-use-api-versioning-for-breaking-changes) +9. [Microservices](#9-microservices) — **MEDIUM** + - 9.1 [Implement Health Checks for Microservices](#91-implement-health-checks-for-microservices) + - 9.2 [Use Message and Event Patterns Correctly](#92-use-message-and-event-patterns-correctly) + - 9.3 [Use Message Queues for Background Jobs](#93-use-message-queues-for-background-jobs) +10. [DevOps & Deployment](#10-devops-deployment) — **LOW-MEDIUM** + +- 10.1 [Implement Graceful Shutdown](#101-implement-graceful-shutdown) +- 10.2 [Use ConfigModule for Environment Configuration](#102-use-configmodule-for-environment-configuration) +- 10.3 [Use Structured Logging](#103-use-structured-logging) + +--- + +## 1. Architecture + +**Section Impact: CRITICAL** + +### 1.1 Avoid Circular Dependencies + +**Impact: CRITICAL** — "#1 cause of runtime crashes" + +Circular dependencies occur when Module A imports Module B, and Module B imports Module A (directly or transitively). NestJS can sometimes resolve these through forward references, but they indicate architectural problems and should be avoided. This is the #1 cause of runtime crashes in NestJS applications. + +**Incorrect (circular module imports):** + +```typescript +// users.module.ts +@Module({ + imports: [OrdersModule], // Orders needs Users, Users needs Orders = circular + providers: [UsersService], + exports: [UsersService], +}) +export class UsersModule {} + +// orders.module.ts +@Module({ + imports: [UsersModule], // Circular dependency! + providers: [OrdersService], + exports: [OrdersService], +}) +export class OrdersModule {} +``` + +**Correct (extract shared logic or use events):** + +```typescript +// Option 1: Extract shared logic to a third module +// shared.module.ts +@Module({ + providers: [SharedService], + exports: [SharedService], +}) +export class SharedModule {} + +// users.module.ts +@Module({ + imports: [SharedModule], + providers: [UsersService], +}) +export class UsersModule {} + +// orders.module.ts +@Module({ + imports: [SharedModule], + providers: [OrdersService], +}) +export class OrdersModule {} + +// Option 2: Use events for decoupled communication +// users.service.ts +@Injectable() +export class UsersService { + constructor(private eventEmitter: EventEmitter2) {} + + async createUser(data: CreateUserDto) { + const user = await this.userRepo.save(data); + this.eventEmitter.emit('user.created', user); + return user; + } +} + +// orders.service.ts +@Injectable() +export class OrdersService { + @OnEvent('user.created') + handleUserCreated(user: User) { + // React to user creation without direct dependency + } +} +``` + +Reference: [NestJS Circular Dependency](https://docs.nestjs.com/fundamentals/circular-dependency) + +--- + +### 1.2 Organize by Feature Modules + +**Impact: CRITICAL** — "3-5x faster onboarding and development" + +Organize your application into feature modules that encapsulate related functionality. Each feature module should be self-contained with its own controllers, services, entities, and DTOs. Avoid organizing by technical layer (all controllers together, all services together). This enables 3-5x faster onboarding and feature development. + +**Incorrect (technical layer organization):** + +```typescript +// Technical layer organization (anti-pattern) +src/ +├── controllers/ +│ ├── users.controller.ts +│ ├── orders.controller.ts +│ └── products.controller.ts +├── services/ +│ ├── users.service.ts +│ ├── orders.service.ts +│ └── products.service.ts +├── entities/ +│ ├── user.entity.ts +│ ├── order.entity.ts +│ └── product.entity.ts +└── app.module.ts // Imports everything directly +``` + +**Correct (feature module organization):** + +```typescript +// Feature module organization +src/ +├── users/ +│ ├── dto/ +│ │ ├── create-user.dto.ts +│ │ └── update-user.dto.ts +│ ├── entities/ +│ │ └── user.entity.ts +│ ├── users.controller.ts +│ ├── users.service.ts +│ ├── users.repository.ts +│ └── users.module.ts +├── orders/ +│ ├── dto/ +│ ├── entities/ +│ ├── orders.controller.ts +│ ├── orders.service.ts +│ └── orders.module.ts +├── shared/ +│ ├── guards/ +│ ├── interceptors/ +│ ├── filters/ +│ └── shared.module.ts +└── app.module.ts + +// users.module.ts +@Module({ + imports: [TypeOrmModule.forFeature([User])], + controllers: [UsersController], + providers: [UsersService, UsersRepository], + exports: [UsersService], // Only export what others need +}) +export class UsersModule {} + +// app.module.ts +@Module({ + imports: [ + ConfigModule.forRoot(), + TypeOrmModule.forRoot(), + UsersModule, + OrdersModule, + SharedModule, + ], +}) +export class AppModule {} +``` + +Reference: [NestJS Modules](https://docs.nestjs.com/modules) + +--- + +### 1.3 Use Proper Module Sharing Patterns + +**Impact: CRITICAL** — Prevents duplicate instances, memory leaks, and state inconsistency + +NestJS modules are singletons by default. When a service is properly exported from a module and that module is imported elsewhere, the same instance is shared. However, providing a service in multiple modules creates separate instances, leading to memory waste, state inconsistency, and confusing behavior. Always encapsulate services in dedicated modules, export them explicitly, and import the module where needed. + +**Incorrect (service provided in multiple modules):** + +```typescript +// StorageService provided directly in multiple modules - WRONG +// storage.service.ts +@Injectable() +export class StorageService { + private cache = new Map(); // Each instance has separate state! + + store(key: string, value: any) { + this.cache.set(key, value); + } +} + +// app.module.ts +@Module({ + providers: [StorageService], // Instance #1 + controllers: [AppController], +}) +export class AppModule {} + +// videos.module.ts +@Module({ + providers: [StorageService], // Instance #2 - different from AppModule! + controllers: [VideosController], +}) +export class VideosModule {} + +// Problems: +// 1. Two separate StorageService instances exist +// 2. cache.set() in VideosModule doesn't affect AppModule's cache +// 3. Memory wasted on duplicate instances +// 4. Debugging nightmares when state doesn't sync +``` + +**Correct (dedicated module with exports):** + +```typescript +// storage/storage.module.ts +@Module({ + providers: [StorageService], + exports: [StorageService], // Make available to importers +}) +export class StorageModule {} + +// videos/videos.module.ts +@Module({ + imports: [StorageModule], // Import the module, not the service + controllers: [VideosController], + providers: [VideosService], +}) +export class VideosModule {} + +// channels/channels.module.ts +@Module({ + imports: [StorageModule], // Same instance shared + controllers: [ChannelsController], + providers: [ChannelsService], +}) +export class ChannelsModule {} + +// app.module.ts +@Module({ + imports: [ + StorageModule, // Only if AppModule itself needs StorageService + VideosModule, + ChannelsModule, + ], +}) +export class AppModule {} + +// Now all modules share the SAME StorageService instance +``` + +**When to use @Global() (sparingly):** + +```typescript +// ONLY for truly cross-cutting concerns +@Global() +@Module({ + providers: [ConfigService, LoggerService], + exports: [ConfigService, LoggerService], +}) +export class CoreModule {} + +// Import once in AppModule +@Module({ + imports: [CoreModule], // Registered globally, available everywhere +}) +export class AppModule {} + +// Other modules don't need to import CoreModule +@Module({ + controllers: [UsersController], + providers: [UsersService], // Can inject ConfigService without importing +}) +export class UsersModule {} + +// WARNING: Don't make everything global! +// - Hides dependencies (can't see what a module needs from imports) +// - Makes testing harder +// - Reserve for: config, logging, database connections +``` + +**Module re-exporting pattern:** + +```typescript +// common.module.ts - shared utilities +@Module({ + providers: [DateService, ValidationService], + exports: [DateService, ValidationService], +}) +export class CommonModule {} + +// core.module.ts - re-exports common for convenience +@Module({ + imports: [CommonModule, DatabaseModule], + exports: [CommonModule, DatabaseModule], // Re-export for consumers +}) +export class CoreModule {} + +// feature.module.ts - imports CoreModule, gets both +@Module({ + imports: [CoreModule], // Gets CommonModule + DatabaseModule + controllers: [FeatureController], +}) +export class FeatureModule {} +``` + +Reference: [NestJS Modules](https://docs.nestjs.com/modules#shared-modules) + +--- + +### 1.4 Single Responsibility for Services + +**Impact: CRITICAL** — "40%+ improvement in testability" + +Each service should have a single, well-defined responsibility. Avoid "god services" that handle multiple unrelated concerns. If a service name includes "And" or handles more than one domain concept, it likely violates single responsibility. This reduces complexity and improves testability by 40%+. + +**Incorrect (god service anti-pattern):** + +```typescript +// God service anti-pattern +@Injectable() +export class UserAndOrderService { + constructor( + private userRepo: UserRepository, + private orderRepo: OrderRepository, + private mailer: MailService, + private payment: PaymentService + ) {} + + async createUser(dto: CreateUserDto) { + const user = await this.userRepo.save(dto); + await this.mailer.sendWelcome(user); + return user; + } + + async createOrder(userId: string, dto: CreateOrderDto) { + const order = await this.orderRepo.save({ userId, ...dto }); + await this.payment.charge(order); + await this.mailer.sendOrderConfirmation(order); + return order; + } + + async calculateOrderStats(userId: string) { + // Stats logic mixed in + } + + async validatePayment(orderId: string) { + // Payment logic mixed in + } +} +``` + +**Correct (focused services with single responsibility):** + +```typescript +// Focused services with single responsibility +@Injectable() +export class UsersService { + constructor(private userRepo: UserRepository) {} + + async create(dto: CreateUserDto): Promise { + return this.userRepo.save(dto); + } + + async findById(id: string): Promise { + return this.userRepo.findOneOrFail({ where: { id } }); + } +} + +@Injectable() +export class OrdersService { + constructor(private orderRepo: OrderRepository) {} + + async create(userId: string, dto: CreateOrderDto): Promise { + return this.orderRepo.save({ userId, ...dto }); + } + + async findByUser(userId: string): Promise { + return this.orderRepo.find({ where: { userId } }); + } +} + +@Injectable() +export class OrderStatsService { + constructor(private orderRepo: OrderRepository) {} + + async calculateForUser(userId: string): Promise { + // Focused stats calculation + } +} + +// Orchestration in controller or dedicated orchestrator +@Controller('orders') +export class OrdersController { + constructor( + private orders: OrdersService, + private payment: PaymentService, + private notifications: NotificationService + ) {} + + @Post() + async create(@CurrentUser() user: User, @Body() dto: CreateOrderDto) { + const order = await this.orders.create(user.id, dto); + await this.payment.charge(order); + await this.notifications.sendOrderConfirmation(order); + return order; + } +} +``` + +Reference: [NestJS Providers](https://docs.nestjs.com/providers) + +--- + +### 1.5 Use Event-Driven Architecture for Decoupling + +**Impact: MEDIUM-HIGH** — Enables async processing and modularity + +Use `@nestjs/event-emitter` for intra-service events and message brokers for inter-service communication. Events allow modules to react to changes without direct dependencies, improving modularity and enabling async processing. + +**Incorrect (direct service coupling):** + +```typescript +// Direct service coupling +@Injectable() +export class OrdersService { + constructor( + private inventoryService: InventoryService, + private emailService: EmailService, + private analyticsService: AnalyticsService, + private notificationService: NotificationService, + private loyaltyService: LoyaltyService + ) {} + + async createOrder(dto: CreateOrderDto): Promise { + const order = await this.repo.save(dto); + + // Tight coupling - OrdersService knows about all consumers + await this.inventoryService.reserve(order.items); + await this.emailService.sendConfirmation(order); + await this.analyticsService.track('order_created', order); + await this.notificationService.push(order.userId, 'Order placed'); + await this.loyaltyService.addPoints(order.userId, order.total); + + // Adding new behavior requires modifying this service + return order; + } +} +``` + +**Correct (event-driven decoupling):** + +```typescript +// Use EventEmitter for decoupling +import { EventEmitter2 } from '@nestjs/event-emitter'; + +// Define event +export class OrderCreatedEvent { + constructor( + public readonly orderId: string, + public readonly userId: string, + public readonly items: OrderItem[], + public readonly total: number + ) {} +} + +// Service emits events +@Injectable() +export class OrdersService { + constructor( + private eventEmitter: EventEmitter2, + private repo: Repository + ) {} + + async createOrder(dto: CreateOrderDto): Promise { + const order = await this.repo.save(dto); + + // Emit event - no knowledge of consumers + this.eventEmitter.emit('order.created', new OrderCreatedEvent(order.id, order.userId, order.items, order.total)); + + return order; + } +} + +// Listeners in separate modules +@Injectable() +export class InventoryListener { + @OnEvent('order.created') + async handleOrderCreated(event: OrderCreatedEvent): Promise { + await this.inventoryService.reserve(event.items); + } +} + +@Injectable() +export class EmailListener { + @OnEvent('order.created') + async handleOrderCreated(event: OrderCreatedEvent): Promise { + await this.emailService.sendConfirmation(event.orderId); + } +} + +@Injectable() +export class AnalyticsListener { + @OnEvent('order.created') + async handleOrderCreated(event: OrderCreatedEvent): Promise { + await this.analyticsService.track('order_created', { + orderId: event.orderId, + total: event.total, + }); + } +} +``` + +Reference: [NestJS Events](https://docs.nestjs.com/techniques/events) + +--- + +### 1.6 Use Repository Pattern for Data Access + +**Impact: HIGH** — Decouples business logic from database + +Create custom repositories to encapsulate complex queries and database logic. This keeps services focused on business logic, makes testing easier with mock repositories, and allows changing database implementations without affecting business code. + +**Incorrect (complex queries in services):** + +```typescript +// Complex queries in services +@Injectable() +export class UsersService { + constructor(@InjectRepository(User) private repo: Repository) {} + + async findActiveWithOrders(minOrders: number): Promise { + // Complex query logic mixed with business logic + return this.repo + .createQueryBuilder('user') + .leftJoinAndSelect('user.orders', 'order') + .where('user.isActive = :active', { active: true }) + .andWhere('user.deletedAt IS NULL') + .groupBy('user.id') + .having('COUNT(order.id) >= :min', { min: minOrders }) + .orderBy('user.createdAt', 'DESC') + .getMany(); + } + + // Service becomes bloated with query logic +} +``` + +**Correct (custom repository with encapsulated queries):** + +```typescript +// Custom repository with encapsulated queries +@Injectable() +export class UsersRepository { + constructor(@InjectRepository(User) private repo: Repository) {} + + async findById(id: string): Promise { + return this.repo.findOne({ where: { id } }); + } + + async findByEmail(email: string): Promise { + return this.repo.findOne({ where: { email } }); + } + + async findActiveWithMinOrders(minOrders: number): Promise { + return this.repo + .createQueryBuilder('user') + .leftJoinAndSelect('user.orders', 'order') + .where('user.isActive = :active', { active: true }) + .andWhere('user.deletedAt IS NULL') + .groupBy('user.id') + .having('COUNT(order.id) >= :min', { min: minOrders }) + .orderBy('user.createdAt', 'DESC') + .getMany(); + } + + async save(user: User): Promise { + return this.repo.save(user); + } +} + +// Clean service with business logic only +@Injectable() +export class UsersService { + constructor(private usersRepo: UsersRepository) {} + + async getActiveUsersWithOrders(): Promise { + return this.usersRepo.findActiveWithMinOrders(1); + } + + async create(dto: CreateUserDto): Promise { + const existing = await this.usersRepo.findByEmail(dto.email); + if (existing) { + throw new ConflictException('Email already registered'); + } + + const user = new User(); + user.email = dto.email; + user.name = dto.name; + return this.usersRepo.save(user); + } +} +``` + +Reference: [Repository Pattern](https://martinfowler.com/eaaCatalog/repository.html) + +--- + +## 2. Dependency Injection + +**Section Impact: CRITICAL** + +### 2.1 Avoid Service Locator Anti-Pattern + +**Impact: HIGH** — Hides dependencies and breaks testability + +Avoid using `ModuleRef.get()` or global containers to resolve dependencies at runtime. This hides dependencies, makes code harder to test, and breaks the benefits of dependency injection. Use constructor injection instead. + +**Incorrect (service locator anti-pattern):** + +```typescript +// Use ModuleRef to get dependencies dynamically +@Injectable() +export class OrdersService { + constructor(private moduleRef: ModuleRef) {} + + async createOrder(dto: CreateOrderDto): Promise { + // Dependencies are hidden - not visible in constructor + const usersService = this.moduleRef.get(UsersService); + const inventoryService = this.moduleRef.get(InventoryService); + const paymentService = this.moduleRef.get(PaymentService); + + const user = await usersService.findOne(dto.userId); + // ... rest of logic + } +} + +// Global singleton container +class ServiceContainer { + private static instance: ServiceContainer; + private services = new Map(); + + static getInstance(): ServiceContainer { + if (!this.instance) { + this.instance = new ServiceContainer(); + } + return this.instance; + } + + get(key: string): T { + return this.services.get(key); + } +} +``` + +**Correct (constructor injection with explicit dependencies):** + +```typescript +// Use constructor injection - dependencies are explicit +@Injectable() +export class OrdersService { + constructor( + private usersService: UsersService, + private inventoryService: InventoryService, + private paymentService: PaymentService + ) {} + + async createOrder(dto: CreateOrderDto): Promise { + const user = await this.usersService.findOne(dto.userId); + const inventory = await this.inventoryService.check(dto.items); + // Dependencies are clear and testable + } +} + +// Easy to test with mocks +describe('OrdersService', () => { + let service: OrdersService; + + beforeEach(async () => { + const module = await Test.createTestingModule({ + providers: [ + OrdersService, + { provide: UsersService, useValue: mockUsersService }, + { provide: InventoryService, useValue: mockInventoryService }, + { provide: PaymentService, useValue: mockPaymentService }, + ], + }).compile(); + + service = module.get(OrdersService); + }); +}); + +// VALID: Factory pattern for dynamic instantiation +@Injectable() +export class HandlerFactory { + constructor(private moduleRef: ModuleRef) {} + + getHandler(type: string): Handler { + switch (type) { + case 'email': + return this.moduleRef.get(EmailHandler); + case 'sms': + return this.moduleRef.get(SmsHandler); + default: + return this.moduleRef.get(DefaultHandler); + } + } +} +``` + +Reference: [NestJS Module Reference](https://docs.nestjs.com/fundamentals/module-ref) + +--- + +### 2.2 Apply Interface Segregation Principle + +**Impact: HIGH** — Reduces coupling and improves testability by 30-50% + +Clients should not be forced to depend on interfaces they don't use. In NestJS, this means keeping interfaces small and focused on specific capabilities rather than creating "fat" interfaces that bundle unrelated methods. When a service only needs to send emails, it shouldn't depend on an interface that also includes SMS, push notifications, and logging. Split large interfaces into role-based ones. + +**Incorrect (fat interface forcing unused dependencies):** + +```typescript +// Fat interface - forces all consumers to depend on everything +interface NotificationService { + sendEmail(to: string, subject: string, body: string): Promise; + sendSms(phone: string, message: string): Promise; + sendPush(userId: string, notification: PushPayload): Promise; + sendSlack(channel: string, message: string): Promise; + logNotification(type: string, payload: any): Promise; + getDeliveryStatus(id: string): Promise; + retryFailed(id: string): Promise; + scheduleNotification(dto: ScheduleDto): Promise; +} + +// Consumer only needs email, but must mock everything for tests +@Injectable() +export class OrdersService { + constructor( + private notifications: NotificationService // Depends on 8 methods, uses 1 + ) {} + + async confirmOrder(order: Order): Promise { + await this.notifications.sendEmail( + order.customer.email, + 'Order Confirmed', + `Your order ${order.id} has been confirmed.` + ); + } +} + +// Testing is painful - must mock unused methods +const mockNotificationService = { + sendEmail: jest.fn(), + sendSms: jest.fn(), // Never used, but required + sendPush: jest.fn(), // Never used, but required + sendSlack: jest.fn(), // Never used, but required + logNotification: jest.fn(), // Never used, but required + getDeliveryStatus: jest.fn(), // Never used, but required + retryFailed: jest.fn(), // Never used, but required + scheduleNotification: jest.fn(), // Never used, but required +}; +``` + +**Correct (segregated interfaces by capability):** + +```typescript +// Segregated interfaces - each focused on one capability +interface EmailSender { + sendEmail(to: string, subject: string, body: string): Promise; +} + +interface SmsSender { + sendSms(phone: string, message: string): Promise; +} + +interface PushSender { + sendPush(userId: string, notification: PushPayload): Promise; +} + +interface NotificationLogger { + logNotification(type: string, payload: any): Promise; +} + +interface NotificationScheduler { + scheduleNotification(dto: ScheduleDto): Promise; +} + +// Implementation can implement multiple interfaces +@Injectable() +export class NotificationService implements EmailSender, SmsSender, PushSender { + async sendEmail(to: string, subject: string, body: string): Promise { + // Email implementation + } + + async sendSms(phone: string, message: string): Promise { + // SMS implementation + } + + async sendPush(userId: string, notification: PushPayload): Promise { + // Push implementation + } +} + +// Or separate implementations +@Injectable() +export class SendGridEmailService implements EmailSender { + async sendEmail(to: string, subject: string, body: string): Promise { + // SendGrid-specific implementation + } +} + +// Consumer depends only on what it needs +@Injectable() +export class OrdersService { + constructor( + @Inject(EMAIL_SENDER) private emailSender: EmailSender // Minimal dependency + ) {} + + async confirmOrder(order: Order): Promise { + await this.emailSender.sendEmail( + order.customer.email, + 'Order Confirmed', + `Your order ${order.id} has been confirmed.` + ); + } +} + +// Testing is simple - only mock what's used +const mockEmailSender: EmailSender = { + sendEmail: jest.fn(), +}; + +// Module registration with tokens +export const EMAIL_SENDER = Symbol('EMAIL_SENDER'); +export const SMS_SENDER = Symbol('SMS_SENDER'); + +@Module({ + providers: [ + { provide: EMAIL_SENDER, useClass: SendGridEmailService }, + { provide: SMS_SENDER, useClass: TwilioSmsService }, + ], + exports: [EMAIL_SENDER, SMS_SENDER], +}) +export class NotificationModule {} +``` + +**Combining interfaces when needed:** + +```typescript +// Sometimes a consumer legitimately needs multiple capabilities +interface EmailAndSmsSender extends EmailSender, SmsSender {} + +// Or use intersection types +type MultiChannelSender = EmailSender & SmsSender & PushSender; + +// Consumer that genuinely needs multiple channels +@Injectable() +export class AlertService { + constructor( + @Inject(MULTI_CHANNEL_SENDER) + private sender: EmailSender & SmsSender + ) {} + + async sendCriticalAlert(user: User, message: string): Promise { + await Promise.all([ + this.sender.sendEmail(user.email, 'Critical Alert', message), + this.sender.sendSms(user.phone, message), + ]); + } +} +``` + +Reference: [Interface Segregation Principle](https://en.wikipedia.org/wiki/Interface_segregation_principle) + +--- + +### 2.3 Honor Liskov Substitution Principle + +**Impact: HIGH** — Ensures implementations are truly interchangeable without breaking callers + +Subtypes must be substitutable for their base types without altering program correctness. In NestJS with dependency injection, this means any implementation of an interface or abstract class must honor the contract completely. A mock payment service used in tests must behave like a real payment service (return similar shapes, handle errors the same way). Violating LSP causes subtle bugs when swapping implementations. + +**Incorrect (implementation violates the contract):** + +```typescript +// Base interface with clear contract +interface PaymentGateway { + /** + * Charges the specified amount. + * @returns PaymentResult on success + * @throws PaymentFailedException on payment failure + */ + charge(amount: number, currency: string): Promise; +} + +// Production implementation - follows the contract +@Injectable() +export class StripeService implements PaymentGateway { + async charge(amount: number, currency: string): Promise { + const response = await this.stripe.charges.create({ amount, currency }); + return { success: true, transactionId: response.id, amount }; + } +} + +// Mock that violates LSP - different behavior! +@Injectable() +export class MockPaymentService implements PaymentGateway { + async charge(amount: number, currency: string): Promise { + // VIOLATION 1: Throws for valid input (contract says return PaymentResult) + if (amount > 1000) { + throw new Error('Mock does not support large amounts'); + } + + // VIOLATION 2: Returns null instead of PaymentResult + if (currency !== 'USD') { + return null as any; // Real service would convert or reject properly + } + + // VIOLATION 3: Missing required field + return { success: true } as PaymentResult; // Missing transactionId! + } +} + +// Consumer trusts the contract +@Injectable() +export class OrdersService { + constructor(@Inject(PAYMENT_GATEWAY) private payment: PaymentGateway) {} + + async checkout(order: Order): Promise { + const result = await this.payment.charge(order.total, order.currency); + // These fail with MockPaymentService: + await this.saveTransaction(result.transactionId); // undefined! + await this.sendReceipt(result); // might be null! + } +} +``` + +**Correct (implementations honor the contract):** + +```typescript +// Well-defined interface with documented behavior +interface PaymentGateway { + /** + * Charges the specified amount. + * @param amount - Amount in smallest currency unit (cents) + * @param currency - ISO 4217 currency code + * @returns PaymentResult with transactionId, success status, and amount + * @throws PaymentFailedException if charge is declined + * @throws InvalidCurrencyException if currency is not supported + */ + charge(amount: number, currency: string): Promise; + + /** + * Refunds a previous charge. + * @throws TransactionNotFoundException if transactionId is invalid + */ + refund(transactionId: string, amount?: number): Promise; +} + +// Production implementation +@Injectable() +export class StripeService implements PaymentGateway { + async charge(amount: number, currency: string): Promise { + try { + const response = await this.stripe.charges.create({ amount, currency }); + return { + success: true, + transactionId: response.id, + amount: response.amount, + }; + } catch (error) { + if (error.type === 'card_error') { + throw new PaymentFailedException(error.message); + } + throw error; + } + } + + async refund(transactionId: string, amount?: number): Promise { + // Implementation... + } +} + +// Mock that honors LSP - same contract, same behavior shape +@Injectable() +export class MockPaymentService implements PaymentGateway { + private transactions = new Map(); + + async charge(amount: number, currency: string): Promise { + // Honor the contract: validate currency like real service would + if (!['USD', 'EUR', 'GBP'].includes(currency)) { + throw new InvalidCurrencyException(`Unsupported currency: ${currency}`); + } + + // Simulate decline for specific test scenarios + if (amount === 99999) { + throw new PaymentFailedException('Card declined (test scenario)'); + } + + // Return same shape as production + const result: PaymentResult = { + success: true, + transactionId: `mock_${Date.now()}_${Math.random().toString(36)}`, + amount, + }; + + this.transactions.set(result.transactionId, result); + return result; + } + + async refund(transactionId: string, amount?: number): Promise { + // Honor the contract: throw if transaction not found + if (!this.transactions.has(transactionId)) { + throw new TransactionNotFoundException(transactionId); + } + + return { + success: true, + refundId: `refund_${transactionId}`, + amount: amount ?? this.transactions.get(transactionId)!.amount, + }; + } +} + +// Consumer can swap implementations safely +@Injectable() +export class OrdersService { + constructor(@Inject(PAYMENT_GATEWAY) private payment: PaymentGateway) {} + + async checkout(order: Order): Promise { + try { + const result = await this.payment.charge(order.total, order.currency); + // Works with both StripeService and MockPaymentService + order.transactionId = result.transactionId; + order.status = 'paid'; + return order; + } catch (error) { + if (error instanceof PaymentFailedException) { + order.status = 'payment_failed'; + return order; + } + throw error; + } + } +} +``` + +**Testing LSP compliance:** + +```typescript +// Shared test suite that any implementation must pass +function testPaymentGatewayContract(createGateway: () => PaymentGateway) { + describe('PaymentGateway contract', () => { + let gateway: PaymentGateway; + + beforeEach(() => { + gateway = createGateway(); + }); + + it('returns PaymentResult with all required fields', async () => { + const result = await gateway.charge(1000, 'USD'); + expect(result).toHaveProperty('success'); + expect(result).toHaveProperty('transactionId'); + expect(result).toHaveProperty('amount'); + expect(typeof result.transactionId).toBe('string'); + }); + + it('throws InvalidCurrencyException for unsupported currency', async () => { + await expect(gateway.charge(1000, 'INVALID')).rejects.toThrow(InvalidCurrencyException); + }); + + it('throws TransactionNotFoundException for invalid refund', async () => { + await expect(gateway.refund('nonexistent')).rejects.toThrow(TransactionNotFoundException); + }); + }); +} + +// Run against all implementations +describe('StripeService', () => { + testPaymentGatewayContract(() => new StripeService(mockStripeClient)); +}); + +describe('MockPaymentService', () => { + testPaymentGatewayContract(() => new MockPaymentService()); +}); +``` + +Reference: [Liskov Substitution Principle](https://en.wikipedia.org/wiki/Liskov_substitution_principle) + +--- + +### 2.4 Prefer Constructor Injection + +**Impact: CRITICAL** — Required for proper DI and testing + +Always use constructor injection over property injection. Constructor injection makes dependencies explicit, enables TypeScript type checking, ensures dependencies are available when the class is instantiated, and improves testability. This is required for proper DI, testing, and TypeScript support. + +**Incorrect (property injection with hidden dependencies):** + +```typescript +// Property injection - avoid unless necessary +@Injectable() +export class UsersService { + @Inject() + private userRepo: UserRepository; // Hidden dependency + + @Inject('CONFIG') + private config: ConfigType; // Also hidden + + async findAll() { + return this.userRepo.find(); + } +} + +// Problems: +// 1. Dependencies not visible in constructor +// 2. Service can be instantiated without dependencies in tests +// 3. TypeScript can't enforce dependency types at instantiation +``` + +**Correct (constructor injection with explicit dependencies):** + +```typescript +// Constructor injection - explicit and testable +@Injectable() +export class UsersService { + constructor( + private readonly userRepo: UserRepository, + @Inject('CONFIG') private readonly config: ConfigType + ) {} + + async findAll(): Promise { + return this.userRepo.find(); + } +} + +// Testing is straightforward +describe('UsersService', () => { + let service: UsersService; + let mockRepo: jest.Mocked; + + beforeEach(() => { + mockRepo = { + find: jest.fn(), + save: jest.fn(), + } as any; + + service = new UsersService(mockRepo, { dbUrl: 'test' }); + }); + + it('should find all users', async () => { + mockRepo.find.mockResolvedValue([{ id: '1', name: 'Test' }]); + const result = await service.findAll(); + expect(result).toHaveLength(1); + }); +}); + +// Only use property injection for optional dependencies +@Injectable() +export class LoggingService { + @Optional() + @Inject('ANALYTICS') + private analytics?: AnalyticsService; + + log(message: string) { + console.log(message); + this.analytics?.track('log', message); // Optional enhancement + } +} +``` + +Reference: [NestJS Providers](https://docs.nestjs.com/providers) + +--- + +### 2.5 Understand Provider Scopes + +**Impact: CRITICAL** — Prevents data leaks and performance issues + +NestJS has three provider scopes: DEFAULT (singleton), REQUEST (per-request instance), and TRANSIENT (new instance for each injection). Most providers should be singletons. Request-scoped providers have performance implications as they bubble up through the dependency tree. Understanding scopes prevents memory leaks and incorrect data sharing. + +**Incorrect (wrong scope usage):** + +```typescript +// Request-scoped when not needed (performance hit) +@Injectable({ scope: Scope.REQUEST }) +export class UsersService { + // This creates a new instance for EVERY request + // All dependencies also become request-scoped + async findAll() { + return this.userRepo.find(); + } +} + +// Singleton with mutable request state +@Injectable() // Default: singleton +export class RequestContextService { + private userId: string; // DANGER: Shared across all requests! + + setUser(userId: string) { + this.userId = userId; // Overwrites for all concurrent requests + } + + getUser() { + return this.userId; // Returns wrong user! + } +} +``` + +**Correct (appropriate scope for each use case):** + +```typescript +// Singleton for stateless services (default, most common) +@Injectable() +export class UsersService { + constructor(private readonly userRepo: UserRepository) {} + + async findById(id: string): Promise { + return this.userRepo.findOne({ where: { id } }); + } +} + +// Request-scoped ONLY when you need request context +@Injectable({ scope: Scope.REQUEST }) +export class RequestContextService { + private userId: string; + + setUser(userId: string) { + this.userId = userId; + } + + getUser(): string { + return this.userId; + } +} + +// Better: Use NestJS built-in request context +import { REQUEST } from '@nestjs/core'; +import { Request } from 'express'; + +@Injectable({ scope: Scope.REQUEST }) +export class AuditService { + constructor(@Inject(REQUEST) private request: Request) {} + + log(action: string) { + console.log(`User ${this.request.user?.id} performed ${action}`); + } +} + +// Best: Use ClsModule for async context (no scope bubble-up) +import { ClsService } from 'nestjs-cls'; + +@Injectable() // Stays singleton! +export class AuditService { + constructor(private cls: ClsService) {} + + log(action: string) { + const userId = this.cls.get('userId'); + console.log(`User ${userId} performed ${action}`); + } +} +``` + +Reference: [NestJS Injection Scopes](https://docs.nestjs.com/fundamentals/injection-scopes) + +--- + +### 2.6 Use Injection Tokens for Interfaces + +**Impact: HIGH** — Enables interface-based DI at runtime + +TypeScript interfaces are erased at compile time and can't be used as injection tokens. Use string tokens, symbols, or abstract classes when you want to inject implementations of interfaces. This enables swapping implementations for testing or different environments. + +**Incorrect (interface can't be used as token):** + +```typescript +// Interface can't be used as injection token +interface PaymentGateway { + charge(amount: number): Promise; +} + +@Injectable() +export class StripeService implements PaymentGateway { + charge(amount: number) { + /* ... */ + } +} + +@Injectable() +export class OrdersService { + // This WON'T work - PaymentGateway doesn't exist at runtime + constructor(private payment: PaymentGateway) {} +} +``` + +**Correct (symbol tokens or abstract classes):** + +```typescript +// Option 1: String/Symbol tokens (most flexible) +export const PAYMENT_GATEWAY = Symbol('PAYMENT_GATEWAY'); + +export interface PaymentGateway { + charge(amount: number): Promise; +} + +@Injectable() +export class StripeService implements PaymentGateway { + async charge(amount: number): Promise { + // Stripe implementation + } +} + +@Injectable() +export class MockPaymentService implements PaymentGateway { + async charge(amount: number): Promise { + return { success: true, id: 'mock-id' }; + } +} + +// Module registration +@Module({ + providers: [ + { + provide: PAYMENT_GATEWAY, + useClass: process.env.NODE_ENV === 'test' ? MockPaymentService : StripeService, + }, + ], + exports: [PAYMENT_GATEWAY], +}) +export class PaymentModule {} + +// Injection +@Injectable() +export class OrdersService { + constructor(@Inject(PAYMENT_GATEWAY) private payment: PaymentGateway) {} + + async createOrder(dto: CreateOrderDto) { + await this.payment.charge(dto.amount); + } +} + +// Option 2: Abstract class (carries runtime type info) +export abstract class PaymentGateway { + abstract charge(amount: number): Promise; +} + +@Injectable() +export class StripeService extends PaymentGateway { + async charge(amount: number): Promise { + // Implementation + } +} + +// No @Inject needed with abstract class +@Injectable() +export class OrdersService { + constructor(private payment: PaymentGateway) {} +} +``` + +Reference: [NestJS Custom Providers](https://docs.nestjs.com/fundamentals/custom-providers) + +--- + +## 3. Error Handling + +**Section Impact: HIGH** + +### 3.1 Handle Async Errors Properly + +**Impact: HIGH** — Prevents process crashes from unhandled rejections + +NestJS automatically catches errors from async route handlers, but errors from background tasks, event handlers, and manually created promises can crash your application. Always handle async errors explicitly and use global handlers as a safety net. + +**Incorrect (fire-and-forget without error handling):** + +```typescript +// Fire-and-forget without error handling +@Injectable() +export class UsersService { + async createUser(dto: CreateUserDto): Promise { + const user = await this.repo.save(dto); + + // Fire and forget - if this fails, error is unhandled! + this.emailService.sendWelcome(user.email); + + return user; + } +} + +// Unhandled promise in event handler +@Injectable() +export class OrdersService { + @OnEvent('order.created') + handleOrderCreated(event: OrderCreatedEvent) { + // This returns a promise but it's not awaited! + this.processOrder(event); + // Errors will crash the process + } + + private async processOrder(event: OrderCreatedEvent): Promise { + await this.inventoryService.reserve(event.items); + await this.notificationService.send(event.userId); + } +} + +// Missing try-catch in scheduled tasks +@Cron('0 0 * * *') +async dailyCleanup(): Promise { + await this.cleanupService.run(); + // If this throws, no error handling +} +``` + +**Correct (explicit async error handling):** + +```typescript +// Handle fire-and-forget with explicit catch +@Injectable() +export class UsersService { + private readonly logger = new Logger(UsersService.name); + + async createUser(dto: CreateUserDto): Promise { + const user = await this.repo.save(dto); + + // Explicitly catch and log errors + this.emailService.sendWelcome(user.email).catch((error) => { + this.logger.error('Failed to send welcome email', error.stack); + // Optionally queue for retry + }); + + return user; + } +} + +// Properly handle async event handlers +@Injectable() +export class OrdersService { + private readonly logger = new Logger(OrdersService.name); + + @OnEvent('order.created') + async handleOrderCreated(event: OrderCreatedEvent): Promise { + try { + await this.processOrder(event); + } catch (error) { + this.logger.error('Failed to process order', { event, error }); + // Don't rethrow - would crash the process + await this.deadLetterQueue.add('order.created', event); + } + } +} + +// Safe scheduled tasks +@Injectable() +export class CleanupService { + private readonly logger = new Logger(CleanupService.name); + + @Cron('0 0 * * *') + async dailyCleanup(): Promise { + try { + await this.cleanupService.run(); + this.logger.log('Daily cleanup completed'); + } catch (error) { + this.logger.error('Daily cleanup failed', error.stack); + // Alert or retry logic + } + } +} + +// Global unhandled rejection handler in main.ts +async function bootstrap() { + const app = await NestFactory.create(AppModule); + const logger = new Logger('Bootstrap'); + + process.on('unhandledRejection', (reason, promise) => { + logger.error('Unhandled Rejection at:', promise, 'reason:', reason); + }); + + process.on('uncaughtException', (error) => { + logger.error('Uncaught Exception:', error); + process.exit(1); + }); + + await app.listen(3000); +} +``` + +Reference: [Node.js Unhandled Rejections](https://nodejs.org/api/process.html#event-unhandledrejection) + +--- + +### 3.2 Throw HTTP Exceptions from Services + +**Impact: HIGH** — Keeps controllers thin and simplifies error handling + +It's acceptable (and often preferable) to throw `HttpException` subclasses from services in HTTP applications. This keeps controllers thin and allows services to communicate appropriate error states. For truly layer-agnostic services, use domain exceptions that map to HTTP status codes. + +**Incorrect (return error objects instead of throwing):** + +```typescript +// Return error objects instead of throwing +@Injectable() +export class UsersService { + async findById(id: string): Promise<{ user?: User; error?: string }> { + const user = await this.repo.findOne({ where: { id } }); + if (!user) { + return { error: 'User not found' }; // Controller must check this + } + return { user }; + } +} + +@Controller('users') +export class UsersController { + @Get(':id') + async findOne(@Param('id') id: string) { + const result = await this.usersService.findById(id); + if (result.error) { + throw new NotFoundException(result.error); + } + return result.user; + } +} +``` + +**Correct (throw exceptions directly from service):** + +```typescript +// Throw exceptions directly from service +@Injectable() +export class UsersService { + constructor(private readonly repo: UserRepository) {} + + async findById(id: string): Promise { + const user = await this.repo.findOne({ where: { id } }); + if (!user) { + throw new NotFoundException(`User #${id} not found`); + } + return user; + } + + async create(dto: CreateUserDto): Promise { + const existing = await this.repo.findOne({ + where: { email: dto.email }, + }); + if (existing) { + throw new ConflictException('Email already registered'); + } + return this.repo.save(dto); + } + + async update(id: string, dto: UpdateUserDto): Promise { + const user = await this.findById(id); // Throws if not found + Object.assign(user, dto); + return this.repo.save(user); + } +} + +// Controller stays thin +@Controller('users') +export class UsersController { + @Get(':id') + findOne(@Param('id') id: string): Promise { + return this.usersService.findById(id); + } + + @Post() + create(@Body() dto: CreateUserDto): Promise { + return this.usersService.create(dto); + } +} + +// For layer-agnostic services, use domain exceptions +export class EntityNotFoundException extends Error { + constructor( + public readonly entity: string, + public readonly id: string + ) { + super(`${entity} with ID "${id}" not found`); + } +} + +// Map to HTTP in exception filter +@Catch(EntityNotFoundException) +export class EntityNotFoundFilter implements ExceptionFilter { + catch(exception: EntityNotFoundException, host: ArgumentsHost) { + const ctx = host.switchToHttp(); + const response = ctx.getResponse(); + + response.status(404).json({ + statusCode: 404, + message: exception.message, + entity: exception.entity, + id: exception.id, + }); + } +} +``` + +Reference: [NestJS Exception Filters](https://docs.nestjs.com/exception-filters) + +--- + +### 3.3 Use Exception Filters for Error Handling + +**Impact: HIGH** — Consistent, centralized error handling + +Never catch exceptions and manually format error responses in controllers. Use NestJS exception filters to handle errors consistently across your application. Create custom exception filters for specific error types and a global filter for unhandled exceptions. + +**Incorrect (manual error handling in controllers):** + +```typescript +// Manual error handling in controllers +@Controller('users') +export class UsersController { + @Get(':id') + async findOne(@Param('id') id: string, @Res() res: Response) { + try { + const user = await this.usersService.findById(id); + if (!user) { + return res.status(404).json({ + statusCode: 404, + message: 'User not found', + }); + } + return res.json(user); + } catch (error) { + console.error(error); + return res.status(500).json({ + statusCode: 500, + message: 'Internal server error', + }); + } + } +} +``` + +**Correct (exception filters with consistent handling):** + +```typescript +// Use built-in and custom exceptions +@Controller('users') +export class UsersController { + @Get(':id') + async findOne(@Param('id') id: string): Promise { + const user = await this.usersService.findById(id); + if (!user) { + throw new NotFoundException(`User #${id} not found`); + } + return user; + } +} + +// Custom domain exception +export class UserNotFoundException extends NotFoundException { + constructor(userId: string) { + super({ + statusCode: 404, + error: 'Not Found', + message: `User with ID "${userId}" not found`, + code: 'USER_NOT_FOUND', + }); + } +} + +// Custom exception filter for domain errors +@Catch(DomainException) +export class DomainExceptionFilter implements ExceptionFilter { + catch(exception: DomainException, host: ArgumentsHost) { + const ctx = host.switchToHttp(); + const response = ctx.getResponse(); + const request = ctx.getRequest(); + + const status = exception.getStatus?.() || 400; + + response.status(status).json({ + statusCode: status, + code: exception.code, + message: exception.message, + timestamp: new Date().toISOString(), + path: request.url, + }); + } +} + +// Global exception filter for unhandled errors +@Catch() +export class AllExceptionsFilter implements ExceptionFilter { + constructor(private readonly logger: Logger) {} + + catch(exception: unknown, host: ArgumentsHost) { + const ctx = host.switchToHttp(); + const response = ctx.getResponse(); + const request = ctx.getRequest(); + + const status = exception instanceof HttpException ? exception.getStatus() : HttpStatus.INTERNAL_SERVER_ERROR; + + const message = exception instanceof HttpException ? exception.message : 'Internal server error'; + + this.logger.error(`${request.method} ${request.url}`, exception instanceof Error ? exception.stack : exception); + + response.status(status).json({ + statusCode: status, + message, + timestamp: new Date().toISOString(), + path: request.url, + }); + } +} + +// Register globally in main.ts +app.useGlobalFilters(new AllExceptionsFilter(app.get(Logger)), new DomainExceptionFilter()); + +// Or via module +@Module({ + providers: [ + { + provide: APP_FILTER, + useClass: AllExceptionsFilter, + }, + ], +}) +export class AppModule {} +``` + +Reference: [NestJS Exception Filters](https://docs.nestjs.com/exception-filters) + +--- + +## 4. Security + +**Section Impact: HIGH** + +### 4.1 Implement Secure JWT Authentication + +**Impact: CRITICAL** — Essential for secure APIs + +Use `@nestjs/jwt` with `@nestjs/passport` for authentication. Store secrets securely, use appropriate token lifetimes, implement refresh tokens, and validate tokens properly. Never expose sensitive data in JWT payloads. + +**Incorrect (insecure JWT implementation):** + +```typescript +// Hardcode secrets +@Module({ + imports: [ + JwtModule.register({ + secret: 'my-secret-key', // Exposed in code + signOptions: { expiresIn: '7d' }, // Too long + }), + ], +}) +export class AuthModule {} + +// Store sensitive data in JWT +async login(user: User): Promise<{ accessToken: string }> { + const payload = { + sub: user.id, + email: user.email, + password: user.password, // NEVER include password! + ssn: user.ssn, // NEVER include sensitive data! + isAdmin: user.isAdmin, // Can be tampered if not verified + }; + return { accessToken: this.jwtService.sign(payload) }; +} + +// Skip token validation +@Injectable() +export class JwtStrategy extends PassportStrategy(Strategy) { + constructor() { + super({ + jwtFromRequest: ExtractJwt.fromAuthHeaderAsBearerToken(), + secretOrKey: 'my-secret', + }); + } + + async validate(payload: any): Promise { + return payload; // No validation of user existence + } +} +``` + +**Correct (secure JWT with refresh tokens):** + +```typescript +// Secure JWT configuration +@Module({ + imports: [ + JwtModule.registerAsync({ + imports: [ConfigModule], + inject: [ConfigService], + useFactory: (config: ConfigService) => ({ + secret: config.get('JWT_SECRET'), + signOptions: { + expiresIn: '15m', // Short-lived access tokens + issuer: config.get('JWT_ISSUER'), + audience: config.get('JWT_AUDIENCE'), + }, + }), + }), + PassportModule.register({ defaultStrategy: 'jwt' }), + ], +}) +export class AuthModule {} + +// Minimal JWT payload +@Injectable() +export class AuthService { + async login(user: User): Promise { + // Only include necessary, non-sensitive data + const payload: JwtPayload = { + sub: user.id, + email: user.email, + roles: user.roles, + iat: Math.floor(Date.now() / 1000), + }; + + const accessToken = this.jwtService.sign(payload); + const refreshToken = await this.createRefreshToken(user.id); + + return { accessToken, refreshToken, expiresIn: 900 }; + } + + private async createRefreshToken(userId: string): Promise { + const token = randomBytes(32).toString('hex'); + const hashedToken = await bcrypt.hash(token, 10); + + await this.refreshTokenRepo.save({ + userId, + token: hashedToken, + expiresAt: new Date(Date.now() + 7 * 24 * 60 * 60 * 1000), // 7 days + }); + + return token; + } +} + +// Proper JWT strategy with validation +@Injectable() +export class JwtStrategy extends PassportStrategy(Strategy) { + constructor( + private config: ConfigService, + private usersService: UsersService + ) { + super({ + jwtFromRequest: ExtractJwt.fromAuthHeaderAsBearerToken(), + secretOrKey: config.get('JWT_SECRET'), + ignoreExpiration: false, + issuer: config.get('JWT_ISSUER'), + audience: config.get('JWT_AUDIENCE'), + }); + } + + async validate(payload: JwtPayload): Promise { + // Verify user still exists and is active + const user = await this.usersService.findById(payload.sub); + + if (!user || !user.isActive) { + throw new UnauthorizedException('User not found or inactive'); + } + + // Verify token wasn't issued before password change + if (user.passwordChangedAt) { + const tokenIssuedAt = new Date(payload.iat * 1000); + if (tokenIssuedAt < user.passwordChangedAt) { + throw new UnauthorizedException('Token invalidated by password change'); + } + } + + return user; + } +} +``` + +Reference: [NestJS Authentication](https://docs.nestjs.com/security/authentication) + +--- + +### 4.2 Implement Rate Limiting + +**Impact: HIGH** — Protects against abuse and ensures fair resource usage + +Use `@nestjs/throttler` to limit request rates per client. Apply different limits for different endpoints - stricter for auth endpoints, more relaxed for read operations. Consider using Redis for distributed rate limiting in clustered deployments. + +**Incorrect (no rate limiting on sensitive endpoints):** + +```typescript +// No rate limiting on sensitive endpoints +@Controller('auth') +export class AuthController { + @Post('login') + async login(@Body() dto: LoginDto): Promise { + // Attackers can brute-force credentials + return this.authService.login(dto); + } + + @Post('forgot-password') + async forgotPassword(@Body() dto: ForgotPasswordDto): Promise { + // Can be abused to spam users with emails + return this.authService.sendResetEmail(dto.email); + } +} + +// Same limits for all endpoints +@UseGuards(ThrottlerGuard) +@Controller('api') +export class ApiController { + @Get('public-data') + async getPublic() {} // Should allow more requests + + @Post('process-payment') + async payment() {} // Should be more restrictive +} +``` + +**Correct (configured throttler with endpoint-specific limits):** + +```typescript +// Configure throttler globally with multiple limits +import { ThrottlerModule, ThrottlerGuard } from '@nestjs/throttler'; + +@Module({ + imports: [ + ThrottlerModule.forRoot([ + { + name: 'short', + ttl: 1000, // 1 second + limit: 3, // 3 requests per second + }, + { + name: 'medium', + ttl: 10000, // 10 seconds + limit: 20, // 20 requests per 10 seconds + }, + { + name: 'long', + ttl: 60000, // 1 minute + limit: 100, // 100 requests per minute + }, + ]), + ], + providers: [ + { + provide: APP_GUARD, + useClass: ThrottlerGuard, + }, + ], +}) +export class AppModule {} + +// Override limits per endpoint +@Controller('auth') +export class AuthController { + @Post('login') + @Throttle({ short: { limit: 5, ttl: 60000 } }) // 5 attempts per minute + async login(@Body() dto: LoginDto): Promise { + return this.authService.login(dto); + } + + @Post('forgot-password') + @Throttle({ short: { limit: 3, ttl: 3600000 } }) // 3 per hour + async forgotPassword(@Body() dto: ForgotPasswordDto): Promise { + return this.authService.sendResetEmail(dto.email); + } +} + +// Skip throttling for certain routes +@Controller('health') +export class HealthController { + @Get() + @SkipThrottle() + check(): string { + return 'OK'; + } +} + +// Custom throttle per user type +@Injectable() +export class CustomThrottlerGuard extends ThrottlerGuard { + protected async getTracker(req: Request): Promise { + // Use user ID if authenticated, IP otherwise + return req.user?.id || req.ip; + } + + protected async getLimit(context: ExecutionContext): Promise { + const request = context.switchToHttp().getRequest(); + + // Higher limits for authenticated users + if (request.user) { + return request.user.isPremium ? 1000 : 200; + } + + return 50; // Anonymous users + } +} +``` + +Reference: [NestJS Throttler](https://docs.nestjs.com/security/rate-limiting) + +--- + +### 4.3 Sanitize Output to Prevent XSS + +**Impact: HIGH** — XSS vulnerabilities can compromise user sessions and data + +While NestJS APIs typically return JSON (which browsers don't execute), XSS risks exist when rendering HTML, storing user content, or when frontend frameworks improperly handle API responses. Sanitize user-generated content before storage and use proper Content-Type headers. + +**Incorrect (storing raw HTML without sanitization):** + +```typescript +// Store raw HTML from users +@Injectable() +export class CommentsService { + async create(dto: CreateCommentDto): Promise { + // User can inject: + return this.repo.save({ + content: dto.content, // Raw, unsanitized + authorId: dto.authorId, + }); + } +} + +// Return HTML without sanitization +@Controller('pages') +export class PagesController { + @Get(':slug') + @Header('Content-Type', 'text/html') + async getPage(@Param('slug') slug: string): Promise { + const page = await this.pagesService.findBySlug(slug); + // If page.content contains user input, XSS is possible + return `${page.content}`; + } +} + +// Reflect user input in errors +@Get(':id') +async findOne(@Param('id') id: string): Promise { + const user = await this.repo.findOne({ where: { id } }); + if (!user) { + // XSS if id contains malicious content and error is rendered + throw new NotFoundException(`User ${id} not found`); + } + return user; +} +``` + +**Correct (sanitize content and use proper headers):** + +```typescript +// Sanitize HTML content before storage +import * as sanitizeHtml from 'sanitize-html'; + +@Injectable() +export class CommentsService { + private readonly sanitizeOptions: sanitizeHtml.IOptions = { + allowedTags: ['b', 'i', 'em', 'strong', 'a', 'p', 'br'], + allowedAttributes: { + a: ['href', 'title'], + }, + allowedSchemes: ['http', 'https', 'mailto'], + }; + + async create(dto: CreateCommentDto): Promise { + return this.repo.save({ + content: sanitizeHtml(dto.content, this.sanitizeOptions), + authorId: dto.authorId, + }); + } +} + +// Use validation pipe to strip HTML +import { Transform } from 'class-transformer'; + +export class CreatePostDto { + @IsString() + @MaxLength(1000) + @Transform(({ value }) => sanitizeHtml(value, { allowedTags: [] })) + title: string; + + @IsString() + @Transform(({ value }) => + sanitizeHtml(value, { + allowedTags: ['p', 'br', 'b', 'i', 'a'], + allowedAttributes: { a: ['href'] }, + }), + ) + content: string; +} + +// Set proper Content-Type headers +@Controller('api') +export class ApiController { + @Get('data') + @Header('Content-Type', 'application/json') + async getData(): Promise { + // JSON response - browser won't execute scripts + return this.service.getData(); + } +} + +// Sanitize error messages +@Get(':id') +async findOne(@Param('id', ParseUUIDPipe) id: string): Promise { + const user = await this.repo.findOne({ where: { id } }); + if (!user) { + // UUID validation ensures safe format + throw new NotFoundException('User not found'); + } + return user; +} + +// Use Helmet for CSP headers +import helmet from 'helmet'; + +async function bootstrap() { + const app = await NestFactory.create(AppModule); + + app.use( + helmet({ + contentSecurityPolicy: { + directives: { + defaultSrc: ["'self'"], + scriptSrc: ["'self'"], + styleSrc: ["'self'", "'unsafe-inline'"], + imgSrc: ["'self'", 'data:', 'https:'], + }, + }, + }), + ); + + await app.listen(3000); +} +``` + +Reference: [OWASP XSS Prevention](https://cheatsheetseries.owasp.org/cheatsheets/Cross_Site_Scripting_Prevention_Cheat_Sheet.html) + +--- + +### 4.4 Use Guards for Authentication and Authorization + +**Impact: HIGH** — Enforces access control before handlers execute + +Guards determine whether a request should be handled based on authentication state, roles, permissions, or other conditions. They run after middleware but before pipes and interceptors, making them ideal for access control. Use guards instead of manual checks in controllers. + +**Incorrect (manual auth checks in every handler):** + +```typescript +// Manual auth checks in every handler +@Controller('admin') +export class AdminController { + @Get('users') + async getUsers(@Request() req) { + if (!req.user) { + throw new UnauthorizedException(); + } + if (!req.user.roles.includes('admin')) { + throw new ForbiddenException(); + } + return this.adminService.getUsers(); + } + + @Delete('users/:id') + async deleteUser(@Request() req, @Param('id') id: string) { + if (!req.user) { + throw new UnauthorizedException(); + } + if (!req.user.roles.includes('admin')) { + throw new ForbiddenException(); + } + return this.adminService.deleteUser(id); + } +} +``` + +**Correct (guards with declarative decorators):** + +```typescript +// JWT Auth Guard +@Injectable() +export class JwtAuthGuard implements CanActivate { + constructor( + private jwtService: JwtService, + private reflector: Reflector + ) {} + + async canActivate(context: ExecutionContext): Promise { + // Check for @Public() decorator + const isPublic = this.reflector.getAllAndOverride('isPublic', [context.getHandler(), context.getClass()]); + if (isPublic) return true; + + const request = context.switchToHttp().getRequest(); + const token = this.extractToken(request); + + if (!token) { + throw new UnauthorizedException('No token provided'); + } + + try { + request.user = await this.jwtService.verifyAsync(token); + return true; + } catch { + throw new UnauthorizedException('Invalid token'); + } + } + + private extractToken(request: Request): string | undefined { + const [type, token] = request.headers.authorization?.split(' ') ?? []; + return type === 'Bearer' ? token : undefined; + } +} + +// Roles Guard +@Injectable() +export class RolesGuard implements CanActivate { + constructor(private reflector: Reflector) {} + + canActivate(context: ExecutionContext): boolean { + const requiredRoles = this.reflector.getAllAndOverride('roles', [context.getHandler(), context.getClass()]); + + if (!requiredRoles) return true; + + const { user } = context.switchToHttp().getRequest(); + return requiredRoles.some((role) => user.roles?.includes(role)); + } +} + +// Decorators +export const Public = () => SetMetadata('isPublic', true); +export const Roles = (...roles: Role[]) => SetMetadata('roles', roles); + +// Register guards globally +@Module({ + providers: [ + { provide: APP_GUARD, useClass: JwtAuthGuard }, + { provide: APP_GUARD, useClass: RolesGuard }, + ], +}) +export class AppModule {} + +// Clean controller +@Controller('admin') +@Roles(Role.Admin) // Applied to all routes +export class AdminController { + @Get('users') + getUsers(): Promise { + return this.adminService.getUsers(); + } + + @Delete('users/:id') + deleteUser(@Param('id') id: string): Promise { + return this.adminService.deleteUser(id); + } + + @Public() // Override: no auth required + @Get('health') + health() { + return { status: 'ok' }; + } +} +``` + +Reference: [NestJS Guards](https://docs.nestjs.com/guards) + +--- + +### 4.5 Validate All Input with DTOs and Pipes + +**Impact: HIGH** — First line of defense against attacks + +Always validate incoming data using class-validator decorators on DTOs and the global ValidationPipe. Never trust user input. Validate all request bodies, query parameters, and route parameters before processing. + +**Incorrect (trust raw input without validation):** + +```typescript +// Trust raw input without validation +@Controller('users') +export class UsersController { + @Post() + create(@Body() body: any) { + // body could contain anything - SQL injection, XSS, etc. + return this.usersService.create(body); + } + + @Get() + findAll(@Query() query: any) { + // query.limit could be "'; DROP TABLE users; --" + return this.usersService.findAll(query.limit); + } +} + +// DTOs without validation decorators +export class CreateUserDto { + name: string; // No validation + email: string; // Could be "not-an-email" + age: number; // Could be "abc" or -999 +} +``` + +**Correct (validated DTOs with global ValidationPipe):** + +```typescript +// Enable ValidationPipe globally in main.ts +async function bootstrap() { + const app = await NestFactory.create(AppModule); + + app.useGlobalPipes( + new ValidationPipe({ + whitelist: true, // Strip unknown properties + forbidNonWhitelisted: true, // Throw on unknown properties + transform: true, // Auto-transform to DTO types + transformOptions: { + enableImplicitConversion: true, + }, + }) + ); + + await app.listen(3000); +} + +// Create well-validated DTOs +import { + IsString, + IsEmail, + IsInt, + Min, + Max, + IsOptional, + MinLength, + MaxLength, + Matches, + IsNotEmpty, +} from 'class-validator'; +import { Transform, Type } from 'class-transformer'; + +export class CreateUserDto { + @IsString() + @IsNotEmpty() + @MinLength(2) + @MaxLength(100) + @Transform(({ value }) => value?.trim()) + name: string; + + @IsEmail() + @Transform(({ value }) => value?.toLowerCase().trim()) + email: string; + + @IsInt() + @Min(0) + @Max(150) + age: number; + + @IsString() + @MinLength(8) + @MaxLength(100) + @Matches(/^(?=.*[a-z])(?=.*[A-Z])(?=.*\d)/, { + message: 'Password must contain uppercase, lowercase, and number', + }) + password: string; +} + +// Query DTO with defaults and transformation +export class FindUsersQueryDto { + @IsOptional() + @IsString() + @MaxLength(100) + search?: string; + + @IsOptional() + @Type(() => Number) + @IsInt() + @Min(1) + @Max(100) + limit: number = 20; + + @IsOptional() + @Type(() => Number) + @IsInt() + @Min(0) + offset: number = 0; +} + +// Param validation +export class UserIdParamDto { + @IsUUID('4') + id: string; +} + +@Controller('users') +export class UsersController { + @Post() + create(@Body() dto: CreateUserDto): Promise { + // dto is guaranteed to be valid + return this.usersService.create(dto); + } + + @Get() + findAll(@Query() query: FindUsersQueryDto): Promise { + // query.limit is a number, query.search is sanitized + return this.usersService.findAll(query); + } + + @Get(':id') + findOne(@Param() params: UserIdParamDto): Promise { + // params.id is a valid UUID + return this.usersService.findById(params.id); + } +} +``` + +Reference: [NestJS Validation](https://docs.nestjs.com/techniques/validation) + +--- + +## 5. Performance + +**Section Impact: HIGH** + +### 5.1 Use Async Lifecycle Hooks Correctly + +**Impact: HIGH** — Improper async handling blocks application startup + +NestJS lifecycle hooks (`onModuleInit`, `onApplicationBootstrap`, etc.) support async operations. However, misusing them can block application startup or cause race conditions. Understand the lifecycle order and use hooks appropriately. + +**Incorrect (fire-and-forget async without await):** + +```typescript +// Fire-and-forget async without await +@Injectable() +export class DatabaseService implements OnModuleInit { + onModuleInit() { + // This runs but doesn't block - app starts before DB is ready! + this.connect(); + } + + private async connect() { + await this.pool.connect(); + console.log('Database connected'); + } +} + +// Heavy blocking operations in constructor +@Injectable() +export class ConfigService { + private config: Config; + + constructor() { + // BLOCKS entire module instantiation synchronously + this.config = fs.readFileSync('config.json'); + } +} +``` + +**Correct (return promises from async hooks):** + +```typescript +// Return promise from async hooks +@Injectable() +export class DatabaseService implements OnModuleInit { + private pool: Pool; + + async onModuleInit(): Promise { + // NestJS waits for this to complete before continuing + await this.pool.connect(); + console.log('Database connected'); + } + + async onModuleDestroy(): Promise { + // Clean up resources on shutdown + await this.pool.end(); + console.log('Database disconnected'); + } +} + +// Use onApplicationBootstrap for cross-module dependencies +@Injectable() +export class CacheWarmerService implements OnApplicationBootstrap { + constructor( + private cache: CacheService, + private products: ProductsService + ) {} + + async onApplicationBootstrap(): Promise { + // All modules are initialized, safe to warm cache + const products = await this.products.findPopular(); + await this.cache.warmup(products); + } +} + +// Heavy init in async hooks, not constructor +@Injectable() +export class ConfigService implements OnModuleInit { + private config: Config; + + constructor() { + // Keep constructor synchronous and fast + } + + async onModuleInit(): Promise { + // Async loading in lifecycle hook + this.config = await this.loadConfig(); + } + + private async loadConfig(): Promise { + const file = await fs.promises.readFile('config.json'); + return JSON.parse(file.toString()); + } + + get(key: string): T { + return this.config[key]; + } +} + +// Enable shutdown hooks in main.ts +async function bootstrap() { + const app = await NestFactory.create(AppModule); + app.enableShutdownHooks(); // Enable SIGTERM/SIGINT handling + await app.listen(3000); +} +``` + +Reference: [NestJS Lifecycle Events](https://docs.nestjs.com/fundamentals/lifecycle-events) + +--- + +### 5.2 Use Lazy Loading for Large Modules + +**Impact: MEDIUM** — Improves startup time for large applications + +NestJS supports lazy-loading modules, which defers initialization until first use. This is valuable for large applications where some features are rarely used, serverless deployments where cold start time matters, or when certain modules have heavy initialization costs. + +**Incorrect (loading everything eagerly):** + +```typescript +// Load everything eagerly in a large app +@Module({ + imports: [ + UsersModule, + OrdersModule, + PaymentsModule, + ReportsModule, // Heavy, rarely used + AnalyticsModule, // Heavy, rarely used + AdminModule, // Only admins use this + LegacyModule, // Migration module, rarely used + BulkImportModule, // Used once a month + ], +}) +export class AppModule {} + +// All modules initialize at startup, even if never used +// Slow cold starts in serverless +// Memory wasted on unused modules +``` + +**Correct (lazy load rarely-used modules):** + +```typescript +// Use LazyModuleLoader for optional modules +import { LazyModuleLoader } from '@nestjs/core'; + +@Injectable() +export class ReportsService { + constructor(private lazyModuleLoader: LazyModuleLoader) {} + + async generateReport(type: string): Promise { + // Load module only when needed + const { ReportsModule } = await import('./reports/reports.module'); + const moduleRef = await this.lazyModuleLoader.load(() => ReportsModule); + + const reportsService = moduleRef.get(ReportsGeneratorService); + return reportsService.generate(type); + } +} + +// Lazy load admin features with caching +@Injectable() +export class AdminService { + private adminModule: ModuleRef | null = null; + + constructor(private lazyModuleLoader: LazyModuleLoader) {} + + private async getAdminModule(): Promise { + if (!this.adminModule) { + const { AdminModule } = await import('./admin/admin.module'); + this.adminModule = await this.lazyModuleLoader.load(() => AdminModule); + } + return this.adminModule; + } + + async runAdminTask(task: string): Promise { + const moduleRef = await this.getAdminModule(); + const taskRunner = moduleRef.get(AdminTaskRunner); + await taskRunner.run(task); + } +} + +// Reusable lazy loader service +@Injectable() +export class ModuleLoaderService { + private loadedModules = new Map(); + + constructor(private lazyModuleLoader: LazyModuleLoader) {} + + async load(key: string, importFn: () => Promise<{ default: Type } | Type>): Promise { + if (!this.loadedModules.has(key)) { + const module = await importFn(); + const moduleType = 'default' in module ? module.default : module; + const moduleRef = await this.lazyModuleLoader.load(() => moduleType); + this.loadedModules.set(key, moduleRef); + } + return this.loadedModules.get(key)!; + } +} + +// Preload modules in background after startup +@Injectable() +export class ModulePreloader implements OnApplicationBootstrap { + constructor(private lazyModuleLoader: LazyModuleLoader) {} + + async onApplicationBootstrap(): Promise { + setTimeout(async () => { + await this.preloadModule(() => import('./reports/reports.module')); + }, 5000); // 5 seconds after startup + } + + private async preloadModule(importFn: () => Promise): Promise { + try { + const module = await importFn(); + const moduleType = module.default || Object.values(module)[0]; + await this.lazyModuleLoader.load(() => moduleType); + } catch (error) { + console.warn('Failed to preload module', error); + } + } +} +``` + +Reference: [NestJS Lazy Loading Modules](https://docs.nestjs.com/fundamentals/lazy-loading-modules) + +--- + +### 5.3 Optimize Database Queries + +**Impact: HIGH** — Database queries are typically the largest source of latency + +Select only needed columns, use proper indexes, avoid over-fetching relations, and consider query performance when designing your data access. Most API slowness traces back to inefficient database queries. + +**Incorrect (over-fetching data and missing indexes):** + +```typescript +// Select everything when you need few fields +@Injectable() +export class UsersService { + async findAllEmails(): Promise { + const users = await this.repo.find(); + // Fetches ALL columns for ALL users + return users.map((u) => u.email); + } + + async getUserSummary(id: string): Promise { + const user = await this.repo.findOne({ + where: { id }, + relations: ['posts', 'posts.comments', 'posts.comments.author', 'followers'], + }); + // Over-fetches massive relation tree + return { name: user.name, postCount: user.posts.length }; + } +} + +// No indexes on frequently queried columns +@Entity() +export class Order { + @Column() + userId: string; // No index - full table scan on every lookup + + @Column() + status: string; // No index - slow status filtering +} +``` + +**Correct (select only needed data with proper indexes):** + +```typescript +// Select only needed columns +@Injectable() +export class UsersService { + async findAllEmails(): Promise { + const users = await this.repo.find({ + select: ['email'], // Only fetch email column + }); + return users.map((u) => u.email); + } + + // Use QueryBuilder for complex selections + async getUserSummary(id: string): Promise { + return this.repo + .createQueryBuilder('user') + .select('user.name', 'name') + .addSelect('COUNT(post.id)', 'postCount') + .leftJoin('user.posts', 'post') + .where('user.id = :id', { id }) + .groupBy('user.id') + .getRawOne(); + } + + // Fetch relations only when needed + async getFullProfile(id: string): Promise { + return this.repo.findOne({ + where: { id }, + relations: ['posts'], // Only immediate relation + select: { + id: true, + name: true, + email: true, + posts: { + id: true, + title: true, + }, + }, + }); + } +} + +// Add indexes on frequently queried columns +@Entity() +@Index(['userId']) +@Index(['status']) +@Index(['createdAt']) +@Index(['userId', 'status']) // Composite index for common query pattern +export class Order { + @PrimaryGeneratedColumn('uuid') + id: string; + + @Column() + userId: string; + + @Column() + status: string; + + @CreateDateColumn() + createdAt: Date; +} + +// Always paginate large datasets +@Injectable() +export class OrdersService { + async findAll(page = 1, limit = 20): Promise> { + const [items, total] = await this.repo.findAndCount({ + skip: (page - 1) * limit, + take: limit, + order: { createdAt: 'DESC' }, + }); + + return { + items, + meta: { + page, + limit, + total, + totalPages: Math.ceil(total / limit), + }, + }; + } +} +``` + +Reference: [TypeORM Query Builder](https://typeorm.io/select-query-builder) + +--- + +### 5.4 Use Caching Strategically + +**Impact: HIGH** — Dramatically reduces database load and response times + +Implement caching for expensive operations, frequently accessed data, and external API calls. Use NestJS CacheModule with appropriate TTLs and cache invalidation strategies. Don't cache everything - focus on high-impact areas. + +**Incorrect (no caching or caching everything):** + +```typescript +// No caching for expensive, repeated queries +@Injectable() +export class ProductsService { + async getPopular(): Promise { + // Runs complex aggregation query EVERY request + return this.productsRepo + .createQueryBuilder('p') + .leftJoin('p.orders', 'o') + .select('p.*, COUNT(o.id) as orderCount') + .groupBy('p.id') + .orderBy('orderCount', 'DESC') + .limit(20) + .getMany(); + } +} + +// Cache everything without thought +@Injectable() +export class UsersService { + @CacheKey('users') + @CacheTTL(3600) + @UseInterceptors(CacheInterceptor) + async findAll(): Promise { + // Caching user list for 1 hour is wrong if data changes frequently + return this.usersRepo.find(); + } +} +``` + +**Correct (strategic caching with proper invalidation):** + +```typescript +// Setup caching module +@Module({ + imports: [ + CacheModule.registerAsync({ + imports: [ConfigModule], + inject: [ConfigService], + useFactory: (config: ConfigService) => ({ + stores: [new KeyvRedis(config.get('REDIS_URL'))], + ttl: 60 * 1000, // Default 60s + }), + }), + ], +}) +export class AppModule {} + +// Manual caching for granular control +@Injectable() +export class ProductsService { + constructor( + @Inject(CACHE_MANAGER) private cache: Cache, + private productsRepo: ProductRepository + ) {} + + async getPopular(): Promise { + const cacheKey = 'products:popular'; + + // Try cache first + const cached = await this.cache.get(cacheKey); + if (cached) return cached; + + // Cache miss - fetch and cache + const products = await this.fetchPopularProducts(); + await this.cache.set(cacheKey, products, 5 * 60 * 1000); // 5 min TTL + return products; + } + + // Invalidate cache on changes + async updateProduct(id: string, dto: UpdateProductDto): Promise { + const product = await this.productsRepo.save({ id, ...dto }); + await this.cache.del('products:popular'); // Invalidate + return product; + } +} + +// Decorator-based caching with auto-interceptor +@Controller('categories') +@UseInterceptors(CacheInterceptor) +export class CategoriesController { + @Get() + @CacheTTL(30 * 60 * 1000) // 30 minutes - categories rarely change + findAll(): Promise { + return this.categoriesService.findAll(); + } + + @Get(':id') + @CacheTTL(60 * 1000) // 1 minute + @CacheKey('category') + findOne(@Param('id') id: string): Promise { + return this.categoriesService.findOne(id); + } +} + +// Event-based cache invalidation +@Injectable() +export class CacheInvalidationService { + constructor(@Inject(CACHE_MANAGER) private cache: Cache) {} + + @OnEvent('product.created') + @OnEvent('product.updated') + @OnEvent('product.deleted') + async invalidateProductCaches(event: ProductEvent) { + await Promise.all([this.cache.del('products:popular'), this.cache.del(`product:${event.productId}`)]); + } +} +``` + +Reference: [NestJS Caching](https://docs.nestjs.com/techniques/caching) + +--- + +## 6. Testing + +**Section Impact: MEDIUM-HIGH** + +### 6.1 Use Supertest for E2E Testing + +**Impact: HIGH** — Validates the full request/response cycle + +End-to-end tests use Supertest to make real HTTP requests against your NestJS application. They test the full stack including middleware, guards, pipes, and interceptors. E2E tests catch integration issues that unit tests miss. + +**Incorrect (no proper E2E setup or teardown):** + +```typescript +// Only unit test controllers +describe('UsersController', () => { + it('should return users', async () => { + const service = { findAll: jest.fn().mockResolvedValue([]) }; + const controller = new UsersController(service as any); + + const result = await controller.findAll(); + + expect(result).toEqual([]); + // Doesn't test: routes, guards, pipes, serialization + }); +}); + +// E2E tests without proper setup/teardown +describe('Users API', () => { + it('should create user', async () => { + const app = await NestFactory.create(AppModule); + // No proper initialization + // No cleanup after test + // Hits real database + }); +}); +``` + +**Correct (proper E2E setup with Supertest):** + +```typescript +// Proper E2E test setup +import { Test, TestingModule } from '@nestjs/testing'; +import { INestApplication, ValidationPipe } from '@nestjs/common'; +import * as request from 'supertest'; +import { AppModule } from '../src/app.module'; + +describe('UsersController (e2e)', () => { + let app: INestApplication; + + beforeAll(async () => { + const moduleFixture: TestingModule = await Test.createTestingModule({ + imports: [AppModule], + }).compile(); + + app = moduleFixture.createNestApplication(); + + // Apply same config as production + app.useGlobalPipes( + new ValidationPipe({ + whitelist: true, + transform: true, + forbidNonWhitelisted: true, + }) + ); + + await app.init(); + }); + + afterAll(async () => { + await app.close(); + }); + + describe('/users (POST)', () => { + it('should create a user', () => { + return request(app.getHttpServer()) + .post('/users') + .send({ name: 'John', email: 'john@test.com' }) + .expect(201) + .expect((res) => { + expect(res.body).toHaveProperty('id'); + expect(res.body.name).toBe('John'); + expect(res.body.email).toBe('john@test.com'); + }); + }); + + it('should return 400 for invalid email', () => { + return request(app.getHttpServer()) + .post('/users') + .send({ name: 'John', email: 'invalid-email' }) + .expect(400) + .expect((res) => { + expect(res.body.message).toContain('email'); + }); + }); + }); + + describe('/users/:id (GET)', () => { + it('should return 404 for non-existent user', () => { + return request(app.getHttpServer()).get('/users/non-existent-id').expect(404); + }); + }); +}); + +// Testing with authentication +describe('Protected Routes (e2e)', () => { + let app: INestApplication; + let authToken: string; + + beforeAll(async () => { + const moduleFixture = await Test.createTestingModule({ + imports: [AppModule], + }).compile(); + + app = moduleFixture.createNestApplication(); + app.useGlobalPipes(new ValidationPipe({ whitelist: true })); + await app.init(); + + // Get auth token + const loginResponse = await request(app.getHttpServer()) + .post('/auth/login') + .send({ email: 'test@test.com', password: 'password' }); + + authToken = loginResponse.body.accessToken; + }); + + it('should return 401 without token', () => { + return request(app.getHttpServer()).get('/users/me').expect(401); + }); + + it('should return user profile with valid token', () => { + return request(app.getHttpServer()) + .get('/users/me') + .set('Authorization', `Bearer ${authToken}`) + .expect(200) + .expect((res) => { + expect(res.body.email).toBe('test@test.com'); + }); + }); +}); + +// Database isolation for E2E tests +describe('Orders API (e2e)', () => { + let app: INestApplication; + let dataSource: DataSource; + + beforeAll(async () => { + const moduleFixture = await Test.createTestingModule({ + imports: [ + ConfigModule.forRoot({ + envFilePath: '.env.test', // Test database config + }), + AppModule, + ], + }).compile(); + + app = moduleFixture.createNestApplication(); + dataSource = moduleFixture.get(DataSource); + await app.init(); + }); + + beforeEach(async () => { + // Clean database between tests + await dataSource.synchronize(true); + }); + + afterAll(async () => { + await dataSource.destroy(); + await app.close(); + }); +}); +``` + +Reference: [NestJS E2E Testing](https://docs.nestjs.com/fundamentals/testing#end-to-end-testing) + +--- + +### 6.2 Mock External Services in Tests + +**Impact: HIGH** — Ensures fast, reliable, deterministic tests + +Never call real external services (APIs, databases, message queues) in unit tests. Mock them to ensure tests are fast, deterministic, and don't incur costs. Use realistic mock data and test edge cases like timeouts and errors. + +**Incorrect (calling real APIs and databases):** + +```typescript +// Call real APIs in tests +describe('PaymentService', () => { + it('should process payment', async () => { + const service = new PaymentService(new StripeClient(realApiKey)); + // Hits real Stripe API! + const result = await service.charge('tok_visa', 1000); + // Slow, costs money, flaky + }); +}); + +// Use real database +describe('UsersService', () => { + beforeEach(async () => { + await connection.query('DELETE FROM users'); // Modifies real DB + }); + + it('should create user', async () => { + const user = await service.create({ email: 'test@test.com' }); + // Side effects on shared database + }); +}); + +// Incomplete mocks +const mockHttpService = { + get: jest.fn().mockResolvedValue({ data: {} }), + // Missing error scenarios, missing other methods +}; +``` + +**Correct (mock all external dependencies):** + +```typescript +// Mock HTTP service properly +describe('WeatherService', () => { + let service: WeatherService; + let httpService: jest.Mocked; + + beforeEach(async () => { + const module = await Test.createTestingModule({ + providers: [ + WeatherService, + { + provide: HttpService, + useValue: { + get: jest.fn(), + post: jest.fn(), + }, + }, + ], + }).compile(); + + service = module.get(WeatherService); + httpService = module.get(HttpService); + }); + + it('should return weather data', async () => { + const mockResponse = { + data: { temperature: 72, humidity: 45 }, + status: 200, + statusText: 'OK', + headers: {}, + config: {}, + }; + + httpService.get.mockReturnValue(of(mockResponse)); + + const result = await service.getWeather('NYC'); + + expect(result).toEqual({ temperature: 72, humidity: 45 }); + }); + + it('should handle API timeout', async () => { + httpService.get.mockReturnValue(throwError(() => new Error('ETIMEDOUT'))); + + await expect(service.getWeather('NYC')).rejects.toThrow('Weather service unavailable'); + }); + + it('should handle rate limiting', async () => { + httpService.get.mockReturnValue( + throwError(() => ({ + response: { status: 429, data: { message: 'Rate limited' } }, + })) + ); + + await expect(service.getWeather('NYC')).rejects.toThrow(TooManyRequestsException); + }); +}); + +// Mock repository instead of database +describe('UsersService', () => { + let service: UsersService; + let repo: jest.Mocked>; + + beforeEach(async () => { + const mockRepo = { + find: jest.fn(), + findOne: jest.fn(), + save: jest.fn(), + delete: jest.fn(), + createQueryBuilder: jest.fn(), + }; + + const module = await Test.createTestingModule({ + providers: [UsersService, { provide: getRepositoryToken(User), useValue: mockRepo }], + }).compile(); + + service = module.get(UsersService); + repo = module.get(getRepositoryToken(User)); + }); + + it('should find user by id', async () => { + const mockUser = { id: '1', name: 'John', email: 'john@test.com' }; + repo.findOne.mockResolvedValue(mockUser); + + const result = await service.findById('1'); + + expect(result).toEqual(mockUser); + expect(repo.findOne).toHaveBeenCalledWith({ where: { id: '1' } }); + }); +}); + +// Create mock factory for complex SDKs +function createMockStripe(): jest.Mocked { + return { + paymentIntents: { + create: jest.fn(), + retrieve: jest.fn(), + confirm: jest.fn(), + cancel: jest.fn(), + }, + customers: { + create: jest.fn(), + retrieve: jest.fn(), + }, + } as any; +} + +// Mock time for time-dependent tests +describe('TokenService', () => { + beforeEach(() => { + jest.useFakeTimers(); + jest.setSystemTime(new Date('2024-01-15')); + }); + + afterEach(() => { + jest.useRealTimers(); + }); + + it('should expire token after 1 hour', async () => { + const token = await service.createToken(); + + // Fast-forward time + jest.advanceTimersByTime(61 * 60 * 1000); + + expect(await service.isValid(token)).toBe(false); + }); +}); +``` + +Reference: [Jest Mocking](https://jestjs.io/docs/mock-functions) + +--- + +### 6.3 Use Testing Module for Unit Tests + +**Impact: HIGH** — Enables proper isolated testing with mocked dependencies + +Use `@nestjs/testing` module to create isolated test environments with mocked dependencies. This ensures your tests run fast, don't depend on external services, and properly test your business logic in isolation. + +**Incorrect (manual instantiation bypassing DI):** + +```typescript +// Instantiate services manually without DI +describe('UsersService', () => { + it('should create user', async () => { + // Manual instantiation bypasses DI + const repo = new UserRepository(); // Real repo! + const service = new UsersService(repo); + + const user = await service.create({ name: 'Test' }); + // This hits the real database! + }); +}); + +// Test implementation details +describe('UsersController', () => { + it('should call service', async () => { + const service = { create: jest.fn() }; + const controller = new UsersController(service as any); + + await controller.create({ name: 'Test' }); + + expect(service.create).toHaveBeenCalled(); // Tests implementation, not behavior + }); +}); +``` + +**Correct (use Test.createTestingModule with mocked dependencies):** + +```typescript +// Use Test.createTestingModule for proper DI +import { Test, TestingModule } from '@nestjs/testing'; + +describe('UsersService', () => { + let service: UsersService; + let repo: jest.Mocked; + + beforeEach(async () => { + const module: TestingModule = await Test.createTestingModule({ + providers: [ + UsersService, + { + provide: UserRepository, + useValue: { + save: jest.fn(), + findOne: jest.fn(), + find: jest.fn(), + }, + }, + ], + }).compile(); + + service = module.get(UsersService); + repo = module.get(UserRepository); + }); + + afterEach(() => { + jest.clearAllMocks(); + }); + + describe('create', () => { + it('should save and return user', async () => { + const dto = { name: 'John', email: 'john@test.com' }; + const expectedUser = { id: '1', ...dto }; + + repo.save.mockResolvedValue(expectedUser); + + const result = await service.create(dto); + + expect(result).toEqual(expectedUser); + expect(repo.save).toHaveBeenCalledWith(dto); + }); + + it('should throw on duplicate email', async () => { + repo.findOne.mockResolvedValue({ id: '1', email: 'test@test.com' }); + + await expect(service.create({ name: 'Test', email: 'test@test.com' })).rejects.toThrow(ConflictException); + }); + }); + + describe('findById', () => { + it('should return user when found', async () => { + const user = { id: '1', name: 'John' }; + repo.findOne.mockResolvedValue(user); + + const result = await service.findById('1'); + + expect(result).toEqual(user); + }); + + it('should throw NotFoundException when not found', async () => { + repo.findOne.mockResolvedValue(null); + + await expect(service.findById('999')).rejects.toThrow(NotFoundException); + }); + }); +}); + +// Testing guards and interceptors +describe('RolesGuard', () => { + let guard: RolesGuard; + let reflector: Reflector; + + beforeEach(async () => { + const module = await Test.createTestingModule({ + providers: [RolesGuard, Reflector], + }).compile(); + + guard = module.get(RolesGuard); + reflector = module.get(Reflector); + }); + + it('should allow when no roles required', () => { + const context = createMockExecutionContext({ user: { roles: [] } }); + jest.spyOn(reflector, 'getAllAndOverride').mockReturnValue(undefined); + + expect(guard.canActivate(context)).toBe(true); + }); + + it('should allow admin for admin-only route', () => { + const context = createMockExecutionContext({ user: { roles: ['admin'] } }); + jest.spyOn(reflector, 'getAllAndOverride').mockReturnValue(['admin']); + + expect(guard.canActivate(context)).toBe(true); + }); +}); + +function createMockExecutionContext(request: Partial): ExecutionContext { + return { + switchToHttp: () => ({ + getRequest: () => request, + }), + getHandler: () => jest.fn(), + getClass: () => jest.fn(), + } as ExecutionContext; +} +``` + +Reference: [NestJS Testing](https://docs.nestjs.com/fundamentals/testing) + +--- + +## 7. Database & ORM + +**Section Impact: MEDIUM-HIGH** + +### 7.1 Avoid N+1 Query Problems + +**Impact: HIGH** — N+1 queries are one of the most common performance killers + +N+1 queries occur when you fetch a list of entities, then make an additional query for each entity to load related data. Use eager loading with `relations`, query builder joins, or DataLoader to batch queries efficiently. + +**Incorrect (lazy loading in loops causes N+1):** + +```typescript +// Lazy loading in loops causes N+1 +@Injectable() +export class OrdersService { + async getOrdersWithItems(userId: string): Promise { + const orders = await this.orderRepo.find({ where: { userId } }); + // 1 query for orders + + for (const order of orders) { + // N additional queries - one per order! + order.items = await this.itemRepo.find({ where: { orderId: order.id } }); + } + + return orders; + } +} + +// Accessing lazy relations without loading +@Controller('users') +export class UsersController { + @Get() + async findAll(): Promise { + const users = await this.userRepo.find(); + // If User.posts is lazy-loaded, serializing triggers N queries + return users; // Each user.posts access = 1 query + } +} +``` + +**Correct (use relations for eager loading):** + +```typescript +// Use relations option for eager loading +@Injectable() +export class OrdersService { + async getOrdersWithItems(userId: string): Promise { + // Single query with JOIN + return this.orderRepo.find({ + where: { userId }, + relations: ['items', 'items.product'], + }); + } +} + +// Use QueryBuilder for complex joins +@Injectable() +export class UsersService { + async getUsersWithPostCounts(): Promise { + return this.userRepo + .createQueryBuilder('user') + .leftJoin('user.posts', 'post') + .select('user.id', 'id') + .addSelect('user.name', 'name') + .addSelect('COUNT(post.id)', 'postCount') + .groupBy('user.id') + .getRawMany(); + } + + async getActiveUsersWithPosts(): Promise { + return this.userRepo + .createQueryBuilder('user') + .leftJoinAndSelect('user.posts', 'post') + .leftJoinAndSelect('post.comments', 'comment') + .where('user.isActive = :active', { active: true }) + .andWhere('post.status = :status', { status: 'published' }) + .getMany(); + } +} + +// Use find options for specific fields +async getOrderSummaries(userId: string): Promise { + return this.orderRepo.find({ + where: { userId }, + relations: ['items'], + select: { + id: true, + total: true, + status: true, + items: { + id: true, + quantity: true, + price: true, + }, + }, + }); +} + +// Use DataLoader for GraphQL to batch and cache queries +import DataLoader from 'dataloader'; + +@Injectable({ scope: Scope.REQUEST }) +export class PostsLoader { + constructor(private postsService: PostsService) {} + + readonly batchPosts = new DataLoader(async (userIds) => { + // Single query for all users' posts + const posts = await this.postsService.findByUserIds([...userIds]); + + // Group by userId + const postsMap = new Map(); + for (const post of posts) { + const userPosts = postsMap.get(post.userId) || []; + userPosts.push(post); + postsMap.set(post.userId, userPosts); + } + + // Return in same order as input + return userIds.map((id) => postsMap.get(id) || []); + }); +} + +// In resolver +@ResolveField() +async posts(@Parent() user: User): Promise { + // DataLoader batches multiple calls into single query + return this.postsLoader.batchPosts.load(user.id); +} + +// Enable query logging in development to detect N+1 +TypeOrmModule.forRoot({ + logging: ['query', 'error'], + logger: 'advanced-console', +}); +``` + +Reference: [TypeORM Relations](https://typeorm.io/relations) + +--- + +### 7.2 Use Database Migrations + +**Impact: HIGH** — Enables safe, repeatable database schema changes + +Never use `synchronize: true` in production. Use migrations for all schema changes. Migrations provide version control for your database, enable safe rollbacks, and ensure consistency across all environments. + +**Incorrect (using synchronize or manual SQL):** + +```typescript +// Use synchronize in production +TypeOrmModule.forRoot({ + type: 'postgres', + synchronize: true, // DANGEROUS in production! + // Can drop columns, tables, or data +}); + +// Manual SQL in production +@Injectable() +export class DatabaseService { + async addColumn(): Promise { + await this.dataSource.query('ALTER TABLE users ADD COLUMN age INT'); + // No version control, no rollback, inconsistent across envs + } +} + +// Modify entities without migration +@Entity() +export class User { + @Column() + email: string; + + @Column() // Added without migration + newField: string; // Will crash in production if synchronize is false +} +``` + +**Correct (use migrations for all schema changes):** + +```typescript +// Configure TypeORM for migrations +// data-source.ts +export const dataSource = new DataSource({ + type: 'postgres', + host: process.env.DB_HOST, + port: parseInt(process.env.DB_PORT), + username: process.env.DB_USERNAME, + password: process.env.DB_PASSWORD, + database: process.env.DB_NAME, + entities: ['dist/**/*.entity.js'], + migrations: ['dist/migrations/*.js'], + synchronize: false, // Always false in production + migrationsRun: true, // Run migrations on startup +}); + +// app.module.ts +TypeOrmModule.forRootAsync({ + inject: [ConfigService], + useFactory: (config: ConfigService) => ({ + type: 'postgres', + host: config.get('DB_HOST'), + synchronize: config.get('NODE_ENV') === 'development', // Only in dev + migrations: ['dist/migrations/*.js'], + migrationsRun: true, + }), +}); + +// migrations/1705312800000-AddUserAge.ts +import { MigrationInterface, QueryRunner } from 'typeorm'; + +export class AddUserAge1705312800000 implements MigrationInterface { + name = 'AddUserAge1705312800000'; + + public async up(queryRunner: QueryRunner): Promise { + // Add column with default to handle existing rows + await queryRunner.query(` + ALTER TABLE "users" ADD "age" integer DEFAULT 0 + `); + + // Add index for frequently queried columns + await queryRunner.query(` + CREATE INDEX "IDX_users_age" ON "users" ("age") + `); + } + + public async down(queryRunner: QueryRunner): Promise { + // Always implement down for rollback + await queryRunner.query(`DROP INDEX "IDX_users_age"`); + await queryRunner.query(`ALTER TABLE "users" DROP COLUMN "age"`); + } +} + +// Safe column rename (two-step) +export class RenameNameToFullName1705312900000 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + // Step 1: Add new column + await queryRunner.query(` + ALTER TABLE "users" ADD "full_name" varchar(255) + `); + + // Step 2: Copy data + await queryRunner.query(` + UPDATE "users" SET "full_name" = "name" + `); + + // Step 3: Add NOT NULL constraint + await queryRunner.query(` + ALTER TABLE "users" ALTER COLUMN "full_name" SET NOT NULL + `); + + // Step 4: Drop old column (after verifying app works) + await queryRunner.query(` + ALTER TABLE "users" DROP COLUMN "name" + `); + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`ALTER TABLE "users" ADD "name" varchar(255)`); + await queryRunner.query(`UPDATE "users" SET "name" = "full_name"`); + await queryRunner.query(`ALTER TABLE "users" DROP COLUMN "full_name"`); + } +} +``` + +Reference: [TypeORM Migrations](https://typeorm.io/migrations) + +--- + +### 7.3 Use Transactions for Multi-Step Operations + +**Impact: HIGH** — Ensures data consistency in multi-step operations + +When multiple database operations must succeed or fail together, wrap them in a transaction. This prevents partial updates that leave your data in an inconsistent state. Use TypeORM's transaction APIs or the DataSource query runner for complex scenarios. + +**Incorrect (multiple saves without transaction):** + +```typescript +// Multiple saves without transaction +@Injectable() +export class OrdersService { + async createOrder(userId: string, items: OrderItem[]): Promise { + // If any step fails, data is inconsistent + const order = await this.orderRepo.save({ userId, status: 'pending' }); + + for (const item of items) { + await this.orderItemRepo.save({ orderId: order.id, ...item }); + await this.inventoryRepo.decrement({ productId: item.productId }, 'stock', item.quantity); + } + + await this.paymentService.charge(order.id); + // If payment fails, order and inventory are already modified! + + return order; + } +} +``` + +**Correct (use DataSource.transaction for automatic rollback):** + +```typescript +// Use DataSource.transaction() for automatic rollback +@Injectable() +export class OrdersService { + constructor(private dataSource: DataSource) {} + + async createOrder(userId: string, items: OrderItem[]): Promise { + return this.dataSource.transaction(async (manager) => { + // All operations use the same transactional manager + const order = await manager.save(Order, { userId, status: 'pending' }); + + for (const item of items) { + await manager.save(OrderItem, { orderId: order.id, ...item }); + await manager.decrement(Inventory, { productId: item.productId }, 'stock', item.quantity); + } + + // If this throws, everything rolls back + await this.paymentService.chargeWithManager(manager, order.id); + + return order; + }); + } +} + +// QueryRunner for manual transaction control +@Injectable() +export class TransferService { + constructor(private dataSource: DataSource) {} + + async transfer(fromId: string, toId: string, amount: number): Promise { + const queryRunner = this.dataSource.createQueryRunner(); + await queryRunner.connect(); + await queryRunner.startTransaction(); + + try { + // Debit source account + await queryRunner.manager.decrement(Account, { id: fromId }, 'balance', amount); + + // Verify sufficient funds + const source = await queryRunner.manager.findOne(Account, { + where: { id: fromId }, + }); + if (source.balance < 0) { + throw new BadRequestException('Insufficient funds'); + } + + // Credit destination account + await queryRunner.manager.increment(Account, { id: toId }, 'balance', amount); + + // Log the transaction + await queryRunner.manager.save(TransactionLog, { + fromId, + toId, + amount, + timestamp: new Date(), + }); + + await queryRunner.commitTransaction(); + } catch (error) { + await queryRunner.rollbackTransaction(); + throw error; + } finally { + await queryRunner.release(); + } + } +} + +// Repository method with transaction support +@Injectable() +export class UsersRepository { + constructor( + @InjectRepository(User) private repo: Repository, + private dataSource: DataSource + ) {} + + async createWithProfile(userData: CreateUserDto, profileData: CreateProfileDto): Promise { + return this.dataSource.transaction(async (manager) => { + const user = await manager.save(User, userData); + await manager.save(Profile, { ...profileData, userId: user.id }); + return user; + }); + } +} +``` + +Reference: [TypeORM Transactions](https://typeorm.io/transactions) + +--- + +## 8. API Design + +**Section Impact: MEDIUM** + +### 8.1 Use DTOs and Serialization for API Responses + +**Impact: MEDIUM** — Response DTOs prevent accidental data exposure and ensure consistency + +Never return entity objects directly from controllers. Use response DTOs with class-transformer's `@Exclude()` and `@Expose()` decorators to control exactly what data is sent to clients. This prevents accidental exposure of sensitive fields and provides a stable API contract. + +**Incorrect (returning entities directly or manual spreading):** + +```typescript +// Return entities directly +@Controller('users') +export class UsersController { + @Get(':id') + async findOne(@Param('id') id: string): Promise { + return this.usersService.findById(id); + // Returns: { id, email, passwordHash, ssn, internalNotes, ... } + // Exposes sensitive data! + } +} + +// Manual object spreading (error-prone) +@Get(':id') +async findOne(@Param('id') id: string) { + const user = await this.usersService.findById(id); + return { + id: user.id, + email: user.email, + name: user.name, + // Easy to forget to exclude sensitive fields + // Hard to maintain across endpoints + }; +} +``` + +**Correct (use class-transformer with @Exclude and response DTOs):** + +```typescript +// Enable class-transformer globally +async function bootstrap() { + const app = await NestFactory.create(AppModule); + app.useGlobalInterceptors(new ClassSerializerInterceptor(app.get(Reflector))); + await app.listen(3000); +} + +// Entity with serialization control +@Entity() +export class User { + @PrimaryGeneratedColumn('uuid') + id: string; + + @Column() + email: string; + + @Column() + name: string; + + @Column() + @Exclude() // Never include in responses + passwordHash: string; + + @Column({ nullable: true }) + @Exclude() + ssn: string; + + @Column({ default: false }) + @Exclude({ toPlainOnly: true }) // Exclude from response, allow in requests + isAdmin: boolean; + + @CreateDateColumn() + createdAt: Date; + + @Column() + @Exclude() + internalNotes: string; +} + +// Now returning entity is safe +@Controller('users') +export class UsersController { + @Get(':id') + async findOne(@Param('id') id: string): Promise { + return this.usersService.findById(id); + // Returns: { id, email, name, createdAt } + // Sensitive fields excluded automatically + } +} + +// For different response shapes, use explicit DTOs +export class UserResponseDto { + @Expose() + id: string; + + @Expose() + email: string; + + @Expose() + name: string; + + @Expose() + @Transform(({ obj }) => obj.posts?.length || 0) + postCount: number; + + constructor(partial: Partial) { + Object.assign(this, partial); + } +} + +export class UserDetailResponseDto extends UserResponseDto { + @Expose() + createdAt: Date; + + @Expose() + @Type(() => PostResponseDto) + posts: PostResponseDto[]; +} + +// Controller with explicit DTOs +@Controller('users') +export class UsersController { + @Get() + @SerializeOptions({ type: UserResponseDto }) + async findAll(): Promise { + const users = await this.usersService.findAll(); + return users.map((u) => plainToInstance(UserResponseDto, u)); + } + + @Get(':id') + async findOne(@Param('id') id: string): Promise { + const user = await this.usersService.findByIdWithPosts(id); + return plainToInstance(UserDetailResponseDto, user, { + excludeExtraneousValues: true, + }); + } +} + +// Groups for conditional serialization +export class UserDto { + @Expose() + id: string; + + @Expose() + name: string; + + @Expose({ groups: ['admin'] }) + email: string; + + @Expose({ groups: ['admin'] }) + createdAt: Date; + + @Expose({ groups: ['admin', 'owner'] }) + settings: UserSettings; +} + +@Controller('users') +export class UsersController { + @Get() + @SerializeOptions({ groups: ['public'] }) + async findAllPublic(): Promise { + // Returns: { id, name } + } + + @Get('admin') + @UseGuards(AdminGuard) + @SerializeOptions({ groups: ['admin'] }) + async findAllAdmin(): Promise { + // Returns: { id, name, email, createdAt } + } + + @Get('me') + @SerializeOptions({ groups: ['owner'] }) + async getProfile(@CurrentUser() user: User): Promise { + // Returns: { id, name, settings } + } +} +``` + +Reference: [NestJS Serialization](https://docs.nestjs.com/techniques/serialization) + +--- + +### 8.2 Use Interceptors for Cross-Cutting Concerns + +**Impact: MEDIUM-HIGH** — Interceptors provide clean separation for cross-cutting logic + +Interceptors can transform responses, add logging, handle caching, and measure performance without polluting your business logic. They wrap the route handler execution, giving you access to both the request and response streams. + +**Incorrect (logging and transformation in every method):** + +```typescript +// Logging in every controller method +@Controller('users') +export class UsersController { + @Get() + async findAll(): Promise { + const start = Date.now(); + this.logger.log('findAll called'); + + const users = await this.usersService.findAll(); + + this.logger.log(`findAll completed in ${Date.now() - start}ms`); + return users; + } + + @Get(':id') + async findOne(@Param('id') id: string): Promise { + const start = Date.now(); + this.logger.log(`findOne called with id: ${id}`); + + const user = await this.usersService.findOne(id); + + this.logger.log(`findOne completed in ${Date.now() - start}ms`); + return user; + } + // Repeated in every method! +} + +// Manual response wrapping +@Get() +async findAll(): Promise<{ data: User[]; meta: Meta }> { + const users = await this.usersService.findAll(); + return { + data: users, + meta: { timestamp: new Date(), count: users.length }, + }; +} +``` + +**Correct (use interceptors for cross-cutting concerns):** + +```typescript +// Logging interceptor +@Injectable() +export class LoggingInterceptor implements NestInterceptor { + private readonly logger = new Logger('HTTP'); + + intercept(context: ExecutionContext, next: CallHandler): Observable { + const request = context.switchToHttp().getRequest(); + const { method, url, body } = request; + const now = Date.now(); + + return next.handle().pipe( + tap({ + next: (data) => { + const response = context.switchToHttp().getResponse(); + this.logger.log( + `${method} ${url} ${response.statusCode} - ${Date.now() - now}ms`, + ); + }, + error: (error) => { + this.logger.error( + `${method} ${url} ${error.status || 500} - ${Date.now() - now}ms`, + error.stack, + ); + }, + }), + ); + } +} + +// Response transformation interceptor +@Injectable() +export class TransformInterceptor implements NestInterceptor> { + intercept(context: ExecutionContext, next: CallHandler): Observable> { + return next.handle().pipe( + map((data) => ({ + data, + meta: { + timestamp: new Date().toISOString(), + path: context.switchToHttp().getRequest().url, + }, + })), + ); + } +} + +// Timeout interceptor +@Injectable() +export class TimeoutInterceptor implements NestInterceptor { + intercept(context: ExecutionContext, next: CallHandler): Observable { + return next.handle().pipe( + timeout(5000), + catchError((err) => { + if (err instanceof TimeoutError) { + throw new RequestTimeoutException('Request timed out'); + } + throw err; + }), + ); + } +} + +// Apply globally or per-controller +@Module({ + providers: [ + { provide: APP_INTERCEPTOR, useClass: LoggingInterceptor }, + { provide: APP_INTERCEPTOR, useClass: TransformInterceptor }, + ], +}) +export class AppModule {} + +// Or per-controller +@Controller('users') +@UseInterceptors(LoggingInterceptor) +export class UsersController { + @Get() + async findAll(): Promise { + // Clean business logic only + return this.usersService.findAll(); + } +} + +// Custom cache interceptor with TTL +@Injectable() +export class HttpCacheInterceptor implements NestInterceptor { + constructor( + private cacheManager: Cache, + private reflector: Reflector, + ) {} + + async intercept(context: ExecutionContext, next: CallHandler): Promise> { + const request = context.switchToHttp().getRequest(); + + // Only cache GET requests + if (request.method !== 'GET') { + return next.handle(); + } + + const cacheKey = this.generateKey(request); + const ttl = this.reflector.get('cacheTTL', context.getHandler()) || 300; + + const cached = await this.cacheManager.get(cacheKey); + if (cached) { + return of(cached); + } + + return next.handle().pipe( + tap((response) => { + this.cacheManager.set(cacheKey, response, ttl); + }), + ); + } + + private generateKey(request: Request): string { + return `cache:${request.url}:${JSON.stringify(request.query)}`; + } +} + +// Usage with custom TTL +@Get() +@SetMetadata('cacheTTL', 600) +@UseInterceptors(HttpCacheInterceptor) +async findAll(): Promise { + return this.usersService.findAll(); +} + +// Error mapping interceptor +@Injectable() +export class ErrorMappingInterceptor implements NestInterceptor { + intercept(context: ExecutionContext, next: CallHandler): Observable { + return next.handle().pipe( + catchError((error) => { + if (error instanceof EntityNotFoundError) { + throw new NotFoundException(error.message); + } + if (error instanceof QueryFailedError) { + if (error.message.includes('duplicate')) { + throw new ConflictException('Resource already exists'); + } + } + throw error; + }), + ); + } +} +``` + +Reference: [NestJS Interceptors](https://docs.nestjs.com/interceptors) + +--- + +### 8.3 Use Pipes for Input Transformation + +**Impact: MEDIUM** — Pipes ensure clean, validated data reaches your handlers + +Use built-in pipes like `ParseIntPipe`, `ParseUUIDPipe`, and `DefaultValuePipe` for common transformations. Create custom pipes for business-specific transformations. Pipes separate validation/transformation logic from controllers. + +**Incorrect (manual type parsing in handlers):** + +```typescript +// Manual type parsing in handlers +@Controller('users') +export class UsersController { + @Get(':id') + async findOne(@Param('id') id: string): Promise { + // Manual validation in every handler + const uuid = id.trim(); + if (!isUUID(uuid)) { + throw new BadRequestException('Invalid UUID'); + } + return this.usersService.findOne(uuid); + } + + @Get() + async findAll( + @Query('page') page: string, + @Query('limit') limit: string, + ): Promise { + // Manual parsing and defaults + const pageNum = parseInt(page) || 1; + const limitNum = parseInt(limit) || 10; + return this.usersService.findAll(pageNum, limitNum); + } +} + +// Type coercion without validation +@Get() +async search(@Query('price') price: string): Promise { + const priceNum = +price; // NaN if invalid, no error + return this.productsService.findByPrice(priceNum); +} +``` + +**Correct (use built-in and custom pipes):** + +```typescript +// Use built-in pipes for common transformations +@Controller('users') +export class UsersController { + @Get(':id') + async findOne(@Param('id', ParseUUIDPipe) id: string): Promise { + // id is guaranteed to be a valid UUID + return this.usersService.findOne(id); + } + + @Get() + async findAll( + @Query('page', new DefaultValuePipe(1), ParseIntPipe) page: number, + @Query('limit', new DefaultValuePipe(10), ParseIntPipe) limit: number, + ): Promise { + // Automatic defaults and type conversion + return this.usersService.findAll(page, limit); + } + + @Get('by-status/:status') + async findByStatus( + @Param('status', new ParseEnumPipe(UserStatus)) status: UserStatus, + ): Promise { + return this.usersService.findByStatus(status); + } +} + +// Custom pipe for business logic +@Injectable() +export class ParseDatePipe implements PipeTransform { + transform(value: string): Date { + const date = new Date(value); + if (isNaN(date.getTime())) { + throw new BadRequestException('Invalid date format'); + } + return date; + } +} + +@Get('reports') +async getReports( + @Query('from', ParseDatePipe) from: Date, + @Query('to', ParseDatePipe) to: Date, +): Promise { + return this.reportsService.findBetween(from, to); +} + +// Custom transformation pipes +@Injectable() +export class NormalizeEmailPipe implements PipeTransform { + transform(value: string): string { + if (!value) return value; + return value.trim().toLowerCase(); + } +} + +// Parse comma-separated values +@Injectable() +export class ParseArrayPipe implements PipeTransform { + transform(value: string): string[] { + if (!value) return []; + return value.split(',').map((v) => v.trim()).filter(Boolean); + } +} + +@Get('products') +async findProducts( + @Query('ids', ParseArrayPipe) ids: string[], + @Query('email', NormalizeEmailPipe) email: string, +): Promise { + // ids is already an array, email is normalized + return this.productsService.findByIds(ids); +} + +// Sanitize HTML input +@Injectable() +export class SanitizeHtmlPipe implements PipeTransform { + transform(value: string): string { + if (!value) return value; + return sanitizeHtml(value, { allowedTags: [] }); + } +} + +// Global validation pipe with transformation +app.useGlobalPipes( + new ValidationPipe({ + whitelist: true, // Strip non-DTO properties + transform: true, // Auto-transform to DTO types + transformOptions: { + enableImplicitConversion: true, // Convert query strings to numbers + }, + forbidNonWhitelisted: true, // Throw on extra properties + }), +); + +// DTO with transformation decorators +export class FindProductsDto { + @IsOptional() + @Type(() => Number) + @IsInt() + @Min(1) + page?: number = 1; + + @IsOptional() + @Type(() => Number) + @IsInt() + @Min(1) + @Max(100) + limit?: number = 10; + + @IsOptional() + @Transform(({ value }) => value?.toLowerCase()) + @IsString() + search?: string; + + @IsOptional() + @Transform(({ value }) => value?.split(',')) + @IsArray() + @IsString({ each: true }) + categories?: string[]; +} + +@Get() +async findAll(@Query() dto: FindProductsDto): Promise { + // dto is already transformed and validated + return this.productsService.findAll(dto); +} + +// Pipe error customization +@Injectable() +export class CustomParseIntPipe extends ParseIntPipe { + constructor() { + super({ + exceptionFactory: (error) => + new BadRequestException(`${error} must be a valid integer`), + }); + } +} + +// Or use options on built-in pipes +@Get(':id') +async findOne( + @Param( + 'id', + new ParseIntPipe({ + errorHttpStatusCode: HttpStatus.NOT_ACCEPTABLE, + exceptionFactory: () => new NotAcceptableException('ID must be numeric'), + }), + ) + id: number, +): Promise { + return this.itemsService.findOne(id); +} +``` + +Reference: [NestJS Pipes](https://docs.nestjs.com/pipes) + +--- + +### 8.4 Use API Versioning for Breaking Changes + +**Impact: MEDIUM** — Versioning allows you to evolve APIs without breaking existing clients + +Use NestJS built-in versioning when making breaking changes to your API. Choose a versioning strategy (URI, header, or media type) and apply it consistently. This allows old clients to continue working while new clients use updated endpoints. + +**Incorrect (breaking changes without versioning):** + +```typescript +// Breaking changes without versioning +@Controller('users') +export class UsersController { + @Get(':id') + async findOne(@Param('id') id: string): Promise { + // Original response: { id, name, email } + // Later changed to: { id, firstName, lastName, emailAddress } + // Old clients break! + return this.usersService.findOne(id); + } +} + +// Manual versioning in routes +@Controller('v1/users') +export class UsersV1Controller {} + +@Controller('v2/users') +export class UsersV2Controller {} +// Inconsistent, error-prone, hard to maintain +``` + +**Correct (use NestJS built-in versioning):** + +```typescript +// Enable versioning in main.ts +async function bootstrap() { + const app = await NestFactory.create(AppModule); + + // URI versioning: /v1/users, /v2/users + app.enableVersioning({ + type: VersioningType.URI, + defaultVersion: '1', + }); + + // Or header versioning: X-API-Version: 1 + app.enableVersioning({ + type: VersioningType.HEADER, + header: 'X-API-Version', + defaultVersion: '1', + }); + + // Or media type: Accept: application/json;v=1 + app.enableVersioning({ + type: VersioningType.MEDIA_TYPE, + key: 'v=', + defaultVersion: '1', + }); + + await app.listen(3000); +} + +// Version-specific controllers +@Controller('users') +@Version('1') +export class UsersV1Controller { + @Get(':id') + async findOne(@Param('id') id: string): Promise { + const user = await this.usersService.findOne(id); + // V1 response format + return { + id: user.id, + name: user.name, + email: user.email, + }; + } +} + +@Controller('users') +@Version('2') +export class UsersV2Controller { + @Get(':id') + async findOne(@Param('id') id: string): Promise { + const user = await this.usersService.findOne(id); + // V2 response format with breaking changes + return { + id: user.id, + firstName: user.firstName, + lastName: user.lastName, + emailAddress: user.email, + createdAt: user.createdAt, + }; + } +} + +// Per-route versioning - different versions for different routes +@Controller('users') +export class UsersController { + @Get() + @Version('1') + findAllV1(): Promise { + return this.usersService.findAllV1(); + } + + @Get() + @Version('2') + findAllV2(): Promise { + return this.usersService.findAllV2(); + } + + @Get(':id') + @Version(['1', '2']) // Same handler for multiple versions + findOne(@Param('id') id: string): Promise { + return this.usersService.findOne(id); + } + + @Post() + @Version(VERSION_NEUTRAL) // Available in all versions + create(@Body() dto: CreateUserDto): Promise { + return this.usersService.create(dto); + } +} + +// Shared service with version-specific logic +@Injectable() +export class UsersService { + async findOne(id: string, version: string): Promise { + const user = await this.repo.findOne({ where: { id } }); + + if (version === '1') { + return this.toV1Response(user); + } + return this.toV2Response(user); + } + + private toV1Response(user: User): UserV1Response { + return { + id: user.id, + name: `${user.firstName} ${user.lastName}`, + email: user.email, + }; + } + + private toV2Response(user: User): UserV2Response { + return { + id: user.id, + firstName: user.firstName, + lastName: user.lastName, + emailAddress: user.email, + createdAt: user.createdAt, + }; + } +} + +// Controller extracts version +@Controller('users') +export class UsersController { + @Get(':id') + async findOne(@Param('id') id: string, @Headers('X-API-Version') version: string = '1'): Promise { + return this.usersService.findOne(id, version); + } +} + +// Deprecation strategy - mark old versions as deprecated +@Controller('users') +@Version('1') +@UseInterceptors(DeprecationInterceptor) +export class UsersV1Controller { + // All V1 routes will include deprecation warning +} + +@Injectable() +export class DeprecationInterceptor implements NestInterceptor { + intercept(context: ExecutionContext, next: CallHandler): Observable { + const response = context.switchToHttp().getResponse(); + response.setHeader('Deprecation', 'true'); + response.setHeader('Sunset', 'Sat, 1 Jan 2025 00:00:00 GMT'); + response.setHeader('Link', '; rel="successor-version"'); + + return next.handle(); + } +} +``` + +Reference: [NestJS Versioning](https://docs.nestjs.com/techniques/versioning) + +--- + +## 9. Microservices + +**Section Impact: MEDIUM** + +### 9.1 Implement Health Checks for Microservices + +**Impact: MEDIUM-HIGH** — Health checks enable orchestrators to manage service lifecycle + +Implement liveness and readiness probes using `@nestjs/terminus`. Liveness checks determine if the service should be restarted. Readiness checks determine if the service can accept traffic. Proper health checks enable Kubernetes and load balancers to route traffic correctly. + +**Incorrect (simple ping that doesn't check dependencies):** + +```typescript +// Simple ping that doesn't check dependencies +@Controller('health') +export class HealthController { + @Get() + check(): string { + return 'OK'; // Service might be unhealthy but returns OK + } +} + +// Health check that blocks on slow dependencies +@Controller('health') +export class HealthController { + @Get() + async check(): Promise { + // If database is slow, health check times out + await this.userRepo.findOne({ where: { id: '1' } }); + await this.redis.ping(); + await this.externalApi.healthCheck(); + return 'OK'; + } +} +``` + +**Correct (use @nestjs/terminus for comprehensive health checks):** + +```typescript +// Use @nestjs/terminus for comprehensive health checks +import { + HealthCheckService, + HttpHealthIndicator, + TypeOrmHealthIndicator, + HealthCheck, + DiskHealthIndicator, + MemoryHealthIndicator, +} from '@nestjs/terminus'; + +@Controller('health') +export class HealthController { + constructor( + private health: HealthCheckService, + private http: HttpHealthIndicator, + private db: TypeOrmHealthIndicator, + private disk: DiskHealthIndicator, + private memory: MemoryHealthIndicator, + ) {} + + // Liveness probe - is the service alive? + @Get('live') + @HealthCheck() + liveness() { + return this.health.check([ + // Basic checks only + () => this.memory.checkHeap('memory_heap', 200 * 1024 * 1024), // 200MB + ]); + } + + // Readiness probe - can the service handle traffic? + @Get('ready') + @HealthCheck() + readiness() { + return this.health.check([ + () => this.db.pingCheck('database'), + () => + this.http.pingCheck('redis', 'http://redis:6379', { timeout: 1000 }), + () => + this.disk.checkStorage('disk', { path: '/', thresholdPercent: 0.9 }), + ]); + } + + // Deep health check for debugging + @Get('deep') + @HealthCheck() + deepCheck() { + return this.health.check([ + () => this.db.pingCheck('database'), + () => this.memory.checkHeap('memory_heap', 200 * 1024 * 1024), + () => this.memory.checkRSS('memory_rss', 300 * 1024 * 1024), + () => + this.disk.checkStorage('disk', { path: '/', thresholdPercent: 0.9 }), + () => + this.http.pingCheck('external-api', 'https://api.example.com/health'), + ]); + } +} + +// Custom indicator for business-specific health +@Injectable() +export class QueueHealthIndicator extends HealthIndicator { + constructor(private queueService: QueueService) { + super(); + } + + async isHealthy(key: string): Promise { + const queueStats = await this.queueService.getStats(); + + const isHealthy = queueStats.failedCount < 100; + const result = this.getStatus(key, isHealthy, { + waiting: queueStats.waitingCount, + active: queueStats.activeCount, + failed: queueStats.failedCount, + }); + + if (!isHealthy) { + throw new HealthCheckError('Queue unhealthy', result); + } + + return result; + } +} + +// Redis health indicator +@Injectable() +export class RedisHealthIndicator extends HealthIndicator { + constructor(@InjectRedis() private redis: Redis) { + super(); + } + + async isHealthy(key: string): Promise { + try { + const pong = await this.redis.ping(); + return this.getStatus(key, pong === 'PONG'); + } catch (error) { + throw new HealthCheckError('Redis check failed', this.getStatus(key, false)); + } + } +} + +// Use custom indicators +@Get('ready') +@HealthCheck() +readiness() { + return this.health.check([ + () => this.db.pingCheck('database'), + () => this.redis.isHealthy('redis'), + () => this.queue.isHealthy('job-queue'), + ]); +} + +// Graceful shutdown handling +@Injectable() +export class GracefulShutdownService implements OnApplicationShutdown { + private isShuttingDown = false; + + isShutdown(): boolean { + return this.isShuttingDown; + } + + async onApplicationShutdown(signal: string): Promise { + this.isShuttingDown = true; + console.log(`Shutting down on ${signal}`); + + // Wait for in-flight requests + await new Promise((resolve) => setTimeout(resolve, 5000)); + } +} + +// Health check respects shutdown state +@Get('ready') +@HealthCheck() +readiness() { + if (this.shutdownService.isShutdown()) { + throw new ServiceUnavailableException('Shutting down'); + } + + return this.health.check([ + () => this.db.pingCheck('database'), + ]); +} +``` + +### Kubernetes Configuration + +```yaml +# Kubernetes deployment with probes +apiVersion: apps/v1 +kind: Deployment +metadata: + name: api-service +spec: + template: + spec: + containers: + - name: api + image: api-service:latest + ports: + - containerPort: 3000 + livenessProbe: + httpGet: + path: /health/live + port: 3000 + initialDelaySeconds: 30 + periodSeconds: 10 + timeoutSeconds: 5 + failureThreshold: 3 + readinessProbe: + httpGet: + path: /health/ready + port: 3000 + initialDelaySeconds: 5 + periodSeconds: 5 + timeoutSeconds: 3 + failureThreshold: 3 + startupProbe: + httpGet: + path: /health/live + port: 3000 + initialDelaySeconds: 0 + periodSeconds: 5 + failureThreshold: 30 +``` + +Reference: [NestJS Terminus](https://docs.nestjs.com/recipes/terminus) + +--- + +### 9.2 Use Message and Event Patterns Correctly + +**Impact: MEDIUM** — Proper patterns ensure reliable microservice communication + +NestJS microservices support two communication patterns: request-response (MessagePattern) and event-based (EventPattern). Use MessagePattern when you need a response, and EventPattern for fire-and-forget notifications. Understanding the difference prevents communication bugs. + +**Incorrect (using wrong pattern for use case):** + +```typescript +// Use @MessagePattern for fire-and-forget +@Controller() +export class NotificationsController { + @MessagePattern('user.created') + async handleUserCreated(data: UserCreatedEvent) { + // This WAITS for response, blocking the sender + await this.emailService.sendWelcome(data.email); + // If email fails, sender gets an error (coupling!) + } +} + +// Use @EventPattern expecting a response +@Controller() +export class OrdersController { + @EventPattern('inventory.check') + async checkInventory(data: CheckInventoryDto) { + const available = await this.inventory.check(data); + return available; // This return value is IGNORED with @EventPattern! + } +} + +// Tight coupling in client +@Injectable() +export class UsersService { + async createUser(dto: CreateUserDto): Promise { + const user = await this.repo.save(dto); + + // Blocks until notification service responds + await this.client.send('user.created', user).toPromise(); + // If notification service is down, user creation fails! + + return user; + } +} +``` + +**Correct (use MessagePattern for request-response, EventPattern for fire-and-forget):** + +```typescript +// MessagePattern: Request-Response (when you NEED a response) +@Controller() +export class InventoryController { + @MessagePattern({ cmd: 'check_inventory' }) + async checkInventory(data: CheckInventoryDto): Promise { + const result = await this.inventoryService.check(data.productId, data.quantity); + return result; // Response sent back to caller + } +} + +// Client expects response +@Injectable() +export class OrdersService { + async createOrder(dto: CreateOrderDto): Promise { + // Check inventory - we NEED this response to proceed + const inventory = await firstValueFrom( + this.inventoryClient.send( + { cmd: 'check_inventory' }, + { productId: dto.productId, quantity: dto.quantity }, + ), + ); + + if (!inventory.available) { + throw new BadRequestException('Insufficient inventory'); + } + + return this.repo.save(dto); + } +} + +// EventPattern: Fire-and-Forget (for notifications, side effects) +@Controller() +export class NotificationsController { + @EventPattern('user.created') + async handleUserCreated(data: UserCreatedEvent): Promise { + // No return value needed - just process the event + await this.emailService.sendWelcome(data.email); + await this.analyticsService.track('user_signup', data); + // If this fails, it doesn't affect the sender + } +} + +// Client emits event without waiting +@Injectable() +export class UsersService { + async createUser(dto: CreateUserDto): Promise { + const user = await this.repo.save(dto); + + // Fire and forget - doesn't block, doesn't wait + this.eventClient.emit('user.created', { + userId: user.id, + email: user.email, + timestamp: new Date(), + }); + + return user; // User creation succeeds regardless of event handling + } +} + +// Hybrid pattern for critical events +@Injectable() +export class OrdersService { + async createOrder(dto: CreateOrderDto): Promise { + const order = await this.repo.save(dto); + + // Critical: inventory reservation (use MessagePattern) + const reserved = await firstValueFrom( + this.inventoryClient.send({ cmd: 'reserve_inventory' }, { + orderId: order.id, + items: dto.items, + }), + ); + + if (!reserved.success) { + await this.repo.delete(order.id); + throw new BadRequestException('Could not reserve inventory'); + } + + // Non-critical: notifications (use EventPattern) + this.eventClient.emit('order.created', { + orderId: order.id, + userId: dto.userId, + total: dto.total, + }); + + return order; + } +} + +// Error handling patterns +// MessagePattern errors propagate to caller +@MessagePattern({ cmd: 'get_user' }) +async getUser(userId: string): Promise { + const user = await this.repo.findOne({ where: { id: userId } }); + if (!user) { + throw new RpcException('User not found'); // Received by caller + } + return user; +} + +// EventPattern errors should be handled locally +@EventPattern('order.created') +async handleOrderCreated(data: OrderCreatedEvent): Promise { + try { + await this.processOrder(data); + } catch (error) { + // Log and potentially retry - don't throw + this.logger.error('Failed to process order event', error); + await this.deadLetterQueue.add(data); + } +} +``` + +Reference: [NestJS Microservices](https://docs.nestjs.com/microservices/basics) + +--- + +### 9.3 Use Message Queues for Background Jobs + +**Impact: MEDIUM-HIGH** — Queues enable reliable background processing + +Use `@nestjs/bullmq` for background job processing. Queues decouple long-running tasks from HTTP requests, enable retry logic, and distribute workload across workers. Use them for emails, file processing, notifications, and any task that shouldn't block user requests. + +**Incorrect (long-running tasks in HTTP handlers):** + +```typescript +// Long-running tasks in HTTP handlers +@Controller('reports') +export class ReportsController { + @Post() + async generate(@Body() dto: GenerateReportDto): Promise { + // This blocks the request for potentially minutes + const data = await this.fetchLargeDataset(dto); + const report = await this.processData(data); // Slow! + await this.sendEmail(dto.email, report); // Can fail! + return report; // Client times out + } +} + +// Fire-and-forget without retry +@Injectable() +export class EmailService { + async sendWelcome(email: string): Promise { + // If this fails, email is never sent + await this.mailer.send({ to: email, template: 'welcome' }); + // No retry, no tracking, no visibility + } +} + +// Use setInterval for scheduled tasks +setInterval(async () => { + await cleanupOldRecords(); +}, 60000); // No error handling, memory leaks +``` + +**Correct (use BullMQ for background processing):** + +```typescript +// Configure BullMQ +import { BullModule } from '@nestjs/bullmq'; + +@Module({ + imports: [ + BullModule.forRoot({ + connection: { + host: 'localhost', + port: 6379, + }, + defaultJobOptions: { + removeOnComplete: 1000, + removeOnFail: 5000, + attempts: 3, + backoff: { + type: 'exponential', + delay: 1000, + }, + }, + }), + BullModule.registerQueue({ name: 'email' }, { name: 'reports' }, { name: 'notifications' }), + ], +}) +export class QueueModule {} + +// Producer: Add jobs to queue +@Injectable() +export class ReportsService { + constructor(@InjectQueue('reports') private reportsQueue: Queue) {} + + async requestReport(dto: GenerateReportDto): Promise<{ jobId: string }> { + // Return immediately, process in background + const job = await this.reportsQueue.add('generate', dto, { + priority: dto.urgent ? 1 : 10, + delay: dto.scheduledFor ? Date.parse(dto.scheduledFor) - Date.now() : 0, + }); + + return { jobId: job.id }; + } + + async getJobStatus(jobId: string): Promise { + const job = await this.reportsQueue.getJob(jobId); + return { + status: await job.getState(), + progress: job.progress, + result: job.returnvalue, + }; + } +} + +// Consumer: Process jobs +@Processor('reports') +export class ReportsProcessor { + private readonly logger = new Logger(ReportsProcessor.name); + + @Process('generate') + async generateReport(job: Job): Promise { + this.logger.log(`Processing report job ${job.id}`); + + // Update progress + await job.updateProgress(10); + + const data = await this.fetchData(job.data); + await job.updateProgress(50); + + const report = await this.processData(data); + await job.updateProgress(90); + + await this.saveReport(report); + await job.updateProgress(100); + + return report; + } + + @OnQueueActive() + onActive(job: Job) { + this.logger.log(`Processing job ${job.id}`); + } + + @OnQueueCompleted() + onCompleted(job: Job, result: any) { + this.logger.log(`Job ${job.id} completed`); + } + + @OnQueueFailed() + onFailed(job: Job, error: Error) { + this.logger.error(`Job ${job.id} failed: ${error.message}`); + } +} + +// Email queue with retry +@Processor('email') +export class EmailProcessor { + @Process('send') + async sendEmail(job: Job): Promise { + const { to, template, data } = job.data; + + try { + await this.mailer.send({ + to, + template, + context: data, + }); + } catch (error) { + // BullMQ will retry based on job options + throw error; + } + } +} + +// Usage +@Injectable() +export class NotificationService { + constructor(@InjectQueue('email') private emailQueue: Queue) {} + + async sendWelcome(user: User): Promise { + await this.emailQueue.add( + 'send', + { + to: user.email, + template: 'welcome', + data: { name: user.name }, + }, + { + attempts: 5, + backoff: { type: 'exponential', delay: 5000 }, + } + ); + } +} + +// Scheduled jobs +@Injectable() +export class ScheduledJobsService implements OnModuleInit { + constructor(@InjectQueue('maintenance') private queue: Queue) {} + + async onModuleInit(): Promise { + // Clean up old reports daily at midnight + await this.queue.add( + 'cleanup', + {}, + { + repeat: { cron: '0 0 * * *' }, + jobId: 'daily-cleanup', // Prevent duplicates + } + ); + + // Send digest every hour + await this.queue.add( + 'digest', + {}, + { + repeat: { every: 60 * 60 * 1000 }, + jobId: 'hourly-digest', + } + ); + } +} + +@Processor('maintenance') +export class MaintenanceProcessor { + @Process('cleanup') + async cleanup(): Promise { + await this.cleanupOldReports(); + await this.cleanupExpiredSessions(); + } + + @Process('digest') + async sendDigest(): Promise { + const users = await this.getUsersForDigest(); + for (const user of users) { + await this.emailQueue.add('send', { to: user.email, template: 'digest' }); + } + } +} + +// Queue monitoring with Bull Board +import { BullBoardModule } from '@bull-board/nestjs'; +import { BullMQAdapter } from '@bull-board/api/bullMQAdapter'; + +@Module({ + imports: [ + BullBoardModule.forRoot({ + route: '/admin/queues', + adapter: ExpressAdapter, + }), + BullBoardModule.forFeature({ + name: 'email', + adapter: BullMQAdapter, + }), + BullBoardModule.forFeature({ + name: 'reports', + adapter: BullMQAdapter, + }), + ], +}) +export class AdminModule {} +``` + +Reference: [NestJS Queues](https://docs.nestjs.com/techniques/queues) + +--- + +## 10. DevOps & Deployment + +**Section Impact: LOW-MEDIUM** + +### 10.1 Implement Graceful Shutdown + +**Impact: MEDIUM-HIGH** — Proper shutdown handling ensures zero-downtime deployments + +Handle SIGTERM and SIGINT signals to gracefully shutdown your NestJS application. Stop accepting new requests, wait for in-flight requests to complete, close database connections, and clean up resources. This prevents data loss and connection errors during deployments. + +**Incorrect (ignoring shutdown signals):** + +```typescript +// Ignore shutdown signals +async function bootstrap() { + const app = await NestFactory.create(AppModule); + await app.listen(3000); + // App crashes immediately on SIGTERM + // In-flight requests fail + // Database connections are abruptly closed +} + +// Long-running tasks without cancellation +@Injectable() +export class ProcessingService { + async processLargeFile(file: File): Promise { + // No way to interrupt this during shutdown + for (let i = 0; i < file.chunks.length; i++) { + await this.processChunk(file.chunks[i]); + // May run for minutes, blocking shutdown + } + } +} +``` + +**Correct (enable shutdown hooks and handle cleanup):** + +```typescript +// Enable shutdown hooks in main.ts +async function bootstrap() { + const app = await NestFactory.create(AppModule); + + // Enable shutdown hooks + app.enableShutdownHooks(); + + // Optional: Add timeout for forced shutdown + const server = await app.listen(3000); + server.setTimeout(30000); // 30 second timeout + + // Handle graceful shutdown + const signals = ['SIGTERM', 'SIGINT']; + signals.forEach((signal) => { + process.on(signal, async () => { + console.log(`Received ${signal}, starting graceful shutdown...`); + + // Stop accepting new connections + server.close(async () => { + console.log('HTTP server closed'); + await app.close(); + process.exit(0); + }); + + // Force exit after timeout + setTimeout(() => { + console.error('Forced shutdown after timeout'); + process.exit(1); + }, 30000); + }); + }); +} + +// Lifecycle hooks for cleanup +@Injectable() +export class DatabaseService implements OnApplicationShutdown { + private readonly connections: Connection[] = []; + + async onApplicationShutdown(signal?: string): Promise { + console.log(`Database service shutting down on ${signal}`); + + // Close all connections gracefully + await Promise.all(this.connections.map((conn) => conn.close())); + + console.log('All database connections closed'); + } +} + +// Queue processor with graceful shutdown +@Injectable() +export class QueueService implements OnApplicationShutdown, OnModuleDestroy { + private isShuttingDown = false; + + onModuleDestroy(): void { + this.isShuttingDown = true; + } + + async onApplicationShutdown(): Promise { + // Wait for current jobs to complete + await this.queue.close(); + } + + async processJob(job: Job): Promise { + if (this.isShuttingDown) { + throw new Error('Service is shutting down'); + } + await this.doWork(job); + } +} + +// WebSocket gateway cleanup +@WebSocketGateway() +export class EventsGateway implements OnApplicationShutdown { + @WebSocketServer() + server: Server; + + async onApplicationShutdown(): Promise { + // Notify all connected clients + this.server.emit('shutdown', { message: 'Server is shutting down' }); + + // Close all connections + this.server.disconnectSockets(); + } +} + +// Health check integration +@Injectable() +export class ShutdownService { + private isShuttingDown = false; + + startShutdown(): void { + this.isShuttingDown = true; + } + + isShutdown(): boolean { + return this.isShuttingDown; + } +} + +@Controller('health') +export class HealthController { + constructor(private shutdownService: ShutdownService) {} + + @Get('ready') + @HealthCheck() + readiness(): Promise { + // Return 503 during shutdown - k8s stops sending traffic + if (this.shutdownService.isShutdown()) { + throw new ServiceUnavailableException('Shutting down'); + } + + return this.health.check([() => this.db.pingCheck('database')]); + } +} + +// Integrate with shutdown +@Injectable() +export class AppShutdownService implements OnApplicationShutdown { + constructor(private shutdownService: ShutdownService) {} + + async onApplicationShutdown(): Promise { + // Mark as unhealthy first + this.shutdownService.startShutdown(); + + // Wait for k8s to update endpoints + await this.sleep(5000); + + // Then proceed with cleanup + } +} + +// Request tracking for in-flight requests +@Injectable() +export class RequestTracker implements NestMiddleware, OnApplicationShutdown { + private activeRequests = 0; + private isShuttingDown = false; + private shutdownPromise: Promise | null = null; + private resolveShutdown: (() => void) | null = null; + + use(req: Request, res: Response, next: NextFunction): void { + if (this.isShuttingDown) { + res.status(503).send('Service Unavailable'); + return; + } + + this.activeRequests++; + + res.on('finish', () => { + this.activeRequests--; + if (this.isShuttingDown && this.activeRequests === 0 && this.resolveShutdown) { + this.resolveShutdown(); + } + }); + + next(); + } + + async onApplicationShutdown(): Promise { + this.isShuttingDown = true; + + if (this.activeRequests > 0) { + console.log(`Waiting for ${this.activeRequests} requests to complete`); + this.shutdownPromise = new Promise((resolve) => { + this.resolveShutdown = resolve; + }); + + // Wait with timeout + await Promise.race([this.shutdownPromise, new Promise((resolve) => setTimeout(resolve, 30000))]); + } + + console.log('All requests completed'); + } +} +``` + +Reference: [NestJS Lifecycle Events](https://docs.nestjs.com/fundamentals/lifecycle-events) + +--- + +### 10.2 Use ConfigModule for Environment Configuration + +**Impact: LOW-MEDIUM** — Proper configuration prevents deployment failures + +Use `@nestjs/config` for environment-based configuration. Validate configuration at startup to fail fast on misconfigurations. Use namespaced configuration for organization and type safety. + +**Incorrect (accessing process.env directly):** + +```typescript +// Access process.env directly +@Injectable() +export class DatabaseService { + constructor() { + // No validation, can fail at runtime + this.connection = new Pool({ + host: process.env.DB_HOST, + port: parseInt(process.env.DB_PORT), // NaN if missing + password: process.env.DB_PASSWORD, // undefined if missing + }); + } +} + +// Scattered env access +@Injectable() +export class EmailService { + sendEmail() { + // Different services access env differently + const apiKey = process.env.SENDGRID_API_KEY || 'default'; + // Typos go unnoticed: process.env.SENDGRID_API_KY + } +} +``` + +**Correct (use @nestjs/config with validation):** + +```typescript +// Setup validated configuration +import { ConfigModule, ConfigService, registerAs } from '@nestjs/config'; +import * as Joi from 'joi'; + +// config/database.config.ts +export const databaseConfig = registerAs('database', () => ({ + host: process.env.DB_HOST, + port: parseInt(process.env.DB_PORT, 10), + username: process.env.DB_USERNAME, + password: process.env.DB_PASSWORD, + database: process.env.DB_NAME, +})); + +// config/app.config.ts +export const appConfig = registerAs('app', () => ({ + port: parseInt(process.env.PORT, 10) || 3000, + environment: process.env.NODE_ENV || 'development', + apiPrefix: process.env.API_PREFIX || 'api', +})); + +// config/validation.schema.ts +export const validationSchema = Joi.object({ + NODE_ENV: Joi.string().valid('development', 'production', 'test').default('development'), + PORT: Joi.number().default(3000), + DB_HOST: Joi.string().required(), + DB_PORT: Joi.number().default(5432), + DB_USERNAME: Joi.string().required(), + DB_PASSWORD: Joi.string().required(), + DB_NAME: Joi.string().required(), + JWT_SECRET: Joi.string().min(32).required(), + REDIS_URL: Joi.string().uri().required(), +}); + +// app.module.ts +@Module({ + imports: [ + ConfigModule.forRoot({ + isGlobal: true, // Available everywhere without importing + load: [databaseConfig, appConfig], + validationSchema, + validationOptions: { + abortEarly: true, // Stop on first error + allowUnknown: true, // Allow other env vars + }, + }), + TypeOrmModule.forRootAsync({ + inject: [ConfigService], + useFactory: (config: ConfigService) => ({ + type: 'postgres', + host: config.get('database.host'), + port: config.get('database.port'), + username: config.get('database.username'), + password: config.get('database.password'), + database: config.get('database.database'), + autoLoadEntities: true, + }), + }), + ], +}) +export class AppModule {} + +// Type-safe configuration access +export interface AppConfig { + port: number; + environment: 'development' | 'production' | 'test'; + apiPrefix: string; +} + +export interface DatabaseConfig { + host: string; + port: number; + username: string; + password: string; + database: string; +} + +// Type-safe access +@Injectable() +export class AppService { + constructor(private config: ConfigService) {} + + getPort(): number { + // Type-safe with generic + return this.config.get('app.port'); + } + + getDatabaseConfig(): DatabaseConfig { + return this.config.get('database'); + } +} + +// Inject namespaced config directly +@Injectable() +export class DatabaseService { + constructor( + @Inject(databaseConfig.KEY) + private dbConfig: ConfigType + ) { + // Full type inference! + const host = this.dbConfig.host; // string + const port = this.dbConfig.port; // number + } +} + +// Environment files support +ConfigModule.forRoot({ + envFilePath: [`.env.${process.env.NODE_ENV}.local`, `.env.${process.env.NODE_ENV}`, '.env.local', '.env'], +}); + +// .env.development +// DB_HOST=localhost +// DB_PORT=5432 + +// .env.production +// DB_HOST=prod-db.example.com +// DB_PORT=5432 +``` + +Reference: [NestJS Configuration](https://docs.nestjs.com/techniques/configuration) + +--- + +### 10.3 Use Structured Logging + +**Impact: MEDIUM-HIGH** — Structured logging enables effective debugging and monitoring + +Use NestJS Logger with structured JSON output in production. Include contextual information (request ID, user ID, operation) to trace requests across services. Avoid console.log and implement proper log levels. + +**Incorrect (using console.log in production):** + +```typescript +// Use console.log in production +@Injectable() +export class UsersService { + async createUser(dto: CreateUserDto): Promise { + console.log('Creating user:', dto); + // Not structured, no levels, lost in production logs + + try { + const user = await this.repo.save(dto); + console.log('User created:', user.id); + return user; + } catch (error) { + console.log('Error:', error); // Using log for errors + throw error; + } + } +} + +// Log sensitive data +console.log('Login attempt:', { email, password }); // SECURITY RISK! + +// Inconsistent log format +logger.log('User ' + userId + ' created at ' + new Date()); +// Hard to parse, no structure +``` + +**Correct (use structured logging with context):** + +```typescript +// Configure logger in main.ts +async function bootstrap() { + const app = await NestFactory.create(AppModule, { + logger: + process.env.NODE_ENV === 'production' ? ['error', 'warn', 'log'] : ['error', 'warn', 'log', 'debug', 'verbose'], + }); +} + +// Use NestJS Logger with context +@Injectable() +export class UsersService { + private readonly logger = new Logger(UsersService.name); + + async createUser(dto: CreateUserDto): Promise { + this.logger.log('Creating user', { email: dto.email }); + + try { + const user = await this.repo.save(dto); + this.logger.log('User created', { userId: user.id }); + return user; + } catch (error) { + this.logger.error('Failed to create user', error.stack, { + email: dto.email, + }); + throw error; + } + } +} + +// Custom logger for JSON output +@Injectable() +export class JsonLogger implements LoggerService { + log(message: string, context?: object): void { + console.log( + JSON.stringify({ + level: 'info', + timestamp: new Date().toISOString(), + message, + ...context, + }) + ); + } + + error(message: string, trace?: string, context?: object): void { + console.error( + JSON.stringify({ + level: 'error', + timestamp: new Date().toISOString(), + message, + trace, + ...context, + }) + ); + } + + warn(message: string, context?: object): void { + console.warn( + JSON.stringify({ + level: 'warn', + timestamp: new Date().toISOString(), + message, + ...context, + }) + ); + } + + debug(message: string, context?: object): void { + console.debug( + JSON.stringify({ + level: 'debug', + timestamp: new Date().toISOString(), + message, + ...context, + }) + ); + } +} + +// Request context logging with ClsModule +import { ClsModule, ClsService } from 'nestjs-cls'; + +@Module({ + imports: [ + ClsModule.forRoot({ + global: true, + middleware: { + mount: true, + generateId: true, + }, + }), + ], +}) +export class AppModule {} + +// Middleware to set request context +@Injectable() +export class RequestContextMiddleware implements NestMiddleware { + constructor(private cls: ClsService) {} + + use(req: Request, res: Response, next: NextFunction): void { + const requestId = req.headers['x-request-id'] || randomUUID(); + this.cls.set('requestId', requestId); + this.cls.set('userId', req.user?.id); + + res.setHeader('x-request-id', requestId); + next(); + } +} + +// Logger that includes request context +@Injectable() +export class ContextLogger { + constructor(private cls: ClsService) {} + + log(message: string, data?: object): void { + console.log( + JSON.stringify({ + level: 'info', + timestamp: new Date().toISOString(), + requestId: this.cls.get('requestId'), + userId: this.cls.get('userId'), + message, + ...data, + }) + ); + } + + error(message: string, error: Error, data?: object): void { + console.error( + JSON.stringify({ + level: 'error', + timestamp: new Date().toISOString(), + requestId: this.cls.get('requestId'), + userId: this.cls.get('userId'), + message, + error: error.message, + stack: error.stack, + ...data, + }) + ); + } +} + +// Pino integration for high-performance logging +import { LoggerModule } from 'nestjs-pino'; + +@Module({ + imports: [ + LoggerModule.forRoot({ + pinoHttp: { + level: process.env.NODE_ENV === 'production' ? 'info' : 'debug', + transport: process.env.NODE_ENV !== 'production' ? { target: 'pino-pretty' } : undefined, + redact: ['req.headers.authorization', 'req.body.password'], + serializers: { + req: (req) => ({ + method: req.method, + url: req.url, + query: req.query, + }), + res: (res) => ({ + statusCode: res.statusCode, + }), + }, + }, + }), + ], +}) +export class AppModule {} + +// Usage with Pino +@Injectable() +export class UsersService { + constructor(private logger: PinoLogger) { + this.logger.setContext(UsersService.name); + } + + async findOne(id: string): Promise { + this.logger.info({ userId: id }, 'Finding user'); + // Pino uses first arg for data, second for message + } +} +``` + +Reference: [NestJS Logger](https://docs.nestjs.com/techniques/logger) + +--- + +## References + +- https://docs.nestjs.com +- https://github.com/nestjs/nest +- https://typeorm.io +- https://github.com/typestack/class-validator +- https://github.com/goldbergyoni/nodebestpractices + +--- + +_Generated by build-agents.ts on 2026-01-16_ diff --git a/.agents/skills/nestjs-best-practices/SKILL.md b/.agents/skills/nestjs-best-practices/SKILL.md index 8c5b4ee..dd64cd4 100644 --- a/.agents/skills/nestjs-best-practices/SKILL.md +++ b/.agents/skills/nestjs-best-practices/SKILL.md @@ -1,10 +1,12 @@ --- name: nestjs-best-practices -description: NestJS best practices and architecture patterns for building production-ready applications. This skill should be used when writing, reviewing, or refactoring NestJS code to ensure proper patterns for modules, dependency injection, security, and performance. +description: NestJS best practices and architecture patterns for building production-ready LCBP3-DMS backend code. Enforces ADR-009 (no TypeORM migrations), ADR-019 (hybrid UUID), ADR-016 (security), ADR-007 (error handling), ADR-008 (BullMQ), ADR-001/002 (workflow + numbering), ADR-018/020 (AI boundary), and ADR-021 (workflow context). +version: 1.8.9 +scope: backend +user-invocable: false license: MIT metadata: - author: Kadajett - version: '1.1.0' + upstream: 'Kadajett/nestjs-best-practices v1.1.0 (forked + LCBP3-aligned)' --- # NestJS Best Practices @@ -110,6 +112,13 @@ Reference these guidelines when: - `devops-use-logging` - Structured logging - `devops-graceful-shutdown` - Zero-downtime deployments +### 11. LCBP3-Specific (CRITICAL — Project Overrides) + +- `db-no-typeorm-migrations` — **CRITICAL** ADR-009: edit SQL directly +- `lcbp3-workflow-engine` — **CRITICAL** ADR-001/002/021: DSL state machine + double-lock numbering + workflow context +- `security-file-two-phase-upload` — **CRITICAL** ADR-016: Upload → Temp → ClamAV → Commit +- `lcbp3-ai-boundary` — **CRITICAL** ADR-018/020: Ollama on-prem only, human-in-the-loop + ## NAP-DMS Project-Specific Rules (MUST FOLLOW) These rules override general NestJS best practices for the NAP-DMS project: @@ -120,21 +129,62 @@ These rules override general NestJS best practices for the NAP-DMS project: - แก้ไข schema โดยตรงที่: `specs/03-Data-and-Storage/lcbp3-v1.8.0-schema-02-tables.sql` - ใช้ n8n workflow สำหรับ data migration ถ้าจำเป็น -### ADR-019: Hybrid Identifier Strategy (CRITICAL) +### ADR-019: Hybrid Identifier Strategy (CRITICAL — March 2026 Pattern) + +> **Updated pattern:** `UuidBaseEntity` exposes `publicId` **directly**. ห้ามใช้ `@Expose({ name: 'id' })` — API จะคืน `publicId` เป็น field name ตรงๆ. ```typescript +// ✅ CORRECT — ใช้ UuidBaseEntity @Entity() -export class Project { - @PrimaryGeneratedColumn() - @Exclude() // ห้ามส่งออกทาง API - id: number; // INT AUTO_INCREMENT - internal only +export class Project extends UuidBaseEntity { + // publicId (string UUIDv7) + id (INT, @Exclude) สืบทอดจาก UuidBaseEntity + // API response → { publicId: "019505a1-7c3e-7000-8000-abc123..." } - @Column({ type: 'uuid' }) - @Expose({ name: 'id' }) // ส่งออกเป็น 'id' ทาง API - publicId: string; // UUIDv7 - public API identifier + @Column() + projectCode: string; + + @Column() + projectName: string; } ``` +```typescript +// ❌ WRONG — pattern เก่า ห้ามใช้ +@Entity() +export class OldProject { + @PrimaryGeneratedColumn() + @Exclude() + id: number; + + @Column({ type: 'uuid' }) + @Expose({ name: 'id' }) // ❌ อย่า rename publicId เป็น 'id' + publicId: string; +} +``` + +**DTO Input (รับ UUID จาก Frontend):** + +```typescript +export class CreateContractDto { + @IsUUID('7') + projectUuid: string; // รับ UUID string จาก client +} + +// Controller resolves UUID → INT internally +@Post() +async create(@Body() dto: CreateContractDto) { + const projectId = await this.projectService.resolveInternalId(dto.projectUuid); + return this.contractService.create({ ...dto, projectId }); +} +``` + +**ห้ามเด็ดขาด (CI Blocker):** + +- ❌ `parseInt(projectPublicId)` — "019505…" → 19 (silently wrong) +- ❌ `Number(publicId)` / `+publicId` — NaN +- ❌ `@Expose({ name: 'id' })` บน `publicId` (pattern เก่า) +- ❌ Expose INT `id` ใน API response (ต้อง `@Exclude()` เสมอ) + ### Two-Phase File Upload ```typescript diff --git a/.agents/skills/nestjs-best-practices/metadata.json b/.agents/skills/nestjs-best-practices/metadata.json new file mode 100644 index 0000000..a68f7be --- /dev/null +++ b/.agents/skills/nestjs-best-practices/metadata.json @@ -0,0 +1,24 @@ +{ + "version": "1.8.9", + "organization": "**NAP-DMS / LCBP3** — Laem Chabang Port Phase 3 Document Management System", + "date": "2026-04-22", + "abstract": "Comprehensive NestJS best-practices guide compiled for the LCBP3-DMS backend. Contains 40+ rules across 11 categories (10 general + 1 project-specific), prioritized by impact. Forked from Kadajett/nestjs-best-practices (v1.1.0) and aligned to LCBP3 ADRs: ADR-001 (workflow engine), ADR-002 (document numbering), ADR-007 (error handling), ADR-008 (notifications/BullMQ), ADR-009 (no TypeORM migrations), ADR-016 (security), ADR-018/020 (AI boundary), ADR-019 (hybrid UUID identifier — March 2026 pattern), and ADR-021 (workflow context).\n\nThis document is the single, consolidated reference used by Cascade and other AI coding agents when writing, reviewing, or refactoring backend code in this repository. All LCBP3-specific overrides live in section 11.", + "references": [ + "[AGENTS.md (root)](../../../AGENTS.md) — canonical AI agent rules", + "[CONTRIBUTING.md](../../../CONTRIBUTING.md) — spec authoring + PR process", + "[ADR-001 Unified Workflow Engine](../../../specs/06-Decision-Records/ADR-001-unified-workflow-engine.md)", + "[ADR-002 Document Numbering Strategy](../../../specs/06-Decision-Records/ADR-002-document-numbering-strategy.md)", + "[ADR-007 Error Handling Strategy](../../../specs/06-Decision-Records/ADR-007-error-handling-strategy.md)", + "[ADR-008 Email/Notification Strategy](../../../specs/06-Decision-Records/ADR-008-email-notification-strategy.md)", + "[ADR-009 Database Migration Strategy](../../../specs/06-Decision-Records/ADR-009-database-migration-strategy.md)", + "[ADR-016 Security & Authentication](../../../specs/06-Decision-Records/ADR-016-security-authentication.md)", + "[ADR-018 AI Boundary](../../../specs/06-Decision-Records/ADR-018-ai-boundary.md)", + "[ADR-019 Hybrid Identifier Strategy](../../../specs/06-Decision-Records/ADR-019-hybrid-identifier-strategy.md)", + "[ADR-020 AI Intelligence Integration](../../../specs/06-Decision-Records/ADR-020-ai-intelligence-integration.md)", + "[ADR-021 Workflow Context](../../../specs/06-Decision-Records/ADR-021-workflow-context.md)", + "[Backend Engineering Guidelines](../../../specs/05-Engineering-Guidelines/05-02-backend-guidelines.md)", + "[Schema — v1.8.0 Tables](../../../specs/03-Data-and-Storage/lcbp3-v1.8.0-schema-02-tables.sql)", + "[Data Dictionary](../../../specs/03-Data-and-Storage/03-01-data-dictionary.md)", + "Upstream: [Kadajett/nestjs-best-practices](https://github.com/Kadajett/nestjs-best-practices) v1.1.0" + ] +} diff --git a/.agents/skills/nestjs-best-practices/rules/db-hybrid-identifier.md b/.agents/skills/nestjs-best-practices/rules/db-hybrid-identifier.md index bec9f50..1f35e1d 100644 --- a/.agents/skills/nestjs-best-practices/rules/db-hybrid-identifier.md +++ b/.agents/skills/nestjs-best-practices/rules/db-hybrid-identifier.md @@ -5,20 +5,22 @@ impactDescription: Use INT PK internally + UUID for public API per project ADR-0 tags: database, uuid, identifier, adr-019, api-design, typeorm --- -## Hybrid Identifier Strategy (ADR-019) +## Hybrid Identifier Strategy (ADR-019) — March 2026 Pattern **This project follows ADR-019: INT Primary Key (internal) + UUIDv7 (public API)** Unlike standard practices that use UUID as the primary key, this project uses a **hybrid approach** optimized for MariaDB performance and API consistency. +> **Updated pattern (March 2026):** Entities extend `UuidBaseEntity`. The `publicId` column is exposed **directly** in API responses — ห้ามใช้ `@Expose({ name: 'id' })` เพื่อ rename. + ### The Strategy -| Layer | Field | Type | Usage | -|-------|-------|------|-------| -| **Database PK** | `id` | `INT AUTO_INCREMENT` | Internal foreign keys only | -| **Public API** | `uuid` | `MariaDB UUID` (native) | External references, URLs | -| **DTO Input** | `xxxUuid` | `string` | Accept UUID in create/update | -| **DTO Output** | `id` | `string` | API returns UUID as `id` via `@Expose` | +| Layer | Field | Type | Usage | +| --------------- | ---------- | ----------------------------------- | ------------------------------------------------- | +| **Database PK** | `id` | `INT AUTO_INCREMENT` | Internal foreign keys only (marked `@Exclude()`) | +| **Public API** | `publicId` | `MariaDB UUID` (native, BINARY(16)) | External references, URLs — exposed as-is | +| **DTO Input** | `xxxUuid` | `string` (UUIDv7) | Accept UUID in create/update DTOs | +| **DTO Output** | `publicId` | `string` (UUIDv7) | API returns `publicId` field directly (no rename) | ### Why Hybrid IDs? @@ -27,31 +29,51 @@ Unlike standard practices that use UUID as the primary key, this project uses a - **Compatibility**: UUID works well with distributed systems and external integrations - **MariaDB Native**: Uses MariaDB's native UUID type (stored as BINARY(16), auto-converts to string) -### Entity Definition +### Entity Definition (Current Pattern) ```typescript -import { Entity, PrimaryGeneratedColumn, Column, Index } from 'typeorm'; -import { Exclude, Expose } from 'class-transformer'; +import { Entity, Column } from 'typeorm'; +import { UuidBaseEntity } from '@/common/entities/uuid-base.entity'; @Entity('contracts') -export class Contract { - @PrimaryGeneratedColumn() - @Exclude() // Never expose in API response - id: number; // Internal INT PK - used for FK relationships - - @Column({ type: 'uuid', unique: true }) - @Expose({ name: 'id' }) // Exposed as 'id' in API - uuid: string; // Public UUIDv7 - what API consumers see +export class Contract extends UuidBaseEntity { + // publicId (string UUIDv7) + id (INT, @Exclude) สืบทอดจาก UuidBaseEntity + // API response → { publicId: "019505a1-7c3e-7000-8000-abc123...", contractCode: ..., ... } @Column() contractCode: string; @Column() contractName: string; + + @Column({ name: 'project_id' }) + projectId: number; // INT FK — internal, not exposed if marked @Exclude in UuidBaseEntity } ``` -### DTO Pattern (Accept UUID, Resolve to INT) +**`UuidBaseEntity` (shared base):** + +```typescript +import { PrimaryGeneratedColumn, Column, CreateDateColumn, UpdateDateColumn } from 'typeorm'; +import { Exclude } from 'class-transformer'; + +export abstract class UuidBaseEntity { + @PrimaryGeneratedColumn() + @Exclude() // ❗ CRITICAL: INT id must never leak to API + id: number; + + @Column({ type: 'uuid', unique: true, generated: 'uuid' }) + publicId: string; // UUIDv7, exposed as-is + + @CreateDateColumn() + createdAt: Date; + + @UpdateDateColumn() + updatedAt: Date; +} +``` + +### DTO Pattern (Accept UUID, Resolve to INT Internally) ```typescript // dto/create-contract.dto.ts @@ -59,8 +81,8 @@ import { IsUUID, IsNotEmpty } from 'class-validator'; export class CreateContractDto { @IsNotEmpty() - @IsUUID('4') - projectUuid: string; // Accept UUID from client + @IsUUID('7') // UUIDv7 (MariaDB native) + projectUuid: string; // Accept UUID from client @IsNotEmpty() contractCode: string; @@ -69,48 +91,38 @@ export class CreateContractDto { contractName: string; } -// dto/contract-response.dto.ts -import { Exclude, Expose } from 'class-transformer'; - -export class ContractResponseDto { - @Expose({ name: 'id' }) - uuid: string; // Returned as 'id' field in JSON - - contractCode: string; - contractName: string; -} +// ❌ NO Response DTO with @Expose rename needed. +// Entity class_transformer via TransformInterceptor will serialize publicId directly. ``` ### Service/Controller Pattern ```typescript @Controller('contracts') +@UseGuards(JwtAuthGuard, CaslAbilityGuard) export class ContractsController { constructor( private contractsService: ContractsService, - private uuidResolver: UuidResolver, // Helper to convert UUID → INT + private uuidResolver: UuidResolver ) {} @Post() async create(@Body() dto: CreateContractDto) { - // Resolve UUID to INT PK for database operations + // Resolve UUID → INT PK for FK relationship const projectId = await this.uuidResolver.resolveProject(dto.projectUuid); - - // Create with INT FK + const contract = await this.contractsService.create({ ...dto, - projectId, // INT for database + projectId, }); - // Response automatically transforms via @Expose + // Response: TransformInterceptor + @Exclude on id → publicId exposed directly return contract; } - @Get(':id') - async findOne(@Param('id') uuid: string) { - // Controller receives UUID string - // Service handles UUID → INT resolution internally - return this.contractsService.findByUuid(uuid); + @Get(':publicId') + async findOne(@Param('publicId', ParseUuidPipe) publicId: string) { + return this.contractsService.findOneByPublicId(publicId); } } ``` @@ -124,21 +136,21 @@ export class UuidResolver { @InjectRepository(Project) private projectRepo: Repository, @InjectRepository(Contract) - private contractRepo: Repository, + private contractRepo: Repository ) {} - async resolveProject(uuid: string): Promise { + async resolveProject(publicId: string): Promise { const project = await this.projectRepo.findOne({ - where: { uuid }, - select: ['id'], // Only fetch INT PK + where: { publicId }, + select: ['id'], // Only INT PK for FK }); if (!project) throw new NotFoundException('Project not found'); return project.id; } - async resolveContract(uuid: string): Promise { + async resolveContract(publicId: string): Promise { const contract = await this.contractRepo.findOne({ - where: { uuid }, + where: { publicId }, select: ['id'], }); if (!contract) throw new NotFoundException('Contract not found'); @@ -147,20 +159,20 @@ export class UuidResolver { } ``` -### TransformInterceptor (Required) +### TransformInterceptor (Required — register ONCE) ```typescript -// Must be configured globally to handle @Exclude/@Expose +// Register via APP_INTERCEPTOR in CommonModule — ห้ามซ้ำใน main.ts @Injectable() export class TransformInterceptor implements NestInterceptor { intercept(context: ExecutionContext, next: CallHandler): Observable { return next.handle().pipe( - map((data) => instanceToPlain(data)), // Applies class-transformer decorators + map((data) => instanceToPlain(data)) // Applies @Exclude / @Expose ); } } -// app.module.ts +// common.module.ts @Module({ providers: [ { @@ -169,40 +181,42 @@ export class TransformInterceptor implements NestInterceptor { }, ], }) -export class AppModule {} +export class CommonModule {} ``` +> **Warning:** ห้ามเรียก `app.useGlobalInterceptors(new TransformInterceptor())` ใน `main.ts` ซ้ำ — จะทำให้ response double-wrap `{ data: { data: ... } }`. + ### Critical: NEVER ParseInt on UUID ```typescript // ❌ WRONG - parseInt on UUID gives garbage value -const id = parseInt(projectUuid); // "0195a1b2-..." → 195 (wrong!) +const id = parseInt(projectPublicId); // "0195a1b2-..." → 195 (wrong!) // ❌ WRONG - Number() on UUID -const id = Number(projectUuid); // NaN +const id = Number(projectPublicId); // NaN // ❌ WRONG - Unary plus on UUID -const id = +projectUuid; // NaN +const id = +projectPublicId; // NaN // ✅ CORRECT - Resolve via database lookup -const projectId = await uuidResolver.resolveProject(projectUuid); +const projectId = await uuidResolver.resolveProject(projectPublicId); -// ✅ CORRECT - Use TypeORM find with UUID column -const project = await projectRepo.findOne({ where: { uuid: projectUuid } }); -const id = project.id; // Get INT PK from entity +// ✅ CORRECT - Use TypeORM find with publicId column +const project = await projectRepo.findOne({ where: { publicId: projectPublicId } }); +const id = project.id; // Get INT PK from entity ``` -### Query with UUID (No Resolution Needed) +### Query with publicId (No Resolution Needed) ```typescript // Direct UUID lookup in TypeORM const project = await this.projectRepo.findOne({ - where: { uuid: projectUuid }, // Query by UUID column + where: { publicId: projectPublicId }, }); // Relations use INT FK internally const contracts = await this.contractRepo.find({ - where: { projectId: project.id }, // INT for FK query + where: { projectId: project.id }, // INT for FK query }); ``` diff --git a/.agents/skills/nestjs-best-practices/rules/db-no-typeorm-migrations.md b/.agents/skills/nestjs-best-practices/rules/db-no-typeorm-migrations.md new file mode 100644 index 0000000..ab0245f --- /dev/null +++ b/.agents/skills/nestjs-best-practices/rules/db-no-typeorm-migrations.md @@ -0,0 +1,100 @@ +--- +title: No TypeORM Migrations (ADR-009) +impact: CRITICAL +impactDescription: Edit SQL schema files directly; n8n handles data migration. Do not generate TypeORM migration files. +tags: database, schema, migration, adr-009, sql, n8n +--- + +## No TypeORM Migrations (ADR-009) + +**This project does NOT use TypeORM migration files.** + +All schema changes must be made **directly** in the canonical SQL file: + +- `specs/03-Data-and-Storage/lcbp3-v1.8.0-schema-02-tables.sql` + +Delta scripts (for incremental rollout to existing environments) go under: + +- `specs/03-Data-and-Storage/deltas/YYYY-MM-DD-descriptive-name.sql` + +Data migration (e.g., backfilling a new column) is handled by **n8n workflows**, not TypeORM's `QueryRunner`. + +--- + +## Why No Migrations? + +1. **Single source of truth** — The full SQL schema is always readable as one file. No need to replay a migration chain to understand current state. +2. **Review friendly** — Schema diff = git diff on the SQL file. Reviewers see the complete picture. +3. **Ops alignment** — DBAs and operators work in SQL, not TypeScript. +4. **n8n for data** — Business-meaningful data transforms live in n8n where they can be versioned, retried, and orchestrated with monitoring. + +--- + +## ✅ Workflow for a Schema Change + +1. **Update Data Dictionary** first: + - `specs/03-Data-and-Storage/03-01-data-dictionary.md` — add field meaning + business rules. +2. **Update the canonical schema**: + - Edit `lcbp3-v1.8.0-schema-02-tables.sql` — add/alter column, constraint, index. +3. **Add a delta script** (if deploying to existing env): + - `specs/03-Data-and-Storage/deltas/2026-04-22-add-rfa-revision-column.sql` + + ```sql + -- Delta: Add revision column to rfa table + ALTER TABLE rfa + ADD COLUMN revision INT NOT NULL DEFAULT 1 AFTER status; + + CREATE INDEX idx_rfa_revision ON rfa(revision); + ``` +4. **Update the Entity** (`backend/src/.../entities/rfa.entity.ts`): + + ```typescript + @Column({ type: 'int', default: 1 }) + revision: number; + ``` +5. **If data backfill needed** → create n8n workflow, not TypeScript migration. + +--- + +## ❌ Forbidden + +```bash +# ❌ DO NOT generate migrations +pnpm typeorm migration:generate ./src/migrations/AddRevision + +# ❌ DO NOT run migrations +pnpm typeorm migration:run +``` + +```typescript +// ❌ DO NOT write migration classes +export class AddRevision1730000000000 implements MigrationInterface { + async up(queryRunner: QueryRunner): Promise { /* ... */ } + async down(queryRunner: QueryRunner): Promise { /* ... */ } +} +``` + +--- + +## ✅ TypeORM Config (runtime only) + +```typescript +// ormconfig.ts +export default { + type: 'mariadb', + // ... + synchronize: false, // ❗ NEVER true (would auto-sync entity ↔ schema) + migrationsRun: false, // ❗ NEVER true + // ❌ Do NOT specify `migrations:` entries +}; +``` + +`synchronize: false` is mandatory because the canonical SQL file is authoritative — TypeORM should never mutate the schema. + +--- + +## Reference + +- [ADR-009 Database Migration Strategy](../../../../specs/06-Decision-Records/ADR-009-database-migration-strategy.md) +- [Data Dictionary](../../../../specs/03-Data-and-Storage/03-01-data-dictionary.md) +- [Schema Tables](../../../../specs/03-Data-and-Storage/lcbp3-v1.8.0-schema-02-tables.sql) diff --git a/.agents/skills/nestjs-best-practices/rules/lcbp3-ai-boundary.md b/.agents/skills/nestjs-best-practices/rules/lcbp3-ai-boundary.md new file mode 100644 index 0000000..4c195b2 --- /dev/null +++ b/.agents/skills/nestjs-best-practices/rules/lcbp3-ai-boundary.md @@ -0,0 +1,157 @@ +--- +title: AI Integration Boundary (ADR-018 / ADR-020) +impact: CRITICAL +impactDescription: AI runs on Admin Desktop only; AI → DMS API → DB (never direct); human-in-the-loop validation mandatory; full audit trail. +tags: ai, ollama, boundary, adr-018, adr-020, privacy, audit +--- + +## AI Integration Boundary + +LCBP3 uses **on-premises AI only** (Ollama on Admin Desktop) with strict isolation from data layers. + +--- + +## The Boundary + +``` +┌────────────────────────────────────────────────────────────┐ +│ User Browser (Next.js) │ +└─────────────────────────┬──────────────────────────────────┘ + │ (authenticated HTTPS) +┌─────────────────────────▼──────────────────────────────────┐ +│ DMS API (NestJS) ◀── enforces CASL, validation, audit │ +│ ├─ AiGateway (proxies to Ollama) │ +│ └─ DB + Storage (Elasticsearch, MariaDB, File System) │ +└─────────────────────────┬──────────────────────────────────┘ + │ (HTTP → Admin Desktop, internal) +┌─────────────────────────▼──────────────────────────────────┐ +│ Admin Desktop (Desk-5439) │ +│ ├─ Ollama (Gemma 4) │ +│ ├─ PaddleOCR (Thai + English) │ +│ └─ n8n orchestration │ +└────────────────────────────────────────────────────────────┘ +``` + +**❗ Admin Desktop has NO network access to MariaDB, no SMB to storage, no shared secrets.** It receives base64-encoded file bytes over HTTPS and returns extracted text + suggestions. + +--- + +## Required Patterns + +### 1. AiGateway Module (backend) + +```typescript +@Module({ + controllers: [AiController], + providers: [AiService, AiGateway, AiAuditLogger], + exports: [AiService], +}) +export class AiModule {} + +@Injectable() +export class AiService { + async extractMetadata(fileId: number, user: User): Promise { + // 1. Authorize (CASL: user can read this file) + await this.ability.ensureCan(user, 'read', File, fileId); + + // 2. Load file (DMS API, inside the boundary) + const fileBytes = await this.storageService.read(fileId); + + // 3. Call Admin Desktop AI over HTTP + const raw = await this.aiGateway.extract(fileBytes); + + // 4. Validate AI output schema (Zod) + const parsed = ExtractedMetadataSchema.parse(raw); + + // 5. Audit log (who, what, when, model, confidence) + await this.auditLogger.log({ + userId: user.id, + action: 'ai.extract_metadata', + fileId, + model: raw.model, + confidence: parsed.confidence, + }); + + // 6. Return — frontend MUST render for human confirmation + return parsed; + } +} +``` + +### 2. Human-in-the-Loop + +AI output is **never persisted directly**. Users must confirm via `DocumentReviewForm`: + +```tsx + saveMetadata(reviewed)} // user edits applied +/> +``` + +The `user_confirmed_at` timestamp and diff (AI suggestion → final value) are stored in the audit log. + +### 3. Rate Limiting + +```typescript +@Post('ai/extract') +@UseGuards(JwtAuthGuard, CaslAbilityGuard, ThrottlerGuard) +@Throttle({ default: { limit: 10, ttl: 60_000 } }) // 10 req/min/user +async extract(@Body() dto: ExtractDto) { /* ... */ } +``` + +--- + +## ❌ Forbidden + +```typescript +// ❌ AI container connecting to DB +// docker-compose.yml inside ai-service: +// environment: +// DATABASE_URL: mysql://... ← NEVER + +// ❌ AI SDK calling cloud API +import OpenAI from 'openai'; // ❌ No cloud AI SDKs in production code +const client = new OpenAI({ apiKey: ... }); + +// ❌ Persisting AI output without human confirm +async extractAndSave(fileId: number) { + const metadata = await this.ai.extract(fileId); + await this.repo.save({ fileId, ...metadata }); // ❌ skips human review +} + +// ❌ Skipping audit log +const result = await this.aiGateway.extract(bytes); // no logging +return result; +``` + +--- + +## Audit Log Schema + +```sql +CREATE TABLE ai_audit_log ( + id INT AUTO_INCREMENT PRIMARY KEY, + public_id UUID UNIQUE NOT NULL, + user_id INT NOT NULL, + action VARCHAR(64) NOT NULL, -- 'ai.extract_metadata', 'ai.classify', etc. + file_id INT, + model VARCHAR(64), -- 'gemma-4:7b', 'paddleocr-v3' + confidence DECIMAL(4,3), + input_hash CHAR(64), -- SHA-256 of input for replay detection + output_summary JSON, + human_confirmed_at DATETIME, + created_at DATETIME DEFAULT CURRENT_TIMESTAMP, + INDEX idx_user_created (user_id, created_at), + INDEX idx_file (file_id) +); +``` + +--- + +## Reference + +- [ADR-018 AI Boundary](../../../../specs/06-Decision-Records/ADR-018-ai-boundary.md) +- [ADR-020 AI Intelligence Integration](../../../../specs/06-Decision-Records/ADR-020-ai-intelligence-integration.md) +- [ADR-017 Ollama Data Migration](../../../../specs/06-Decision-Records/ADR-017-ollama-data-migration.md) diff --git a/.agents/skills/nestjs-best-practices/rules/lcbp3-workflow-engine.md b/.agents/skills/nestjs-best-practices/rules/lcbp3-workflow-engine.md new file mode 100644 index 0000000..db41f6d --- /dev/null +++ b/.agents/skills/nestjs-best-practices/rules/lcbp3-workflow-engine.md @@ -0,0 +1,181 @@ +--- +title: Workflow Engine + Document Numbering + Workflow Context (ADR-001 / 002 / 021) +impact: CRITICAL +impactDescription: DSL-based state machine; double-lock numbering; integrated workflow context exposed to clients. +tags: workflow, numbering, redlock, version-column, adr-001, adr-002, adr-021 +--- + +## Workflow Engine + Numbering + Context + +LCBP3 uses a **unified workflow engine** (DSL-based state machine) across RFA, Transmittal, Correspondence, Circulation, and Shop Drawing. Every state transition goes through the same engine — no per-type routing tables. + +--- + +## ADR-001: Unified Workflow Engine + +### State Transition Pattern + +```typescript +@Injectable() +export class WorkflowEngine { + async transition( + instanceId: string, + action: WorkflowAction, + actor: User, + context?: WorkflowContext, + ): Promise { + // 1. Load current state from DB (never trust client-provided state) + const instance = await this.repo.findOneByPublicId(instanceId); + if (!instance) throw new NotFoundException(); + + // 2. Validate transition against DSL + const dsl = await this.dslService.load(instance.workflowTypeId); + const nextState = dsl.resolve(instance.currentState, action); + if (!nextState) { + throw new BusinessException( + `Action ${action} not allowed from state ${instance.currentState}`, + 'ไม่สามารถดำเนินการนี้ได้ในสถานะปัจจุบัน', + 'กรุณาตรวจสอบขั้นตอนการอนุมัติ', + 'WF_INVALID_TRANSITION', + ); + } + + // 3. Apply transition atomically (optimistic lock via @VersionColumn) + instance.currentState = nextState; + await this.repo.save(instance); // throws OptimisticLockVersionMismatchError on race + + // 4. Emit event for listeners (notifications via BullMQ — ADR-008) + this.eventBus.publish(new WorkflowTransitionedEvent(instance, action, actor)); + + return instance; + } +} +``` + +### ❌ Anti-Patterns + +- ❌ Hard-coded `switch (state)` in controllers/services +- ❌ Trusting `currentState` from request body +- ❌ Creating separate routing tables per document type + +--- + +## ADR-002: Document Numbering (Double-Lock) + +Concurrent requests for a new document number **must** use both: + +1. **Redis Redlock** — distributed lock across app instances +2. **TypeORM `@VersionColumn`** — optimistic lock on counter row + +### Counter Entity + +```typescript +@Entity('document_number_counters') +@Unique(['projectId', 'documentTypeId']) +export class DocumentNumberCounter extends UuidBaseEntity { + @Column({ name: 'project_id' }) + projectId: number; + + @Column({ name: 'document_type_id' }) + documentTypeId: number; + + @Column({ name: 'last_number', default: 0 }) + lastNumber: number; + + @VersionColumn() + version: number; // ❗ Optimistic lock — do not rename, do not remove +} +``` + +### Service Pattern + +```typescript +@Injectable() +export class DocumentNumberingService { + constructor( + @InjectRepository(DocumentNumberCounter) + private counterRepo: Repository, + private redlock: RedlockService, + private readonly logger: Logger, + ) {} + + async generateNext(ctx: NumberingContext): Promise { + const lockKey = `doc_num:${ctx.projectId}:${ctx.documentTypeId}`; + + // Distributed lock — 3s TTL, up to 5 retries + const lock = await this.redlock.acquire([lockKey], 3000); + + try { + // Optimistic lock via @VersionColumn + const counter = await this.counterRepo.findOne({ + where: { projectId: ctx.projectId, documentTypeId: ctx.documentTypeId }, + }); + + if (!counter) { + throw new NotFoundException('Counter not initialized for this project/type'); + } + + counter.lastNumber += 1; + await this.counterRepo.save(counter); // may throw OptimisticLockVersionMismatchError + + return this.formatNumber(ctx, counter.lastNumber); + } catch (err) { + if (err instanceof OptimisticLockVersionMismatchError) { + this.logger.warn(`Numbering race detected for ${lockKey}, retrying`); + // Let caller retry via BullMQ retry policy + } + throw err; + } finally { + await lock.release(); + } + } + + private formatNumber(ctx: NumberingContext, seq: number): string { + // e.g. "LCBP3-RFA-0042" + return `${ctx.projectCode}-${ctx.typeCode}-${String(seq).padStart(4, '0')}`; + } +} +``` + +### ❌ Anti-Patterns + +- ❌ App-side counter only (`let counter = 0; counter++`) +- ❌ Using `findOne` + `update` without `@VersionColumn` +- ❌ Using only Redis lock without DB optimistic lock (race if Redis fails) + +--- + +## ADR-021: Integrated Workflow Context + +Every workflow-aware API response **must** expose: + +```typescript +export class WorkflowEnvelope { + data: T; + + workflow: { + instancePublicId: string; + currentState: string; // e.g. 'pending_review' + availableActions: string[]; // e.g. ['approve', 'reject', 'request-revision'] + canEdit: boolean; // computed from CASL + current state + lastTransitionAt: string; // ISO 8601 + }; + + stepAttachments?: Array<{ // files produced by the current/previous step + publicId: string; + fileName: string; + stepCode: string; + downloadUrl: string; + }>; +} +``` + +Frontend uses `workflow.availableActions` to render buttons — no client-side state machine logic. + +--- + +## Reference + +- [ADR-001 Unified Workflow Engine](../../../../specs/06-Decision-Records/ADR-001-unified-workflow-engine.md) +- [ADR-002 Document Numbering Strategy](../../../../specs/06-Decision-Records/ADR-002-document-numbering-strategy.md) +- [ADR-021 Workflow Context](../../../../specs/06-Decision-Records/ADR-021-workflow-context.md) diff --git a/.agents/skills/nestjs-best-practices/rules/security-file-two-phase-upload.md b/.agents/skills/nestjs-best-practices/rules/security-file-two-phase-upload.md new file mode 100644 index 0000000..eca54d5 --- /dev/null +++ b/.agents/skills/nestjs-best-practices/rules/security-file-two-phase-upload.md @@ -0,0 +1,137 @@ +--- +title: Two-Phase File Upload + ClamAV (ADR-016) +impact: CRITICAL +impactDescription: Upload → Temp → ClamAV scan → Commit → Permanent. Whitelist + 50MB cap. StorageService only. +tags: file-upload, clamav, security, adr-016, storage +--- + +## Two-Phase File Upload (ADR-016) + +**Never write uploaded files directly to permanent storage.** All uploads must go through: + +``` +Client → Upload endpoint → Temp storage → ClamAV scan → Commit endpoint → Permanent storage +``` + +--- + +## Constraints (non-negotiable) + +| Rule | Value | +| --- | --- | +| Allowed MIME types | `application/pdf`, `image/vnd.dwg`, `application/vnd.openxmlformats-officedocument.wordprocessingml.document`, `application/vnd.openxmlformats-officedocument.spreadsheetml.sheet`, `application/zip` | +| Allowed extensions | `.pdf`, `.dwg`, `.docx`, `.xlsx`, `.zip` | +| Max size | 50 MB | +| Temp TTL | 24 h (purged by cron) | +| Virus scan | ClamAV (blocking) | +| Mover | `StorageService` only — never `fs.rename` directly from controller | + +--- + +## Phase 1: Upload to Temp + +```typescript +@Post('upload') +@UseGuards(JwtAuthGuard, ThrottlerGuard) +@UseInterceptors(FileInterceptor('file', { + limits: { fileSize: 50 * 1024 * 1024 }, // 50 MB +})) +async uploadTemp( + @UploadedFile() file: Express.Multer.File, + @CurrentUser() user: User, +): Promise<{ tempId: string; expiresAt: string }> { + // 1. Validate MIME + extension (defense in depth) + this.fileValidator.assertAllowed(file); + + // 2. Scan with ClamAV + const scanResult = await this.clamavService.scan(file.buffer); + if (!scanResult.clean) { + throw new BusinessException( + `ClamAV rejected: ${scanResult.signature}`, + 'ไฟล์ไม่ปลอดภัย ระบบตรวจพบความเสี่ยง', + 'กรุณาตรวจสอบไฟล์และลองใหม่อีกครั้ง', + 'FILE_INFECTED', + ); + } + + // 3. Save to temp (encrypted at rest) + const tempId = await this.storageService.saveToTemp(file, user.id); + + return { + tempId, + expiresAt: addHours(new Date(), 24).toISOString(), + }; +} +``` + +--- + +## Phase 2: Commit in Transaction + +The business operation (e.g., creating a Correspondence) promotes temp files to permanent **in the same DB transaction**. + +```typescript +async createCorrespondence(dto: CreateCorrespondenceDto, user: User) { + return this.dataSource.transaction(async (manager) => { + // 1. Create domain entity + const entity = await manager.save(Correspondence, { + ...dto, + createdById: user.id, + }); + + // 2. Commit temp files → permanent (ACID together with entity) + await this.storageService.commitFiles( + dto.tempFileIds, + { entityId: entity.id, entityType: 'correspondence' }, + manager, + ); + + return entity; + }); +} +``` + +If the transaction rolls back, temp files remain and expire in 24h — no orphaned permanent files. + +--- + +## StorageService Contract + +```typescript +export interface StorageService { + saveToTemp(file: Express.Multer.File, ownerId: number): Promise; + commitFiles( + tempIds: string[], + target: { entityId: number; entityType: string }, + manager: EntityManager, + ): Promise; + purgeExpiredTemp(): Promise; // called by cron + getPermanentPath(fileId: number): Promise; +} +``` + +--- + +## ❌ Forbidden + +```typescript +// ❌ Direct write to permanent +fs.writeFileSync(`/var/storage/${file.originalname}`, file.buffer); + +// ❌ Skip ClamAV +await this.storageService.savePermanent(file); + +// ❌ Non-whitelist MIME +@UseInterceptors(FileInterceptor('file')) // no size or type limit + +// ❌ Commit outside transaction +const entity = await this.repo.save(...); +await this.storageService.commitFiles(tempIds, ...); // race: entity exists, files may fail +``` + +--- + +## Reference + +- [ADR-016 Security & Authentication](../../../../specs/06-Decision-Records/ADR-016-security-authentication.md) +- [Edge Cases](../../../../specs/01-Requirements/01-06-edge-cases-and-rules.md) — file upload scenarios diff --git a/.agents/skills/nestjs-best-practices/scripts/build-agents.ts b/.agents/skills/nestjs-best-practices/scripts/build-agents.ts index 2c64b11..b3d2605 100644 --- a/.agents/skills/nestjs-best-practices/scripts/build-agents.ts +++ b/.agents/skills/nestjs-best-practices/scripts/build-agents.ts @@ -32,6 +32,7 @@ const CATEGORIES = [ { prefix: 'api-', name: 'API Design', impact: 'MEDIUM', section: 8 }, { prefix: 'micro-', name: 'Microservices', impact: 'MEDIUM', section: 9 }, { prefix: 'devops-', name: 'DevOps & Deployment', impact: 'LOW-MEDIUM', section: 10 }, + { prefix: 'lcbp3-', name: 'LCBP3 Project-Specific', impact: 'CRITICAL', section: 11 }, ]; interface RuleFrontmatter { @@ -50,8 +51,10 @@ interface Rule { } function parseFrontmatter(content: string): { frontmatter: RuleFrontmatter | null; body: string } { + // Normalize CRLF → LF so the regex works on Windows-authored files + const normalized = content.replace(/\r\n/g, '\n'); const frontmatterRegex = /^---\n([\s\S]*?)\n---\n([\s\S]*)$/; - const match = content.match(frontmatterRegex); + const match = normalized.match(frontmatterRegex); if (!match) { return { frontmatter: null, body: content }; diff --git a/.agents/skills/next-best-practices/SKILL.md b/.agents/skills/next-best-practices/SKILL.md index 35761e7..419423a 100644 --- a/.agents/skills/next-best-practices/SKILL.md +++ b/.agents/skills/next-best-practices/SKILL.md @@ -1,6 +1,8 @@ --- name: next-best-practices -description: Next.js best practices - file conventions, RSC boundaries, data patterns, async APIs, metadata, error handling, route handlers, image/font optimization, bundling +description: Next.js best practices for LCBP3-DMS frontend. Enforces ADR-019 (publicId only, no parseInt/id fallback), TanStack Query + RHF + Zod, shadcn/ui, i18n, ADR-007 error UX, ADR-021 IntegratedBanner/WorkflowLifecycle, two-phase file upload. +version: 1.8.9 +scope: frontend user-invocable: false --- @@ -157,6 +159,24 @@ See [parallel-routes.md](./parallel-routes.md) for: - `default.tsx` for fallbacks - Closing modals correctly with `router.back()` +## i18n (Thai / English) + +See [i18n.md](./i18n.md) for: + +- `useTranslations('namespace')` pattern +- Key naming (kebab-case, feature-namespaced) +- When Zod messages stay inline vs i18n +- Server-side `userMessage` passthrough + +## Two-Phase File Upload + +See [two-phase-upload.md](./two-phase-upload.md) for: + +- `useDropzone` + `useMutation` hook +- `tempFileIds` form-state pattern +- Whitelist MIME / max-size (must mirror backend) +- Clear-on-submit / expired-temp handling + ## Self-Hosting See [self-hosting.md](./self-hosting.md) for: @@ -204,28 +224,38 @@ const form = useForm({ }); ``` -### ADR-019 UUID Handling (CRITICAL) +### ADR-019 UUID Handling (CRITICAL — March 2026 Pattern) + +> **Updated:** ใช้ `publicId` ตรงๆ — ห้ามใช้ `id ?? ''` fallback หรือ `uuid` ร่วม. ```tsx -// Interface ต้องมีทั้ง id และ publicId +// ✅ CORRECT — Interface มีแค่ publicId interface Contract { - id?: number; // Internal (อาจ undefined) - publicId?: string; // UUID - ใช้ตัวนี้ + publicId?: string; // UUID from API — ใช้ตัวนี้ contractCode: string; + contractName: string; } -// Select options - ใช้ pattern นี้เสมอ +// ✅ CORRECT — Select options (ไม่มี fallback) const options = contracts.map((c) => ({ label: `${c.contractName} (${c.contractCode})`, - value: String(c.publicId ?? c.id ?? ''), // fallback pattern - key: String(c.publicId ?? c.id ?? ''), + value: c.publicId ?? '', // ใช้ publicId ล้วน + key: c.publicId ?? c.contractCode, // fallback ไป business field ได้ })); -// ❌ ห้ามใช้ parseInt บน UUID -// const id = parseInt(projectId); // WRONG! +// ❌ WRONG — pattern เก่า (ห้าม) +interface OldContract { + id?: number; // ❌ อย่า expose INT id + uuid?: string; // ❌ ใช้ชื่อ uuid + publicId?: string; +} +const oldValue = String(c.publicId ?? c.id ?? ''); // ❌ `id ?? ''` fallback ห้าม -// ✅ ส่ง UUID string ตรงๆ -apiClient.get(`/projects/${projectId}`); // projectId is UUID string +// ❌ NEVER parseInt on UUID +// const badId = parseInt(projectPublicId); // "019505..." → 19 (WRONG!) + +// ✅ ส่ง UUID string ตรงๆ ไป API +apiClient.get(`/projects/${projectPublicId}`); ``` ### Naming Conventions @@ -312,13 +342,17 @@ apiClient.interceptors.request.use((config) => { ### Anti-Patterns (ห้ามทำ) -- ❌ Fetch data ใน useEffect โดยตรง +- ❌ Fetch data ใน useEffect โดยตรง (ใช้ TanStack Query) - ❌ Props drilling ลึกเกิน 3 levels - ❌ Inline styles (ใช้ Tailwind) -- ❌ console.log ใน production -- ❌ parseInt() บน UUID values +- ❌ `console.log` ใน committed code +- ❌ `parseInt()` / `Number()` / `+` บน UUID values (ADR-019) +- ❌ `id ?? ''` fallback บน `publicId` (ใช้ `publicId ?? ''` หรือ fallback ไป business field) +- ❌ Expose `uuid` คู่กับ `publicId` ใน interface (ใช้ `publicId` อย่างเดียว) - ❌ ใช้ index เป็น key ใน list - ❌ Snake_case ใน form field names (ใช้ camelCase) +- ❌ Hardcode Thai/English string ใน component (ใช้ i18n keys) +- ❌ `any` type (strict mode) --- diff --git a/.agents/skills/next-best-practices/i18n.md b/.agents/skills/next-best-practices/i18n.md new file mode 100644 index 0000000..1ac5642 --- /dev/null +++ b/.agents/skills/next-best-practices/i18n.md @@ -0,0 +1,79 @@ +# i18n (Thai / English) + +LCBP3 frontend **must not** hardcode Thai or English UI strings in components. + +## Rules + +1. **All user-facing strings go through the i18n layer** (`next-intl` / `i18next` — check `frontend/package.json`). +2. **Keys use kebab-case**, namespaced by feature: + - `correspondence.list.title` + - `correspondence.form.submit` + - `common.actions.cancel` +3. **Comments in code remain Thai** (business logic explanation); **only UI copy** goes through i18n. +4. **Error messages** from backend (via ADR-007 `userMessage`) are already localized server-side — render them directly, don't translate client-side. + +--- + +## ❌ Wrong + +```tsx +export function CorrespondenceHeader() { + return

รายการหนังสือติดต่อ

; // ❌ hardcoded Thai +} + +toast.success('บันทึกสำเร็จ'); // ❌ hardcoded +``` + +--- + +## ✅ Right + +```tsx +import { useTranslations } from 'next-intl'; + +export function CorrespondenceHeader() { + const t = useTranslations('correspondence.list'); + return

{t('title')}

; +} + +toast.success(t('save.success')); +``` + +Translation files: + +```json +// messages/th.json +{ + "correspondence": { + "list": { "title": "รายการหนังสือติดต่อ" }, + "save": { "success": "บันทึกสำเร็จ" } + } +} + +// messages/en.json +{ + "correspondence": { + "list": { "title": "Correspondence List" }, + "save": { "success": "Saved successfully" } + } +} +``` + +--- + +## Zod Error Messages + +Zod error messages shown in forms **do** stay in Thai inline (per `specs/05-Engineering-Guidelines/05-03-frontend-guidelines.md`), because they're schema-bound and rarely need translation. If dual-language support becomes required, wrap with an i18n-aware resolver: + +```ts +const schema = z.object({ + projectUuid: z.string().uuid(t('validation.project.required')), +}); +``` + +--- + +## Reference + +- [i18n Guidelines](../../../specs/05-Engineering-Guidelines/05-08-i18n-guidelines.md) +- [Frontend Guidelines](../../../specs/05-Engineering-Guidelines/05-03-frontend-guidelines.md) diff --git a/.agents/skills/next-best-practices/two-phase-upload.md b/.agents/skills/next-best-practices/two-phase-upload.md new file mode 100644 index 0000000..99fbcf3 --- /dev/null +++ b/.agents/skills/next-best-practices/two-phase-upload.md @@ -0,0 +1,100 @@ +# Two-Phase File Upload (Frontend) + +Pair with [backend two-phase upload rule](../nestjs-best-practices/rules/security-file-two-phase-upload.md). + +## Flow + +``` +User drops file + → POST /files/upload (temp) → { tempId, expiresAt } + → store tempId in form state + → user submits form + → POST /correspondences (with tempFileIds) → backend commits in transaction +``` + +## Hook Pattern + +```tsx +'use client'; + +import { useDropzone } from 'react-dropzone'; +import { useMutation } from '@tanstack/react-query'; + +export function useTwoPhaseUpload() { + const uploadTemp = useMutation({ + mutationFn: async (file: File) => { + const fd = new FormData(); + fd.append('file', file); + const { data } = await apiClient.post<{ tempId: string; expiresAt: string }>( + '/files/upload', + fd, + ); + return data; + }, + }); + + return uploadTemp; +} +``` + +## Form Integration (RHF) + +```tsx +export function CorrespondenceForm() { + const form = useForm({ resolver: zodResolver(schema) }); + const uploadTemp = useTwoPhaseUpload(); + const [tempFileIds, setTempFileIds] = useState([]); + + const { getRootProps, getInputProps } = useDropzone({ + accept: { + 'application/pdf': ['.pdf'], + 'image/vnd.dwg': ['.dwg'], + 'application/vnd.openxmlformats-officedocument.wordprocessingml.document': ['.docx'], + 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet': ['.xlsx'], + 'application/zip': ['.zip'], + }, + maxSize: 50 * 1024 * 1024, // 50 MB — must match backend + onDrop: async (files) => { + const results = await Promise.all(files.map((f) => uploadTemp.mutateAsync(f))); + setTempFileIds((prev) => [...prev, ...results.map((r) => r.tempId)]); + }, + }); + + const onSubmit = async (values: FormData) => { + await correspondenceService.create({ + ...values, + tempFileIds, // committed server-side in the same DB transaction + }); + setTempFileIds([]); + }; + + return ( +
+
+ +

{t('upload.dragDrop')}

+
+ {/* other fields */} +
+ ); +} +``` + +## Rules + +- **Whitelist MIME types** — must mirror backend ADR-016 whitelist (`.pdf`, `.dwg`, `.docx`, `.xlsx`, `.zip`). +- **50 MB cap** — enforce client-side too (better UX) plus server-side (authoritative). +- **Show temp-file pills** with remove button — users see what will be attached. +- **Clear `tempFileIds` on success/cancel** — prevent stale IDs on subsequent submits. +- **No retry of expired temps** — if `expiresAt` passed, prompt re-upload. + +## ❌ Forbidden + +- ❌ Uploading directly to permanent storage endpoint (no commit phase) +- ❌ Hardcoded MIME list in component (keep in shared constant file mirrored from backend) +- ❌ Ignoring `maxSize` — backend will reject but UX suffers + +## Reference + +- [ADR-016 Security](../../../specs/06-Decision-Records/ADR-016-security-authentication.md) +- Backend rule: [`security-file-two-phase-upload.md`](../nestjs-best-practices/rules/security-file-two-phase-upload.md) diff --git a/.agents/skills/next-best-practices/uuid-handling.md b/.agents/skills/next-best-practices/uuid-handling.md index 4029213..a5fa0f9 100644 --- a/.agents/skills/next-best-practices/uuid-handling.md +++ b/.agents/skills/next-best-practices/uuid-handling.md @@ -1,17 +1,19 @@ -# UUID Handling (ADR-019) +# UUID Handling (ADR-019) — March 2026 Pattern **Project-specific: Hybrid Identifier Strategy for NAP-DMS** This project uses ADR-019: INT Primary Key (internal) + UUIDv7 (public API). Frontend code must handle this correctly. +> **Updated pattern:** Backend exposes `publicId` directly — ไม่มี `@Expose({ name: 'id' })` rename แล้ว. Frontend ใช้ `publicId` ตรงๆ — ห้าม fallback ไป `id`. + ## The Pattern -| Source | Field Name | Type | Notes | -|--------|------------|------|-------| -| **API Response** | `id` | `string` (UUID) | Actually `publicId` exposed via `@Expose({ name: 'id' })` | -| **TypeScript Interface** | `publicId?: string` | UUID string | Use this for all references | -| **Fallback** | `id?: number` | INT (internal) | May be undefined due to `@Exclude()` | -| **Form Values** | `xxxUuid` | `string` | DTO field names: `projectUuid`, `contractUuid` | +| Source | Field Name | Type | Notes | +| ------------------------ | ------------------- | ----------------- | ----------------------------------------------------------- | +| **API Response** | `publicId` | `string` (UUIDv7) | Exposed directly (no rename) | +| **TypeScript Interface** | `publicId?: string` | UUID string | ใช้ตัวนี้เท่านั้น | +| **Form DTO** | `xxxUuid` | `string` | DTO field names: `projectUuid`, `contractUuid` (input only) | +| **URL param** | `[publicId]` | `string` (UUID) | e.g. `/correspondences/[publicId]/page.tsx` | ## Critical Rules @@ -31,22 +33,26 @@ const id = +projectId; // NaN apiClient.get(`/projects/${projectId}`); // projectId is already UUID string ``` -### 2. Use `publicId ?? id` Pattern +### 2. Use `publicId` Only — NO `id ?? ''` Fallback ```tsx -// types/project.ts +// ✅ CORRECT — types/project.ts interface Project { - id?: number; // Internal INT (may be undefined) - publicId?: string; // UUID from API (use this) + publicId?: string; // UUID from API — ใช้ตัวนี้เท่านั้น projectCode: string; projectName: string; } -// Component usage +// ✅ CORRECT — Component usage const projectOptions = projects.map((p) => ({ label: `${p.projectName} (${p.projectCode})`, - value: String(p.publicId ?? p.id ?? ''), // ADR-019 pattern - key: String(p.publicId ?? p.id ?? ''), + value: p.publicId ?? '', // ADR-019 — ไม่ต้อง String() และไม่ไป id + key: p.publicId ?? p.projectCode, // fallback ไป business field ได้ +})); + +// ❌ WRONG — pattern เก่า +const oldOptions = projects.map((p) => ({ + value: String(p.publicId ?? p.id ?? ''), // ❌ `id ?? ''` fallback })); ``` @@ -84,14 +90,13 @@ export function ContractSelect({ contracts, value, onChange }: ContractSelectPro - {contracts.map((c) => ( - - {c.contractName} ({c.contractCode}) - - ))} + {contracts + .filter((c) => !!c.publicId) // กรอง contract ที่มี publicId เท่านั้น + .map((c) => ( + + {c.contractName} ({c.contractCode}) + + ))} ); @@ -113,7 +118,9 @@ const columns: ColumnDef[] = [ cell: ({ row }) => { const contract = row.original.contract; return contract ? ( - {contract.contractName} ({contract.contractCode}) + + {contract.contractName} ({contract.contractCode}) + ) : ( - ); @@ -153,10 +160,9 @@ export const contractService = { ## TypeScript Interfaces ```tsx -// types/entities.ts +// ✅ CORRECT — types/entities.ts export interface BaseEntity { - id?: number; // Internal INT - may be undefined - publicId?: string; // UUID - use this for API calls + publicId?: string; // UUID — ใช้ตัวนี้เท่านั้น (ไม่มี INT id ใน interface) createdAt?: string; updatedAt?: string; } @@ -170,14 +176,12 @@ export interface Project extends BaseEntity { export interface Contract extends BaseEntity { contractCode: string; contractName: string; - projectId?: number; // Internal INT FK - projectUuid?: string; // UUID for DTOs - project?: Project; // Relation + project?: Project; // Relation (nested entity) } -// DTOs +// DTO (input only — รับ UUID จาก form) export interface CreateContractDto { - projectUuid: string; // Accept UUID from form + projectUuid: string; // UUID string from select contractCode: string; contractName: string; } @@ -215,9 +219,7 @@ export function ContractForm() { return (
- - {/* Form fields */} -
+
{/* Form fields */}
); } @@ -231,19 +233,20 @@ export default async function ContractPage({ params }: { params: Promise<{ id: s const { id } = await params; // id is UUID string from URL const contract = await contractService.getById(id); - + return ; } ``` ## Common Pitfalls -| Pitfall | Wrong | Right | -|---------|-------|-------| -| Assuming `entity.id` exists | `key={entity.id}` | `key={entity.publicId ?? entity.id}` | -| parseInt on UUID | `parseInt(projectId)` | `projectId` (string) | -| Field name mismatch | `name="project_id"` | `name="projectUuid"` | -| Missing fallback | `value={entity.publicId}` | `value={entity.publicId ?? entity.id ?? ''}` | +| Pitfall | ❌ Wrong | ✅ Right | +| ---------------------------- | ------------------------------------------------ | --------------------------------- | +| Using INT `id` | `key={entity.id}` | `key={entity.publicId}` | +| parseInt on UUID | `parseInt(projectId)` | `projectId` (string) | +| Field name mismatch | `name="project_id"` | `name="projectUuid"` | +| `id ?? ''` fallback | `value={publicId ?? id ?? ''}` | `value={publicId ?? ''}` | +| `uuid` + `publicId` together | `interface { uuid?: string; publicId?: string }` | `interface { publicId?: string }` | ## Reference diff --git a/.agents/skills/skills.md b/.agents/skills/skills.md index 5a46fa5..c57cdc2 100644 --- a/.agents/skills/skills.md +++ b/.agents/skills/skills.md @@ -1,17 +1,20 @@ -# 🧠 NAP-DMS Agent Skills (v1.8.6) +# 🧠 NAP-DMS Agent Skills (v1.8.9) -ไฟล์นี้กำหนดทักษะและความสามารถเฉพาะทางของ Document Intelligence Engine สำหรับโครงการ LCBP3 v1.8.6 เพื่อรักษามาตรฐานสูงสุดด้าน Security และ Data Integrity +ไฟล์นี้กำหนดทักษะและความสามารถเฉพาะทางของ Document Intelligence Engine สำหรับโครงการ LCBP3 v1.8.9 เพื่อรักษามาตรฐานสูงสุดด้าน Security และ Data Integrity -**Status**: Production Ready | **Last Updated**: 2026-04-14 | **Total Skills**: 20 +**Status**: Production Ready | **Last Updated**: 2026-04-22 | **Total Skills**: 20 + +> 📌 Shared context for all speckit-\* skills: see [`_LCBP3-CONTEXT.md`](./_LCBP3-CONTEXT.md). --- ## 🏗️ Architectural & Data Integrity -- **Identifier Strategy Mastery (ADR-019):** - - บังคับใช้ **UUIDv7** เป็น Public ID ใน API และ URL เสมอ - - ตรวจสอบและป้องกันการใช้ `parseInt()`, `Number()`, หรือตัวดำเนินการทางคณิตศาสตร์ (`+`) กับ UUID - - ตรวจสอบว่า Entity มีการใช้ `@Exclude()` บน Primary Key ที่เป็น `INT AUTO_INCREMENT` เพื่อไม่ให้หลุดออกไปยัง API +- **Identifier Strategy Mastery (ADR-019 — March 2026):** + - บังคับใช้ **UUIDv7** เป็น Public ID; entity สืบทอดจาก `UuidBaseEntity` และเปิด `publicId` **ตรงๆ** (ห้ามใช้ `@Expose({ name: 'id' })` rename) + - ตรวจสอบและป้องกันการใช้ `parseInt()`, `Number()`, หรือ `+` กับ UUID ทั้ง backend/frontend + - ตรวจสอบว่า Entity มีการใช้ `@Exclude()` บน Primary Key `INT AUTO_INCREMENT` เพื่อไม่ให้หลุดออกไปยัง API + - Frontend ใช้ `publicId` ตรงๆ — **ห้าม** `id ?? ''` fallback หรือมี `uuid?: string` คู่กับ `publicId` ใน interface - **Strict Validation Engine:** - บังคับใช้ **Zod** สำหรับการทำ Form Validation ฝั่ง Frontend - บังคับใช้ **class-validator** สำหรับ Backend DTOs @@ -81,22 +84,22 @@ ## 🛠️ Skill Health Monitoring -### Health Check Scripts +### Health Check Scripts (from repo root) -- **Bash**: `./scripts/bash/audit-skills.sh` - Comprehensive skill health audit -- **PowerShell**: `./scripts/powershell/audit-skills.ps1` - Windows equivalent +- **Bash**: `./.agents/scripts/bash/audit-skills.sh` - Comprehensive skill health audit +- **PowerShell**: `./.agents/scripts/powershell/audit-skills.ps1` - Windows equivalent ### Validation Scripts -- **Version Check**: `./scripts/bash/validate-versions.sh` - Ensure version consistency -- **Workflow Sync**: `./scripts/bash/sync-workflows.sh` - Verify workflow integration +- **Version Check**: `./.agents/scripts/bash/validate-versions.sh` - Ensure version consistency +- **Workflow Sync**: `./.agents/scripts/bash/sync-workflows.sh` - Verify workflow integration ### Health Metrics - **Total Skills**: 20 implemented -- **Version Alignment**: v1.8.6 across all skills +- **Version Alignment**: v1.8.9 across all skills - **Template Coverage**: 100% for skills requiring templates -- **Documentation**: Complete front matter and sections +- **Documentation**: Complete front matter + shared `_LCBP3-CONTEXT.md` appendix ### Maintenance Schedule diff --git a/.agents/skills/speckit-analyze/SKILL.md b/.agents/skills/speckit-analyze/SKILL.md index 704bd58..adc875c 100644 --- a/.agents/skills/speckit-analyze/SKILL.md +++ b/.agents/skills/speckit-analyze/SKILL.md @@ -1,7 +1,7 @@ --- name: speckit-analyze description: Perform a non-destructive cross-artifact consistency and quality analysis across spec.md, plan.md, and tasks.md after task generation. -version: 1.0.0 +version: 1.8.9 depends-on: - speckit-tasks --- @@ -28,7 +28,7 @@ Identify inconsistencies, duplications, ambiguities, and underspecified items ac **STRICTLY READ-ONLY**: Do **not** modify any files. Output a structured analysis report. Offer an optional remediation plan (user must explicitly approve before any follow-up editing commands would be invoked manually). -**Constitution Authority**: The project constitution (`.specify/memory/constitution.md`) is **non-negotiable** within this analysis scope. Constitution conflicts are automatically CRITICAL and require adjustment of the spec, plan, or tasks—not dilution, reinterpretation, or silent ignoring of the principle. If a principle itself needs to change, that must occur in a separate, explicit constitution update outside `/speckit-analyze`. +**Constitution Authority**: The project constitution (`AGENTS.md`) is **non-negotiable** within this analysis scope. Constitution conflicts are automatically CRITICAL and require adjustment of the spec, plan, or tasks—not dilution, reinterpretation, or silent ignoring of the principle. If a principle itself needs to change, that must occur in a separate, explicit constitution update outside `/speckit-analyze`. ### Steps @@ -72,7 +72,7 @@ Load only the minimal necessary context from each artifact: **From constitution:** -- Load `.specify/memory/constitution.md` for principle validation +- Load `AGENTS.md` for principle validation ### 3. Build Semantic Models @@ -192,3 +192,15 @@ Ask the user: "Would you like me to suggest concrete remediation edits for the t ## Context {{args}} + +--- + +## LCBP3-DMS Context (MUST LOAD) + +Before executing, load **[../_LCBP3-CONTEXT.md](../_LCBP3-CONTEXT.md)** to get: + +- Canonical rule sources (AGENTS.md, specs/06-Decision-Records/, specs/05-Engineering-Guidelines/) +- Tier 1 non-negotiables (ADR-019 UUID, ADR-009 schema, ADR-016 security, ADR-002 numbering, ADR-008 BullMQ, ADR-018/020 AI boundary, ADR-007 errors) +- Domain glossary (Correspondence / RFA / Transmittal / Circulation) +- Helper script real paths +- Commit checklist \ No newline at end of file diff --git a/.agents/skills/speckit-checker/SKILL.md b/.agents/skills/speckit-checker/SKILL.md index 332ec88..919c252 100644 --- a/.agents/skills/speckit-checker/SKILL.md +++ b/.agents/skills/speckit-checker/SKILL.md @@ -1,7 +1,7 @@ --- name: speckit-checker description: Run static analysis tools and aggregate results. -version: 1.0.0 +version: 1.8.9 depends-on: [] --- @@ -157,3 +157,15 @@ Auto-detect available tools, run them, and aggregate results into a prioritized - **Be Actionable**: Every issue should have a clear fix path - **Don't Duplicate**: Dedupe issues found by multiple tools - **Respect Configs**: Honor project's existing linter configs + +--- + +## LCBP3-DMS Context (MUST LOAD) + +Before executing, load **[../_LCBP3-CONTEXT.md](../_LCBP3-CONTEXT.md)** to get: + +- Canonical rule sources (AGENTS.md, specs/06-Decision-Records/, specs/05-Engineering-Guidelines/) +- Tier 1 non-negotiables (ADR-019 UUID, ADR-009 schema, ADR-016 security, ADR-002 numbering, ADR-008 BullMQ, ADR-018/020 AI boundary, ADR-007 errors) +- Domain glossary (Correspondence / RFA / Transmittal / Circulation) +- Helper script real paths +- Commit checklist \ No newline at end of file diff --git a/.agents/skills/speckit-checklist/SKILL.md b/.agents/skills/speckit-checklist/SKILL.md index f6674d8..ddbfe88 100644 --- a/.agents/skills/speckit-checklist/SKILL.md +++ b/.agents/skills/speckit-checklist/SKILL.md @@ -1,7 +1,7 @@ --- name: speckit-checklist description: Generate a custom checklist for the current feature based on user requirements. -version: 1.0.0 +version: 1.8.9 --- ## Checklist Purpose: "Unit Tests for English" @@ -300,3 +300,15 @@ Sample items: - Correct: Validation of requirement quality - Wrong: "Does it do X?" - Correct: "Is X clearly specified?" + +--- + +## LCBP3-DMS Context (MUST LOAD) + +Before executing, load **[../_LCBP3-CONTEXT.md](../_LCBP3-CONTEXT.md)** to get: + +- Canonical rule sources (AGENTS.md, specs/06-Decision-Records/, specs/05-Engineering-Guidelines/) +- Tier 1 non-negotiables (ADR-019 UUID, ADR-009 schema, ADR-016 security, ADR-002 numbering, ADR-008 BullMQ, ADR-018/020 AI boundary, ADR-007 errors) +- Domain glossary (Correspondence / RFA / Transmittal / Circulation) +- Helper script real paths +- Commit checklist \ No newline at end of file diff --git a/.agents/skills/speckit-clarify/SKILL.md b/.agents/skills/speckit-clarify/SKILL.md index f8c09ca..779ed28 100644 --- a/.agents/skills/speckit-clarify/SKILL.md +++ b/.agents/skills/speckit-clarify/SKILL.md @@ -1,7 +1,7 @@ --- name: speckit-clarify description: Identify underspecified areas in the current feature spec by asking up to 5 highly targeted clarification questions and encoding answers back into the spec. -version: 1.0.0 +version: 1.8.9 depends-on: - speckit-specify handoffs: @@ -189,3 +189,15 @@ Behavior rules: - If quota reached with unresolved high-impact categories remaining, explicitly flag them under Deferred with rationale. Context for prioritization: {{args}} + +--- + +## LCBP3-DMS Context (MUST LOAD) + +Before executing, load **[../_LCBP3-CONTEXT.md](../_LCBP3-CONTEXT.md)** to get: + +- Canonical rule sources (AGENTS.md, specs/06-Decision-Records/, specs/05-Engineering-Guidelines/) +- Tier 1 non-negotiables (ADR-019 UUID, ADR-009 schema, ADR-016 security, ADR-002 numbering, ADR-008 BullMQ, ADR-018/020 AI boundary, ADR-007 errors) +- Domain glossary (Correspondence / RFA / Transmittal / Circulation) +- Helper script real paths +- Commit checklist \ No newline at end of file diff --git a/.agents/skills/speckit-constitution/SKILL.md b/.agents/skills/speckit-constitution/SKILL.md index f41dd55..16807fc 100644 --- a/.agents/skills/speckit-constitution/SKILL.md +++ b/.agents/skills/speckit-constitution/SKILL.md @@ -1,7 +1,7 @@ --- name: speckit-constitution description: Create or update the project constitution from interactive or provided principle inputs, ensuring all dependent templates stay in sync. -version: 1.0.0 +version: 1.8.9 handoffs: - label: Build Specification agent: speckit-specify @@ -24,11 +24,11 @@ You are the **Antigravity Governance Architect**. Your role is to establish and ### Outline -You are updating the project constitution at `.specify/memory/constitution.md`. This file is a TEMPLATE containing placeholder tokens in square brackets (e.g. `[PROJECT_NAME]`, `[PRINCIPLE_1_NAME]`). Your job is to (a) collect/derive concrete values, (b) fill the template precisely, and (c) propagate any amendments across dependent artifacts. +You are updating the project constitution at `AGENTS.md`. This file is a TEMPLATE containing placeholder tokens in square brackets (e.g. `[PROJECT_NAME]`, `[PRINCIPLE_1_NAME]`). Your job is to (a) collect/derive concrete values, (b) fill the template precisely, and (c) propagate any amendments across dependent artifacts. Follow this execution flow: -1. Load the existing constitution template at `memory/constitution.md`. +1. Load the existing constitution template at `AGENTS.md`. - Identify every placeholder token of the form `[ALL_CAPS_IDENTIFIER]`. **IMPORTANT**: The user might require less or more principles than the ones used in the template. If a number is specified, respect that - follow the general template. You will update the doc accordingly. @@ -49,10 +49,10 @@ Follow this execution flow: - Ensure Governance section lists amendment procedure, versioning policy, and compliance review expectations. 4. Consistency propagation checklist (convert prior checklist into active validations): - - Read `.specify/templates/plan-template.md` and ensure any "Constitution Check" or rules align with updated principles. - - Read `.specify/templates/spec-template.md` for scope/requirements alignment—update if constitution adds/removes mandatory sections or constraints. - - Read `.specify/templates/tasks-template.md` and ensure task categorization reflects new or removed principle-driven task types (e.g., observability, versioning, testing discipline). - - Read each command file in `.specify/templates/commands/*.md` (including this one) to verify no outdated references (agent-specific names like CLAUDE only) remain when generic guidance is required. + - Read `.agents/skills/speckit-plan/templates/plan-template.md` and ensure any "Constitution Check" or rules align with updated principles. + - Read `.agents/skills/speckit-specify/templates/spec-template.md` for scope/requirements alignment—update if constitution adds/removes mandatory sections or constraints. + - Read `.agents/skills/speckit-tasks/templates/tasks-template.md` and ensure task categorization reflects new or removed principle-driven task types (e.g., observability, versioning, testing discipline). + - Read each command file in `.agents/skills/*.md` (including this one) to verify no outdated references (agent-specific names like CLAUDE only) remain when generic guidance is required. - Read any runtime guidance docs (e.g., `README.md`, `docs/quickstart.md`, or agent-specific guidance files if present). Update references to principles changed. 5. Produce a Sync Impact Report (prepend as an HTML comment at top of the constitution file after update): @@ -69,7 +69,7 @@ Follow this execution flow: - Dates ISO format YYYY-MM-DD. - Principles are declarative, testable, and free of vague language ("should" → replace with MUST/SHOULD rationale where appropriate). -7. Write the completed constitution back to `.specify/memory/constitution.md` (overwrite). +7. Write the completed constitution back to `AGENTS.md` (overwrite). 8. Output a final summary to the user with: - New version and bump rationale. @@ -87,4 +87,16 @@ If the user supplies partial updates (e.g., only one principle revision), still If critical info missing (e.g., ratification date truly unknown), insert `TODO(): explanation` and include in the Sync Impact Report under deferred items. -Do not create a new template; always operate on the existing `.specify/memory/constitution.md` file. +Do not create a new template; always operate on the existing `AGENTS.md` file. + +--- + +## LCBP3-DMS Context (MUST LOAD) + +Before executing, load **[../_LCBP3-CONTEXT.md](../_LCBP3-CONTEXT.md)** to get: + +- Canonical rule sources (AGENTS.md, specs/06-Decision-Records/, specs/05-Engineering-Guidelines/) +- Tier 1 non-negotiables (ADR-019 UUID, ADR-009 schema, ADR-016 security, ADR-002 numbering, ADR-008 BullMQ, ADR-018/020 AI boundary, ADR-007 errors) +- Domain glossary (Correspondence / RFA / Transmittal / Circulation) +- Helper script real paths +- Commit checklist \ No newline at end of file diff --git a/.agents/skills/speckit-diff/SKILL.md b/.agents/skills/speckit-diff/SKILL.md index 996c211..65ea31a 100644 --- a/.agents/skills/speckit-diff/SKILL.md +++ b/.agents/skills/speckit-diff/SKILL.md @@ -1,7 +1,7 @@ --- name: speckit-diff description: Compare two versions of a spec or plan to highlight changes. -version: 1.0.0 +version: 1.8.9 depends-on: [] --- @@ -84,3 +84,15 @@ Compare two versions of a specification artifact and produce a structured diff r - **Highlight Impact**: Explain what each change means for implementation - **Flag Breaking Changes**: Any change that invalidates existing work - **Ignore Whitespace**: Focus on semantic changes, not formatting + +--- + +## LCBP3-DMS Context (MUST LOAD) + +Before executing, load **[../_LCBP3-CONTEXT.md](../_LCBP3-CONTEXT.md)** to get: + +- Canonical rule sources (AGENTS.md, specs/06-Decision-Records/, specs/05-Engineering-Guidelines/) +- Tier 1 non-negotiables (ADR-019 UUID, ADR-009 schema, ADR-016 security, ADR-002 numbering, ADR-008 BullMQ, ADR-018/020 AI boundary, ADR-007 errors) +- Domain glossary (Correspondence / RFA / Transmittal / Circulation) +- Helper script real paths +- Commit checklist \ No newline at end of file diff --git a/.agents/skills/speckit-implement/SKILL.md b/.agents/skills/speckit-implement/SKILL.md index e12c424..2bcb744 100644 --- a/.agents/skills/speckit-implement/SKILL.md +++ b/.agents/skills/speckit-implement/SKILL.md @@ -1,7 +1,7 @@ --- name: speckit-implement description: Execute the implementation plan by processing and executing all tasks defined in tasks.md (with Ironclad Anti-Regression Protocols) -version: 1.0.0 +version: 1.8.9 depends-on: - speckit-tasks --- @@ -81,7 +81,7 @@ At the start of execution and after every 3 modifications: ### Outline -1. Run `.specify/scripts/bash/check-prerequisites.sh --json --require-tasks --include-tasks` from repo root and parse FEATURE_DIR and AVAILABLE_DOCS list. All paths must be absolute. For single quotes in args like "I'm Groot", use escape syntax: e.g 'I'\\''m Groot' (or double-quote if possible: "I'm Groot"). +1. Run `../scripts/bash/check-prerequisites.sh --json --require-tasks --include-tasks` from repo root and parse FEATURE_DIR and AVAILABLE_DOCS list. All paths must be absolute. For single quotes in args like "I'm Groot", use escape syntax: e.g 'I'\\''m Groot' (or double-quote if possible: "I'm Groot"). 2. **Check checklists status** (if FEATURE_DIR/checklists/ exists): - Scan all checklist files in the checklists/ directory @@ -246,3 +246,15 @@ At the start of execution and after every 3 modifications: --- Note: This command assumes a complete task breakdown exists in tasks.md. If tasks are incomplete or missing, suggest running `/speckit-tasks` first to regenerate the task list. + +--- + +## LCBP3-DMS Context (MUST LOAD) + +Before executing, load **[../_LCBP3-CONTEXT.md](../_LCBP3-CONTEXT.md)** to get: + +- Canonical rule sources (AGENTS.md, specs/06-Decision-Records/, specs/05-Engineering-Guidelines/) +- Tier 1 non-negotiables (ADR-019 UUID, ADR-009 schema, ADR-016 security, ADR-002 numbering, ADR-008 BullMQ, ADR-018/020 AI boundary, ADR-007 errors) +- Domain glossary (Correspondence / RFA / Transmittal / Circulation) +- Helper script real paths +- Commit checklist \ No newline at end of file diff --git a/.agents/skills/speckit-migrate/SKILL.md b/.agents/skills/speckit-migrate/SKILL.md index c736a5a..1f3f23e 100644 --- a/.agents/skills/speckit-migrate/SKILL.md +++ b/.agents/skills/speckit-migrate/SKILL.md @@ -1,7 +1,7 @@ --- name: speckit-migrate description: Migrate existing projects into the speckit structure by generating spec.md, plan.md, and tasks.md from existing code. -version: 1.0.0 +version: 1.8.9 depends-on: [] --- @@ -116,3 +116,15 @@ Analyze an existing codebase and generate speckit artifacts (spec.md, plan.md, t - **Preserve Intent**: Use code comments and naming to understand purpose - **Flag TODOs**: Any TODO/FIXME/HACK in code becomes an open task - **Be Conservative**: When unsure, ask rather than assume + +--- + +## LCBP3-DMS Context (MUST LOAD) + +Before executing, load **[../_LCBP3-CONTEXT.md](../_LCBP3-CONTEXT.md)** to get: + +- Canonical rule sources (AGENTS.md, specs/06-Decision-Records/, specs/05-Engineering-Guidelines/) +- Tier 1 non-negotiables (ADR-019 UUID, ADR-009 schema, ADR-016 security, ADR-002 numbering, ADR-008 BullMQ, ADR-018/020 AI boundary, ADR-007 errors) +- Domain glossary (Correspondence / RFA / Transmittal / Circulation) +- Helper script real paths +- Commit checklist \ No newline at end of file diff --git a/.agents/skills/speckit-plan/SKILL.md b/.agents/skills/speckit-plan/SKILL.md index ff91c6d..261217b 100644 --- a/.agents/skills/speckit-plan/SKILL.md +++ b/.agents/skills/speckit-plan/SKILL.md @@ -1,7 +1,7 @@ --- name: speckit-plan description: Execute the implementation planning workflow using the plan template to generate design artifacts. -version: 1.0.0 +version: 1.8.9 depends-on: - speckit-specify handoffs: @@ -32,7 +32,7 @@ You are the **Antigravity System Architect**. Your role is to bridge the gap bet 1. **Setup**: Run `../scripts/bash/setup-plan.sh --json` from repo root and parse JSON for FEATURE_SPEC, IMPL_PLAN, SPECS_DIR, BRANCH. For single quotes in args like "I'm Groot", use escape syntax: e.g 'I'\\''m Groot' (or double-quote if possible: "I'm Groot"). -2. **Load context**: Read FEATURE_SPEC and `.specify/memory/constitution.md`. Load IMPL_PLAN template from `templates/plan-template.md`. +2. **Load context**: Read FEATURE_SPEC and `AGENTS.md`. Load IMPL_PLAN template from `templates/plan-template.md`. 3. **Execute plan workflow**: Follow the structure in IMPL_PLAN template to: - Fill Technical Context (mark unknowns as "NEEDS CLARIFICATION") @@ -85,7 +85,7 @@ You are the **Antigravity System Architect**. Your role is to bridge the gap bet - Output OpenAPI/GraphQL schema to `/contracts/` 3. **Agent context update**: - - Run `../scripts/bash/update-agent-context.sh gemini` + - Run `../scripts/bash/update-agent-context.sh windsurf` - These scripts detect which AI agent is in use - Update the appropriate agent-specific context file - Add only new technology from current plan @@ -97,3 +97,15 @@ You are the **Antigravity System Architect**. Your role is to bridge the gap bet - Use absolute paths - ERROR on gate failures or unresolved clarifications + +--- + +## LCBP3-DMS Context (MUST LOAD) + +Before executing, load **[../_LCBP3-CONTEXT.md](../_LCBP3-CONTEXT.md)** to get: + +- Canonical rule sources (AGENTS.md, specs/06-Decision-Records/, specs/05-Engineering-Guidelines/) +- Tier 1 non-negotiables (ADR-019 UUID, ADR-009 schema, ADR-016 security, ADR-002 numbering, ADR-008 BullMQ, ADR-018/020 AI boundary, ADR-007 errors) +- Domain glossary (Correspondence / RFA / Transmittal / Circulation) +- Helper script real paths +- Commit checklist \ No newline at end of file diff --git a/.agents/skills/speckit-plan/templates/plan-template.md b/.agents/skills/speckit-plan/templates/plan-template.md index 54438f9..d7f4e4e 100644 --- a/.agents/skills/speckit-plan/templates/plan-template.md +++ b/.agents/skills/speckit-plan/templates/plan-template.md @@ -3,7 +3,7 @@ **Branch**: `[###-feature-name]` | **Date**: [DATE] | **Spec**: [link] **Input**: Feature specification from `/specs/[###-feature-name]/spec.md` -**Note**: This template is filled in by the `/speckit-plan` command. See `.specify/templates/commands/plan.md` for the execution workflow. +**Note**: This template is filled in by the `/speckit-plan` command. See `.agents/skills/plan.md` for the execution workflow. ## Summary diff --git a/.agents/skills/speckit-quizme/SKILL.md b/.agents/skills/speckit-quizme/SKILL.md index c4bc4d8..76923d3 100644 --- a/.agents/skills/speckit-quizme/SKILL.md +++ b/.agents/skills/speckit-quizme/SKILL.md @@ -1,7 +1,7 @@ --- name: speckit-quizme description: Challenge the specification with Socratic questioning to identify logical gaps, unhandled edge cases, and robustness issues. -version: 1.0.0 +version: 1.8.9 handoffs: - label: Clarify Spec Requirements agent: speckit-clarify @@ -65,3 +65,15 @@ Execution steps: - **Be a Skeptic**: Don't assume the happy path works. - **Focus on "When" and "If"**: When high load, If network drops, When concurrent edits. - **Don't be annoying**: Focus on _critical_ flaws, not nitpicks. + +--- + +## LCBP3-DMS Context (MUST LOAD) + +Before executing, load **[../_LCBP3-CONTEXT.md](../_LCBP3-CONTEXT.md)** to get: + +- Canonical rule sources (AGENTS.md, specs/06-Decision-Records/, specs/05-Engineering-Guidelines/) +- Tier 1 non-negotiables (ADR-019 UUID, ADR-009 schema, ADR-016 security, ADR-002 numbering, ADR-008 BullMQ, ADR-018/020 AI boundary, ADR-007 errors) +- Domain glossary (Correspondence / RFA / Transmittal / Circulation) +- Helper script real paths +- Commit checklist \ No newline at end of file diff --git a/.agents/skills/speckit-reviewer/SKILL.md b/.agents/skills/speckit-reviewer/SKILL.md index e231861..fb56968 100644 --- a/.agents/skills/speckit-reviewer/SKILL.md +++ b/.agents/skills/speckit-reviewer/SKILL.md @@ -1,7 +1,7 @@ --- name: speckit-reviewer description: Perform code review with actionable feedback and suggestions. -version: 1.0.0 +version: 1.8.9 depends-on: [] --- @@ -142,3 +142,15 @@ Review code changes and provide structured feedback with severity levels. - **Be Balanced**: Mention what's good, not just what's wrong - **Prioritize**: Focus on real issues, not style nitpicks - **Be Educational**: Explain WHY something is an issue + +--- + +## LCBP3-DMS Context (MUST LOAD) + +Before executing, load **[../_LCBP3-CONTEXT.md](../_LCBP3-CONTEXT.md)** to get: + +- Canonical rule sources (AGENTS.md, specs/06-Decision-Records/, specs/05-Engineering-Guidelines/) +- Tier 1 non-negotiables (ADR-019 UUID, ADR-009 schema, ADR-016 security, ADR-002 numbering, ADR-008 BullMQ, ADR-018/020 AI boundary, ADR-007 errors) +- Domain glossary (Correspondence / RFA / Transmittal / Circulation) +- Helper script real paths +- Commit checklist \ No newline at end of file diff --git a/.agents/skills/speckit-security-audit/SKILL.md b/.agents/skills/speckit-security-audit/SKILL.md index d8ccbba..850dfc9 100644 --- a/.agents/skills/speckit-security-audit/SKILL.md +++ b/.agents/skills/speckit-security-audit/SKILL.md @@ -1,7 +1,7 @@ --- name: speckit-security-audit description: Perform a security-focused audit of the codebase against OWASP Top 10, CASL authorization, and LCBP3-DMS security requirements. -version: 1.0.0 +version: 1.8.9 depends-on: - speckit-checker --- @@ -12,16 +12,16 @@ You are the **Antigravity Security Sentinel**. Your mission is to identify secur ## Task -Perform a comprehensive security audit covering OWASP Top 10, CASL permission enforcement, file upload safety, and project-specific security rules defined in `specs/06-Decision-Records/ADR-016-security.md`. +Perform a comprehensive security audit covering OWASP Top 10, CASL permission enforcement, file upload safety, and project-specific security rules defined in `specs/06-Decision-Records/ADR-016-security-authentication.md`. ## Context Loading Before auditing, load the security context: -1. Read `specs/06-Decision-Records/ADR-016-security.md` for project security decisions +1. Read `specs/06-Decision-Records/ADR-016-security-authentication.md` for project security decisions 2. Read `specs/05-Engineering-Guidelines/05-02-backend-guidelines.md` for backend security patterns -3. Read `specs/03-Data-and-Storage/lcbp3-v1.7.0-seed-permissions.sql` for CASL permission definitions -4. Read `GEMINI.md` for security rules (Section: Security & Integrity Rules) +3. Read `specs/03-Data-and-Storage/lcbp3-v1.8.0-seed-permissions.sql` for CASL permission definitions +4. Read `AGENTS.md` for security rules (Section: Security Rules Non-Negotiable + Security & Integrity Audit Protocol) ## Execution Steps @@ -44,7 +44,7 @@ Scan the `backend/src/` directory for each OWASP category: ### Phase 2: CASL Authorization Audit -1. **Load permission matrix** from `specs/03-Data-and-Storage/lcbp3-v1.7.0-seed-permissions.sql` +1. **Load permission matrix** from `specs/03-Data-and-Storage/lcbp3-v1.8.0-seed-permissions.sql` 2. **Scan all controllers** for `@UseGuards(CaslAbilityGuard)` coverage: ```bash @@ -197,3 +197,15 @@ Generate a structured report: - **No False Confidence**: If a check is inconclusive, mark it as "⚠️ Needs Manual Review" rather than passing. - **LCBP3-Specific**: Prioritize project-specific rules (idempotency, ClamAV, Redlock) over generic checks. - **Frontend Too**: If scope includes frontend, also check for XSS in React components, unescaped user data, and exposed API keys. + +--- + +## LCBP3-DMS Context (MUST LOAD) + +Before executing, load **[../_LCBP3-CONTEXT.md](../_LCBP3-CONTEXT.md)** to get: + +- Canonical rule sources (AGENTS.md, specs/06-Decision-Records/, specs/05-Engineering-Guidelines/) +- Tier 1 non-negotiables (ADR-019 UUID, ADR-009 schema, ADR-016 security, ADR-002 numbering, ADR-008 BullMQ, ADR-018/020 AI boundary, ADR-007 errors) +- Domain glossary (Correspondence / RFA / Transmittal / Circulation) +- Helper script real paths +- Commit checklist \ No newline at end of file diff --git a/.agents/skills/speckit-specify/SKILL.md b/.agents/skills/speckit-specify/SKILL.md index 971851a..a2cab74 100644 --- a/.agents/skills/speckit-specify/SKILL.md +++ b/.agents/skills/speckit-specify/SKILL.md @@ -1,7 +1,7 @@ --- name: speckit-specify description: Create or update the feature specification from a natural language feature description. -version: 1.0.0 +version: 1.8.9 handoffs: - label: Build Technical Plan agent: speckit-plan @@ -64,8 +64,8 @@ Given that feature description, do this: d. Run the script `../scripts/bash/create-new-feature.sh --json "{{args}}"` with the calculated number and short-name: - Pass `--number N+1` and `--short-name "your-short-name"` along with the feature description - - Bash example: `.specify/scripts/bash/create-new-feature.sh --json "{{args}}" --json --number 5 --short-name "user-auth" "Add user authentication"` - - PowerShell example: `.specify/scripts/bash/create-new-feature.sh --json "{{args}}" -Json -Number 5 -ShortName "user-auth" "Add user authentication"` + - Bash example: `.agents/scripts/bash/create-new-feature.sh --json "{{args}}" --number 5 --short-name "user-auth" "Add user authentication"` + - PowerShell example: `.agents/scripts/powershell/create-new-feature.ps1 -Json -Args '{{args}}' -Number 5 -ShortName "user-auth" "Add user authentication"` **IMPORTANT**: - Check all three sources (remote branches, local branches, specs directories) to find the highest number @@ -262,3 +262,15 @@ Success criteria must be: - "Database can handle 1000 TPS" (implementation detail, use user-facing metric) - "React components render efficiently" (framework-specific) - "Redis cache hit rate above 80%" (technology-specific) + +--- + +## LCBP3-DMS Context (MUST LOAD) + +Before executing, load **[../\_LCBP3-CONTEXT.md](../_LCBP3-CONTEXT.md)** to get: + +- Canonical rule sources (AGENTS.md, specs/06-Decision-Records/, specs/05-Engineering-Guidelines/) +- Tier 1 non-negotiables (ADR-019 UUID, ADR-009 schema, ADR-016 security, ADR-002 numbering, ADR-008 BullMQ, ADR-018/020 AI boundary, ADR-007 errors) +- Domain glossary (Correspondence / RFA / Transmittal / Circulation) +- Helper script real paths +- Commit checklist diff --git a/.agents/skills/speckit-status/SKILL.md b/.agents/skills/speckit-status/SKILL.md index ba47850..6c4374a 100644 --- a/.agents/skills/speckit-status/SKILL.md +++ b/.agents/skills/speckit-status/SKILL.md @@ -1,7 +1,7 @@ --- name: speckit-status description: Display a dashboard showing feature status, completion percentage, and blockers. -version: 1.0.0 +version: 1.8.9 depends-on: [] --- @@ -109,3 +109,15 @@ Generate a dashboard view of all features and their completion status. - **Be Visual**: Use progress bars and tables - **Be Actionable**: Every status should have a "next action" - **Be Fast**: Cache nothing, always recalculate + +--- + +## LCBP3-DMS Context (MUST LOAD) + +Before executing, load **[../_LCBP3-CONTEXT.md](../_LCBP3-CONTEXT.md)** to get: + +- Canonical rule sources (AGENTS.md, specs/06-Decision-Records/, specs/05-Engineering-Guidelines/) +- Tier 1 non-negotiables (ADR-019 UUID, ADR-009 schema, ADR-016 security, ADR-002 numbering, ADR-008 BullMQ, ADR-018/020 AI boundary, ADR-007 errors) +- Domain glossary (Correspondence / RFA / Transmittal / Circulation) +- Helper script real paths +- Commit checklist \ No newline at end of file diff --git a/.agents/skills/speckit-tasks/SKILL.md b/.agents/skills/speckit-tasks/SKILL.md index ecce112..bedb6e3 100644 --- a/.agents/skills/speckit-tasks/SKILL.md +++ b/.agents/skills/speckit-tasks/SKILL.md @@ -1,7 +1,7 @@ --- name: speckit-tasks description: Generate an actionable, dependency-ordered tasks.md for the feature based on available design artifacts. -version: 1.0.0 +version: 1.8.9 depends-on: - speckit-plan handoffs: @@ -145,3 +145,15 @@ Every task MUST strictly follow this format: - Within each story: Tests (if requested) → Models → Services → Endpoints → Integration - Each phase should be a complete, independently testable increment - **Final Phase**: Polish & Cross-Cutting Concerns + +--- + +## LCBP3-DMS Context (MUST LOAD) + +Before executing, load **[../_LCBP3-CONTEXT.md](../_LCBP3-CONTEXT.md)** to get: + +- Canonical rule sources (AGENTS.md, specs/06-Decision-Records/, specs/05-Engineering-Guidelines/) +- Tier 1 non-negotiables (ADR-019 UUID, ADR-009 schema, ADR-016 security, ADR-002 numbering, ADR-008 BullMQ, ADR-018/020 AI boundary, ADR-007 errors) +- Domain glossary (Correspondence / RFA / Transmittal / Circulation) +- Helper script real paths +- Commit checklist \ No newline at end of file diff --git a/.agents/skills/speckit-taskstoissues/SKILL.md b/.agents/skills/speckit-taskstoissues/SKILL.md index fcc88d5..10cc592 100644 --- a/.agents/skills/speckit-taskstoissues/SKILL.md +++ b/.agents/skills/speckit-taskstoissues/SKILL.md @@ -1,7 +1,7 @@ --- name: speckit-taskstoissues description: Convert existing tasks into actionable, dependency-ordered issues for the feature based on available design artifacts. -version: 1.1.0 +version: 1.8.9 depends-on: - speckit-tasks tools: ['github/github-mcp-server/issue_write'] @@ -204,3 +204,15 @@ Convert all tasks from `tasks.md` into well-structured issues on the appropriate - **Label Consistency**: Use a consistent label taxonomy across all issues - **Platform Safety**: Never create issues on repos that don't match the git remote - **Dry Run Support**: Always support `--dry-run` to preview before creating + +--- + +## LCBP3-DMS Context (MUST LOAD) + +Before executing, load **[../_LCBP3-CONTEXT.md](../_LCBP3-CONTEXT.md)** to get: + +- Canonical rule sources (AGENTS.md, specs/06-Decision-Records/, specs/05-Engineering-Guidelines/) +- Tier 1 non-negotiables (ADR-019 UUID, ADR-009 schema, ADR-016 security, ADR-002 numbering, ADR-008 BullMQ, ADR-018/020 AI boundary, ADR-007 errors) +- Domain glossary (Correspondence / RFA / Transmittal / Circulation) +- Helper script real paths +- Commit checklist \ No newline at end of file diff --git a/.agents/skills/speckit-tester/SKILL.md b/.agents/skills/speckit-tester/SKILL.md index 6236816..2e131be 100644 --- a/.agents/skills/speckit-tester/SKILL.md +++ b/.agents/skills/speckit-tester/SKILL.md @@ -1,7 +1,7 @@ --- name: speckit-tester description: Execute tests, measure coverage, and report results. -version: 1.0.0 +version: 1.8.9 depends-on: [] --- @@ -120,3 +120,15 @@ Detect the project's test framework, execute tests, and generate a comprehensive - **Preserve Output**: Keep full test output for debugging - **Be Helpful**: Suggest fixes for common failure patterns - **Respect Timeouts**: Set reasonable timeout (5 min default) + +--- + +## LCBP3-DMS Context (MUST LOAD) + +Before executing, load **[../_LCBP3-CONTEXT.md](../_LCBP3-CONTEXT.md)** to get: + +- Canonical rule sources (AGENTS.md, specs/06-Decision-Records/, specs/05-Engineering-Guidelines/) +- Tier 1 non-negotiables (ADR-019 UUID, ADR-009 schema, ADR-016 security, ADR-002 numbering, ADR-008 BullMQ, ADR-018/020 AI boundary, ADR-007 errors) +- Domain glossary (Correspondence / RFA / Transmittal / Circulation) +- Helper script real paths +- Commit checklist \ No newline at end of file diff --git a/.agents/skills/speckit-validate/SKILL.md b/.agents/skills/speckit-validate/SKILL.md index c7caed6..08f0abc 100644 --- a/.agents/skills/speckit-validate/SKILL.md +++ b/.agents/skills/speckit-validate/SKILL.md @@ -1,7 +1,7 @@ --- name: speckit-validate description: Validate that implementation matches specification requirements. -version: 1.0.0 +version: 1.8.9 depends-on: - speckit-implement --- @@ -92,3 +92,15 @@ Post-implementation validation that compares code against spec requirements. - **Be Fair**: Semantic matching, not just keyword matching - **Be Actionable**: Every gap should have a clear fix recommendation - **Don't Block on Style**: Focus on functional coverage, not code style + +--- + +## LCBP3-DMS Context (MUST LOAD) + +Before executing, load **[../_LCBP3-CONTEXT.md](../_LCBP3-CONTEXT.md)** to get: + +- Canonical rule sources (AGENTS.md, specs/06-Decision-Records/, specs/05-Engineering-Guidelines/) +- Tier 1 non-negotiables (ADR-019 UUID, ADR-009 schema, ADR-016 security, ADR-002 numbering, ADR-008 BullMQ, ADR-018/020 AI boundary, ADR-007 errors) +- Domain glossary (Correspondence / RFA / Transmittal / Circulation) +- Helper script real paths +- Commit checklist \ No newline at end of file diff --git a/.windsurf/workflows/00-speckit.all.md b/.windsurf/workflows/00-speckit.all.md index 504fba7..f743d8d 100644 --- a/.windsurf/workflows/00-speckit.all.md +++ b/.windsurf/workflows/00-speckit.all.md @@ -10,27 +10,27 @@ This meta-workflow orchestrates the **complete development lifecycle**, from spe ## Preparation Phase (Steps 1-5) 1. **Specify** (`/speckit.specify`): - - Use the `view_file` tool to read: `.agents/skills/speckit.specify/SKILL.md` + - Use the `view_file` tool to read: `.agents/skills/speckit-specify/SKILL.md` - Execute with user's feature description - Creates: `spec.md` 2. **Clarify** (`/speckit.clarify`): - - Use the `view_file` tool to read: `.agents/skills/speckit.clarify/SKILL.md` + - Use the `view_file` tool to read: `.agents/skills/speckit-clarify/SKILL.md` - Execute to resolve ambiguities - Updates: `spec.md` 3. **Plan** (`/speckit.plan`): - - Use the `view_file` tool to read: `.agents/skills/speckit.plan/SKILL.md` + - Use the `view_file` tool to read: `.agents/skills/speckit-plan/SKILL.md` - Execute to create technical design - Creates: `plan.md` 4. **Tasks** (`/speckit.tasks`): - - Use the `view_file` tool to read: `.agents/skills/speckit.tasks/SKILL.md` + - Use the `view_file` tool to read: `.agents/skills/speckit-tasks/SKILL.md` - Execute to generate task breakdown - Creates: `tasks.md` 5. **Analyze** (`/speckit.analyze`): - - Use the `view_file` tool to read: `.agents/skills/speckit.analyze/SKILL.md` + - Use the `view_file` tool to read: `.agents/skills/speckit-analyze/SKILL.md` - Execute to validate consistency across spec, plan, and tasks - Output: Analysis report - **Gate**: If critical issues found, stop and fix before proceeding @@ -38,29 +38,29 @@ This meta-workflow orchestrates the **complete development lifecycle**, from spe ## Implementation Phase (Steps 6-7) 6. **Implement** (`/speckit.implement`): - - Use the `view_file` tool to read: `.agents/skills/speckit.implement/SKILL.md` + - Use the `view_file` tool to read: `.agents/skills/speckit-implement/SKILL.md` - Execute all tasks from `tasks.md` with anti-regression protocols - Output: Working implementation 7. **Check** (`/speckit.checker`): - - Use the `view_file` tool to read: `.agents/skills/speckit.checker/SKILL.md` + - Use the `view_file` tool to read: `.agents/skills/speckit-checker/SKILL.md` - Run static analysis (linters, type checkers, security scanners) - Output: Checker report ## Verification Phase (Steps 8-10) 8. **Test** (`/speckit.tester`): - - Use the `view_file` tool to read: `.agents/skills/speckit.tester/SKILL.md` + - Use the `view_file` tool to read: `.agents/skills/speckit-tester/SKILL.md` - Run tests with coverage - Output: Test + coverage report 9. **Review** (`/speckit.reviewer`): - - Use the `view_file` tool to read: `.agents/skills/speckit.reviewer/SKILL.md` + - Use the `view_file` tool to read: `.agents/skills/speckit-reviewer/SKILL.md` - Perform code review - Output: Review report with findings 10. **Validate** (`/speckit.validate`): - - Use the `view_file` tool to read: `.agents/skills/speckit.validate/SKILL.md` + - Use the `view_file` tool to read: `.agents/skills/speckit-validate/SKILL.md` - Verify implementation matches spec requirements - Output: Validation report (pass/fail) diff --git a/.windsurf/workflows/01-speckit.constitution.md b/.windsurf/workflows/01-speckit.constitution.md index 0e51978..093340c 100644 --- a/.windsurf/workflows/01-speckit.constitution.md +++ b/.windsurf/workflows/01-speckit.constitution.md @@ -9,7 +9,7 @@ description: Create or update the project constitution from interactive or provi - The user has provided an input prompt. Treat this as the primary input for the skill. 2. **Load Skill**: - - Use the `view_file` tool to read the skill file at: `.agents/skills/speckit.constitution/SKILL.md` + - Use the `view_file` tool to read the skill file at: `.agents/skills/speckit-constitution/SKILL.md` 3. **Execute**: - Follow the instructions in the `SKILL.md` exactly. diff --git a/.windsurf/workflows/02-speckit.specify.md b/.windsurf/workflows/02-speckit.specify.md index ef504ce..c9e5252 100644 --- a/.windsurf/workflows/02-speckit.specify.md +++ b/.windsurf/workflows/02-speckit.specify.md @@ -10,7 +10,7 @@ description: Create or update the feature specification from a natural language - This is typically the starting point of a new feature. 2. **Load Skill**: - - Use the `view_file` tool to read the skill file at: `.agents/skills/speckit.specify/SKILL.md` + - Use the `view_file` tool to read the skill file at: `.agents/skills/speckit-specify/SKILL.md` 3. **Execute**: - Follow the instructions in the `SKILL.md` exactly. diff --git a/.windsurf/workflows/03-speckit.clarify.md b/.windsurf/workflows/03-speckit.clarify.md index df5b4f3..32607d3 100644 --- a/.windsurf/workflows/03-speckit.clarify.md +++ b/.windsurf/workflows/03-speckit.clarify.md @@ -9,7 +9,7 @@ description: Identify underspecified areas in the current feature spec by asking - The user has provided an input prompt. Treat this as the primary input for the skill. 2. **Load Skill**: - - Use the `view_file` tool to read the skill file at: `.agents/skills/speckit.clarify/SKILL.md` + - Use the `view_file` tool to read the skill file at: `.agents/skills/speckit-clarify/SKILL.md` 3. **Execute**: - Follow the instructions in the `SKILL.md` exactly. diff --git a/.windsurf/workflows/04-speckit.plan.md b/.windsurf/workflows/04-speckit.plan.md index a57993f..8254ab2 100644 --- a/.windsurf/workflows/04-speckit.plan.md +++ b/.windsurf/workflows/04-speckit.plan.md @@ -9,7 +9,7 @@ description: Execute the implementation planning workflow using the plan templat - The user has provided an input prompt. Treat this as the primary input for the skill. 2. **Load Skill**: - - Use the `view_file` tool to read the skill file at: `.agents/skills/speckit.plan/SKILL.md` + - Use the `view_file` tool to read the skill file at: `.agents/skills/speckit-plan/SKILL.md` 3. **Execute**: - Follow the instructions in the `SKILL.md` exactly. diff --git a/.windsurf/workflows/05-speckit.tasks.md b/.windsurf/workflows/05-speckit.tasks.md index 845d066..6e00ad2 100644 --- a/.windsurf/workflows/05-speckit.tasks.md +++ b/.windsurf/workflows/05-speckit.tasks.md @@ -9,7 +9,7 @@ description: Generate an actionable, dependency-ordered tasks.md for the feature - The user has provided an input prompt. Treat this as the primary input for the skill. 2. **Load Skill**: - - Use the `view_file` tool to read the skill file at: `.agents/skills/speckit.tasks/SKILL.md` + - Use the `view_file` tool to read the skill file at: `.agents/skills/speckit-tasks/SKILL.md` 3. **Execute**: - Follow the instructions in the `SKILL.md` exactly. diff --git a/.windsurf/workflows/06-speckit.analyze.md b/.windsurf/workflows/06-speckit.analyze.md index 6ee384e..b6fe830 100644 --- a/.windsurf/workflows/06-speckit.analyze.md +++ b/.windsurf/workflows/06-speckit.analyze.md @@ -11,7 +11,7 @@ description: Perform a non-destructive cross-artifact consistency and quality an - The user has provided an input prompt. Treat this as the primary input for the skill. 2. **Load Skill**: - - Use the `view_file` tool to read the skill file at: `.agents/skills/speckit.analyze/SKILL.md` + - Use the `view_file` tool to read the skill file at: `.agents/skills/speckit-analyze/SKILL.md` 3. **Execute**: - Follow the instructions in the `SKILL.md` exactly. diff --git a/.windsurf/workflows/07-speckit.implement.md b/.windsurf/workflows/07-speckit.implement.md index f95d024..1da367f 100644 --- a/.windsurf/workflows/07-speckit.implement.md +++ b/.windsurf/workflows/07-speckit.implement.md @@ -9,7 +9,7 @@ description: Execute the implementation plan by processing and executing all tas - The user has provided an input prompt. Treat this as the primary input for the skill. 2. **Load Skill**: - - Use the `view_file` tool to read the skill file at: `.agents/skills/speckit.implement/SKILL.md` + - Use the `view_file` tool to read the skill file at: `.agents/skills/speckit-implement/SKILL.md` 3. **Execute**: - Follow the instructions in the `SKILL.md` exactly. diff --git a/.windsurf/workflows/08-speckit.checker.md b/.windsurf/workflows/08-speckit.checker.md index af76d1f..b5b3032 100644 --- a/.windsurf/workflows/08-speckit.checker.md +++ b/.windsurf/workflows/08-speckit.checker.md @@ -11,7 +11,7 @@ description: Run static analysis tools and aggregate results. - The user may specify paths to check or run on entire project. 2. **Load Skill**: - - Use the `view_file` tool to read the skill file at: `.agents/skills/speckit.checker/SKILL.md` + - Use the `view_file` tool to read the skill file at: `.agents/skills/speckit-checker/SKILL.md` 3. **Execute**: - Follow the instructions in the `SKILL.md` exactly. diff --git a/.windsurf/workflows/09-speckit.tester.md b/.windsurf/workflows/09-speckit.tester.md index 3bd0459..9c71ee2 100644 --- a/.windsurf/workflows/09-speckit.tester.md +++ b/.windsurf/workflows/09-speckit.tester.md @@ -11,7 +11,7 @@ description: Execute tests, measure coverage, and report results. - The user may specify test paths, options, or just run all tests. 2. **Load Skill**: - - Use the `view_file` tool to read the skill file at: `.agents/skills/speckit.tester/SKILL.md` + - Use the `view_file` tool to read the skill file at: `.agents/skills/speckit-tester/SKILL.md` 3. **Execute**: - Follow the instructions in the `SKILL.md` exactly. diff --git a/.windsurf/workflows/10-speckit.reviewer.md b/.windsurf/workflows/10-speckit.reviewer.md index eddce5a..8d5de7f 100644 --- a/.windsurf/workflows/10-speckit.reviewer.md +++ b/.windsurf/workflows/10-speckit.reviewer.md @@ -9,7 +9,7 @@ description: Perform code review with actionable feedback and suggestions. - The user may specify files to review, "staged" for git staged changes, or "branch" for branch diff. 2. **Load Skill**: - - Use the `view_file` tool to read the skill file at: `.agents/skills/speckit.reviewer/SKILL.md` + - Use the `view_file` tool to read the skill file at: `.agents/skills/speckit-reviewer/SKILL.md` 3. **Execute**: - Follow the instructions in the `SKILL.md` exactly. diff --git a/.windsurf/workflows/11-speckit.validate.md b/.windsurf/workflows/11-speckit.validate.md index db7ae3a..ed4bea0 100644 --- a/.windsurf/workflows/11-speckit.validate.md +++ b/.windsurf/workflows/11-speckit.validate.md @@ -9,7 +9,7 @@ description: Validate that implementation matches specification requirements. - The user has provided an input prompt. Treat this as the primary input for the skill. 2. **Load Skill**: - - Use the `view_file` tool to read the skill file at: `.agents/skills/speckit.validate/SKILL.md` + - Use the `view_file` tool to read the skill file at: `.agents/skills/speckit-validate/SKILL.md` 3. **Execute**: - Follow the instructions in the `SKILL.md` exactly. diff --git a/.windsurf/workflows/12-speckit.security-audit.md b/.windsurf/workflows/12-speckit.security-audit.md new file mode 100644 index 0000000..63d40e2 --- /dev/null +++ b/.windsurf/workflows/12-speckit.security-audit.md @@ -0,0 +1,22 @@ +--- +auto_execution_mode: 0 +description: Perform a security-focused audit of the codebase against OWASP Top 10, CASL authorization, and LCBP3-DMS security requirements. +--- + +# Workflow: speckit.security-audit + +1. **Context Analysis**: + - The user may pass a scope hint: `backend`, `frontend`, `both`, or specific module paths (defaults to `both`). + +2. **Load Skill**: + - Use the `view_file` tool to read the skill file at: `.agents/skills/speckit-security-audit/SKILL.md` + - Also load `.agents/skills/_LCBP3-CONTEXT.md` for project-specific rules. + +3. **Execute**: + - Follow the instructions in the `SKILL.md` exactly. + - This is READ-ONLY — never modify code during the audit. + - Output a structured report with Critical / High / Medium / Low severity. + +4. **On Error**: + - If scope unclear: Default to `both` (backend + frontend) + - If `specs/06-Decision-Records/ADR-016-security-authentication.md` missing: Warn and proceed with OWASP Top 10 + CASL checks only diff --git a/.windsurf/workflows/speckit.prepare.md b/.windsurf/workflows/speckit.prepare.md index 0570cf9..4f60772 100644 --- a/.windsurf/workflows/speckit.prepare.md +++ b/.windsurf/workflows/speckit.prepare.md @@ -9,20 +9,20 @@ This workflow orchestrates the sequential execution of the Speckit preparation p 1. **Step 1: Specify (Skill 02)** - Goal: Create or update the `spec.md` based on user input. - - Action: Read and execute `.agents/skills/speckit.specify/SKILL.md`. + - Action: Read and execute `.agents/skills/speckit-specify/SKILL.md`. 2. **Step 2: Clarify (Skill 03)** - Goal: Refine the `spec.md` by identifying and resolving ambiguities. - - Action: Read and execute `.agents/skills/speckit.clarify/SKILL.md`. + - Action: Read and execute `.agents/skills/speckit-clarify/SKILL.md`. 3. **Step 3: Plan (Skill 04)** - Goal: Generate `plan.md` from the finalized spec. - - Action: Read and execute `.agents/skills/speckit.plan/SKILL.md`. + - Action: Read and execute `.agents/skills/speckit-plan/SKILL.md`. 4. **Step 4: Tasks (Skill 05)** - Goal: Generate actionable `tasks.md` from the plan. - - Action: Read and execute `.agents/skills/speckit.tasks/SKILL.md`. + - Action: Read and execute `.agents/skills/speckit-tasks/SKILL.md`. 5. **Step 5: Analyze (Skill 06)** - Goal: Validate consistency across all design artifacts (spec, plan, tasks). - - Action: Read and execute `.agents/skills/speckit.analyze/SKILL.md`. + - Action: Read and execute `.agents/skills/speckit-analyze/SKILL.md`. diff --git a/.windsurf/workflows/util-speckit.checklist.md b/.windsurf/workflows/util-speckit.checklist.md index 4ac2850..8f6d3b1 100644 --- a/.windsurf/workflows/util-speckit.checklist.md +++ b/.windsurf/workflows/util-speckit.checklist.md @@ -9,7 +9,7 @@ description: Generate a custom checklist for the current feature based on user r - The user has provided an input prompt. Treat this as the primary input for the skill. 2. **Load Skill**: - - Use the `view_file` tool to read the skill file at: `.agents/skills/speckit.checklist/SKILL.md` + - Use the `view_file` tool to read the skill file at: `.agents/skills/speckit-checklist/SKILL.md` 3. **Execute**: - Follow the instructions in the `SKILL.md` exactly. diff --git a/.windsurf/workflows/util-speckit.diff.md b/.windsurf/workflows/util-speckit.diff.md index 359523c..27c5946 100644 --- a/.windsurf/workflows/util-speckit.diff.md +++ b/.windsurf/workflows/util-speckit.diff.md @@ -9,7 +9,7 @@ description: Compare two versions of a spec or plan to highlight changes. - The user has provided an input prompt (optional file paths or version references). 2. **Load Skill**: - - Use the `view_file` tool to read the skill file at: `.agents/skills/speckit.diff/SKILL.md` + - Use the `view_file` tool to read the skill file at: `.agents/skills/speckit-diff/SKILL.md` 3. **Execute**: - Follow the instructions in the `SKILL.md` exactly. diff --git a/.windsurf/workflows/util-speckit.migrate.md b/.windsurf/workflows/util-speckit.migrate.md index 3c18266..b59c30c 100644 --- a/.windsurf/workflows/util-speckit.migrate.md +++ b/.windsurf/workflows/util-speckit.migrate.md @@ -9,7 +9,7 @@ description: Migrate existing projects into the speckit structure by generating - The user has provided an input prompt (path to analyze, feature name). 2. **Load Skill**: - - Use the `view_file` tool to read the skill file at: `.agents/skills/speckit.migrate/SKILL.md` + - Use the `view_file` tool to read the skill file at: `.agents/skills/speckit-migrate/SKILL.md` 3. **Execute**: - Follow the instructions in the `SKILL.md` exactly. diff --git a/.windsurf/workflows/util-speckit.quizme.md b/.windsurf/workflows/util-speckit.quizme.md index f1fb9c0..3c0bce1 100644 --- a/.windsurf/workflows/util-speckit.quizme.md +++ b/.windsurf/workflows/util-speckit.quizme.md @@ -11,7 +11,7 @@ description: Challenge the specification with Socratic questioning to identify l - The user has provided an input prompt. Treat this as the primary input for the skill. 2. **Load Skill**: - - Use the `view_file` tool to read the skill file at: `.agents/skills/speckit.quizme/SKILL.md` + - Use the `view_file` tool to read the skill file at: `.agents/skills/speckit-quizme/SKILL.md` 3. **Execute**: - Follow the instructions in the `SKILL.md` exactly. diff --git a/.windsurf/workflows/util-speckit.status.md b/.windsurf/workflows/util-speckit.status.md index 82550dd..472e42c 100644 --- a/.windsurf/workflows/util-speckit.status.md +++ b/.windsurf/workflows/util-speckit.status.md @@ -11,7 +11,7 @@ description: Display a dashboard showing feature status, completion percentage, - The user may optionally specify a feature to focus on. 2. **Load Skill**: - - Use the `view_file` tool to read the skill file at: `.agents/skills/speckit.status/SKILL.md` + - Use the `view_file` tool to read the skill file at: `.agents/skills/speckit-status/SKILL.md` 3. **Execute**: - Follow the instructions in the `SKILL.md` exactly. diff --git a/.windsurf/workflows/util-speckit.taskstoissues.md b/.windsurf/workflows/util-speckit.taskstoissues.md new file mode 100644 index 0000000..5d5ce5b --- /dev/null +++ b/.windsurf/workflows/util-speckit.taskstoissues.md @@ -0,0 +1,23 @@ +--- +auto_execution_mode: 0 +description: Convert existing tasks into actionable, dependency-ordered issues on Gitea for the current feature. +--- + +# Workflow: speckit.taskstoissues + +1. **Context Analysis**: + - The user may pass filters (e.g., phase, priority). Default: convert all pending tasks. + +2. **Load Skill**: + - Use the `view_file` tool to read the skill file at: `.agents/skills/speckit-taskstoissues/SKILL.md` + - Also load `.agents/skills/_LCBP3-CONTEXT.md` for project conventions (labels, commit format). + +3. **Execute**: + - Follow the instructions in the `SKILL.md` exactly. + - Use Gitea API (not GitHub) — target `git.np-dms.work/np-dms/lcbp3`. + - Apply LCBP3 labels: `spec`, `adr`, `security`, `ux`, `backend`, `frontend`, `schema`, etc. + - Use commit-format-compatible issue titles (per `specs/05-Engineering-Guidelines/05-05-git-conventions.md`). + +4. **On Error**: + - If `tasks.md` missing: Run `/05-speckit.tasks` first + - If Gitea credentials missing: Report to user and provide manual issue-creation template diff --git a/AGENTS.md b/AGENTS.md index 6515042..2f21ca0 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -1,8 +1,9 @@ # NAP-DMS Project Context & Rules - For: Windsurf Cascade (and compatible: Codex CLI, opencode, Amp, Antigravity, AGENTS.md tools) -- Version: 1.8.7 | Last synced from repo: 2026-04-14 +- Version: 1.8.9 | Last synced from repo: 2026-04-22 - Repo: [https://git.np-dms.work/np-dms/lcbp3](https://git.np-dms.work/np-dms/lcbp3) +- Skill pack: `.agents/skills/` (v1.8.9, 20 skills) — see [`skills/README.md`](./.agents/skills/README.md) + [`skills/_LCBP3-CONTEXT.md`](./.agents/skills/_LCBP3-CONTEXT.md) --- @@ -380,26 +381,30 @@ This file is a **quick reference**. For detailed information: - **Architecture:** `specs/02-architecture/` - **Requirements:** `specs/01-requirements/` -- **Data & Storage:** `specs/03-Data-and-Storage/` +- **Data & Storage:** `specs/03-Data-and-Storage/` (canonical schema + `deltas/` incremental SQL per ADR-009) - **Engineering Guidelines:** `specs/05-Engineering-Guidelines/` - **Decision Records:** `specs/06-Decision-Records/` - **Infrastructure:** `specs/04-Infrastructure-OPS/` +- **Agent Skill Pack:** `.agents/skills/` (NestJS/Next.js rules + 18 Speckit workflow skills) +- **Helper Scripts:** `.agents/scripts/{bash,powershell}/` (audit, validate, prerequisites, setup-plan) --- ## 🔄 Change Log -| Version | Date | Changes | Updated By | -| ------- | ---------- | ------------------------------------------------------------------------------------------------------------------------------------------- | -------------- | -| 1.8.7 | 2026-04-14 | + ADR-021 Workflow Context integration, + ADR-021 Integration Work tier, + Transmittal/Circulation context triggers, updated ADR-020 status | Windsurf AI | -| 1.8.6 | 2026-04-10 | + DMS Workflow Engine Protocol, + Security & Integrity Audit Protocol, + 2 Context-Aware Triggers, ADR Status column, Forbidden Why column | Human Dev | -| 1.8.5 | 2026-04-04 | Added ADR-007 error handling, ADR-020 AI integration, updated security rules | Windsurf AI | -| 1.8.4 | 2026-03-24 | Phase 5.4→✅ DONE, Tailwind 3.4.3, ADR count(16), MariaDB UUID note | Windsurf AI | -| 1.8.3 | 2026-03-21 | + Rule Enforcement Tiers (🔴🟡🟢), + Tiered Development Flow | Human Dev + AI | -| 1.8.2 | 2026-03-21 | + Context Triggers, + Code Snippets, + Error Handling, + i18n | Human Dev + AI | -| 1.8.1 | 2026-03-21 | + ADR-019 UUID patterns, + Phase 5.4 pending files | Claude Sonnet | -| 1.8.0 | 2026-03-19 | + Security overrides, + UAT criteria reference | Human Dev | -| 1.7.2 | 2026-03-15 | + AI Boundary rules (ADR-018) | Gemini Pro | +| Version | Date | Changes | Updated By | +| ------- | ---------- | ------------------------------------------------------------------------------------------------------------------------------------------------- | -------------- | +| 1.8.9 | 2026-04-22 | `.agents/skills/` LCBP3-native rebuild (20 skills @ v1.8.9) + `_LCBP3-CONTEXT.md` appendix + `specs/03-Data-and-Storage/deltas/` + AGENTS.md sync | Windsurf AI | +| 1.8.8 | 2026-04-14 | Workflow attachments (ADR-021) + step-attachment envelope fields | Windsurf AI | +| 1.8.7 | 2026-04-14 | + ADR-021 Workflow Context integration, + ADR-021 Integration Work tier, + Transmittal/Circulation context triggers, updated ADR-020 status | Windsurf AI | +| 1.8.6 | 2026-04-10 | + DMS Workflow Engine Protocol, + Security & Integrity Audit Protocol, + 2 Context-Aware Triggers, ADR Status column, Forbidden Why column | Human Dev | +| 1.8.5 | 2026-04-04 | Added ADR-007 error handling, ADR-020 AI integration, updated security rules | Windsurf AI | +| 1.8.4 | 2026-03-24 | Phase 5.4→✅ DONE, Tailwind 3.4.3, ADR count(16), MariaDB UUID note | Windsurf AI | +| 1.8.3 | 2026-03-21 | + Rule Enforcement Tiers (🔴🟡🟢), + Tiered Development Flow | Human Dev + AI | +| 1.8.2 | 2026-03-21 | + Context Triggers, + Code Snippets, + Error Handling, + i18n | Human Dev + AI | +| 1.8.1 | 2026-03-21 | + ADR-019 UUID patterns, + Phase 5.4 pending files | Claude Sonnet | +| 1.8.0 | 2026-03-19 | + Security overrides, + UAT criteria reference | Human Dev | +| 1.7.2 | 2026-03-15 | + AI Boundary rules (ADR-018) | Gemini Pro | --- diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 2a8dd4d..162d29f 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -58,7 +58,8 @@ specs/ │ ├── lcbp3-v1.8.0-seed-basic.sql # Master Data Seed │ ├── lcbp3-v1.8.0-seed-permissions.sql # RBAC Permissions Seed │ ├── 03-01-data-dictionary.md -│ └── 03-06-migration-business-scope.md # Gap 7: Migration Scope [★ NEW] +│ ├── 03-06-migration-business-scope.md # Gap 7: Migration Scope [★ NEW] +│ └── deltas/ # Incremental SQL (ADR-009) [★ v1.8.9] │ ├── 04-Infrastructure-OPS/ # Deployment & Operations (9 docs) │ ├── README.md @@ -713,6 +714,50 @@ Create `.markdownlint.json`: --- +## 🤖 AI-Assisted Contributions + +โปรเจกต์นี้รองรับ AI agents (Windsurf Cascade, Codex CLI, opencode, Amp, Antigravity) ในการเขียน / review / refactor โค้ด — ผ่านคู่มือกลางคือ [`AGENTS.md`](./AGENTS.md) และชุดทักษะใน [`.agents/skills/`](./.agents/skills/) + +### Canonical Rule Sources (อ่านตามลำดับนี้) + +1. **[`AGENTS.md`](./AGENTS.md)** — quick-reference rules + change log (supersedes legacy `GEMINI.md`) +2. **[`.agents/skills/_LCBP3-CONTEXT.md`](./.agents/skills/_LCBP3-CONTEXT.md)** — shared context loaded by every speckit-\* skill +3. **[`.agents/skills/README.md`](./.agents/skills/README.md)** — skill-pack layout + Windsurf invocation guide +4. `specs/06-Decision-Records/` (โดยเฉพาะ ADR-019 — UUID **March 2026 pattern**) +5. `specs/05-Engineering-Guidelines/` (backend / frontend / testing / i18n / git conventions) + +### Invocation (Windsurf) + +ใช้ slash commands ด้านล่าง — `.windsurf/workflows/*.md` ห่อหุ้ม [`.agents/skills/speckit-*`](./.agents/skills/) ไว้ให้: + +- `/02-speckit.specify` → spec.md +- `/04-speckit.plan` → plan.md + data-model.md + contracts/ +- `/05-speckit.tasks` → tasks.md +- `/07-speckit.implement` → execute tasks (with Ironclad Anti-Regression Protocols) +- `/10-speckit.reviewer` → code review (Tier 1/2/3 classification) +- `/12-speckit.security-audit` → OWASP + CASL + LCBP3-specific + +### Health Checks + +```bash +# Version + frontmatter consistency +bash ./.agents/scripts/bash/validate-versions.sh +pwsh ./.agents/scripts/powershell/validate-versions.ps1 + +# Full skill audit (20 skills) +bash ./.agents/scripts/bash/audit-skills.sh +pwsh ./.agents/scripts/powershell/audit-skills.ps1 +``` + +### 🔴 Tier 1 Non-Negotiables (AI must enforce) + +- **ADR-019 UUID** — `publicId` exposed directly; ห้าม `parseInt`/`Number`/`+` บน UUID; ห้าม `id ?? ''` fallback; ห้ามใช้ `@Expose({ name: 'id' })` rename +- **ADR-009 Schema** — แก้ `lcbp3-v1.8.0-schema-02-tables.sql` โดยตรง + เพิ่ม delta ที่ `specs/03-Data-and-Storage/deltas/`; ห้าม TypeORM migrations +- **ADR-016 Security** — CASL + `Idempotency-Key` + ClamAV two-phase upload +- **ADR-018/020 AI Boundary** — Ollama on Admin Desktop only; human-in-the-loop validation + +--- + ## 🎯 Quality Standards ### Definition of Done (DoD) สำหรับ Spec Changes diff --git a/README.md b/README.md index 224ebe4..a1ba33c 100644 --- a/README.md +++ b/README.md @@ -325,9 +325,9 @@ lcbp3-dms/ ├── .vscode/ # VS Code settings and extensions ├── .husky/ # Git hooks │ -├── AGENTS.md # AI agent rules & project context -├── GEMINI.md # AI coding guidelines -├── CONTRIBUTING.md # Contribution guidelines +├── AGENTS.md # AI agent rules & project context (v1.8.9) [★ primary] +├── GEMINI.md # AI coding guidelines [legacy — kept for backward compat] +├── CONTRIBUTING.md # Contribution guidelines (+ AI-Assisted section) ├── CHANGELOG.md # Version history ├── README.md # This file ├── package.json # Root package.json (monorepo) @@ -739,6 +739,22 @@ docker-compose -f docker-compose.yml up -d - Development Process - Pull Request Process - Coding Standards +- **AI-Assisted Contributions** (AGENTS.md + `.agents/skills/` skill pack + Windsurf slash commands) + +### 🤖 For AI Agents (Windsurf Cascade, Codex CLI, opencode, Amp, Antigravity) + +ไฟล์กลางสำหรับ AI assistants: + +| Priority | File | Purpose | +| -------- | ------------------------------------------------------------------------ | --------------------------------------------------------------------------------------------- | +| 1 | [`AGENTS.md`](./AGENTS.md) | Quick-reference rules (Tier 1/2/3 enforcement, ADR-019 March 2026 pattern, forbidden actions) | +| 2 | [`.agents/skills/_LCBP3-CONTEXT.md`](./.agents/skills/_LCBP3-CONTEXT.md) | Shared context appendix injected into every speckit-\* skill | +| 3 | [`.agents/skills/README.md`](./.agents/skills/README.md) | Skill-pack layout + slash-command invocation guide | +| 4 | `specs/06-Decision-Records/` | 22 ADRs (architectural decisions) | + +**Slash commands:** `/02-speckit.specify` → `/04-speckit.plan` → `/05-speckit.tasks` → `/07-speckit.implement` → `/10-speckit.reviewer` → `/12-speckit.security-audit` + +**Health checks:** `bash ./.agents/scripts/bash/audit-skills.sh` or `pwsh ./.agents/scripts/powershell/audit-skills.ps1` --- @@ -769,7 +785,18 @@ This project is **Internal Use Only** - ลิขสิทธิ์เป็น ## 🗺️ Roadmap -### ✅ Version 1.8.9 (Apr 2026) — Infrastructure Hardening +### ✅ Version 1.8.9 (Apr 2026) — Infrastructure Hardening + Agent Skill Pack Rebuild + +**Agent skill pack rebuilt (`.agents/skills/` @ v1.8.9) — 2026-04-22:** + +- ✅ 20 skills standardized (2 best-practices + 18 speckit-\*) — shared `_LCBP3-CONTEXT.md` appendix +- ✅ ADR-019 drift removed: `publicId` exposed directly (no `@Expose({ name: 'id' })` rename); `id ?? ''` fallback eliminated +- ✅ Dead references cleaned: `GEMINI.md` → `AGENTS.md`; `.specify/memory/` → `AGENTS.md`; `v1.7.0` → `v1.8.0` schema +- ✅ New rules: workflow-engine (ADR-001/002/021), file-two-phase-upload (ADR-016), ai-boundary (ADR-018/020), no-typeorm-migrations (ADR-009), i18n, two-phase-upload (frontend) +- ✅ `.windsurf/workflows/` path fixes (18 files) + 2 new wrappers (`12-speckit.security-audit`, `util-speckit.taskstoissues`) +- ✅ `specs/03-Data-and-Storage/deltas/` directory bootstrapped (ADR-009 incremental SQL) +- ✅ Regenerated `nestjs-best-practices/AGENTS.md` (188KB, 45 rules × 11 categories incl. LCBP3 project-specific) +- ✅ Helper scripts fixed (bash + pwsh): BASE_DIR, CRLF, color enum, version extraction **Docker Compose stacks fully hardened — 27 findings across 4 phases:** diff --git a/specs/02-architecture/02-03-network-design.md b/specs/02-architecture/02-03-network-design.md index 5272bf2..5805947 100644 --- a/specs/02-architecture/02-03-network-design.md +++ b/specs/02-architecture/02-03-network-design.md @@ -3,10 +3,10 @@ --- title: 'Network Design & Security' -version: 1.8.0 +version: 1.8.2 status: first-draft owner: Nattanin Peancharoen -last_updated: 2026-02-23 +last_updated: 2026-04-23 related: - specs/02-Architecture/00-01-system-context.md @@ -16,11 +16,11 @@ related: ## 1. 🌐 Network Segmentation (VLANs) และหลักการ Zero Trust -ระบบ LCBP3-DMS จัดแบ่งเครือข่ายออกเป็นเครือข่ายย่อย (VLANs) เพื่อการควบคุมการเข้าถึง (Access Control) ตามหลักการ Zero Trust โดยใช้อุปกรณ์ Network ของ Omada (ER7206 Router & SG2428P Core Switch) และ Switch ต่างๆ ในเครือข่าย +ระบบ LCBP3-DMS จัดแบ่งเครือข่ายออกเป็นเครือข่ายย่อย (VLANs) เพื่อการควบคุมการเข้าถึง (Access Control) ตามหลักการ Zero Trust โดยใช้อุปกรณ์ Network ของ Omada (ER7206 Router & SG3210X-M2 Core Switch) และ Switch ต่างๆ ในเครือข่าย | VLAN ID | Name | Purpose | Subnet | Gateway | Notes | | ------- | -------------- | ----------------------- | --------------- | ------------ | ---------------------------------------------------- | -| 10 | SERVER | Server & Storage | 192.168.10.0/24 | 192.168.10.1 | Servers (QNAP, ASUSTOR). Static IPs ONLY. | +| 10 | SERVER | Server & Storage | 192.168.10.0/24 | 192.168.10.1 | Servers (QNAP, ASUSTOR, Zyxel NAS326). Static IPs ONLY. | | 20 | MGMT (Default) | Management & Admin | 192.168.20.0/24 | 192.168.20.1 | Network devices (ER7206, OC200, Switches), Admin PC. | | 30 | USER | User Devices | 192.168.30.0/24 | 192.168.30.1 | Staff PC, Notebooks, Wi-Fi. | | 40 | CCTV | Surveillance | 192.168.40.0/24 | 192.168.40.1 | Cameras, NVR. Isolated. | @@ -81,36 +81,42 @@ flowchart TB ```mermaid graph TB subgraph Internet - WAN[("🌐 Internet
WAN")] + WAN[("Internet
WAN")] end subgraph Router["ER7206 Router"] - R[("🔲 ER7206
192.168.20.1")] + R[("ER7206
192.168.20.1")] end - subgraph CoreSwitch["SG2428P Core Switch"] - CS[("🔲 SG2428P
192.168.20.2")] + subgraph CoreSwitch["SG3210X-M2 Core Switch"] + CS[("SG3210X-M2
192.168.20.4")] end - subgraph ServerSwitch["AMPCOM 2.5G Switch"] - SS[("🔲 AMPCOM
192.168.20.3")] + subgraph DistSwitch["SG2428P Distribution Switch"] + DS[("SG2428P
192.168.20.2")] end subgraph Servers["VLAN 10 - Servers"] - QNAP[("💾 QNAP
192.168.10.8")] - ASUSTOR[("💾 ASUSTOR
192.168.10.9")] + QNAP[(" QNAP
192.168.10.8")] + ASUSTOR[(" ASUSTOR
192.168.10.9")] + Zyxel[(" Zyxel NAS326
192.168.10.111")] end subgraph AccessPoints["EAP610 x16"] - AP[("📶 WiFi APs")] + AP[(" WiFi APs")] end - WAN --> R - R -->|Port 3| CS - CS -->|LAG Port 3-4| SS - SS -->|Port 3-4 LACP| QNAP - SS -->|Port 5-6 LACP| ASUSTOR - CS -->|Port 5-20| AP + subgraph AdminPC["Admin Desktop"] + PC[(" Admin PC
192.168.20.100")] + end + + WAN -->|Port 2| R + R -->|SFP Port 1| CS + CS -->|SFP+ Port 9| DS + CS -->|Port 3-4 LACP| QNAP + CS -->|Port 5-6 LACP| ASUSTOR + CS -->|Port 8| PC + DS -->|Port 1-16| AP ``` ### 3.1 Switch Profiles & Interfaces @@ -123,13 +129,258 @@ graph TB - **06_AP_TRUNK:** EAP610 Access Points (Native: 20, Tagged: 30, 70) - **07_VOICE_ACCESS:** IP Phones (Native: 30, Tagged: 50, Untagged: 30) -### 3.2 NAS NIC Bonding Configuration +### 3.2 Detailed Port Configuration + +#### 3.2.1 TP-Link ER7206 (Router) +- **1× Gigabit SFP WAN/LAN port + 5× Gigabit RJ45 ports (1× WAN, 4× WAN/LAN)** + - SFP Port 1 WAN/LAN -> SG3210X-M2 Port 10 SFP+ + - Port 2 WAN port uplink Internet + +#### 3.2.2 TP-Link SG3210X-M2 (Core Switch) +- **8-Port 2.5Gbps + 2-Port 10G SFP+ Slots** + - Port 1&2 (Active LACP) -> Reserved + - Port 3&4 (Active LACP) -> QNAP 192.168.10.8 + - Port 5&6 (Active LACP) -> ASUSTOR 192.168.10.9 + - Port 7 Reserved + - Port 8 -> Admin Desktop (192.168.20.100) + - SFP+ Port 9 -> SG2428P (192.168.20.2) Port 28 + - SFP+ Port 10 uplink ER7206 (192.168.20.1) Port 1 + +#### 3.2.3 TP-Link SG2428P (Distribution Switch) +- **24× 10/100/1000 Mbps RJ45 Ports + 4× Gigabit SFP Slots** + - Port 1-16 -> EAP610 (16 Access Points) + - Port 17 Reserved for TP-07 (LAN port) + - Port 18 TP-08 (LAN port) + - Port 19 -> TL-SG1210P Port 9 (Voice Switch) + - Port 20 Reserved + - Port 21 TP-11 (LAN port) + - Port 22 Reserved + - Port 23 -> Printer + - Port 24 uplink OC200 + - SFP Port 25 Reserved + - SFP Port 26 -> TL-SL1226P SFP + - SFP Port 27 Reserved + - SFP Port 28 uplink SG3210X-M2 SFP+ + +#### 3.2.4 TP-Link TL-SL1226P (CCTV Switch) +- **24× PoE+ 10/100 Mbps RJ45 ports, 2× Gigabit RJ45 ports, and 2× combo Gigabit SFP** + - Port 1-6 -> CCTV (6 cameras) + - 1000 Mbps Port 25 -> NVR + - SFP Port 26 uplink SG2428P Port 26 + +#### 3.2.5 TP-Link TL-SG1210P (Voice Switch) +- **9 Port 10/100/1000Mbps RJ45 ports, 1 Gigabit SFP port** + - Port 1-8 -> IP Phone (TP-01 to TP-06, TP-09, TP-10) + - Port 9 uplink SG2428P Port 19 + - SFP Port 10 Reserved + +### 3.3 VLAN Assignment Table + +#### 3.3.1 SG3210X-M2 (Core Switch) + +| Port | Connection | VLAN Mode | Native VLAN | Tagged VLANs | Profile | +|------|------------|-----------|-------------|--------------|---------| +| 1-2 | Reserved (LACP) | Trunk | 20 | 10,20,30,40,50,60,70 | 01_CORE_TRUNK | +| 3-4 | QNAP (LACP) | Access | 10 | - | 03_SERVER_ACCESS | +| 5-6 | ASUSTOR (LACP) | Access | 10 | - | 03_SERVER_ACCESS | +| 7 | Reserved | - | - | - | - | +| 8 | Admin Desktop | Access | 20 | - | 02_MGMT_ONLY | +| 9 (SFP+) | SG2428P | Trunk | 20 | 10,20,30,40,50,70 | 01_CORE_TRUNK | +| 10 (SFP+) | ER7206 | Trunk | 20 | 10,20,30,40,50,70 | 01_CORE_TRUNK | + +#### 3.3.2 SG2428P (Distribution Switch) + +| Port | Connection | VLAN Mode | Native VLAN | Tagged VLANs | Profile | +|------|------------|-----------|-------------|--------------|---------| +| 1-16 | EAP610 APs | Trunk | 20 | 30,70 | 06_AP_TRUNK | +| 17 | TP-07 (LAN) | Access | 30 | - | 05_USER_ACCESS | +| 18 | TP-08 (LAN) | Access | 30 | - | 05_USER_ACCESS | +| 19 | TL-SG1210P | Trunk | 30 | 50 | 07_VOICE_ACCESS | +| 20 | Reserved | - | - | - | - | +| 21 | TP-11 (LAN) | Access | 30 | - | 05_USER_ACCESS | +| 22 | Reserved | - | - | - | - | +| 23 | Printer | Access | 30 | - | 05_USER_ACCESS | +| 24 | OC200 | Access | 20 | - | 02_MGMT_ONLY | +| 25 (SFP) | Reserved | - | - | - | - | +| 26 (SFP) | TL-SL1226P | Trunk | 20 | 40 | 04_CCTV_ACCESS | +| 27 (SFP) | Reserved | - | - | - | - | +| 28 (SFP) | SG3210X-M2 | Trunk | 20 | 10,20,30,40,50,70 | 01_CORE_TRUNK | + +#### 3.3.3 TL-SL1226P (CCTV Switch) + +| Port | Connection | VLAN Mode | Native VLAN | Tagged VLANs | Profile | +|------|------------|-----------|-------------|--------------|---------| +| 1-6 | CCTV Cameras | Access | 40 | - | 04_CCTV_ACCESS | +| 7-24 | Reserved | - | - | - | - | +| 25 | NVR | Access | 40 | - | 04_CCTV_ACCESS | +| 26 | SG2428P | Trunk | 20 | 40 | 04_CCTV_ACCESS | + +#### 3.3.4 TL-SG1210P (Voice Switch) + +| Port | Connection | VLAN Mode | Native VLAN | Tagged VLANs | Profile | +|------|------------|-----------|-------------|--------------|---------| +| 1-8 | IP Phone + PC Passthrough | Trunk | 30 (Data) | 50 (Voice) | 07_VOICE_ACCESS | +| 9 | SG2428P | Trunk | 30 | 50 | 07_VOICE_ACCESS | +| 10 (SFP) | Reserved | - | - | - | - | + +**Note:** IP Phone ports support PC passthrough - Native VLAN 30 for PC data, Tagged VLAN 50 for VoIP traffic. + +### 3.4 NAS NIC Bonding Configuration | Device | Bonding Mode | Member Ports | VLAN Mode | Tagged VLAN | IP Address | Gateway | Notes | | ------- | ------------------- | ------------ | --------- | ----------- | --------------- | ------------ | ---------------------- | | QNAP | IEEE 802.3ad (LACP) | Adapter 1, 2 | Untagged | 10 (SERVER) | 192.168.10.8/24 | 192.168.10.1 | Primary NAS for DMS | | ASUSTOR | IEEE 802.3ad (LACP) | Port 1, 2 | Untagged | 10 (SERVER) | 192.168.10.9/24 | 192.168.10.1 | Backup / Secondary NAS | +### 3.5 PoE Budget & Power Consumption + +#### 3.5.1 SG2428P (Distribution Switch) + +| Specification | Value | +|---------------|-------| +| Total PoE Budget | 370W | +| PoE Standard | IEEE 802.3at (PoE+) | +| PoE Ports | 1-16 (RJ45), 25-26 (SFP) | + +**Power Consumption Estimate:** + +| Device | Quantity | Power per Device | Total Power | Port Assignment | +|--------|----------|-----------------|-------------|----------------| +| EAP610 Access Point | 16 | ~12.95W | ~207W | Port 1-16 | +| TL-SL1226P Uplink | 1 | ~15W | ~15W | Port 26 (SFP) | +| **Total Used** | - | - | **~222W** | - | +| **Available** | - | - | **148W** | - | +| **Utilization** | - | - | **60%** | - | + +#### 3.5.2 TL-SL1226P (CCTV Switch) + +| Specification | Value | +|---------------|-------| +| Total PoE Budget | 195W | +| PoE Standard | IEEE 802.3at (PoE+) | +| PoE Ports | 1-24 (RJ45) | + +**Power Consumption Estimate:** + +| Device | Quantity | Power per Device | Total Power | Port Assignment | +|--------|----------|-----------------|-------------|----------------| +| CCTV Camera | 6 | ~8W | ~48W | Port 1-6 | +| NVR (Non-PoE) | 1 | 0W | 0W | Port 25 (1000Mbps) | +| **Total Used** | - | - | **48W** | - | +| **Available** | - | - | **147W** | - | +| **Utilization** | - | - | **25%** | - | + +> [!NOTE] +> PoE budget has sufficient headroom for future expansion. SG2428P can support additional ~12 APs, TL-SL1226P can support additional ~12 cameras. + +### 3.6 Cable Specifications + +| Link Type | Cable Category | Max Distance | Application | +|-----------|----------------|--------------|-------------| +| 10Gbps Uplinks (SFP+) | Cat6a / Cat7 | 100m | SG3210X-M2 ↔ SG2428P, ER7206 ↔ SG3210X-M2 | +| 2.5Gbps Server Links | Cat6 | 100m | SG3210X-M2 ↔ QNAP/ASUSTOR (LACP) | +| 1Gbps Standard Links | Cat5e / Cat6 | 100m | All other RJ45 connections | +| IP Phone Passthrough | Cat5e / Cat6 | 100m | IP Phone + PC connections | + +**Cable Color Coding:** +- **Blue:** Uplink/Trunk links (SFP+, LACP) +- **Green:** Server connections (VLAN 10) +- **Yellow:** Management connections (VLAN 20) +- **Red:** CCTV/Voice connections (VLAN 40, 50) +- **Orange:** User connections (VLAN 30) + +### 3.7 QoS (Quality of Service) Settings + +#### 3.7.1 Priority Levels (DSCP) + +| Priority | DSCP Value | Traffic Type | Application | +|----------|------------|--------------|-------------| +| Highest (7) | EF (46) | Voice (SIP/RTP) | IP Phones (VLAN 50) | +| High (6) | AF41 (34) | Video Surveillance | CCTV Cameras (VLAN 40) | +| Medium (5) | AF31 (26) | Critical Applications | DMS Backend, Database | +| Low (4) | AF21 (18) | Best Effort | Web browsing, Email | +| Lowest (0) | CS0 (0) | Background | File downloads, Updates | + +#### 3.7.2 QoS Configuration per Switch + +**SG3210X-M2 (Core Switch):** +- Enable QoS globally +- Trust DSCP on all trunk ports +- Prioritize Voice (VLAN 50) and Video (VLAN 40) traffic +- Rate limit Guest VLAN (70) to 10Mbps per client + +**SG2428P (Distribution Switch):** +- Enable QoS globally +- Trust DSCP on uplink ports (SFP+ 28, RJ45 19) +- Map VLAN 50 to Queue 7 (Highest) +- Map VLAN 40 to Queue 6 (High) +- Map VLAN 10 to Queue 5 (Medium) + +**TL-SL1226P (CCTV Switch):** +- Enable QoS globally +- Map all CCTV ports to Queue 6 (High) +- Ensure NVR traffic has priority + +**TL-SG1210P (Voice Switch):** +- Enable QoS globally +- Map VLAN 50 to Queue 7 (Highest) +- Map VLAN 30 to Queue 4 (Low - for PC data) +- Enable LLDP-MED for IP Phone power negotiation + +### 3.8 Redundancy Planning & Network Resilience + +#### 3.8.1 Critical Links Redundancy + +| Critical Path | Primary Link | Backup Link | Failover Time | Implementation Status | +|---------------|--------------|-------------|---------------|-----------------------| +| Internet Access | ER7206 WAN Port 2 | 4G/LTE Backup | < 30s | Planned (Q3 2026) | +| Core Switch Connectivity | SG3210X-M2 SFP+ Port 9-10 | SG3210X-M2 Port 1-2 (LACP) | < 1s | Ready (Ports Reserved) | +| Server Connectivity | QNAP LACP (Ports 3-4) | ASUSTOR LACP (Ports 5-6) | < 1s | Active | +| Distribution Layer | SG2428P SFP+ Port 28 | SG2428P Port 20 | < 5s | Planned | +| Controller Management | OC200 Port 24 | OC200 Wireless Fallback | < 10s | Active | + +#### 3.8.2 Single Points of Failure (SPOF) Analysis + +| Component | Risk Level | Mitigation Strategy | Target Resolution | +|-----------|------------|---------------------|-------------------| +| ER7206 Router | HIGH | Add secondary router (VRRP) | Q3 2026 | +| SG3210X-M2 Core Switch | MEDIUM | Utilize reserved LACP ports 1-2 | Immediate | +| QNAP Primary Storage | MEDIUM | ASUSTOR backup with real-time sync | Active | +| Internet Connection | HIGH | 4G/LTE failover router | Q3 2026 | +| Power Supply | MEDIUM | UPS + Generator maintenance | Ongoing | + +#### 3.8.3 Network Monitoring & Alerting + +| Monitor Item | Threshold | Alert Method | Escalation | +|--------------|-----------|--------------|------------| +| Link Utilization > 80% | 5 min | Email + Teams | Network Admin | +| Link Down | Immediate | SMS + Email | Network Admin | +| High Latency > 100ms | 2 min | Email | Network Admin | +| Packet Loss > 1% | 3 min | Email | Network Admin | +| VLAN Misconfiguration | Immediate | Email | Network Admin | + +#### 3.8.4 Disaster Recovery Procedures + +1. **Core Switch Failure:** + - Activate LACP ports 1-2 on SG3210X-M2 + - Re-route critical traffic through backup paths + - Restore within 15 minutes + +2. **Router Failure:** + - Manual failover to backup router + - Update DHCP gateway addresses + - Restore within 30 minutes + +3. **Internet Outage:** + - Activate 4G/LTE backup connection + - Update DNS records if needed + - Restore within 5 minutes + +4. **Power Outage:** + - UPS maintains critical infrastructure for 2 hours + - Generator activates after 5 minutes + - Full service maintained + ## 4. 🔥 Firewall Rules (ACLs) & Port Forwarding กฎของ Firewall จะถูกกำหนดบน Omada Controller และอุปกรณ์ Gateway (ER7206) ตามหลักการอนุญาตแค่สิ่งที่ต้องการ (Default Deny) @@ -138,11 +389,11 @@ graph TB **IP Groups:** -- `Server`: 192.168.10.8, 192.168.10.9, 192.168.10.111 +- `Server`: 192.168.10.8 (QNAP), 192.168.10.9 (ASUSTOR), 192.168.10.111 (Zyxel NAS326) - `Omada-Controller`: 192.168.20.250 - `DHCP-Gateways`: 192.168.30.1, 192.168.70.1 - `QNAP_Services`: 192.168.10.8 -- `Internal`: 192.168.10.0/24, 192.168.20.0/24, 192.168.30.0/24 +- `Internal`: 192.168.10.0/24, 192.168.20.0/24, 192.168.30.0/24, 192.168.40.0/24, 192.168.50.0/24 - `Blacklist`: (เพิ่ม IP ประสงค์ร้าย) **Port Groups:** diff --git a/specs/03-Data-and-Storage/deltas/.gitkeep b/specs/03-Data-and-Storage/deltas/.gitkeep new file mode 100644 index 0000000..e69de29 diff --git a/specs/03-Data-and-Storage/deltas/README.md b/specs/03-Data-and-Storage/deltas/README.md new file mode 100644 index 0000000..1aad80f --- /dev/null +++ b/specs/03-Data-and-Storage/deltas/README.md @@ -0,0 +1,65 @@ +# Schema Deltas + +Incremental SQL scripts applied to existing environments **after** the canonical schema +(`../lcbp3-v1.8.0-schema-02-tables.sql`) has been updated. + +## Naming Convention + +``` +YYYY-MM-DD-descriptive-name.sql +``` + +Examples: + +- `2026-04-22-add-rfa-revision-column.sql` +- `2026-04-25-index-correspondence-created-at.sql` +- `2026-05-01-add-workflow-step-attachment-table.sql` + +## Rules (per ADR-009) + +1. **Never replace** the canonical `lcbp3-v1.8.x-schema-02-tables.sql` — update it first, then add the delta here. +2. **Idempotent where possible** — prefer `CREATE TABLE IF NOT EXISTS`, `ALTER TABLE … ADD COLUMN IF NOT EXISTS`, etc. +3. **No TypeORM migrations** — these `.sql` files are the only schema deployment mechanism. +4. **Data backfill** goes through **n8n workflows**, not this directory. +5. **Update Data Dictionary** (`../03-01-data-dictionary.md`) in the same PR that adds a delta. + +## Delta Template + +```sql +-- Delta: +-- Date: YYYY-MM-DD +-- Related ADR: ADR-XXX (if applicable) +-- Related Spec: specs/NN-NAME/spec.md (if applicable) +-- Applied in: v1.8.X → v1.8.Y + +-- ------------------------------------------------------------ +-- Schema changes +-- ------------------------------------------------------------ + +ALTER TABLE + ADD COLUMN ; + +-- ------------------------------------------------------------ +-- Indexes (if needed) +-- ------------------------------------------------------------ + +CREATE INDEX idx_
_ON
(); + +-- ------------------------------------------------------------ +-- Verification query (optional) +-- ------------------------------------------------------------ + +-- SELECT COUNT(*) FROM
WHERE IS NOT NULL; +``` + +## Rollback + +Every delta should have a reversible companion (`YYYY-MM-DD-descriptive-name.rollback.sql`) +where physically possible. Dropping `NOT NULL` columns with existing data is explicitly +irreversible — document in the delta header when rollback is impossible. + +## References + +- [ADR-009 Database Migration Strategy](../../06-Decision-Records/ADR-009-database-migration-strategy.md) +- [Canonical Schema](../lcbp3-v1.8.0-schema-02-tables.sql) +- [Data Dictionary](../03-01-data-dictionary.md) diff --git a/specs/04-Infrastructure-OPS/switch-configuration-guide.md b/specs/04-Infrastructure-OPS/switch-configuration-guide.md new file mode 100644 index 0000000..6059db7 --- /dev/null +++ b/specs/04-Infrastructure-OPS/switch-configuration-guide.md @@ -0,0 +1,858 @@ +# Switch Configuration Guide — Omada SDN V6 + +**Version:** 3.0 +**Last Updated:** 2026-04-24 +**Status:** Production +**Author:** Infrastructure Team +**Maintainer:** NAP-DMS DevOps +**Scope:** LCBP3 Network Infrastructure (SG3210X-M2 + SG2428P) + +--- + +## Table of Contents + +1. [Overview](#overview) +2. [VLAN Definitions](#vlan-definitions) +3. [Port Profiles](#port-profiles) +4. [VLAN Mapping](#vlan-mapping) +5. [Network Diagram](#network-diagram) +6. [Configuration Procedure](#configuration-procedure) +7. [Change Log](#change-log) +8. [Quick Reference](#quick-reference--edit-port-values) +9. [Pre-Deployment Checklist](#pre-deployment-checklist) +10. [Testing Guide](#testing-guide--vlan--lacp--stp) +11. [Security & Optimization](#security--optimization-recommended) +12. [Related Documents](#related-documents) + +--- + +## Overview + +เอกสารนี้กำหนด Port Profile templates และ VLAN mapping configuration สำหรับ LCBP3 network infrastructure โดยใช้ TP-Link Omada SDN V6 Controller (OC200) + +**Audience:** Network Administrator, DevOps Engineer +**Prerequisites:** Omada SDN Controller v6.x, สิทธิ์ Admin บน OC200 +**Related ADRs:** ADR-016 (Security), ADR-009 (Database Strategy — ถ้ามี Network DB) + +### Network Equipment + +| Device | Model | Role | +|--------|-------|------| +| Core Switch | SG3210X-M2 | 10G Core Switch | +| Access Switch | SG2428P | PoE Access Switch | +| NAS Storage | QNAP / ASUSTOR | Network Attached Storage | +| Unmanaged Switch 1 | TL-SG1210P | IP Phone + PC | +| Unmanaged Switch 2 | TL-SL1226P | CCTV | +| Wireless AP | EAP610 | Wi-Fi Access Points | +| Router | ER7206 | Edge Router | + +### Configuration Concepts + +**Port Profile** — Template defining port-level settings (STP Security, Loopback Control, Multicast Fast Leave, Flow Control, EEE, LLDP-MED, PoE). Port Profiles do NOT contain VLAN configuration. + +**Edit Port** — VLAN assignment step where Native Network (Untagged), Tagged Network, and Voice Network are configured, and a Port Profile is applied. + +--- + +## 🧠 Key Concepts (Before Using This Config) + +- ใช้ **STP เท่านั้น (เลิก Loop Detection)** — Spanning Tree Protocol สำหรับ loop prevention +- **Harden Access Port ด้วย BPDU Guard** — ป้องกันการเสียบ switch โดยไม่ได้รับอนุญาต +- **กัน Rogue Switch ด้วย Root Guard** — ป้องกัน switch เถื่อนยึด root bridge +- **ทำ Trunk ให้ clean + predictable** — Native VLAN 999 สำหรับทุก trunk port +- **เผื่อ future VLAN expansion** — รองรับ VLAN เพิ่มเติมในอนาคต +- **VLAN 999 (was 99)** — เปลี่ยนจาก VLAN 99 เป็น 999 เพื่อความปลอดภัย + +--- + +## VLAN Definitions + +| VLAN ID | Name | Purpose | Subnet | Gateway | DHCP Range | +|---------|------|---------|--------|---------|-------------| +| 10 | NAS-ADMIN | NAS Storage & Admin Desktop | 192.168.10.0/24 | 192.168.10.1 | 192.168.10.50–199 | +| 20 | MGMT | Network Management (OC200) | 192.168.20.0/24 | 192.168.20.1 | 192.168.20.50–199 | +| 30 | USERS | User PCs, Printers, Staff WiFi | 192.168.30.0/24 | 192.168.30.1 | 192.168.30.50–199 | +| 40 | CCTV | CCTV Cameras, IoT Devices | 192.168.40.0/24 | 192.168.40.1 | 192.168.40.50–199 | +| 50 | VOICE | IP Phones | 192.168.50.0/24 | 192.168.50.1 | 192.168.50.50–199 | +| 70 | GUEST | Guest WiFi | 192.168.70.0/24 | 192.168.70.1 | 192.168.70.50–199 | +| 999 | NATIVE | Trunk Native VLAN (No DHCP) — Hardened | — | — | — | +| 60 | UNUSED | Reserved for future use | — | — | — | + +--- + +## Port Profiles + +### Profile 1 — 001-CORE-TRUNK-LACP 🔷 + +**Purpose:** LACP trunk links between Core and Access switches / Router + +**Applied To:** +- SG3210X-M2 Port 1–2 (to SG2428P Port 21–22) +- SG3210X-M2 Port 9 (to ER7206) +- SG2428P Port 21–22 (to SG3210X-M2 Port 1–2) + +**Configuration:** + +```bash +Loopback Control: Spanning Tree + +STP: +- Loop Protect: ENABLE +- Root Protect: DISABLE +- TC Guard: DISABLE +- BPDU Guard: DISABLE +- BPDU Filter: DISABLE + +General: +- Flow Control: ON +- EEE: OFF +- Port Isolation: OFF +``` + +📌 **ใช้กับ:** Core ↔ Access, Core ↔ Router + +--- + +### Profile 2 — 002-NAS-LACP 🔷 + +**Purpose:** LACP links to NAS storage devices (QNAP / ASUSTOR) + +**Applied To:** +- SG3210X-M2 Port 3–4 (to QNAP) +- SG3210X-M2 Port 5–6 (to ASUSTOR) + +**Configuration:** + +```bash +Loopback Control: Spanning Tree + +STP: +- Loop Protect: ENABLE +- Root Protect: DISABLE +- BPDU Guard: DISABLE + +General: +- Flow Control: ON +- EEE: OFF +``` + +⚠️ **เหตุผล:** NAS บางรุ่นส่ง BPDU แปลก ๆ → ห้ามเปิด BPDU Guard + +--- + +### Profile 3 — 003-UNMANAGED-SWITCH 🔷⭐ (สำคัญมาก) + +**Purpose:** Downstream links to unmanaged switches — ป้องกัน Rogue Switch + +**Applied To:** +- SG2428P Port 25 (to TL-SL1226P — CCTV) +- SG2428P Port 26 (to TL-SG1210P — IP Phone + PC) + +**Configuration:** + +```bash +Loopback Control: Spanning Tree + +STP: +- Root Protect: ENABLE 🔥 +- Loop Protect: ENABLE +- BPDU Guard: DISABLE +- TC Guard: DISABLE + +General: +- Flow Control: ON +- EEE: OFF +``` + +📌 **ป้องกัน:** เสียบ switch เถื่อน → ยึด root ไม่ได้ + +--- + +### Profile 4 — 004-AP-TRUNK 🔷 + +**Purpose:** Trunk links to wireless access points (EAP610) + +**Applied To:** +- SG2428P Port 1–16 (to EAP610) + +**Configuration:** + +```bash +Loopback Control: Spanning Tree + +STP: +- Edge Port: ENABLE +- BPDU Guard: ENABLE 🔥 (optional แต่แนะนำ) +- Loop Protect: DISABLE + +General: +- Flow Control: ON +- EEE: OFF +``` + +📌 **หมายเหตุ:** AP ไม่ควรส่ง BPDU → เปิด guard ได้ + +--- + +### Profile 5 — 005-VOICE-ONLY 🔷 + +**Purpose:** Direct connections to IP phones + +**Applied To:** +- SG2428P Port 17–18 (to IP Phone) + +**Configuration:** + +```bash +Loopback Control: Spanning Tree + +STP: +- Edge Port: ENABLE +- BPDU Guard: ENABLE 🔥 + +General: +- LLDP-MED: ENABLE +- Flow Control: ON +``` + +--- + +### Profile 6 — 006-ACCESS-PC 🔷⭐ + +**Purpose:** Direct connections to PCs and printers — Hardened Access Port + +**Applied To:** +- SG2428P Port 23 (to Printer) +- SG3210X-M2 Port 8 (to Admin Desktop) +- General PC connections + +**Configuration:** + +```bash +Loopback Control: Spanning Tree + +STP: +- Edge Port: ENABLE +- BPDU Guard: ENABLE 🔥🔥🔥 (สำคัญสุด) + +General: +- Flow Control: ON +- EEE: OFF +``` + +📌 **ถ้ามีคนเสียบ switch:** → Port จะ shutdown ทันที + +--- + +### Profile 7 — 007-DEFAULT-MGMT 🔷 + +**Purpose:** Default configuration for management ports + +**Applied To:** +- Management ports +- Ports requiring no special configuration + +**Configuration:** + +```bash +Loopback Control: Spanning Tree + +STP: +- Edge Port: ENABLE +- BPDU Guard: ENABLE + +General: +- Default +``` + +--- + +## VLAN Mapping 🔶 + +### SG3210X-M2 (Core) Port Configuration + +| Port | Destination | Profile | Native (Untagged) | Tagged | Voice | +|------|-------------|---------|-------------------|--------|-------| +| 1-2 | SG2428P (LACP) | 001-CORE-TRUNK-LACP | 999 | 10,20,30,40,50,70 | Off | +| 3-4 | QNAP (LACP) | 002-NAS-LACP | 10 | 20 🔥 | Off | +| 5-6 | ASUSTOR (LACP) | 002-NAS-LACP | 10 | 20 🔥 | Off | +| 7 | Reserved (future expansion) | 007-DEFAULT-MGMT | 999 | None | Off | +| 8 | Admin Desktop | 006-ACCESS-PC | 10 | None | Off | +| 9 | ER7206 | 001-CORE-TRUNK-LACP | 999 | 10,20,30,40,50,70 | Off | +| 10 | Reserved (future expansion) | 007-DEFAULT-MGMT | 999 | None | Off | + +📌 **NAS (Port 3-6) ปรับใหม่:** เพิ่ม Tagged VLAN 20 สำหรับ MGMT redundancy + +--- + +### SG2428P (Access) Port Configuration + +| Port | Destination | Profile | Native (Untagged) | Tagged | Voice | +|------|-------------|---------|-------------------|--------|-------| +| 1-16 | EAP610 | 004-AP-TRUNK | 999 | 10,20,30,40,50,70 🔥 allow all | Off | +| 17-18 | IP Phone | 005-VOICE-ONLY | 50 | None | Enable (VLAN 50) | +| 19-20 | Reserved (future expansion) | 007-DEFAULT-MGMT | 999 | None | Off | +| 21-22 | SG3210X-M2 (LACP) | 001-CORE-TRUNK-LACP | 999 | 10,20,30,40,50,70 | Off | +| 23 | Printer | 006-ACCESS-PC | 30 | None | Off | +| 24 | OC200 | 007-DEFAULT-MGMT | 20 | None | Off | +| 25 | TL-SL1226P (CCTV) | 003-UNMANAGED-SWITCH | 40 | None | Off | +| 26 | TL-SG1210P (IP Phone + PC) | 003-UNMANAGED-SWITCH | 30 | 50 | Enable (VLAN 50) | +| 27-28 | Reserved (future expansion) | 007-DEFAULT-MGMT | 999 | None | Off | + +📌 **AP Ports (1-16) ปรับใหม่:** Allow all VLANs สำหรับ future expansion + +--- + +## Network Diagram + +``` + ┌──────────────┐ + │ ER7206 │ + │ (Trunk 999) │ + └──────┬───────┘ + │ + ▼ + ┌──────────────────────────┐ + │ SG3210X-M2 (Core) │ + │ [Root Bridge 4096] │ + └──────────────────────────┘ + LACP 1-2 / | | | | \ Reserved + / | | | | \ + ▼ ▼ ▼ ▼ ▼ ▼ + SG2428P (Access) QNAP ASUSTOR Admin Reserved + [Priority 8192] (VLAN10+20) (VLAN10+20) (VLAN10) + (AP 1–16 Trunk) + +Uplink SG3210X‑M2 (1–2) ↔ SG2428P (21–22) + +WiFi Staff → VLAN 30 +WiFi Guest → VLAN 70 +CCTV → VLAN 40 +IP Phone → VLAN 50 +Printer → VLAN 30 +Admin Desktop → VLAN 10 +NAS → VLAN 10 (+20 MGMT) +OC200 → VLAN 20 +``` + +--- + +## Configuration Procedure + +### Step 1 — Create Port Profiles + +1. Navigate to Omada SDN Controller → Port Profiles +2. Create each profile listed in the Port Profiles section +3. Configure all settings as specified +4. **Do not configure VLANs in Port Profiles** (VLANs are configured in Edit Port) + +### Step 2 — Configure Port VLANs + +1. Navigate to Omada SDN Controller → Switches → Edit Port +2. For each port, configure: + - **Native Network (Untagged)** — The access VLAN for untagged traffic + - **Tagged Network** — VLANs allowed on the trunk (comma-separated) + - **Voice Network** — Voice VLAN (if applicable) + - **Profile** — Select the appropriate Port Profile from Step 1 +3. Apply configuration per the VLAN Mapping tables + +--- + +## Change Log + +| Version | Date | Changes | +|---------|------|---------| +| 3.0 | 2026-04-24 | **FINAL VERSION** — STP-only (no Loop Detection), BPDU Guard on access ports, Root Guard on unmanaged switch ports, VLAN 99→999, NAS with MGMT redundancy (VLAN 20 tagged), AP allow-all VLANs, Security Hardening section | +| 2.0 | 2026-04-24 | Updated port mappings (LACP 21-22), new VLAN scheme (30/40/50/70), consolidated CCTV/IoT to VLAN 40, added DHCP table, renamed PC-ONLY to ACCESS-PC | +| 1.0 | 2026-04-23 | Initial version with basic port profiles and VLAN mapping | + +--- + +## Quick Reference — Edit Port Values + +### SG3210X-M2 + +| Port | Native | Tagged | Profile | Voice | +|------|--------|--------|---------|-------| +| 1-2 | 999 | 10,20,30,40,50,70 | 001-CORE-TRUNK-LACP | Off | +| 3-4 | 10 | 20 | 002-NAS-LACP | Off | +| 5-6 | 10 | 20 | 002-NAS-LACP | Off | +| 7 | 999 | — | 007-DEFAULT-MGMT | Off | +| 8 | 10 | — | 006-ACCESS-PC | Off | +| 9 | 999 | 10,20,30,40,50,70 | 001-CORE-TRUNK-LACP | Off | +| 10 | 999 | — | 007-DEFAULT-MGMT | Off | + +### SG2428P + +| Port | Native | Tagged | Profile | Voice | +|------|--------|--------|---------|-------| +| 1-16 | 999 | 10,20,30,40,50,70 | 004-AP-TRUNK | Off | +| 17-18 | 50 | — | 005-VOICE-ONLY | 50 | +| 19-20 | 999 | — | 007-DEFAULT-MGMT | Off | +| 21-22 | 999 | 10,20,30,40,50,70 | 001-CORE-TRUNK-LACP | Off | +| 23 | 30 | — | 006-ACCESS-PC | Off | +| 24 | 20 | — | 007-DEFAULT-MGMT | Off | +| 25 | 40 | — | 003-UNMANAGED-SWITCH | Off | +| 26 | 30 | 50 | 003-UNMANAGED-SWITCH | 50 | +| 27-28 | 999 | — | 007-DEFAULT-MGMT | Off | + +--- + +## Pre-Deployment Checklist + +ก่อน Apply ค่า Configuration: +- [ ] สร้าง VLANs 10, 20, 30, 40, 50, 70, 999 ใน Omada Controller (VLAN 999 = Hardened Native) +- [ ] สร้าง Port Profiles 001–007 ครบถ้วน (STP Mode — ไม่ใช้ Loop Detection) +- [ ] ตรวจสอบ LACP Group Configuration (Port 1-2 ↔ Port 21-22) +- [ ] ตั้งค่า DHCP Server ตามตาราง VLAN Definitions +- [ ] ตรวจสอบว่า OC200 อยู่บน VLAN 20 และมี IP 192.168.20.x +- [ ] ตรวจสอบ Voice VLAN Enable บน Port 17-18 และ 26 +- [ ] กำหนด STP Priority: Core=4096, Access=8192 +- [ ] สำรอง Configuration ปัจจุบันก่อน Apply + +--- + +# Testing Guide — VLAN + LACP + STP + +การทดสอบทีละ Layer โดยไม่ต้องใช้เครื่องมือพิเศษ — ใช้แค่ PC + ping + OC200 UI + +--- + +## PART 1 — Testing VLAN (Step-by-Step) + +### Goal +- ตรวจสอบว่าแต่ละพอร์ตอยู่ VLAN ถูกต้อง +- ตรวจสอบว่า Tagged/Untagged ทำงาน +- ตรวจสอบว่า DHCP แจก IP ถูก subnet +- ตรวจสอบว่า WiFi → VLAN ถูกต้อง + +--- + +### STEP 1 — Test VLAN 10 (NAS-ADMIN) + +**Test Equipment:** +- Admin Desktop (Port 8 SG3210X-M2) +- QNAP / ASUSTOR + +**Procedure:** +1. Connect Admin Desktop → Port 8 +2. Open Command Prompt +3. Type: + ``` + ipconfig + ``` +4. Expected IP range: + ``` + 192.168.10.x + ``` + +**Ping Tests:** +``` +ping 192.168.10.1 ← Gateway +ping +ping +``` + +**Expected Result:** +- All pings successful +- Should NOT ping to VLAN 30/40/50/70 (if ACL configured) + +--- + +### STEP 2 — Test VLAN 30 (USERS) + +**Test Equipment:** +- General PC +- Printer +- Staff WiFi (SSID: Staff) + +**Procedure:** +1. Connect PC → Port 23 or Port 26 (via TL-SG1210P) +2. Type: + ``` + ipconfig + ``` +3. Expected IP: + ``` + 192.168.30.x + ``` + +**Ping Tests:** +``` +ping 192.168.30.1 +ping +``` + +**WiFi Staff Test:** +1. Connect to SSID Staff +2. Type: + ``` + ipconfig + ``` +3. Expected IP: 192.168.30.x + +--- + +### STEP 3 — Test VLAN 40 (CCTV/IoT) + +**Test Equipment:** +- CCTV Camera (via TL-SL1226P Port 25) + +**Procedure:** +1. Open OC200 → Clients +2. Camera must show as VLAN 40 +3. Test ping from Admin Desktop: + ``` + ping + ``` + +**Expected Result:** +- Ping successful +- DHCP must assign IP 192.168.40.x + +--- + +### STEP 4 — Test VLAN 50 (VOICE) + +**Test Equipment:** +- IP Phone (Port 17–18 SG2428P) + +**Procedure:** +1. IP Phone boots up +2. Expected IP: + ``` + 192.168.50.x + ``` +3. In OC200 → Clients, must see Voice VLAN 50 + +**LLDP-MED Test:** +In OC200 → Switch → Port 17–18, must see: +``` +LLDP-MED: Active +Voice VLAN: 50 +``` + +--- + +### STEP 5 — Test VLAN 70 (Guest WiFi) + +**Procedure:** +1. Connect to SSID Guest +2. Type: + ``` + ipconfig + ``` +3. Expected IP: + ``` + 192.168.70.x + ``` + +**Isolation Test:** +``` +ping 192.168.30.1 ← Must NOT pass +ping 192.168.10.1 ← Must NOT pass +``` + +--- + +## PART 2 — Testing LACP (Step-by-Step) + +### Goal +- ตรวจสอบว่า LACP ระหว่าง SG3210X-M2 ↔ SG2428P ทำงาน +- ตรวจสอบว่า QNAP/ASUSTOR LACP ทำงาน +- ตรวจสอบว่าไม่มี Mis-config + +--- + +### STEP 1 — Check LACP Status in OC200 + +**Path:** Insight → Switch → LAG Status + +Expected status: + +**SG3210X-M2:** +- LAG1 (Port 1–2) → **Up** +- LAG2 (Port 3–4) → **Up** +- LAG3 (Port 5–6) → **Up** + +**SG2428P:** +- LAG1 (Port 21–22) → **Up** + +--- + +### STEP 2 — Test Load Balancing + +**Procedure:** +1. Open QNAP → File Station +2. Copy large file (10–20GB) to Admin Desktop +3. Open Task Manager → Performance → Ethernet +4. Must see traffic on both links (Port 3–4 or 5–6) + +**Uplink Test:** +1. Run Speedtest between PC VLAN 30 → NAS VLAN 10 +2. Must achieve > 1Gbps (if 2Gbps LACP) + +--- + +### STEP 3 — Test Failover + +**Procedure:** +1. Disconnect cable from **Port 1** of SG3210X-M2 +2. LACP must remain **Up** (using Port 2) +3. Disconnect Port 2 → LACP must go Down + +Repeat test with QNAP/ASUSTOR + +--- + +## PART 3 — Testing STP (Step-by-Step) + +### Goal +- ตรวจสอบว่าไม่มี Loop +- ตรวจสอบว่า Root Bridge ถูกต้อง +- ตรวจสอบว่า STP Security ทำงาน + +--- + +### STEP 1 — Check Root Bridge + +**Path:** Devices → SG3210X-M2 → Ports → STP + +Expected: +``` +SG3210X-M2 = Root Bridge +``` + +If not, adjust Priority: +``` +SG3210X-M2 Priority = 4096 +SG2428P Priority = 8192 +``` + +--- + +### STEP 2 — Test Loop Detection + +**Safe Test Method:** +1. Go to TL-SG1210P (Port 26 SG2428P) +2. Create loop with LAN cable (Port 1 ↔ Port 2) +3. Check OC200 → Alerts + +Expected alert: +``` +Loop Detected on Port 26 +Port Shutdown (BPDU Protect) +``` + +Port must auto-shutdown + +--- + +### STEP 3 — Test STP Blocking + +**Procedure:** +1. Connect cable from SG2428P Port 19 → SG2428P Port 20 +2. Check OC200 → Switch → Ports + +Expected: +``` +STP State: Blocking +``` + +--- + +### STEP 4 — Test Topology Change (TC Guard) + +**Procedure:** +1. Power cycle AP (Port 1–16) +2. Check OC200 → Logs + +Expected: **NO** message: +``` +Topology Change Detected +``` + +Because TC Guard is enabled + +--- + +## PART 4 — Testing Checklist (SOP) + +### VLAN Tests +- [ ] VLAN 10 gets IP 192.168.10.x +- [ ] VLAN 30 gets IP 192.168.30.x +- [ ] VLAN 40 gets IP 192.168.40.x +- [ ] VLAN 50 gets IP 192.168.50.x +- [ ] VLAN 70 gets IP 192.168.70.x + +### WiFi Tests +- [ ] Staff WiFi → VLAN 30 +- [ ] Guest WiFi → VLAN 70 + +### Device Tests +- [ ] CCTV → VLAN 40 +- [ ] IP Phone → VLAN 50 +- [ ] Printer → VLAN 30 +- [ ] Admin Desktop → VLAN 10 +- [ ] NAS → VLAN 10 +- [ ] OC200 → VLAN 20 + +### LACP Tests +- [ ] LACP SG3210X-M2 ↔ SG2428P = Up +- [ ] LACP QNAP = Up +- [ ] LACP ASUSTOR = Up +- [ ] Load balancing works (2Gbps) +- [ ] Failover works (single link failure) + +### STP Tests +- [ ] Root Bridge = SG3210X-M2 (Priority 4096) +- [ ] BPDU Guard shutdown test (เสียบ switch ที่ port PC → port ต้อง shutdown) +- [ ] Root Guard works (003-UNMANAGED-SWITCH) +- [ ] STP Blocking works +- [ ] TC Guard works (no topology change on AP reboot) + +--- + +# 🔐 Security Hardening (ต้องทำเพิ่ม) + +Required security configurations for Enterprise-grade network protection. + +--- + +## DHCP Snooping 🔥 + +```bash +Global: ENABLE + +Trusted Ports: +- Uplink ไป Router (ER7206) +- Core Trunk (Port 1-2, 9) +``` + +**Path:** Settings → Wired Networks → Switch → DHCP Snooping + +1. Enable **DHCP Snooping** globally +2. Mark **Trusted Ports**: + - SG3210X-M2 Port 9 (to ER7206) + - SG3210X-M2 Port 1-2 (Core Trunk) + - SG2428P Port 21-22 (Uplink to Core) +3. **Untrusted:** ทุก access port (จะถูก block ถ้าส่ง DHCP Offer) + +--- + +## Storm Control (AP Ports) 🔥 + +```bash +Broadcast: 1% +Multicast: 2% +Unknown: 2% +``` + +**Path:** Settings → Wired Networks → Switch → Port Profile → 004-AP-TRUNK + +1. Navigate to **Bandwidth Control / Storm Control** +2. Configure: + - Broadcast: 1% (หรือ 1000 pps) + - Multicast: 2% (หรือ 2000 pps) + - Unknown Unicast: 2% (หรือ 2000 pps) +3. Save + +📌 **หมายเหตุ:** ใช้ percentage หรือ pps ตามความเหมาะสมกับ traffic + +--- + +## STP Priority (Root Bridge Election) 🔥 + +```bash +SG3210X-M2 (Core): 4096 +SG2428P (Access): 8192 +``` + +**Path:** Devices → Switch → Config → STP → Priority + +1. **SG3210X-M2:** Set Priority = **4096** (Root Bridge) +2. **SG2428P:** Set Priority = **8192** (Backup Root) +3. Save and verify: + ``` + OC200 → Topology → Root Bridge = SG3210X-M2 + ``` + +📌 **สำคัญ:** Core ต้องเป็น Root Bridge เสมอ + +--- + +## Jumbo Frame 🔥 + +```bash +MTU: 9000 +(ต้องตั้งทุก device ให้เท่ากัน) +``` + +### SG3210X-M2 +**Path:** Devices → SG3210X-M2 → Config → Switch Settings +``` +Jumbo Frame: Enable +MTU: 9000 +``` + +### QNAP +**Path:** Control Panel → Network & Virtual Switch → Interfaces +``` +MTU: 9000 +``` + +### ASUSTOR +**Path:** Settings → Network → Interface → Advanced +``` +Jumbo Frame: 9000 +``` + +⚠️ **คำเตือน:** ถ้าตั้งไม่เท่ากันทุก device → จะมีปัญหา fragmentation หรือ packet drop + +--- + +## 💥 Final Validation Checklist + +### Connectivity Tests +- [ ] LACP = UP ทุกเส้น (Core↔Access, NAS) +- [ ] Root Bridge = Core Switch (SG3210X-M2 Priority 4096) +- [ ] เสียบ switch ที่ port PC → port ต้อง shutdown (BPDU Guard) +- [ ] WiFi ได้ VLAN ถูกต้อง (Staff=30, Guest=70) +- [ ] NAS เข้าถึงได้ทั้ง VLAN 10 และ 20 +- [ ] Guest VLAN เข้า LAN ไม่ได้ (isolation) + +### Security Tests +- [ ] DHCP Snooping blocks rogue DHCP +- [ ] Storm Control limits broadcast +- [ ] BPDU Guard shuts down unauthorized switches +- [ ] Root Guard prevents rogue root bridge + +### Performance Tests +- [ ] Jumbo Frame works (MTU 9000 end-to-end) +- [ ] LACP load balancing (2Gbps aggregate) +- [ ] Failover works (single link failure) + +--- + +## Related Documents + +- Network Architecture Design — `specs/02-architecture/02-03-network-design.md` +- VLAN Scheme — See [VLAN Definitions](#vlan-definitions) section +- IP Addressing Scheme — See DHCP table in [VLAN Definitions](#vlan-definitions) section +- Security Guidelines — `specs/06-Decision-Records/ADR-016-security-authentication.md` +- Release Policy — `specs/04-Infrastructure-OPS/04-08-release-management-policy.md` (สำหรับ network changes) + +--- + +## Document Metadata + +| Property | Value | +|----------|-------| +| **Type** | Infrastructure Specification | +| **Language** | Thai (explanations), English (technical terms) | +| **Standards** | AGENTS.md v1.8.9 | +| **Review Cycle** | Per release or on equipment change | +| **Approval Required** | Yes — Infrastructure Lead + Security Review | + + +